Merge branch 'master' of gitfub.space:caspervk/nightr
This commit is contained in:
commit
00aff53d3a
|
@ -1,41 +1,66 @@
|
|||
import inspect
|
||||
import statistics
|
||||
from dataclasses import asdict
|
||||
from datetime import timedelta
|
||||
from typing import List
|
||||
|
||||
import requests_cache
|
||||
from flask import Flask, jsonify
|
||||
|
||||
from server.nightr.strategies import dmi, steam
|
||||
from server.nightr.util import Context
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
requests_cache.install_cache("requests_cache.sqlite", expire_after=timedelta(minutes=10))
|
||||
|
||||
|
||||
strategies = {
|
||||
# name: (weight, probability function)
|
||||
"dmi": (1.0, dmi.probability),
|
||||
"steam": (0.5, steam.lol),
|
||||
"dmi": (0.5, dmi.probability),
|
||||
"steam": (1.0, steam.probability),
|
||||
}
|
||||
|
||||
|
||||
@app.route("/", methods=["GET", "POST"])
|
||||
def probabilities():
|
||||
phone_data = None # TODO
|
||||
phone_data = {} # TODO: get from POST request
|
||||
context = Context(**phone_data)
|
||||
|
||||
probs = []
|
||||
predictions: List[dict] = []
|
||||
for name, (weight, strategy) in strategies.items():
|
||||
try:
|
||||
prob = strategy(phone_data)
|
||||
prediction = strategy(context)
|
||||
except Exception as e:
|
||||
print(f"Strategy {name} failed: {e}")
|
||||
continue
|
||||
probs.append({
|
||||
predictions.append({
|
||||
"name": name,
|
||||
"doc": inspect.getdoc(strategy),
|
||||
"prob": prob * weight,
|
||||
"description": inspect.getdoc(strategy),
|
||||
"weight": weight,
|
||||
"weighted_probability": prediction.probability * weight,
|
||||
"night": prediction.probability > 0.5,
|
||||
**asdict(prediction),
|
||||
})
|
||||
|
||||
mean = statistics.mean(p["weighted_probability"] for p in predictions)
|
||||
median = statistics.median(p["weighted_probability"] for p in predictions)
|
||||
night = mean > 0.5
|
||||
|
||||
# Calculate contributions of predictions
|
||||
consensus_weight_sum = sum(p["weight"] for p in predictions if p["night"] == night)
|
||||
for prediction in predictions:
|
||||
# If this prediction agrees with the consensus it contributed
|
||||
if prediction["night"] == night:
|
||||
prediction["contribution"] = prediction["weight"] / consensus_weight_sum
|
||||
else:
|
||||
prediction["contribution"] = 0.0
|
||||
|
||||
return jsonify({
|
||||
"strategies": probs,
|
||||
"mean": statistics.mean(p["prob"] for p in probs),
|
||||
"median": statistics.median(p["prob"] for p in probs),
|
||||
"predictions": predictions,
|
||||
"weighted_probabilities_mean": mean,
|
||||
"weighted_probabilities_median": median,
|
||||
"night": night,
|
||||
})
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
from server.nightr.util import Context, Prediction
|
||||
|
||||
def probability(phone_data) -> float:
|
||||
|
||||
def probability(context: Context) -> Prediction:
|
||||
"""
|
||||
The data from DMI.
|
||||
"""
|
||||
return 0.63
|
||||
p = Prediction()
|
||||
p.probability = 0.7
|
||||
p.reasons.append("It is raining in Tønder")
|
||||
|
||||
return p
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
from server.nightr.util import Context, Prediction
|
||||
|
||||
def lol(phone_data) -> float:
|
||||
return 0.21
|
||||
|
||||
def probability(context: Context) -> Prediction:
|
||||
"""
|
||||
How many players are currently online on Steam.
|
||||
"""
|
||||
p = Prediction()
|
||||
p.probability = 0.2
|
||||
p.reasons.append("CSGO has more than 10.000 online players")
|
||||
|
||||
return p
|
||||
|
|
14
server/nightr/util.py
Normal file
14
server/nightr/util.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from dataclasses import dataclass, field
|
||||
from typing import List, Tuple
|
||||
|
||||
|
||||
@dataclass
|
||||
class Context:
|
||||
battery: float = 1.0
|
||||
coordinates: Tuple[float, float] = (0.0, 0.0)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Prediction:
|
||||
probability: float = 0.5
|
||||
reasons: List[str] = field(default_factory=list)
|
|
@ -1,2 +1,3 @@
|
|||
Flask==1.0.2
|
||||
requests==2.21.0
|
||||
requests-cache==0.4.13
|
||||
|
|
Loading…
Reference in a new issue