Merged app

This commit is contained in:
Viktor Søndergaard 2019-04-06 18:40:34 +02:00
commit 39b1a43a53
27 changed files with 529 additions and 136 deletions

4
.gitignore vendored
View File

@ -519,3 +519,7 @@ tags
.history
# End of https://www.gitignore.io/api/vim,emacs,android,pycharm+all,androidstudio,visualstudiocode,python,java,angular
# Custom
requests_cache.sqlite

View File

@ -3948,6 +3948,14 @@
}
}
},
"nativescript-geolocation": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/nativescript-geolocation/-/nativescript-geolocation-5.0.0.tgz",
"integrity": "sha512-olFTkG68Y0pkqtxyaPoHalZSHgXcg3iL9q+r9gcEY5c7QY8sCtfdO/T5FhHeQlDu0YrrZhx2Ke20dUczuePmUA==",
"requires": {
"nativescript-permissions": "~1.2.3"
}
},
"nativescript-hook": {
"version": "0.2.5",
"resolved": "https://registry.npmjs.org/nativescript-hook/-/nativescript-hook-0.2.5.tgz",
@ -3978,6 +3986,11 @@
"resolved": "https://registry.npmjs.org/nativescript-intl/-/nativescript-intl-3.0.0.tgz",
"integrity": "sha1-gu6b59N3Fys8QpVzRyMDdijhhqc="
},
"nativescript-permissions": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/nativescript-permissions/-/nativescript-permissions-1.2.3.tgz",
"integrity": "sha1-4+ZVRfmP5IjdVXj3/5DrrjCI5wA="
},
"nativescript-theme-core": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/nativescript-theme-core/-/nativescript-theme-core-1.0.4.tgz",

View File

@ -22,6 +22,7 @@
"@angular/platform-browser-dynamic": "~7.2.0",
"@angular/router": "~7.2.0",
"nativescript-angular": "~7.2.0",
"nativescript-geolocation": "^5.0.0",
"nativescript-theme-core": "~1.0.4",
"reflect-metadata": "~0.1.12",
"rxjs": "~6.3.0",

View File

@ -5,5 +5,7 @@
<ns-my-button (tap)=onTap($event) text="Nightr"></ns-my-button>
</StackLayout>
<ns-locationButton></ns-locationButton>
</AbsoluteLayout>

View File

@ -2,6 +2,7 @@ import { Component } from "@angular/core";
import * as dialogs from "tns-core-modules/ui/dialogs";
import { MyHttpPostService } from './services/my-http-post-service'
import { TouchGestureEventData, GestureEventData } from 'tns-core-modules/ui/gestures'
import { isEnabled, enableLocationRequest, getCurrentLocation, watchLocation, distance, clearWatch } from "nativescript-geolocation";
@Component({
selector: "ns-app",

View File

@ -4,6 +4,7 @@ import { NativeScriptModule } from "nativescript-angular/nativescript.module";
import { AppComponent } from "./app.component";
import { MyButtonComponent } from './component/my-button/my-button.component';
import { NativeScriptHttpClientModule } from "nativescript-angular/http-client";
import { MyLocationButtonComponent } from './component/locationButton/locationButton.component';
// Uncomment and add to NgModule imports if you need to use two-way binding
// import { NativeScriptFormsModule } from "nativescript-angular/forms";
@ -21,6 +22,7 @@ import { NativeScriptHttpClientModule } from "nativescript-angular/http-client";
],
declarations: [
AppComponent,
MyLocationButtonComponent,
MyButtonComponent,
],
providers: [],

View File

@ -0,0 +1 @@
/* Add mobile styles for the component here. */

View File

@ -0,0 +1,4 @@
<StackLayout>
<Button text="{{title}}" class="btn btn-primary" (tap)="onTap()"></Button>
<Label text="{{lat}}"></Label>
</StackLayout>

View File

@ -0,0 +1,25 @@
import { Component, OnInit } from '@angular/core';
import { MyGeoLocationService} from '../../services/my-geo-location.service';
@Component({
selector: 'ns-locationButton',
templateUrl: './locationButton.component.html',
styleUrls: ['./locationButton.component.css'],
moduleId: module.id,
})
export class MyLocationButtonComponent implements OnInit {
title = "Click to get location!";
lat = "start";
geoLocationService = new MyGeoLocationService();
constructor() {
}
ngOnInit() {
}
onTap() {
this.geoLocationService.getLocation().then(location => {
this.lat = ""+location.latitude;
}).catch(error => {
});
}
}

View File

@ -0,0 +1,35 @@
import { Injectable } from '@angular/core';
import { isEnabled, enableLocationRequest, getCurrentLocation, watchLocation, distance, clearWatch, Location } from "nativescript-geolocation";
import { stringify } from '@angular/core/src/render3/util';
@Injectable({
providedIn: 'root'
})
export class MyGeoLocationService {
loc: Location;
constructor() {
}
getLocation(): Promise<Location> {
this.isLocationEnabled();
var result = getCurrentLocation({
desiredAccuracy: 3,
timeout: 5000
});
return result;
}
private isLocationEnabled() {
isEnabled().then(function (isEnabled) {
if (!isEnabled) {
enableLocationRequest().then(function () {
}, function (e) {
alert("Error: " + (e.message || e));
});
}
}, function (e) {
alert("Error: " + (e.message || e));
});
}
}

View File

@ -4,15 +4,13 @@ FIRST_RUN=$?
# Create and enter virtual environment
if (( $FIRST_RUN )); then
echo Creating virtual environment
python3 -m venv venv
python3.7 -m venv venv
fi
source venv/bin/activate
# Install required python packages
if (( $FIRST_RUN )); then
echo Installing required Python packages
pip install -Ur requirements.txt
fi
echo Installing required Python packages
pip install -Ur requirements.txt
function run() {
python -m nightr

View File

@ -1,24 +1,33 @@
import inspect
import statistics
import timeit
from dataclasses import asdict
from datetime import timedelta
from logging import DEBUG
from typing import List
import requests_cache
from flask import Flask, jsonify
from flask import Flask, jsonify, logging
from server.nightr.strategies import dmi, steam
from server.nightr.util import Context
from .strategies import miloStrats, iss, cars_in_traffic, tide_strat, upstairs_neighbour
from .util import Context
app = Flask(__name__)
logger = logging.create_logger(app)
logger.setLevel(DEBUG)
requests_cache.install_cache("requests_cache.sqlite", expire_after=timedelta(minutes=10))
requests_cache.install_cache("requests_cache", expire_after=timedelta(minutes=10))
strategies = {
# name: (weight, probability function)
"dmi": (0.5, dmi.probability),
"steam": (1.0, steam.probability),
"tv2news": miloStrats.tv2newsStrat,
"australia": miloStrats.australiaStrat,
"camera": miloStrats.camImgStrat,
"iss": iss.night_on_iss,
"cars_in_traffic": cars_in_traffic.cars_in_traffic,
"tide": tide_strat.is_tide,
"upstairs_neighbour": upstairs_neighbour.check_games,
}
@ -28,17 +37,22 @@ def probabilities():
context = Context(**phone_data)
predictions: List[dict] = []
for name, (weight, strategy) in strategies.items():
for name, strategy in strategies.items():
try:
start = timeit.default_timer()
prediction = strategy(context)
stop = timeit.default_timer()
logger.debug("Execution time for %s: %ss", name, stop - start)
except Exception as e:
print(f"Strategy {name} failed: {e}")
logger.warning("Strategy '%s' failed:", name)
logger.exception(e)
continue
predictions.append({
"name": name,
"description": inspect.getdoc(strategy),
"weight": weight,
"weighted_probability": prediction.probability * weight,
"weight": prediction.weight,
"weighted_probability": prediction.probability * prediction.weight,
"night": prediction.probability > 0.5,
**asdict(prediction),
})
@ -47,6 +61,12 @@ def probabilities():
median = statistics.median(p["weighted_probability"] for p in predictions)
night = mean > 0.5
# Invert if we're in Australia
if context.in_australia:
night = not night
for prediction in predictions:
prediction["night"] = not prediction["night"]
# Calculate contributions of predictions
consensus_weight_sum = sum(p["weight"] for p in predictions if p["night"] == night)
for prediction in predictions:
@ -65,7 +85,7 @@ def probabilities():
def main():
app.run(host='0.0.0.0')
app.run(host='0.0.0.0', debug=True)
if __name__ == '__main__':

View File

@ -1,6 +1,8 @@
import requests
from datetime import datetime
import time
from datetime import datetime
import requests
def main():
filename = "dotaplayers " + str(datetime.now()) + ".csv"
@ -16,5 +18,6 @@ def main():
f.close()
time.sleep(100)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,40 @@
import requests
from ..util import Prediction, Context
def cars_in_traffic(context: Context) -> Prediction:
"""
How many cars are currently driving around Aarhus?
"""
r = requests.get('https://portal.opendata.dk/api/3/action/datastore_search?resource_id=b3eeb0ff-c8a8-4824-99d6-e0a3747c8b0d')
night_avr = 3.38
day_avr = 6.98
p = Prediction()
data = r.json()
sum = 0
len = 0
for lel in data['result']['records']:
sum += lel['vehicleCount']
len += 1
if sum > 0:
curr_avg = len / sum
else:
curr_avg = 0
diff = day_avr - night_avr
if curr_avg >= day_avr:
p.reasons.append(f"Because {curr_avg} cars are driving around Aarhus right now and {day_avr} is the expected number for daytime")
p.probability = 0.0
elif curr_avg <= night_avr:
p.reasons.append(f"Because {curr_avg} cars are driving around Aarhus right now and {night_avr} is the expected number for nighttime")
p.probability = 1.0
else:
p.reasons.append(f"Because average for daytime is {day_avr} and average for nighttime is {night_avr}, but the current average is {curr_avg}")
res = 1 - curr_avg / diff
p.probability = res
return p

View File

@ -1,12 +0,0 @@
from server.nightr.util import Context, Prediction
def probability(context: Context) -> Prediction:
"""
The data from DMI.
"""
p = Prediction()
p.probability = 0.7
p.reasons.append("It is raining in Tønder")
return p

View File

@ -0,0 +1,88 @@
import itertools
import logging
from datetime import datetime
from math import pi, sqrt, sin, cos, atan2
import pytz
import requests
from timezonefinder import TimezoneFinder
from ..util import Context, Prediction
tf = TimezoneFinder(in_memory=True)
def night_on_iss(context: Context) -> Prediction:
"""
It is night if it is night on the ISS and it is currently orbiting above us.
"""
p = Prediction()
if not context.flat_earth:
iss_position = requests.get("http://api.open-notify.org/iss-now.json").json()["iss_position"]
the_iss = "The ISS"
iss_position_description = "on board the ISS"
else:
p.reasons.append("The ISS is (obviously) located in Hollywood")
the_iss = "Hollywood"
iss_position = {'latitude': 34.092808, 'longitude': -118.328659} # Hollywood
iss_position_description = "in the Hollywood studio"
phone_position = context.position
# Calculate ratio: a number between 0 and 1 saying how close we are to the ISS
distance = haversine(iss_position, phone_position)
max_distance = 40075 / 2 # the furthest you can be from any position is half of the earth's circumference
ratio = distance / max_distance
# We're in the same "timezone" as the ISS if we're on the same half of the earth
on_iss_time = ratio < 0.5
side = "same" if on_iss_time else "other"
p.reasons.append(f"{the_iss} is {int(distance)} km away, so we are on the {side} side of the earth.")
for i in itertools.count(1):
iss_tz = tf.closest_timezone_at(lng=float(iss_position["longitude"]),
lat=float(iss_position["latitude"]),
delta_degree=i)
if iss_tz is not None:
break
iss_time = datetime.now(pytz.timezone(iss_tz))
iss_night = 6 < iss_time.hour > 22
# iss_night on_iss_time night
# 0 0 1
# 0 1 0
# 1 0 0
# 1 1 1
night = iss_night == on_iss_time
iss_time_description = "nighttime" if iss_night else "daytime"
time_description = "nighttime" if night else "daytime"
p.probability = float(night)
p.reasons.append(f"It is {iss_time_description} {iss_position_description}.")
p.reasons.append(f"Therefore, it must be {time_description} where we are.")
return p
def haversine(pos1, pos2):
"""
Distance between two GPS coordinates.
https://stackoverflow.com/a/18144531
"""
lat1 = float(pos1["latitude"])
long1 = float(pos1["longitude"])
lat2 = float(pos2["latitude"])
long2 = float(pos2["longitude"])
degree_to_rad = float(pi / 180.0)
d_lat = (lat2 - lat1) * degree_to_rad
d_long = (long2 - long1) * degree_to_rad
a = pow(sin(d_lat / 2), 2) + cos(lat1 * degree_to_rad) * cos(lat2 * degree_to_rad) * pow(sin(d_long / 2), 2)
c = 2 * atan2(sqrt(a), sqrt(1 - a))
km = 6367 * c
return km

View File

@ -0,0 +1,11 @@
import requests
from bs4 import BeautifulSoup
def is_restaurant_open(name):
r = requests.get("https://www.just-eat.dk/area/8000-%C3%A5rhusc")
soup = BeautifulSoup(r.content, features='html5lib')
print(soup.find('div', {'data-test-id': 'listingGroupOpen'}))
is_restaurant_open("stop2shop")

View File

@ -1,14 +1,22 @@
from datetime import datetime
from pathlib import Path
import requests
import cv2
from datetime import datetime, timedelta
from pytz import timezone
from server.nightr.util import Context, Prediction
from ..util import Context, Prediction
def camImgStrat(context : Context) -> Prediction:
img = cv2.imread('night.jpg',0)
"""
The contents of the camera image
"""
img = cv2.imread(str(Path(__file__).parent.joinpath("night.jpg")), 0)
average = img.mean(axis=0).mean(axis=0)
print(average)
p = Prediction()
p.weight = 0.7
if average < 100:
p.probability = 1.0
p.reasons.append('Image was dark')
@ -17,15 +25,43 @@ def camImgStrat(context : Context) -> Prediction:
p.probability = 0.0
return p
def australiaStrat(context : Context) -> Prediction:
"""
Using time in Australia
"""
australia = timezone('Australia/Melbourne')
t = datetime.now().astimezone(australia)
hour = t.hour
p = Prediction()
if hour > 22 or hour < 6:
p.probability = 1.0
p.reasons.append('It\'s day-time in Australia')
else:
p.probability = 0.0
p.reasons.append('It\'s night-time in Australia')
else:
p.probability = 1.0
p.reasons.append('It\'s day-time in Australia')
return p
def tv2newsStrat(context : Context) -> Prediction:
r = requests.get('http://mpx.services.tv2.dk/api/latest')
data = r.json()
publish_dates = [(x['pubDate'])//1000 for x in data][:10]
delta_times = []
for i in range(len(publish_dates)):
if i == 0 : continue
delta_times.append(publish_dates[i-1] - publish_dates[i])
avg_delta = 0
for d in delta_times:
avg_delta += d
avg_timestamp = avg_delta // len(delta_times) // 60
p = Prediction()
if avg_timestamp < 0:
p.weight = 0.0
else:
p.weight = 0.7
p.probability = 1.0 if avg_timestamp > 50 else 0.0
p.reasons.append('There were ' + ('few' if avg_timestamp > 50 else 'many') + ' recent articles on TV2 News')
return p

View File

@ -0,0 +1 @@
{"help": "https://portal.opendata.dk/api/3/action/help_show?name=datastore_search", "success": true, "result": {"include_total": true, "resource_id": "2a82a145-0195-4081-a13c-b0e587e9b89c", "fields": [{"type": "int", "id": "_id"}, {"type": "text", "id": "date"}, {"type": "text", "id": "garageCode"}, {"type": "int4", "id": "totalSpaces"}, {"type": "int4", "id": "vehicleCount"}], "records_format": "objects", "records": [{"_id": 1, "date": "2019/04/06 14:30:01", "garageCode": "NORREPORT", "totalSpaces": 80, "vehicleCount": 61}, {"_id": 2, "date": "2019/04/06 14:30:01", "garageCode": "SCANDCENTER", "totalSpaces": 1240, "vehicleCount": 1033}, {"_id": 6, "date": "2019/04/06 14:30:01", "garageCode": "SALLING", "totalSpaces": 700, "vehicleCount": 575}, {"_id": 7, "date": "2019/04/06 14:30:01", "garageCode": "DOKK1", "totalSpaces": 1000, "vehicleCount": 0}, {"_id": 8, "date": "2019/04/06 14:30:01", "garageCode": "Navitas", "totalSpaces": 449, "vehicleCount": 208}, {"_id": 9, "date": "2019/04/06 14:30:01", "garageCode": "NewBusgadehuset", "totalSpaces": 105, "vehicleCount": 101}, {"_id": 3, "date": "2019/04/06 14:30:01", "garageCode": "BRUUNS", "totalSpaces": 953, "vehicleCount": 598}, {"_id": 4, "date": "2019/04/06 14:30:01", "garageCode": "MAGASIN", "totalSpaces": 378, "vehicleCount": 361}, {"_id": 5, "date": "2019/04/06 14:30:01", "garageCode": "KALKVAERKSVEJ", "totalSpaces": 210, "vehicleCount": 278}, {"_id": 10, "date": "2019/04/06 14:30:01", "garageCode": "Urban Level 1", "totalSpaces": 319, "vehicleCount": 99}, {"_id": 11, "date": "2019/04/06 14:30:01", "garageCode": "Urban Level 2+3", "totalSpaces": 654, "vehicleCount": 170}], "_links": {"start": "/api/3/action/datastore_search?resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c", "next": "/api/3/action/datastore_search?offset=100&resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c"}, "total": 11}}

View File

@ -1,63 +1,7 @@
import requests
from bs4 import BeautifulSoup
import pandas as pd
import urllib.request
from datetime import datetime, timedelta
import json
def determine_month():
ds = pd.read_excel(urllib.request.urlopen('https://sundogbaelt.dk/wp-content/uploads/2019/04/trafiktal-maaned.xls'))
cur_year = 2019
amount_of_cur_year = sum([x == cur_year for x in ds['År']])
cur_year_total = sum(ds['Total'][1:amount_of_cur_year+1])
last_year_total = sum(ds['Total'][amount_of_cur_year+1:amount_of_cur_year+13])
return (12/(last_year_total//cur_year_total))+1
def is_tide():
month = determine_month()
tide_data = requests.get('https://www.dmi.dk/fileadmin/user_upload/Bruger_upload/Tidevand/2019/Aarhus.t.txt')
lines = tide_data.text[570:].split('\n')
tuples = [x.split('\t') for x in lines]
lel = [[datetime.strptime(x[0], '%Y%m%d%H%M'), x[1]] for x in tuples[:-1]]
matches = [[x[0], int(x[1])] for x in lel if x[0].month == month]
all_the_data = requests.get('https://www.dmi.dk/NinJo2DmiDk/ninjo2dmidk?cmd=odj&stations=22331&datatype=obs')
current_water_level = json.loads(all_the_data.content)[0]['values'][-1]['value']
# Generate average of when the water is high
last_match = matches[0]
moments = []
for idx, water_level in enumerate(matches[1:]):
#print(last_match[1], water_level[1])
diff = abs(last_match[1]) + abs(water_level[1])
time_diff = (water_level[0] - last_match[0]).seconds
average_inc = time_diff/diff
average_delta = timedelta(seconds=average_inc)
if last_match[1] < 0: # Increasing
time = last_match
while time[1] != current_water_level:
time[0] += average_delta
time[1] += 1
elif last_match[1] > 0: # Decreasing
time = last_match
while time[1] != current_water_level:
time[0] += average_delta
time[1] -= 1
last_match = water_level
moments.append(time[0])
night = sum([1 for x in moments if 6 >= x.hour or x.hour >= 22])
return night / len(moments)
import requests
from bs4 import BeautifulSoup
def tmp():
@ -66,32 +10,13 @@ def tmp():
json.dump(r.json(), f)
def read_tmp():
with open('traffic_data_13_23.json') as f:
data = json.load(f)
number = sum([cars['vehicleCount'] for cars in data['result']['records']])
print(number / len(data['result']['records']))
def scrape_traffic():
r = requests.get('https://portal.opendata.dk/api/3/action/datastore_search?resource_id=b3eeb0ff-c8a8-4824-99d6-e0a3747c8b0d')
night_avr = 3.38
day_avr = None
data = r.json()
sum = 0
len = 0
for lel in data['result']['records']:
sum += lel['vehicleCount']
len += 1
curr_avg = len / sum
diff= day_avr - night_avr
if curr_avg >= day_avr:
return 0.0
elif curr_avg <= night_avr:
return 1.0
res = 1 - curr_avg / diff
assert(res < 1 and res > 0)
return res
def scrape_weather():
@ -112,3 +37,5 @@ def scrape_dmi_aarhus():
return 0.0
#adak_latest_time, adak_latest_temp_aarhus = max(adak_timeserie.items(), key= lambda x : x[0])
read_tmp()

View File

@ -1,12 +0,0 @@
from server.nightr.util import Context, Prediction
def probability(context: Context) -> Prediction:
"""
How many players are currently online on Steam.
"""
p = Prediction()
p.probability = 0.2
p.reasons.append("CSGO has more than 10.000 online players")
return p

View File

@ -0,0 +1,22 @@
import pandas as pd
import urllib.request
import json
import requests
def determine_month():
ds = pd.read_excel(urllib.request.urlopen('https://sundogbaelt.dk/wp-content/uploads/2019/04/trafiktal-maaned.xls'))
cur_year = 2019
amount_of_cur_year = sum([x == cur_year for x in ds['År']])
cur_year_total = sum(ds['Total'][1:amount_of_cur_year+1])
last_year_total = sum(ds['Total'][amount_of_cur_year+1:amount_of_cur_year+13])
return ((12/(last_year_total//cur_year_total))+1), cur_year_total, last_year_total
def write_json(url, data_name, time):
r = requests.get(url)
with open(f"{data_name}_{time}.json", 'w') as f:
json.dump(r.json(), f)

View File

@ -0,0 +1,44 @@
from sklearn import svm
from sklearn.externals import joblib
import requests
import glob
import json
import numpy as np
from server.nightr.strategies.strat_utils import write_json
def find_data(time):
write_json("https://portal.opendata.dk/api/3/action/datastore_search?resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c", "parking_aarhus", time)
def load_data():
X = []
Y = []
for filename in glob.glob("parking_aarhus*"):
p_class = '2330' in filename
with open(filename) as file:
data = json.load(file)
records = data['result']['records']
frequencies = [house['vehicleCount'] / house['totalSpaces'] for house in records]
X.append(frequencies)
Y.append(int(p_class))
return np.array(X), np.array(Y)
def train():
X, Y = load_data()
classifier = svm.SVC(C=10, gamma=0.01, probability=True)
classifier.fit(X, Y)
joblib.dump(classifier, "nightness_classifier.pkl")
def predict(X):
classifier = joblib.load("nightness_classifier.pkl")
prob = classifier.predict_proba(X)
return prob[0, 1]
train()

View File

@ -0,0 +1,86 @@
import calendar
from datetime import datetime, timedelta
import json
import requests
from .strat_utils import determine_month
from ..util import Context, Prediction
def is_tide(context: Context) -> Prediction:
"""
Determine whether or not it is night in Aarhus based no the current water level and which month we are in, based
on number of cars driving across The Storbæltsbro.
"""
p = Prediction()
month, cur_year_total_cars, last_year_total_cars = determine_month()
month = int(month)
p.reasons.append(f"Because the month is f{calendar.month_name[month]}")
p.reasons.append(f"Because the number of cars having driven on the Storbæltsbro is f{cur_year_total_cars}")
p.reasons.append(f"And because the number of cars having driven over it in the last year is f{last_year_total_cars}")
tide_data = requests.get('https://www.dmi.dk/fileadmin/user_upload/Bruger_upload/Tidevand/2019/Aarhus.t.txt')
lines = tide_data.text[570:].split('\n')
tuples = [x.split('\t') for x in lines]
lel = [[datetime.strptime(x[0], '%Y%m%d%H%M'), x[1]] for x in tuples[:-1]]
matches = [[x[0], int(x[1])] for x in lel if x[0].month == month]
all_the_data = requests.get('https://www.dmi.dk/NinJo2DmiDk/ninjo2dmidk?cmd=odj&stations=22331&datatype=obs')
current_water_level = int(json.loads(all_the_data.content)[0]['values'][-1]['value'])
# Generate average of when the water is high
last_match = matches[0]
moments = []
for idx, water_level in enumerate(matches[1:]):
#print(last_match[1], water_level[1])
diff = abs(last_match[1]) + abs(water_level[1])
time_diff = (water_level[0] - last_match[0]).seconds
average_inc = time_diff/diff
average_delta = timedelta(seconds=average_inc)
if last_match[1] < 0 and last_match[1] < current_water_level: # Increasing
time = last_match
while time[1] != current_water_level:
time[0] += average_delta
time[1] += 1
elif last_match[1] < 0 and last_match[1] > current_water_level:
time = last_match
while time[1] != current_water_level:
time[0] += average_delta
time[1] -= 1
elif last_match[1] > 0 and last_match[1] > current_water_level: # Decreasing
time = last_match
while time[1] != current_water_level:
time[0] += average_delta
time[1] -= 1
elif last_match[1] > 0 and last_match[1] < current_water_level:
time = last_match
while time[1] != current_water_level:
time[0] += average_delta
time[1] += 1
last_match = water_level
moments.append(time[0])
night = sum([1 for x in moments if 6 >= x.hour or x.hour >= 22])
p.reasons.append(f"And because the number of times the water is at the current level at nighttime is: {night}, compared to the total amount of times in {calendar.month_name[month]}, being {len(moments)}")
p.probability = night / len(moments)
return p

View File

@ -0,0 +1,40 @@
import requests
from bs4 import BeautifulSoup
from datetime import datetime
from ..util import Prediction, Context
def update():
requests.post('https://euw.op.gg/summoner/ajax/renew.json/', data={'summonerId': 34009256})
def check_games(context: Context) -> Prediction:
"""
Is Alexanders upstairs neighbour currently playing League of Legends?
"""
update()
r = requests.get('https://euw.op.gg/summoner/userName=Im+Eating+Pros')
#if not "is not in an active game" in str(r.content):
# return 1.0
p = Prediction()
soup = BeautifulSoup(r.content, features='html5lib')
timestamp = int(soup.find('div', {'class': 'GameItemList'}).find('div', {'class': 'GameItem'})['data-game-time'])
last_played_game = datetime.fromtimestamp(timestamp)
last_game_in_hours = (((datetime.now() - last_played_game).seconds)/60/60)
if last_game_in_hours < 2:
p.reasons.append("Alexanders upstairs neighbour is currently playing league")
p.probability = 0.8
else:
last_game_in_hours = min(24.0, last_game_in_hours)
p.reasons.append(f"Alexanders upstairs neighbour has not played league for {last_game_in_hours} hours!")
p.probability = 1 - (last_game_in_hours / 24)
return p

View File

@ -1,14 +1,19 @@
from dataclasses import dataclass, field
from typing import List, Tuple
from typing import List, Dict
@dataclass
class Context:
battery: float = 1.0
coordinates: Tuple[float, float] = (0.0, 0.0)
position: Dict[str, float] = field(default_factory=lambda: {'latitude': 53.0, 'longitude': 9.0})
# App settings
in_australia: bool = False
flat_earth: bool = False
@dataclass
class Prediction:
probability: float = 0.5
weight: float = 1.0
reasons: List[str] = field(default_factory=list)

View File

@ -1,3 +1,11 @@
Flask==1.0.2
requests==2.21.0
requests-cache==0.4.13
Flask
requests
requests-cache
pytz
beautifulsoup4
pandas
opencv-python
timezonefinder
scikit-learn
html5lib
xlrd