Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
482e0a9ab9
1
server/nightr/strategies/parking_aarhus_1430.json
Normal file
1
server/nightr/strategies/parking_aarhus_1430.json
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"help": "https://portal.opendata.dk/api/3/action/help_show?name=datastore_search", "success": true, "result": {"include_total": true, "resource_id": "2a82a145-0195-4081-a13c-b0e587e9b89c", "fields": [{"type": "int", "id": "_id"}, {"type": "text", "id": "date"}, {"type": "text", "id": "garageCode"}, {"type": "int4", "id": "totalSpaces"}, {"type": "int4", "id": "vehicleCount"}], "records_format": "objects", "records": [{"_id": 1, "date": "2019/04/06 14:30:01", "garageCode": "NORREPORT", "totalSpaces": 80, "vehicleCount": 61}, {"_id": 2, "date": "2019/04/06 14:30:01", "garageCode": "SCANDCENTER", "totalSpaces": 1240, "vehicleCount": 1033}, {"_id": 6, "date": "2019/04/06 14:30:01", "garageCode": "SALLING", "totalSpaces": 700, "vehicleCount": 575}, {"_id": 7, "date": "2019/04/06 14:30:01", "garageCode": "DOKK1", "totalSpaces": 1000, "vehicleCount": 0}, {"_id": 8, "date": "2019/04/06 14:30:01", "garageCode": "Navitas", "totalSpaces": 449, "vehicleCount": 208}, {"_id": 9, "date": "2019/04/06 14:30:01", "garageCode": "NewBusgadehuset", "totalSpaces": 105, "vehicleCount": 101}, {"_id": 3, "date": "2019/04/06 14:30:01", "garageCode": "BRUUNS", "totalSpaces": 953, "vehicleCount": 598}, {"_id": 4, "date": "2019/04/06 14:30:01", "garageCode": "MAGASIN", "totalSpaces": 378, "vehicleCount": 361}, {"_id": 5, "date": "2019/04/06 14:30:01", "garageCode": "KALKVAERKSVEJ", "totalSpaces": 210, "vehicleCount": 278}, {"_id": 10, "date": "2019/04/06 14:30:01", "garageCode": "Urban Level 1", "totalSpaces": 319, "vehicleCount": 99}, {"_id": 11, "date": "2019/04/06 14:30:01", "garageCode": "Urban Level 2+3", "totalSpaces": 654, "vehicleCount": 170}], "_links": {"start": "/api/3/action/datastore_search?resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c", "next": "/api/3/action/datastore_search?offset=100&resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c"}, "total": 11}}
|
|
@ -1,6 +1,7 @@
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
|
||||||
def determine_month():
|
def determine_month():
|
||||||
ds = pd.read_excel(urllib.request.urlopen('https://sundogbaelt.dk/wp-content/uploads/2019/04/trafiktal-maaned.xls'))
|
ds = pd.read_excel(urllib.request.urlopen('https://sundogbaelt.dk/wp-content/uploads/2019/04/trafiktal-maaned.xls'))
|
||||||
|
@ -12,3 +13,10 @@ def determine_month():
|
||||||
last_year_total = sum(ds['Total'][amount_of_cur_year+1:amount_of_cur_year+13])
|
last_year_total = sum(ds['Total'][amount_of_cur_year+1:amount_of_cur_year+13])
|
||||||
|
|
||||||
return ((12/(last_year_total//cur_year_total))+1), cur_year_total, last_year_total
|
return ((12/(last_year_total//cur_year_total))+1), cur_year_total, last_year_total
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def write_json(url, data_name, time):
|
||||||
|
r = requests.get(url)
|
||||||
|
with open(f"{data_name}_{time}.json", 'w') as f:
|
||||||
|
json.dump(r.json(), f)
|
||||||
|
|
44
server/nightr/strategies/svm_strat.py
Normal file
44
server/nightr/strategies/svm_strat.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
from sklearn import svm
|
||||||
|
from sklearn.externals import joblib
|
||||||
|
import requests
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
from server.nightr.strategies.strat_utils import write_json
|
||||||
|
|
||||||
|
|
||||||
|
def find_data(time):
|
||||||
|
write_json("https://portal.opendata.dk/api/3/action/datastore_search?resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c", "parking_aarhus", time)
|
||||||
|
|
||||||
|
def load_data():
|
||||||
|
|
||||||
|
X = []
|
||||||
|
Y = []
|
||||||
|
|
||||||
|
for filename in glob.glob("parking_aarhus*"):
|
||||||
|
p_class = '2330' in filename
|
||||||
|
|
||||||
|
with open(filename) as file:
|
||||||
|
data = json.load(file)
|
||||||
|
|
||||||
|
records = data['result']['records']
|
||||||
|
frequencies = [house['vehicleCount'] / house['totalSpaces'] for house in records]
|
||||||
|
X.append(frequencies)
|
||||||
|
Y.append(int(p_class))
|
||||||
|
|
||||||
|
return np.array(X), np.array(Y)
|
||||||
|
|
||||||
|
def train():
|
||||||
|
X, Y = load_data()
|
||||||
|
classifier = svm.SVC(C=10, gamma=0.01, probability=True)
|
||||||
|
classifier.fit(X, Y)
|
||||||
|
joblib.dump(classifier, "nightness_classifier.pkl")
|
||||||
|
|
||||||
|
def predict(X):
|
||||||
|
classifier = joblib.load("nightness_classifier.pkl")
|
||||||
|
prob = classifier.predict_proba(X)
|
||||||
|
return prob[0, 1]
|
||||||
|
|
||||||
|
train()
|
Loading…
Reference in New Issue
Block a user