forked from GFA-RDIP/Morgana
-
Notifications
You must be signed in to change notification settings - Fork 0
/
backend.py
109 lines (87 loc) · 2.97 KB
/
backend.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
from flask import Flask, jsonify, request
from nodes import data_network, read_data, data_tree, filter_categorical_cols, sort_cols, decorate_id_tree
import pickle
import csv
from collections import defaultdict
app = Flask(__name__)
# To run locally (cross site scripting hack)
from flask_cors import CORS
cors = CORS(app)
data = read_data('HealthHack2016_Morgana_Data_permuted.csv')
# Variable, name
with open('variables.csv') as f:
variables = [row for row in csv.DictReader(f)]
var_map = {r['variable']: r['name'] for r in variables}
# Variable, Value, Name, Shortname
with open('attributes.csv') as f:
attributes = [row for row in csv.DictReader(f)]
var_attr_name = defaultdict(dict)
for row in attributes:
variable = row['variable']
name = row['shortname']
# Hack to deal with pandas type conversion
try:
attribute = float(row['value'])
except (TypeError, ValueError):
pass
var_attr_name[variable][attribute] = name
var_attr_name = dict(var_attr_name)
# Filter down to the good columns
cols = filter_categorical_cols(data)
cols = [col for col in cols if col in var_attr_name]
cols = sort_cols(data, cols)
data = data[['id'] + cols]
try:
with open('network.dat', 'rb') as f:
network_memo = pickle.load(f)
except FileNotFoundError:
network_memo = {}
@app.route("/api/network")
def network(cols=cols, exclude=None, limit=None):
limit = request.args.get('limit') or 40
exclude = request.args.get('exclude')
if exclude:
cols = [col for col in cols if col not in exclude]
if limit:
limit = int(limit)
cols = cols[:limit]
if tuple(cols) not in network_memo:
network_memo[tuple(cols)] = data_network(
data, cols, [var_map[col] for col in cols])
with open('network.dat', 'wb') as f:
pickle.dump(network_memo, f)
j = network_memo[tuple(cols)]
return jsonify(j)
try:
with open('sun.dat', 'rb') as f:
sun_memo = pickle.load(f)
except FileNotFoundError:
sun_memo = {}
@app.route("/api/sunburst")
def sunburst(cols=cols):
limit = request.args.get('limit') or 5
exclude = request.args.get('exclude')
if exclude:
cols = [col for col in cols if col not in exclude]
if limit:
limit = int(limit)
cols = cols[:limit]
if tuple(cols) not in sun_memo:
sun_memo[tuple(cols)] = data_tree(data, cols, var_attr_name)
with open('sun.dat', 'wb') as f:
pickle.dump(sun_memo, f)
j = sun_memo[tuple(cols)]
return jsonify(j)
@app.route("/api/patient", methods=['GET', 'POST'])
def patient():
selector = request.get_json(force=True)
out_data = data[['id'] + selector.keys()]
for var, val in selector.items():
if val != "*":
out_data=out_data[out_data[var] == val]
with open('data.csv', 'w') as f:
out_data.to_csv(f)
# return jsonify(out_data)
return jsonify({'url': 'http://localhost:8000/data.csv'})
if __name__ == "__main__":
app.run()