Skip to content

Commit

Permalink
Uploaded public code and data
Browse files Browse the repository at this point in the history
  • Loading branch information
Belen Carolina Saldias Fuentes committed Jan 11, 2019
1 parent ee21947 commit bd4f4df
Show file tree
Hide file tree
Showing 11 changed files with 27,561 additions and 0 deletions.
1,228 changes: 1,228 additions & 0 deletions 00. PYMC | BBVI - Bayesian YN question type.ipynb

Large diffs are not rendered by default.

869 changes: 869 additions & 0 deletions 01. PYMC - Bayesian ABCD question type.ipynb

Large diffs are not rendered by default.

360 changes: 360 additions & 0 deletions animals/objects.csv

Large diffs are not rendered by default.

24 changes: 24 additions & 0 deletions animals/users.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
initial_yn_questions,id
879,6
890,4
889,15
886,3
880,8
902,16
911,13
903,19
924,18
927,7
908,11
881,14
922,27
858,17
895,23
888,20
885,21
896,12
920,2
868,26
906,10
887,22
940,5
1,423 changes: 1,423 additions & 0 deletions animals/votes_abcd.csv

Large diffs are not rendered by default.

12,818 changes: 12,818 additions & 0 deletions animals/votes_yn.csv

Large diffs are not rendered by default.

359 changes: 359 additions & 0 deletions catalina/objects.csv

Large diffs are not rendered by default.

20 changes: 20 additions & 0 deletions catalina/users.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
initial_yn_questions,id
868,19
883,15
899,18
883,11
879,10
872,21
913,2
912,7
899,5
873,20
885,9
919,12
925,8
905,17
883,16
902,6
880,4
856,13
880,14
1,038 changes: 1,038 additions & 0 deletions catalina/votes_abcd.csv

Large diffs are not rendered by default.

9,348 changes: 9,348 additions & 0 deletions catalina/votes_yn.csv

Large diffs are not rendered by default.

74 changes: 74 additions & 0 deletions custom_metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import numpy as np
from matplotlib import pyplot as plt
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.metrics import f1_score

class ListTable(list):
def _repr_html_(self):
html = ["<table>"]
for row in self:
html.append("<tr>")

for col in row:
html.append("<td>{0}</td>".format(col))

html.append("</tr>")
html.append("</table>")
return ''.join(html)

def draw_theta(theta, labels=None):
"""Draw the confusion matrix received.
Parameters
----------
theta : np.array
confusion matrix drawn
"""

if labels is None:
labels = []

row_sums = theta.sum(axis=1)
new_matrix = theta / row_sums[:, np.newaxis]
print(np.round(new_matrix,3))

fig = plt.figure()
ax = fig.add_subplot(111)
res = ax.imshow(new_matrix, cmap=plt.cm.Blues, interpolation='nearest')
plt.title('Confusion matrix\n')
ticks = [i for i in range(len(theta))]
ax.set_xticks(ticks)
ax.set_yticks(ticks)
ax.set_xticklabels(labels)
ax.set_yticklabels(labels)
plt.ylabel('True Class')
plt.xlabel('Predicted Class')
plt.show()

def draw_metrics(y_true, y_pred, labels):
"""
This method is only for to draw metrics
"""
draw_theta(confusion_matrix(y_true, y_pred) , labels)
table = ListTable()
print("Acurracy score",accuracy_score(y_true, y_pred))
table.append(["Metrics / Classes"]+list(labels))
table.append(["Precision score"]+list(np.round(precision_score(y_true, y_pred, average=None),3)))
table.append(["Recall score"]+list(np.round(recall_score(y_true, y_pred, average=None),3)))
table.append(["F score"]+list(np.round(f1_score(y_true, y_pred, average=None),3)))
return table

def metrics(y_true, y_pred):

to_ret = {
'p':precision_score(y_true, y_pred, average=None),
'r':recall_score(y_true, y_pred, average=None),
'f':f1_score(y_true, y_pred, average=None),
}

return to_ret

0 comments on commit bd4f4df

Please sign in to comment.