-
Notifications
You must be signed in to change notification settings - Fork 1
/
score.py
61 lines (47 loc) · 1.79 KB
/
score.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#Adapted from https://github.com/FakeNewsChallenge/fnc-1/blob/master/scorer.py
#Original credit - @bgalbraith
LABELS = ['agree', 'disagree', 'discuss', 'unrelated']
LABELS_RELATED = ['unrelated','related']
RELATED = LABELS[0:3]
def score_submission(gold_labels, test_labels):
score = 0.0
cm = [[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]]
for i, (g, t) in enumerate(zip(gold_labels, test_labels)):
g_stance, t_stance = g, t
if g_stance == t_stance:
score += 0.25
if g_stance != 'unrelated':
score += 0.50
if g_stance in RELATED and t_stance in RELATED:
score += 0.25
cm[LABELS.index(g_stance)][LABELS.index(t_stance)] += 1
return score, cm
def print_confusion_matrix(cm):
lines = []
header = "|{:^11}|{:^11}|{:^11}|{:^11}|{:^11}|".format('', *LABELS)
line_len = len(header)
lines.append("-"*line_len)
lines.append(header)
lines.append("-"*line_len)
hit = 0
total = 0
for i, row in enumerate(cm):
hit += row[i]
total += sum(row)
lines.append("|{:^11}|{:^11}|{:^11}|{:^11}|{:^11}|".format(LABELS[i],
*row))
lines.append("-"*line_len)
print('\n'.join(lines))
def report_score(actual,predicted):
score,cm = score_submission(actual,predicted)
best_score, _ = score_submission(actual,actual)
print_confusion_matrix(cm)
print("Score: " +str(score) + " out of " + str(best_score) + "\t("+str(score*100/best_score) + "%)")
return score*100/best_score
if __name__ == "__main__":
actual = [0,0,0,0,1,1,0,3,3]
predicted = [0,0,0,0,1,1,2,3,3]
report_score([LABELS[e] for e in actual],[LABELS[e] for e in predicted])