-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathevaluate.py
73 lines (61 loc) · 2 KB
/
evaluate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import numpy as np
import metric
def print_evl(predict, label):
"""
Utility method that prints the evaluation results:
accuracy
recall
precision
kappa
Contingency table
Args:
predict: prediction
label: labels
Returns:
"""
print("Accuracy: {}".format(metric.accuracy(predict, label)))
print("Recall: {}".format(metric.recall(predict, label)))
print("Precision: {}".format(metric.precision(predict, label)))
print("Kappa: {}".format(metric.kappa(predict, label)))
print(
"Contingency Table:\n"
"{0:5d} {2:5d}\n"
"{3:5d} {1:5d}\n"
"Number of Inputs: {4}"
.format(*metric.basic_metrics(predict, label))
)
def evl_result(val_df):
"""
Utility method that analyse the result of from a model.
Will print:
accuracy, recall, precision, kappa, Contingency table
evaluated:
per image
per study using highest score
per study using lowest score
per study using average score
Args:
val_df: result dataframe
Returns:
result loaded into dataframe
"""
print("****** Evaluation per Image")
print_evl(
np.asarray(val_df["prediction"].tolist()),
np.asarray(val_df["label"].tolist())
)
print("****** Evaluation per Study Using Highest Score")
print_evl(
np.asarray(val_df.groupby("study")["prediction"].max().tolist()),
np.asarray(val_df.groupby("study")["label"].mean().tolist())
)
print("****** Evaluation per Study Using Lowest Score")
print_evl(
np.asarray(val_df.groupby("study")["prediction"].min().tolist()),
np.asarray(val_df.groupby("study")["label"].mean().tolist())
)
print("****** Evaluation per Study Using Average Score")
print_evl(
np.asarray(val_df.groupby("study")["prediction"].mean().tolist()),
np.asarray(val_df.groupby("study")["label"].mean().tolist())
)