-
Notifications
You must be signed in to change notification settings - Fork 0
/
evaluation.py
41 lines (30 loc) · 1.55 KB
/
evaluation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import torch.nn.functional as F
import torch
from sklearn.metrics import f1_score, roc_auc_score
from utils import fair_metric
def evaluate(x, classifier, hp, encoder, data, args):
classifier.eval()
encoder.eval()
with torch.no_grad():
h = encoder(data.x, data.edge_index, data.adj_norm_sp)
output = classifier(h)
accs, auc_rocs, F1s, paritys, equalitys = {}, {}, {}, {}, {}
pred_val = (output[data.val_mask].squeeze() > 0).type_as(data.y)
pred_test = (output[data.test_mask].squeeze() > 0).type_as(data.y)
accs['val'] = pred_val.eq(
data.y[data.val_mask]).sum().item() / data.val_mask.sum().item()
accs['test'] = pred_test.eq(
data.y[data.test_mask]).sum().item() / data.test_mask.sum().item()
F1s['val'] = f1_score(data.y[data.val_mask].cpu(
).numpy(), pred_val.cpu().numpy())
F1s['test'] = f1_score(data.y[data.test_mask].cpu(
).numpy(), pred_test.cpu().numpy())
auc_rocs['val'] = roc_auc_score(
data.y[data.val_mask].cpu().numpy(), output[data.val_mask].detach().cpu().numpy())
auc_rocs['test'] = roc_auc_score(
data.y[data.test_mask].cpu().numpy(), output[data.test_mask].detach().cpu().numpy())
paritys['val'], equalitys['val'] = fair_metric(pred_val.cpu().numpy(), data.y[data.val_mask].cpu(
).numpy(), data.sens[data.val_mask].cpu().numpy())
paritys['test'], equalitys['test'] = fair_metric(pred_test.cpu().numpy(), data.y[data.test_mask].cpu(
).numpy(), data.sens[data.test_mask].cpu().numpy())
return accs, auc_rocs, F1s, paritys, equalitys