-
Notifications
You must be signed in to change notification settings - Fork 2
/
apoz.py
57 lines (43 loc) · 1.72 KB
/
apoz.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import numpy as np
import torch.nn as nn
from vgg import feature_cfgs, classifier_cfgs
from helper import valid
class APoZ:
def __init__(self, model):
self.model = model
self.idx = 0
self.num_layer = 0
self.apoz = []
for c in feature_cfgs + classifier_cfgs:
if c is 'M':
continue
self.apoz.append([0] * c)
self.num_layer += 1
self.apoz = np.array(self.apoz)
self.register()
print(f"Layer(ReLU + Linear) {self.num_layer} module register")
def get_zero_percent_hook(self, module, input, output):
if output.dim() == 4:
p_zero = (output == 0).sum(dim=(2, 3)).float() / (output.size(2) * output.size(3))
self.apoz[self.idx] += p_zero.mean(dim=0).cpu().numpy()
elif output.dim() == 2:
p_zero = (output == 0).sum(dim=0).float() / output.size(0)
self.apoz[self.idx] += p_zero.cpu().numpy()
else:
raise ValueError(f"{output.dim()} dimension is Not Supported")
self.idx += 1
if self.idx == self.num_layer:
self.idx = 0
def register(self):
for module in self.model.features.modules():
if type(module) == nn.ReLU:
module.register_forward_hook(self.get_zero_percent_hook)
for module in self.model.classifier.modules():
if type(module) == nn.ReLU:
module.register_forward_hook(self.get_zero_percent_hook)
def get_apoz(self, loader, criterion):
top1, top5 = valid(self.model,
loader,
criterion)
print(f"top1 : {top1} top5 : {top5}")
return self.apoz / len(loader)