-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathwrn.py
96 lines (79 loc) · 3.27 KB
/
wrn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
from tensorflow.keras.layers import Convolution2D, AveragePooling2D
from tensorflow.keras.layers import Input, Add, Activation, Flatten, Dense
from tensorflow.keras.models import Model
def initial_conv(input, reg=None):
x = Convolution2D(16, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(input)
x = Activation('swish')(x)
return x
def expand_conv(init, base, k, strides=(1, 1), reg=None):
x = Convolution2D(base * k, (3, 3), padding='same', strides=strides, kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(init)
x = Activation('swish')(x)
x = Convolution2D(base * k, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(x)
skip = Convolution2D(base * k, (1, 1), padding='same', strides=strides, kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(init)
m = Add()([x, skip])
return m
def conv1_block(input, k=1, reg=None):
init = input
x = Activation('swish')(input)
x = Convolution2D(16 * k, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(x)
x = Activation('swish')(x)
x = Convolution2D(16 * k, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(x)
m = Add()([init, x])
return m
def conv2_block(input, k=1, reg=None):
init = input
x = Activation('swish')(input)
x = Convolution2D(32 * k, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(x)
x = Activation('swish')(x)
x = Convolution2D(32 * k, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(x)
m = Add()([init, x])
return m
def conv3_block(input, k=1, reg=None):
init = input
x = Activation('swish')(input)
x = Convolution2D(64 * k, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(x)
x = Activation('swish')(x)
x = Convolution2D(64 * k, (3, 3), padding='same', kernel_initializer='he_normal',
kernel_regularizer=reg,
use_bias=False)(x)
m = Add()([init, x])
return m
def create_wide_residual_network(input_dim, nb_classes=100, N=2, k=1, reg=None):
ip = Input(shape=input_dim)
x = initial_conv(ip, reg=reg)
nb_conv = 4
x = expand_conv(x, 16, k, reg=reg)
nb_conv += 2
for i in range(N - 1):
x = conv1_block(x, k, reg=reg)
nb_conv += 2
x = Activation('swish')(x)
x = expand_conv(x, 32, k, strides=(2, 2), reg=reg)
nb_conv += 2
for i in range(N - 1):
x = conv2_block(x, k, reg=reg)
nb_conv += 2
x = Activation('swish')(x)
x = AveragePooling2D((8, 8))(x)
x = Flatten()(x)
x = Dense(nb_classes, kernel_regularizer=reg)(x)
model = Model(ip, x)
return model