-
Notifications
You must be signed in to change notification settings - Fork 0
/
SimpleRNN.py
108 lines (87 loc) · 3.12 KB
/
SimpleRNN.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import TimeDistributed, Dense, Dropout, SimpleRNN, RepeatVector
from tensorflow.keras.callbacks import EarlyStopping, LambdaCallback
from termcolor import colored
all_chars = "0123456789+"
num_features = len(all_chars)
print("number of features ", num_features)
char_to_index = dict((c,i) for i,c in enumerate(all_chars))
index_to_char = dict((i,c) for i,c in enumerate(all_chars))
print(index_to_char)
print(char_to_index)
def generate_data():
first = np.random.randint(0,100)
second = np.random.randint(0,100)
example = str(first) + "+" + str(second)
label = str(first+second)
return example, label
generate_data()
hidden_units = 128
max_time_steps = 5
model = Sequential([
SimpleRNN(hidden_units, input_shape=(None,num_features)),
RepeatVector(max_time_steps),
SimpleRNN(hidden_units,return_sequences= True),
TimeDistributed(Dense(num_features,activation='softmax'))
])
model.summary()
model.compile(
loss = "categorical_crossentropy",
optimizer = "adam",
#metric = ["accuracy"]
#metric = [tf.keras.metrics.Accuracy()]
)
def vectorize_example(example,label):
x = np.zeros((max_time_steps, num_features))
y = np.zeros((max_time_steps, num_features))
diff_x = max_time_steps - len(example)
diff_y = max_time_steps - len(label)
for i,c in enumerate(example):
x[i+diff_x,char_to_index[c]] = 1
for i in range(diff_x):
x[i,char_to_index["0"]] = 1
for i,c in enumerate(label):
y[i+diff_y,char_to_index[c]] = 1
for i in range(diff_y):
y[i,char_to_index["0"]] = 1
return x,y
e,l = generate_data()
print(e,l)
x,y = vectorize_example(e,l)
print(x.shape,y.shape)
def devectorize_example(example):
result = [index_to_char[np.argmax(vec)] for i,vec in enumerate(example)]
return ''.join(result)
devectorize_example(x)
devectorize_example(y)
def create_dataset(num_examples=2000):
x = np.zeros((num_examples, max_time_steps, num_features))
y = np.zeros((num_examples, max_time_steps, num_features))
for i in range(num_examples):
e,l = generate_data()
e_v,l_v = vectorize_example(e,l)
x[i] = e_v
y[i] = l_v
return x,y
x,y = create_dataset()
print(x.shape, y.shape)
devectorize_example(x[0])
devectorize_example(y[0])
l_cb = LambdaCallback(
on_epoch_end = lambda e,l: print('{:.2f}'.format(l['val_acc']),end = ' _ ')
)
es_cb = EarlyStopping(monitor='val_loss', patience=10)
model.fit(x,y,epochs=500,batch_size=256, validation_split=0.2,
verbose=False, callbacks=[es_cb,l_cb])
x_test, y_test = create_dataset(10)
preds = model.predict(x_test)
for i,pred in enumerate(preds):
y= devectorize_example(y_test[i])
y_hat = devectorize_example(pred)
col = "green"
if y!= y_hat:
col="red"
out = "Input: " + devectorize_example(x_test[i]) + "Ouptut: " + y +"Predicted: " + y_hat
print(colored(out,col))