-
Notifications
You must be signed in to change notification settings - Fork 0
/
sgd_toy_model.py
80 lines (59 loc) · 2.11 KB
/
sgd_toy_model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import time
import jax
import jax.numpy as jnp
import dataset
layer_sizes = [2, 2, 1]
lr = 0.1
max_iter = 5000
def init_random_params(layer_sizes, key, init="normal"):
if init == "uniform":
return [
(jax.random.uniform(key, (m, n)), jax.random.uniform(key, (n,)))
for m, n, in zip(layer_sizes[:-1], layer_sizes[1:])
]
elif init == "normal":
return [
(jax.random.normal(key, (m, n)), jax.random.normal(key, (n,)))
for m, n, in zip(layer_sizes[:-1], layer_sizes[1:])
]
else:
raise Exception("only uniform or normal initialization allowed")
def forward(params, inputs):
activations = inputs
for w, b in params[:-1]:
outputs = jnp.dot(activations, w) + b
# activations = jax.nn.sigmoid(outputs)
activations = jax.nn.relu(outputs)
final_w, final_b = params[-1]
logits = jnp.dot(activations, final_w) + final_b
return jax.nn.relu(logits)
def loss(params, x, y):
preds = forward(params, x)
return jnp.mean((preds - y) ** 2)
def accuracy(params, x, y):
predicted_class = jnp.rint(forward(params, x))
return jnp.mean(predicted_class == y)
@jax.jit
def update_params(params, x, y):
grads = jax.grad(loss)(params, x, y)
return [(w - lr * dw, b - lr * db) for (w, b), (dw, db) in zip(params, grads)]
# dataset = dataset.XorDataSet()
dataset = dataset.AndDataSet()
key = jax.random.PRNGKey(int(time.time()))
params = init_random_params(layer_sizes, key)
for w, b in params:
print("w: ", w)
print("b: ", b)
start_time = time.time()
for iteration in range(max_iter):
key, _ = jax.random.split(key, 2)
# x,y = dataset.get_samples()
x, y = dataset.get_noisy_samples(num=4, key=key)
params = update_params(params, x, y)
if iteration % 100 == 0:
print("predict:", forward(params, x))
print("params", params)
print("LOSS:", loss(params, x, y))
iteration_time = time.time() - start_time
print("Epoch {}, Training Time {:0.2f} sec".format(iteration, iteration_time))
print("Accuracy {}\n".format(accuracy(params, x, y)))