Skip to content

Commit

Permalink
Modifications for application
Browse files Browse the repository at this point in the history
  • Loading branch information
csxeba committed Mar 4, 2020
1 parent b23f1f3 commit 183e860
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 22 deletions.
4 changes: 2 additions & 2 deletions Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -320,10 +320,10 @@ inshape, outshape = images.neurons_required

model = BackpropNetwork(input_shape=inshape, layerstack=(
ConvLayer(nfilters=10, filterx=3, filtery=3, compiled=True),
PoolLayer(fdim=2, compiled=True),
PoolLayer(filter_size=2, compiled=True),
Activation("relu"),
ConvLayer(nfilters=10, filterx=5, filtery=5, compiled=True),
PoolLayer(fdim=3, compiled=True),
PoolLayer(filter_size=3, compiled=True),
Activation("relu"),
Flatten(),
DenseLayer(120, activation="tanh"),
Expand Down
2 changes: 1 addition & 1 deletion brainforge/layers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .core import Dense, Activation, Reshape, Flatten
from .fancy import Highway, DropOut
from .recurrent import RLayer, LSTM, GRU, ClockworkLayer, Reservoir
from .tensor import PoolLayer, ConvLayer
from .tensor import PoolLayer, ConvLayer, GlobalAveragePooling
32 changes: 18 additions & 14 deletions brainforge/layers/tensor.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
from brainforge.util import emptyX
import numpy as np

from .abstract_layer import LayerBase, NoParamMixin
from ..util import zX, zX_like, white
from ..util import zX, zX_like, white, scalX


class PoolLayer(NoParamMixin, LayerBase):

def __init__(self, fdim, compiled=True):
def __init__(self, filter_size, compiled=True):
LayerBase.__init__(self, activation="linear", trainable=False)
if compiled:
print("Compiling PoolLayer...")
from ..llatomic.lltensor_op import MaxPoolOp
else:
from ..atomic import MaxPoolOp
self.fdim = fdim
self.fdim = filter_size
self.filter = None
self.op = MaxPoolOp()

Expand Down Expand Up @@ -43,7 +43,7 @@ def __str__(self):
class ConvLayer(LayerBase):

def __init__(self, nfilters, filterx=3, filtery=3, compiled=True, **kw):
super().__init__(activation=kw.get("activation", "linear"), compiled=compiled, **kw)
super().__init__(compiled=compiled, **kw)
self.nfilters = nfilters
self.fx = filterx
self.fy = filtery
Expand All @@ -54,7 +54,6 @@ def __init__(self, nfilters, filterx=3, filtery=3, compiled=True, **kw):

def connect(self, brain):
if self.compiled:
print("Compiling ConvLayer...")
from ..llatomic import ConvolutionOp
else:
from ..atomic import ConvolutionOp
Expand Down Expand Up @@ -96,15 +95,20 @@ def __str__(self):
class GlobalAveragePooling(NoParamMixin, LayerBase):

def __init__(self):
super().__init__()
self.dynamic_input_shape = None
LayerBase.__init__(self)
NoParamMixin.__init__(self)
self.repeats = 0

def feedforward(self, X):
self.dynamic_input_shape = X.shape
self.repeats = np.prod(X.shape[2:])
return X.mean(axis=(2, 3))

def backpropagate(self, delta):
canvas = emptyX(*self.inputs.shape)
nxy = self.dynamic_input_shape[-2] * self.dynamic_input_shape[-1]
for mm, cc in ((m, c) for c in range(delta.shape[1]) for m in range(delta.shape[0])):
canvas.flat[mm, cc] = delta[mm, cc] / nxy
m = len(delta)
delta = np.repeat(delta / scalX(self.repeats), self.repeats)
delta = delta.reshape((m,) + self.inshape)
return delta

@property
def outshape(self):
return self.inshape[0],
4 changes: 4 additions & 0 deletions brainforge/util/testing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
class NoBrainer:

def __init__(self, outshape):
self.outshape = outshape
30 changes: 30 additions & 0 deletions tests/test_layers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import unittest

import numpy as np

from brainforge import layers
from brainforge.util import testing


class TestGlobalAveragePooling(unittest.TestCase):

def setUp(self) -> None:
self.inputs = np.empty([3, 5, 5], dtype="float32")
for channel in range(3):
self.inputs[channel] = np.full([5, 5], fill_value=channel+1, dtype="float32")
self.outputs = np.array([1., 2., 3.], dtype="float32")
self.brain = testing.NoBrainer(outshape=self.inputs.shape)
self.layer = layers.GlobalAveragePooling()
self.layer.connect(self.brain)

def test_forward_pass_is_correct(self):

output = self.layer.feedforward(self.inputs[None, ...])[0]
np.testing.assert_equal(output, self.outputs)

def test_backwards_pass_is_correct(self):

self.layer.feedforward(self.inputs[None, ...])
delta = self.layer.backpropagate(self.outputs[None, ...])[0]

np.testing.assert_allclose(delta, self.inputs / 25)
7 changes: 2 additions & 5 deletions xperiments/xp_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,14 @@
ins, ous = X.shape[1:], Y.shape[1:]
net = BackpropNetwork(input_shape=ins, layerstack=[
ConvLayer(32, 3, 3, compiled=1),
Activation("tanh"),
Activation("relu"),
ConvLayer(64, 3, 3, compiled=1),
PoolLayer(2, compiled=1),
Activation("tanh"),
Activation("relu"),
Flatten(),
Dense(ous[0], activation="softmax")
], cost="cxent", optimizer="adam")

net.learn_batch(X[-5:], Y[-5:])
net.age += 1

gradientcheck.run(net, X[:5], Y[:5], epsilon=1e-5, throw=True)

net.fit(X, Y, batch_size=32, epochs=10, metrics=["acc"])

0 comments on commit 183e860

Please sign in to comment.