diff --git a/neon/transforms/activation.py b/neon/transforms/activation.py index 7dd446ca..20e128c6 100644 --- a/neon/transforms/activation.py +++ b/neon/transforms/activation.py @@ -209,7 +209,7 @@ def __init__(self, axis=0, name=None, epsilon=2**-23): """ super(Softmax, self).__init__(name) self.epsilon = epsilon - self.ax = axis + self.axis = axis def __call__(self, x): """ @@ -222,8 +222,8 @@ def __call__(self, x): Tensor or optree: Output activation """ return (self.be.reciprocal(self.be.sum( - self.be.exp(x - self.be.max(x, axis=self.ax)), axis=self.ax)) * - self.be.exp(x - self.be.max(x, axis=self.ax))) + self.be.exp(x - self.be.max(x, axis=self.axis)), axis=self.axis)) * + self.be.exp(x - self.be.max(x, axis=self.axis))) def bprop(self, x): """