From af0c100b96856c53731979020fcb59042d022bd1 Mon Sep 17 00:00:00 2001 From: Jennifer Myers Date: Tue, 2 May 2017 17:29:27 -0700 Subject: [PATCH] Fix warning about axis argument to softmax (#741) --- neon/transforms/activation.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/neon/transforms/activation.py b/neon/transforms/activation.py index 7dd446ca..20e128c6 100644 --- a/neon/transforms/activation.py +++ b/neon/transforms/activation.py @@ -209,7 +209,7 @@ def __init__(self, axis=0, name=None, epsilon=2**-23): """ super(Softmax, self).__init__(name) self.epsilon = epsilon - self.ax = axis + self.axis = axis def __call__(self, x): """ @@ -222,8 +222,8 @@ def __call__(self, x): Tensor or optree: Output activation """ return (self.be.reciprocal(self.be.sum( - self.be.exp(x - self.be.max(x, axis=self.ax)), axis=self.ax)) * - self.be.exp(x - self.be.max(x, axis=self.ax))) + self.be.exp(x - self.be.max(x, axis=self.axis)), axis=self.axis)) * + self.be.exp(x - self.be.max(x, axis=self.axis))) def bprop(self, x): """