Skip to content
This repository has been archived by the owner on Jan 3, 2023. It is now read-only.

Commit

Permalink
Fix warning about axis argument to softmax (#741)
Browse files Browse the repository at this point in the history
  • Loading branch information
Jennifer Myers committed May 3, 2017
1 parent fad6fbb commit af0c100
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions neon/transforms/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def __init__(self, axis=0, name=None, epsilon=2**-23):
"""
super(Softmax, self).__init__(name)
self.epsilon = epsilon
self.ax = axis
self.axis = axis

def __call__(self, x):
"""
Expand All @@ -222,8 +222,8 @@ def __call__(self, x):
Tensor or optree: Output activation
"""
return (self.be.reciprocal(self.be.sum(
self.be.exp(x - self.be.max(x, axis=self.ax)), axis=self.ax)) *
self.be.exp(x - self.be.max(x, axis=self.ax)))
self.be.exp(x - self.be.max(x, axis=self.axis)), axis=self.axis)) *
self.be.exp(x - self.be.max(x, axis=self.axis)))

def bprop(self, x):
"""
Expand Down

0 comments on commit af0c100

Please sign in to comment.