-
Notifications
You must be signed in to change notification settings - Fork 0
/
act_func.py
44 lines (36 loc) · 862 Bytes
/
act_func.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# Import packages
import numpy as np
# Activation functions (forward)
def linear(W, A_prev, b):
Z = np.dot(W, A_prev) + b
return Z
def sigmoid(Z):
A = 1/(1+np.exp(-Z))
return A
def relu(Z):
A = np.maximum(0,Z)
return A
def softmax(Z):
Z_max = np.max(Z, axis=1, keepdims=True)
Z_exp = np.exp(Z - Z_max)
Z_sum = np.sum(Z_exp, axis=1, keepdims=True)
A = np.divide(Z_exp, Z_sum)
return A
# Activation functions (backward)
def linear_backward(W, dZ):
dA_prev = np.dot(W.T, dZ)
return dA_prev
def sigmoid_backward(dA, Z):
sig = sigmoid(Z)
dZ = dA * sig * (1 - sig)
return dZ
def relu_backward(dA, Z):
dZ = np.array(dA, copy=True)
dZ[Z<=0] = 0
return dZ
def softmax_backward(dA, Z):
sft = softmax(Z)
#sft_sum = np.sum(dA * sft, axis=1, keepdims=True)
#dZ = sft * (dA - sft_sum)
dZ = sft * (dA - (dA * sft).sum(axis=1)[:,None])
return dZ