-
Notifications
You must be signed in to change notification settings - Fork 1
/
loss.py
26 lines (23 loc) · 904 Bytes
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
import numpy as np
def binary_cross_entropy(Y, P_hat):
"""
This function computes Binary Cross-Entropy(bce) Cost and returns the Cost and its
derivative.
This function uses the following Binary Cross-Entropy Cost defined as:
=> (1/m) * np.sum(-Y*np.log(P_hat) - (1-Y)*np.log(1-P_hat))
Args:
Y: labels of data
P_hat: Estimated output probabilities from the last layer, the output layer
Returns:
cost: The Binary Cross-Entropy Cost result
dP_hat: gradient of Cost w.r.t P_hat
"""
m = Y.shape[1] # m -> number of examples in the batch
Y = Y.T
EPSILON = 1e-07
P_MAX = 1 - EPSILON # 0.9999999
P_hat = np.clip(P_hat, a_min=EPSILON, a_max=P_MAX)
cost = (1/m) * np.nansum(-Y*np.log(P_hat) - (1-Y)*np.log(1-P_hat))
cost = np.squeeze(cost)
dP_hat = (1/m) * (-(Y/P_hat) + ((1-Y)/(1-P_hat)))
return cost, dP_hat