-
Notifications
You must be signed in to change notification settings - Fork 0
/
MLP.py
37 lines (27 loc) · 642 Bytes
/
MLP.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 05 09:28:05 2018
@author: Ashish Jha
"""
#Importing Libraries
import numpy as np
import pandas as od
import matplotlib.pyplot as plt
import sklearn
from math import *
#Implementation of Different Activation function
def hardlim(x,k):
op=0 if x<k else 1
return op
def sigmoid(x):
return 1/(1+exp(-x))
def tanh(x):
return (exp(x)-exp(-x))/(exp(x)+exp(-x))
def relu(x):
op=0 if x<0 else x
return op
def lrelu(x,a):
op=a*x if x<0 else x
def softmax(z):
return np.exp(z)/np.sum(np.exp(z))
print(sigmoid(1))