-
Notifications
You must be signed in to change notification settings - Fork 2
/
backProp.m
63 lines (52 loc) · 1.37 KB
/
backProp.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
function [ w, errs ] = backProp( x,yp, w, filters,iters )
% Apply backpropagation to the CNN
%
% x : training set
% yp : expected outcomes
% w : initial weights
% iters : total number of iterations
%
% w : learnt weights
errs = zeros(iters*length(x));
for i=1:iters
for j=1:length(x)
% get current classification
[y,cnn] = classify(x{j},w,filters);
% apply a backprop step
[w,err] = backPropStep(cnn{end}.stack,y,yp{j},w);
errs((i-1)*length(x) + j) = err;
end
end
end
function [ wp, err ] = backPropStep( x,y,yp,w )
%BACKWARDSTEP Sincle backpropagation step
% x : training data
% y : predicted output
% yp : expected output
% w : actual weights
%
% wp : learnt weights
%
ALPHA = 1000; % Learning rate
err = error(yp,y)
wsum = dot(x,w);
% Gradient ascent on wieghts
for i=1:length(w)
wp(i) = w(i) + ALPHA*err*dg(wsum)*x(i);
end
end
% Error between current and expected value
function err = error(x,xp)
err = 0;
for i=1:size(x)
err = err + ((x(i) - xp(i))^2)/2;
end
end
% First derivative of activation function
function dgx = dg(x)
dgx = g(x)*(1-g(x));
end
% Activation function (sigmoid)
function gx = g(x)
gx = 1/(1+exp(-x));
end