-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathAutomaticGradient.m
59 lines (51 loc) · 1.42 KB
/
AutomaticGradient.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
function [dw,db]=AutomaticGradient(data,label,NN)
if strcmp(NN.InputAutoScaling,'on')==1
data=NN.InputScaleVector.*data-NN.InputCenterVector;
end
v=data;
for j=1:NN.depth-1
z=NN.weight{j}*v+NN.bias{j};
v=NN.active(z);
Memory.A{j}=v;
if strcmp(NN.ActivationFunction,'Gaussian')
Memory.D{j}=NN.activeDerivate(z,v);
elseif strcmp(NN.ActivationFunction,'ReLU')
Memory.D{j}=NN.activeDerivate(z);
else
Memory.D{j}=NN.activeDerivate(v);
end
end
z=NN.weight{NN.depth}*v+NN.bias{NN.depth};
Memory.A{NN.depth}=NN.OutActive(z);
Memory.D{NN.depth}=z;
if strcmp(NN.Cost,'MAE')==1
ErrorVector=NN.MeanFactor*sign(Memory.A{NN.depth}-label);
else
ErrorVector=NN.MeanFactor*(Memory.A{NN.depth}-label);
end
if size(label,2)==NN.numOfData && NN.WeightedFlag==1
DataWeightMatrix=NN.Weighted;
elseif size(label,2)~=NN.numOfData && NN.WeightedFlag==1
DataWeightMatrix=NN.SampleWeight;
end
% Compute Gradient For Last Layer
if NN.WeightedFlag==0
g=ErrorVector;
else
g=DataWeightMatrix.*ErrorVector;
end
dw=NN.weight; db=NN.bias;
dw{NN.depth}=g*(Memory.A{NN.depth-1}.');
db{NN.depth}=sum(g,2);
for j=NN.depth-1:-1:2
g=Memory.D{j}.*((NN.weight{j+1}.')*g);
A=(Memory.A{j-1}).';
dw{j}=g*A;
db{j}=sum(g,2);
end
% Compute Gradient For First Layer
g=Memory.D{1}.*((NN.weight{2}.')*g);
A=data.';
dw{1}=g*A;
db{1}=sum(g,2);
end