-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathLR.m
56 lines (50 loc) · 1.62 KB
/
LR.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
classdef LR < handle
% Logistic Regression
% This code is based on <<Neural Network and Deep Learning>> written by Xipeng Qiu(Section 3)
properties(SetAccess = private)
nIter = 0; % number of iterations
theta = 0; % bias
W = []; % weights between input layer and output layer
end
methods
% constructure
function Obj = LR(nIter)
Obj.nIter = nIter;
end
% train
function train(Obj,X,T)
[N,D] = size(X);
if isempty(Obj.W)
Obj.W = ones(1,D);
end
Loss = zeros(1,Obj.nIter);
eta = 0.01; % learn rate;
for i = 1 : Obj.nIter
loss = 0;
for j = 1 : N
[Z,Y] = Obj.predict(X(j,:),false);
Obj.W = Obj.W - eta*X(j,:)*(T(j) - Y);
Obj.theta = Obj.theta - eta*(Y - T(j));
loss = loss + (Y-T(j))^2;
end
Loss(i) = sqrt(loss)/N;
end
figure;
plot(1:Obj.nIter,Loss,'-r','LineWidth',1.4)
xlabel('Epoch');
ylabel('Loss');
title('LR');
end
% preduct
function [Z,Y] = predict(Obj,X,disp)
Z = X*Obj.W' + Obj.theta;
Y = 1./(1+exp(-Z));
if disp % D = 2
figure;
xX = 0:0.1:2;
plot(xX,-(Obj.W(1).*xX + Obj.theta)./Obj.W(2),'-');
hold on;
end
end
end
end