最近在看deep learning的东西 看到 logistic regression有些想法就写下来 方便以后查阅
close all;
close all;
clc
x=load ('ex4x.dat');
y=load ('ex4y.dat');
m=size(x,1);
x=[ones(m,1),x];
theta=zeros(size(x(1,:)))';
num=7;
pos=find(y==1);
neg=find(y==0);
figure
plot(x(pos,2),x(pos,3),'+');
hold on
plot(x(neg,2),x(pos,3),'o');
hold on
xlabel('Exam 1 score')
ylabel('Exam 2 score')
j_theta=zeros(size(x(1,:)))';
g = inline('1.0 ./ (1.0 + exp(-z))');
for j=1:num
z=x*theta;
tmp=g(z);
j_theta_1=x'*(tmp-y);%J(theta)的一阶导数
j_theta_2=x'*diag(tmp)*diag(1-tmp)*x;%J(theta)的二阶导数
theta=theta-inv(j_theta_2)*j_theta_1;%牛顿法更新theta
j_theta=1/m*sum(-y.*log(tmp) - (1-y).*log(1-tmp));
end
plot_x=[min(x(:,2)) max(x(:,2))];%俩点划线法,囧
plot_y=-(1/theta(3))*(theta(2)*plot_x+theta(1));
plot(plot_x,plot_y);
legend('Admitted', 'Not admitted', 'Decision Boundary')
hold off
figure
plot(0:MAX_ITR-1, J, 'o--', 'MarkerFaceColor', 'r', 'MarkerSize', 8)
xlabel('Iteration'); ylabel('J')
J
参考
点击打开链接