吴恩达机器学习课程ex2:Logistic Regression

# plotData.m
function plotData(X, y)
figure; hold on;

pos = find(y==1);
neg = find(y==0);
plot(X(pos, 1), X(pos,2), 'b+', 'LineWidth',2,'MarkerSize',7);
plot(X(neg, 1), X(neg,2), 'ro', 'LineWidth',1,'MarkerSize',7,'MarkerFaceColor','r');
hold off;
end
# sigmoid.m
function g = sigmoid(z)

g = zeros(size(z));
g = 1./(1+exp(-z));
end
# costFunction.m
m = length(y);
J = 0;
grad = zeros(size(theta));

h_fuc = sigmoid(X * theta);
J = (-1/m) * ((y' * log(h_fuc)) + (ones(m,1) - y)' * log(ones(m,1) - h_fuc)); 
grad = (1/m) * X' * (h_fuc - y);
end
# costFunctionReg.m
m = length(y);
J = 0;
grad = zeros(size(theta));

h_fuc = sigmoid(X * theta);
J = (-1/m) * ((y' * log(h_fuc)) + (ones(m,1) - y)' * log(ones(m,1) - h_fuc))+ (theta(2:end)' * theta(2:end)) * lambda / (2*m);
grad = (1/m) * X' * (h_fuc - y);

for i=2:size(theta,1)
  grad(i) = grad(i) + lambda / m * theta(i);
endfor

end
# predict.m
function p = predict(theta, X)
m = size(X, 1);
p = zeros(m, 1);

prob = sigmoid(X * theta);
threshold = ones(m,1) .* 0.5;

for i = 1:m
  if(prob(i) >= threshold(i))
    p(i) = 1;
    endif
endfor

end
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值