machine-learning

function A = warmUpExercise()

A=eye(5);
end

%plotting

data = load('ex1data1.txt');
X = data(:, 1); y = data(:, 2);
m = length(y);

plotData(X,y);

function plotData(X, y)

plot(X,y);
figure;
end


X = [ones(m, 1), data(:,1)];

theta = zeros(2, 1);

iterations = 1500;
alpha = 0.01;

computeCost(X, y, theta)

function J = computeCost(X, y, theta)

m = length(y);
J=(1/(2 * m)) * sum((X * theta - y) .^2);
end


theta = gradientDescent(X, y, theta, alpha, iterations);

function [theta, J_history] = gradientDescent(X, y, theta, alpha, num_iters)

m = length(y);
J_history = zeros(num_iters, 1);
for iter = 1:num_iters
    delta=((theta' * X'- y') * X)';
    theta=theta - alpha /m * delta;
    J_history(iter) = computeCost(X, y, theta);
    fprintf('%f\n',J_history(iter));
end

end

fprintf('%f %f \n', theta(1), theta(2));

hold on;

plot(X(:,2), X*theta, '-')

legend('Training data', 'Linear regression')

hold off


predict1 = [1, 3.5] *theta;

predict2 = [1, 7] * theta;

theta0_vals = linspace(-10, 10, 100);

theta1_vals = linspace(-1, 4, 100);

J_vals = zeros(length(theta0_vals), length(theta1_vals));


for i = 1:length(theta0_vals)
    for j = 1:length(theta1_vals)
        t = [theta0_vals(i); theta1_vals(j)];    
        J_vals(i,j) = computeCost(X, y, t);
    end
end


J_vals = J_vals';

figure;

surf(theta0_vals, theta1_vals, J_vals)

xlabel('\theta_0'); ylabel('\theta_1');


figure;

contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 20))
xlabel('\theta_0'); ylabel('\theta_1');
hold on;
plot(theta(1), theta(2), 'rx', 'MarkerSize', 10, 'LineWidth', 2);


  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值