ml_ex1

复习整理一下之前做的cousera上吴恩达老师的机器学习课程布置的编程作业:( MATLAB )

ex1:(求精简略去了部分代码)

%% Machine Learning Online Class - Exercise 1: Linear Regression

%% Initialization
clear ; close all; clc

%% ==================== Part 1: Basic Function ====================
%(略)
%% ======================= Part 2: Plotting =======================
data = load('ex1data1.txt');
X = data(:, 1); y = data(:, 2);
m = length(y); % number of training examples
plotData(X, y);
%% =================== Part 3: Cost and Gradient descent ===================

X = [ones(m, 1), data(:,1)]; % Add a column of ones to x
theta = zeros(2, 1); % initialize fitting parameters

% Some gradient descent settings
iterations = 1500;
alpha = 0.01;
% compute and display initial cost
J = computeCost(X, y, theta);

% further testing of the cost function
J = computeCost(X, y, [-1 ; 2]);

% run gradient descent
theta = gradientDescent(X, y, theta, alpha, iterations);

% Predict values for population sizes of 35,000 and 70,000
predict1 = [1, 3.5] *theta;
predict2 = [1, 7] * theta;

%% ============= Part 4: Visualizing J(theta_0, theta_1) =============
(略)
function J = computeCost(X, y, theta)
%COMPUTECOST Compute cost for linear regression
%   J = COMPUTECOST(X, y, theta) computes the cost of using theta as the
%   parameter for linear regression to fit the data points in X and y

% Initialize some useful values
m = length(y); % number of training examples
    temp=0;   
    for i=1:m
        temp=temp+(theta'*X(i,:)'-y(i))^2;
    end
    J=(1/(2*m))*temp;
end
function J = computeCostMulti(X, y, theta)
%COMPUTECOSTMULTI Compute cost for linear regression with multiple variables
%   J = COMPUTECOSTMULTI(X, y, theta) computes the cost of using theta as the
%   parameter for linear regression to fit the data points in X and y

% Initialize some useful values
m = length(y); % number of training examples
    temp=0;   
    for i=1:m
        temp=temp+(theta'*X(i,:)'-y(i))^2;
    end
    J=(1/(2*m))*temp;
end
function [X_norm, mu, sigma] = featureNormalize(X)
%FEATURENORMALIZE Normalizes the features in X 
%   FEATURENORMALIZE(X) returns a normalized version of X where
%   the mean value of each feature is 0 and the standard deviation
%   is 1. This is often a good preprocessing step to do when
%   working with learning algorithms.

% You need to set these values correctly
X_norm = X;
mu = zeros(1, size(X, 2));
sigma = zeros(1, size(X, 2));
m = size(X,2);

for i=1:m
    mu(:,i) = mean(X(:,i));
    sigma(:,i) = std(X(:,i));
    X_norm(:,i) = (X(:,i) - mu(i))/sigma(i);
end

end
function [theta, J_history] = gradientDescent(X, y, theta, alpha, num_iters)
%GRADIENTDESCENT Performs gradient descent to learn theta
%   theta = GRADIENTDESCENT(X, y, theta, alpha, num_iters) updates theta by 
%   taking num_iters gradient steps with learning rate alpha

% Initialize some useful values
m = length(y); % number of training examples
J_history = zeros(num_iters, 1);

for k=1:num_iters
    temp1=0;
    temp2=0;
    for i=1:m
        temp1=temp1+(theta'*X(i,:)'-y(i))*X(i,1);
        temp2=temp2+(theta'*X(i,:)'-y(i))*X(i,2);
    end
    theta(1)=theta(1)-alpha*(1/m)*temp1;
    theta(2)=theta(2)-alpha*(1/m)*temp2;
    J_history(k) = computeCost(X, y, theta);
end

end
function [theta, J_history] = gradientDescentMulti(X, y, theta, alpha, num_iters)
%GRADIENTDESCENTMULTI Performs gradient descent to learn theta
%   theta = GRADIENTDESCENTMULTI(x, y, theta, alpha, num_iters) updates theta by
%   taking num_iters gradient steps with learning rate alpha

% Initialize some useful values
m = length(y); % number of training examples
J_history = zeros(num_iters, 1);

for k=1:num_iters
    b=size(theta,1);
    temp=zeros(1,b);
    for i=1:m
        for j=1:b
            temp(j)=temp(j)+(theta'*X(i,:)'-y(i))*X(i,j);
        end
    end
    for j=1:b
        theta(j)=theta(j)-alpha*(1/m)*temp(j);
    end
    J_history(k) = computeCostMulti(X, y, theta);
end

end
function [theta] = normalEqn(X, y)
%NORMALEQN Computes the closed-form solution to linear regression 
%   NORMALEQN(X,y) computes the closed-form solution to linear 
%   regression using the normal equations.

theta = zeros(size(X, 2), 1);
theta = (inv(X'*X))*X'*y;

end







  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值