%cost function
function J =computeCost(X, y, theta)%COMPUTECOST Compute cost for linear regression
% J =COMPUTECOST(X, y, theta) computes the cost of using theta as the
% parameter for linear regression to fit the data points in X and y
% Initialize some useful values
m =length(y);% number of training examples
% You need to return the following variables correctly
J =0;
J1 =0;%====================== YOUR CODE HERE ======================% Instructions: Compute the cost of a particular choice of theta
% You should set J to the cost.for i =1:m
J1=J1+((theta(1)+theta(2)*X(i,2))-y(i))^2;
endfor
J=J1/(2*m);%=========================================================================
endfunction
%gradient descent algorithm
function [theta, J_history]=gradientDescent(X, y, theta, alpha, num_iters)%GRADIENTDESCENT Performs gradient descent to learn theta
% theta =GRADIENTDESCENT(X, y, theta, alpha, num_iters) updates theta by
% taking num_iters gradient steps with learning rate alpha
% Initialize some useful values
m =length(y);% number of training examples
J_history =zeros(num_iters,1);%初始化,用於記錄cost function的歷史數據
for iter =1:num_iters
%====================== YOUR CODE HERE ======================% Instructions: Perform a single gradient step on the parameter vector
% theta.%% Hint: While debugging, it can be useful to print out the values
% of the cost function (computeCost) and gradient here.%
temp1=0;
temp2=0;for j=1:m
temp1=temp1+((theta(1)+theta(2)*X(j,2))-y(j));%注意:主函數裏面的X設計的是兩列第一列為X0,即第一列皆爲1
temp2=temp2+(((theta(1)+theta(2)*X(j,2))-y(j))*X(j,2));
endfor
theta(1)=theta(1)-(alpha/m)*temp1;theta(2)=theta(2)-(alpha/m)*temp2;%============================================================% Save the cost J in every iteration
J_history(iter)=computeCost(X, y, theta);
endfor
endfunction
%cost function
function J =computeCostMulti(X, y, theta)%COMPUTECOSTMULTI Compute cost for linear regression with multiple variables
% J =COMPUTECOSTMULTI(X, y, theta) computes the cost of using theta as the
% parameter for linear regression to fit the data points in X and y
% Initialize some useful values
m =length(y);% number of training examples
% You need to return the following variables correctly
J =0;
J1 =0;%====================== YOUR CODE HERE ======================% Instructions: Compute the cost of a particular choice of theta
% You should set J to the cost.for i =1:m
J1=J1+((theta(1)+theta(2)*X(i,2)+theta(3)*X(i,3))-y(i))^2;
endfor
J = J1/(2*m);%=========================================================================
endfunction
% gradientDescent
function [theta, J_history]=gradientDescentMulti(X, y, theta, alpha, num_iters)%GRADIENTDESCENTMULTI Performs gradient descent to learn theta
% theta =GRADIENTDESCENTMULTI(x, y, theta, alpha, num_iters) updates theta by
% taking num_iters gradient steps with learning rate alpha
% Initialize some useful values
m =length(y);% number of training examples
J_history =zeros(num_iters,1);for iter =1:num_iters
%====================== YOUR CODE HERE ======================% Instructions: Perform a single gradient step on the parameter vector
% theta.%% Hint: While debugging, it can be useful to print out the values
% of the cost function (computeCostMulti) and gradient here.%
temp1=0;
temp2=0;
temp3=0;for j =1:m
temp1=temp1+((theta(1)+theta(2)*X(j,2)+theta(3)*X(j,3))-y(j));
temp2=temp2+((theta(1)+theta(2)*X(j,2)+theta(3)*X(j,3))-y(j))*X(j,2);
temp3=temp3+((theta(1)+theta(2)*X(j,2)+theta(3)*X(j,3))-y(j))*X(j,3);
endfor
theta(1)=theta(1)-(alpha/m)*temp1;theta(2)=theta(2)-(alpha/m)*temp2;theta(3)=theta(3)-(alpha/m)*temp3;%============================================================% Save the cost J in every iteration
J_history(iter)=computeCostMulti(X, y, theta);
endfor
endfunction
%特征缩放 分为三种类型,此为其中一种
function [theta]=normalEqn(X, y)%NORMALEQN Computes the closed-form solution to linear regression
%NORMALEQN(X,y) computes the closed-form solution to linear
% regression using the normal equations.% 標準方程法求解theta
theta =zeros(size(X,2),1);%====================== YOUR CODE HERE ======================% Instructions: Complete the code to compute the closed form solution
% to linear regression and put the result in theta.%%---------------------- Sample Solution ----------------------
theta=pinv(X'*X)*X'*y;%-------------------------------------------------------------%============================================================
endfunction
标准方程法
function [theta]=normalEqn(X, y)%NORMALEQN Computes the closed-form solution to linear regression
%NORMALEQN(X,y) computes the closed-form solution to linear
% regression using the normal equations.% 標準方程法求解theta
theta =zeros(size(X,2),1);%====================== YOUR CODE HERE ======================% Instructions: Complete the code to compute the closed form solution
% to linear regression and put the result in theta.%%---------------------- Sample Solution ----------------------
theta=pinv(X'*X)*X'*y;%-------------------------------------------------------------%============================================================
endfunction