一、MEAN FUNCTION:
Data=load('ex2data2.txt');
X=Data(:,[1 2]);
y=Data(:,3);
plotData(X,y);
xlabel('Microchip Test 1');
ylabel('Microchip Test 2');
legend('y=1','y=0');
X28 = mapFeature(X(:,1), X(:,2));
initial_theta = zeros(size(X28,2), 1);
lambda =1;
options= optimset('display','iter','Gradobj','on','maxiter',400,'Algorithm','quasi-newton');
[theta,J,exit_flag]=fminunc(@(t)(costFunctionReg(t, X28, y,lambda)), initial_theta, options);
plotDecisionBoundary(theta, X28, y);
hold on;
title(sprintf('lambda = %g', lambda))
% Labels and Legend
xlabel('Microchip Test 1')
ylabel('Microchip Test 2')
legend('y = 1', 'y = 0', 'Decision boundary')
hold off;
二、FUNCTION HANDLES
(1)BOUNDARY FUNCTION
function plotDecisionBoundary(theta, X, y)
%PLOTDECISIONBOUNDARY Plots the data points X and y into a new figure with
%the decision boundary defined by theta
% PLOTDECISIONBOUNDARY(theta, X,y) plots the data points with + for the
% positive examples and o for the negative examples. X is assumed to be
% a either
% 1) Mx3 matrix, where the first column is an all-ones column for the
% intercept.
% 2) MxN, N>3 matrix, where the first column is all-ones
% Plot Data
plotData(X(:,2:3), y);
hold on
if size(X, 2) <= 3
% Only need 2 points to define a line, so choose two endpoints
plot_x = [min(X(:,2))-2, max(X(:,2))+2];
% Calculate the decision boundary line
plot_y = (-1./theta(3)).*(theta(2).*plot_x + theta(1));
% Plot, and adjust axes for better viewing
plot(plot_x, plot_y,'G','linewidth',1)
% Legend, specific for the exercise
legend('Admitted', 'Not admitted', 'Decision Boundary')
axis([30, 100, 30, 100])
else
% Here is the grid range
u = linspace(-1, 1.5, 50);
v = linspace(-1, 1.5, 50);
z = zeros(length(u), length(v));
% Evaluate z = theta*x over the grid
for i = 1:length(u)
for j = 1:length(v)
z(i,j) = mapFeature(u(i), v(j))*theta;
end
end
z = z'; % important to transpose z before calling contour
% Plot z = 0
% Notice you need to specify the range [0, 0]
contour(u, v, z, [0, 0],'G', 'LineWidth', 1)
end
hold off
enD
(2)CostFunction && GradientDescent Funciton
function [J, grad] = costFunctionReg(theta, X, y, lambda)
m = length(y); % number of training examples
% You need to return the following variables correctly
grad = zeros(size(theta));
J=0;
g=sigmoid(X*theta);
h=0;
for i=1:m
h=h+y(i)*log(g(i))+(1-y(i))*log(1-g(i));
end
J=J-1/m*h;
h=0;
for i=2:size(theta)
h=h+theta(i)^2;
end
J=J+lambda*h/(2*m);
% Gradient descent
h=0;
for i=1:m
h=h+(g(i)-y(i))*X(i,1);
end
grad(1)=h/m;
for i=2:size(theta)
h=0;
for j=1:m
h=h+(g(j)-y(j))*X(j,i);
end
grad(i)=h/m+lambda/m*theta(i);
end
end
(3)MapFeature Function to return more features(Eg:degree=6)
function out = mapFeature(X1, X2)
degree = 6;
out = ones(size(X1(:,1)));
for i = 1:degree
for j = 0:i
out(:, end+1) = (X1.^(i-j)).*(X2.^j);
end
end
end
(4)Plot Data Function to classify Exmples of the Data [X,y].
function plotData(X, y)
pos=find(y==1);
neg=find(y==0);
plot(X(pos,1),X(pos,2),'k+','linewidth',2);
plot(X(neg,1),X(neg,2),'ko','markersize',7,'markerfacecolor','y');
hold off;
end