data = load('ex2data1.txt');
X = data(:, [1, 2]); y = data(:, 3);
Visualizing the data
function plotData(X, y)
figure; hold on;
%Find Indices of Positive and Negative Examples
pos = find(y==1); neg = find(y == 0);
% Plot Examples
plot(X(pos, 1), X(pos, 2), 'k+','LineWidth', 2, ...
'MarkerSize', 7);
plot(X(neg, 1), X(neg, 2), 'ko', 'MarkerFaceColor', 'y', ...
'MarkerSize', 7);
hold off;
end
plotData(X, y);
hold on;
xlabel('Exam 1 score')
ylabel('Exam 2 score')
legend('Admitted', 'Not admitted')
hold off;
fprintf('\nProgram paused. Press enter to continue.\n');
pause;
sigmoid function
g=ones(size(z))./(1+e.^(-z));
Cost function and gradient
function [J, grad] = costFunction(theta, X, y)
m = length(y);
J = 0;
grad = zeros(size(theta));
h=sigmoid(X*theta);
J=1/m*(((-y)')*log(h)-((1-y)')*log(1-h));
grad=1/m*((h-y)')*X;
end
Learning parameters using fminunc
% Set options for fminunc
options = optimset('GradObj', 'on', 'MaxIter', 400);
% Run fminunc to obtain the optimal theta
% This function will return theta and the cost
[theta, cost] = ...
fminunc(@(t)(costFunction(t, X, y)), initial theta, options);
by using fminunc, you did not have to write any loops
yourself, or set a learning rate like you did for gradient descent.you only needed to provide a function calculating the cost and the gradient
plot a boundary
function [J, grad] = costFunction(theta, X, y)
%COSTFUNCTION Compute cost and gradient for logistic regression
% J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the
% parameter for logistic regression and the gradient of the cost
% w.r.t. to the parameters.
% Initialize some useful values
m = length(y); % number of training examples
% You need to return the following variables correctly
J = 0;
grad = zeros(size(theta));
% Instructions: Compute the cost of a particular choice of theta.
% You should set J to the cost.
% Compute the partial derivatives and set grad to the partial
% derivatives of the cost w.r.t. each parameter in theta
%
% Note: grad should have the same dimensions as theta
%
h=sigmoid(X*theta);
J=1/m*(((-y)')*log(h)-((1-y)')*log(1-h));
grad=1/m*((h-y)')*X;
end
Regularized logistic regression
h=sigmoid(X*theta);
J=1/m*(((-y)')*log(h)-((1-y)')*log(1-h))+lambda/(2*m)*sum(theta(2:size(theta)).^2);
grad=(1/m*((h-y)')*X)'+(lambda/m)*[0;theta(2:size(theta))];
using fminunc
% Initialize fitting parameters
initial_theta = zeros(size(X, 2), 1);
% Set regularization parameter lambda to 1 (you should vary this)
lambda = 1;
% Set Options
options = optimset('GradObj', 'on', 'MaxIter', 400);
% Optimize
[theta, J, exit_flag] = ...
fminunc(@(t)(costFunctionReg(t, X, y, lambda)), initial_theta, options);