1Regularied Logistic Regression
h = sigmoid(X*theta);
J = sum((-y).*log(h) - (1-y).*log(1-h))./m +(lambda) * sum(theta(2:end).^2)/(2*m);
grad = X'*(h-y)/m;
temp = grad (1);
grad = grad + (lambda/m)*theta;
grad(1) = temp;
for c = 1:num_labels
initial_theta = zeros(n+1,1);
options = optimset('GradObj', 'on', 'MaxIter', 50);
[theta]= fmincg (@(t)(lrCostFunction(t,X,(y == c),lambda)),initial_theta,options);
all_theta(c,:) = theta';
end
3 One-vs-all classifier prediction
[k,i] = max(sigmoid(X * all_theta'),[],2);
p = i;
a1 = [ones(m,1) X];
z2 = Theta1 *( a1');
a2 = sigmoid(z2);
a2 = [ones(1, m);a2];
z3 = Theta2 * a2;
a3 = sigmoid(z3);
output =a3';
[c,i] = max(output, [], 2);
p = i;