- 逻辑回归—多分类
% 载入数据,自动保存在数组X, y load('ex3data1.mat'); m = size(X, 1); % 更新theta,多分类all_theta为num_labels*(n+1),二分类theta为(n+1)*1 lambda = 0.1; [all_theta] = oneVsAll(X, y, num_labels, lambda); % 训练预测准确率 pred = predictOneVsAll(all_theta, X); fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);
% 代价函数和偏导,同二分类,有正则项 theta_cp = theta; theta_cp(1) = 0; J = (1/m)*((-y)'*log(sigmoid(X*theta))-(1-y)'*log(1-sigmoid(X*theta)))+(lambda/(2*m))*(theta_cp'*theta_cp); grad = (1/m)*(X'*(sigmoid(X*theta)-y))+(lambda/m)*theta_cp;
% 多分类 n = size(X, 2); % n个特征 all_theta = zeros(num_labels, n + 1); initial_theta = zeros(n + 1, 1); options = optimset('GradObj', 'on', 'MaxIter', 50); % 训练num_labels,得到每类的最佳theta,存放于一行 for i=1:num_labels [all_theta(i,:)] = fmincg (@(t)(lrCostFunction(t, X, (y == i), lambda)),initial_theta, options); end
% 预测,max找到每行最大值 [p_value,p] = max(sigmoid(X*(all_theta')),[],2);
- 神经网络
% 载入数据 load('ex3data1.mat'); m = size(X, 1); % 载入theta load('ex3weights.mat'); %训练预测准确率 pred = predict(Theta1, Theta2, X); fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);
% 预测 X = [ones(m,1) X]; % 特征矩阵 X_hid = sigmoid(X*Theta1'); % 隐藏层激活值,Theta1:25*401,25隐藏层单元数,400输入层单元数 m_hid = size(X_hid,1); X_hid = [ones(m_hid,1) X_hid]; % 增加a_0 = 1 h_theta = sigmoid(X_hid*Theta2'); % 输出值,Theta2:10*26,10输出层单元数,25隐藏层单元数 [p_value,p] = max(h_theta,[],2);
ex3练习
最新推荐文章于 2022-07-28 18:39:46 发布