参考用书:
本文是在学习此书Chapter4时,跑的实验。
4.1.4 Fisher‘s Linear Discriminate
function [w y1 y2 Jw] = FisherLinearDiscriminat(data, label)
% FLD Fisher Linear Discriminant.
% data : D*N data
% label : {+1,-1}
% Reference:M.Bishop Pattern Recognition and Machine Learning p186-p189
% compute means and scatter matrix
%-------------------------------
inx1 = find( label == 1);
inx2 = find( label == -1);
n1 = length(inx1);
n2 = length(inx2);
m1 = mean(data(:,inx1),2);
m2 = mean(data(:,inx2),2);
S1 = (data(:,inx1)-m1*ones(1,n1))*(data(:,inx1)-m1*ones(1,n1))';
S2 = (data(:,inx2)-m2*ones(1,n2))*(data(:,inx2)-m2*ones(1,n2))';
Sw = S1 + S2;
% compute FLD
%-------------------------------
W = inv(Sw)*(m1-m2);
y1 = W'*m1; %label=+1
y2 = W'*m2; %label=-1
w = W;
Jw = (y1-y2)^2/(W'*Sw*W);
4.1.7 The perceptron algorithm
function [w, mis_class] = perceptron(X,t)
% The perceptron algorithm
%by LiFeiteng email:lifeiteng0422@gmail.com
% X : D*N维输入数据
% t : {+1,-1}标签
%
% w : [w0 w1 w2]
% mis_class : 错误分类数据点数
% 对t做简单的检查
if size(unique(t),2)~=2
return
elseif max(t)~=1
return
elseif min(t)~=-1
return
end
[dim num_data] = size(X);
w = ones(dim+1,1);%%w = [w0 w1 w2]'
X = [ones(1,num_data); X];
maxiter = 100000;
mis_class = 0;
iter = 0;
while iter
iter = iter+1;
y = w'*X;
label = ones(1, num_data);%{+1,-1}
label(y<=0) = -1;
index = find(label~=t); %错误分类的点
mis_class = numel(index); %错误分类点的数目
if mis_class==0
break
end
for i = 1:mis_class
w = w + X(:,index(i))*t(index(i));
end
end
if iter==maxiter
disp(['达到最大迭代次数' num2str(maxiter)])
end