%这几个做图像分类的代码都是自己调试好可以用的,只需要改改参数就行。
%这里的SVM用的是MATLAB中自带的多分类工具fitcecoc函数,
function [classifer,predictIndex,accuracy] = SVM_Classify(trainFeatures,trainLabels,testFeatures,testLabels)
%SVM分类器
classifer = fitcecoc(trainFeatures,trainLabels);
[predictIndex,~] = predict(classifer,testFeatures);
% accuracy = length(find(predictIndex==testLabels))/(length(predictIndex)) * 100;
% predictedLabels_test = predict(classifer, testFeatures);
% predictedLabels_train = predict(classifer, trainFeatures);
accuracy = length(find(predictIndex==testLabels))/length(testLabels);
end
%GS_LIBSVM记得先下载LIBSVM的工具包,随便一搜就有,将他放在你要运行的程序文件夹里就可以直接用了
function [ predict_label,accuracy ] = GS_LIBSVM_Classify(trainFeatures,trainLabels,testFeatures,testLabels)
%%libsvm分类
train=trainFeatures; %选取训练数据
train_group=trainLabels;%选取训练数据类别标识
test=testFeatures;%选取测试数据
test_group=testLabels; %选取测试数据类别标识
%寻找最优c和g
%粗略选择:c&g 的变化范围是 2^(-10),2^(-9),...,2^(10)
% [bestacc,bestc,bestg] = SVMcgForClass(train_group,train,-10,10);
%精细选择:c 的变化范围是 2^(-2),2^(-1.5),...,2^(4), g 的变化范围是 2^(-4),2^(-3.5),...,2^(4)
[bestacc,bestc,bestg] = SVMcgForClass(train_group,train,-2,4,-4,4,3,0.5,0.5,0.9);
% 训练模型
cmd = ['-c ',num2str(bestc),' -g ',num2str(bestg)];
model=libsvmtrain(train_group,train,cmd);
disp(cmd);
% 测试分类
[predict_label, accuracy, dec_values]=libsvmpredict(test_group,test,model);
% 打印测试分类结果
figure;
hold on;
plot(test_group,'o');
plot(predict_label,'r*');
legend('实际测试集分类','预测测试集分类');
title('测试集的实际分类和预测分类图','FontSize',10);
end
%GA_LIBSVM
%% type == 3 for regression
%% type == 1 for classfication
function [BestMSE,Bestc,Bestg,ga_option] = GA_LIBSVM_Classify(train_label,train_data,type,ga_option)
%% Parameter Initialization
if nargin == 3
ga_option = struct('maxgen',100,'sizepop',50,'ggap',0.9,...
'cbound',[0,100],'gbound',[0,100],'v',5);
end
%% maxgen:Maximum Envolved Generation, Default 200, Usual Value Range [100,500]
%% sizepop: Maximum populattion of Sworm, Default 20, Usual Value range [20,100]
%% cbound = [cmin,cmax], Parameter c Value Range, Default (0,100)
%% gbound = [gmin,gmax], Parameter g Value Range, Default (0,100)
%% v:SVM Cross Validation parameter, defacult 5
%% NIND: number of individuals(chrom)
%%
MAXGEN = ga_option.maxgen;
NIND = ga_option.sizepop;
NVAR = 2;
PRECI = 20;
GGAP = ga_option.ggap;
trace = zeros(MAXGEN,2);
%%fieldD=[len;lb;ub;code;scale;lbin;ubin]
%%len:binary code length of each value(gene)
%%lb: the upper boundary of value
%%ub: the lower boundary of value
%%code: coding method of each value 1:standard binary coding 0: binary Gray coding
%%scale: 1:log scale 0:arithmatic scale
%%lbin&ubin: 1:include the value at the boundary of range 0:not include
%%stategy:standard binary code; arithmatic scale; first gene only cover lower boundary,second include both
FieldID = ...
[rep([PRECI],[1,NVAR]);[ga_option.cbound(1),ga_option.gbound(1);ga_option.cbound(2),ga_option.gbound(2)];...
[1,1;0,0;0,1;1,1]];
%% crtbp(nind,lind,basevec) creates a binary population of given size and structure
%% nind:number of individuals(chromosomes); lind: number of value(gene) in each chromosome
%% crtbp(nind,lind) default: each value(gene) is 0 or 1;
Chrom = crtbp(NIND,NVAR*PRECI);
gen = 1;
v = ga_option.v;
BestMSE = inf;
Bestc = 0;
Bestg = 0;
%%
cg = bs2rv(Chrom,FieldID);
for nind = 1:NIND
cmd = ['-v ',num2str(v),' -c ',num2str(cg(nind,1)),' -g ',num2str(cg(nind,2)),' -s ',num2str(type),' -p 0.01'];
ObjV(nind,1) = libsvmtrain(train_label,train_data,cmd);
end
[BestMSE,I] = min(ObjV); %% return the BestMSE and corresponding Column
Bestc = cg(I,1);
Bestg = cg(I,2);
%%
while 1
FitnV = ranking(ObjV);
SelCh = select('sus',Chrom,FitnV,GGAP);
SelCh = recombin('xovsp',SelCh,0.7);
SelCh = mut(SelCh);
cg = bs2rv(SelCh,FieldID);
for nind = 1:size(SelCh,1)
cmd = ['-v ',num2str(v),' -c ',num2str(cg(nind,1)),' -g ',num2str(cg(nind,2)),' -s ', num2str(type),' -p 0.01'];
ObjVSel(nind,1) = libsvmtrain(train_label,train_data,cmd);
end
[Chrom,ObjV] = reins(Chrom,SelCh,1,1,ObjV,ObjVSel);
[NewBestCVaccuracy,I] = min(ObjV);
cg_temp = bs2rv(Chrom,FieldID);
temp_NewBestCVaccuracy = NewBestCVaccuracy;
if NewBestCVaccuracy < BestMSE
BestMSE = NewBestCVaccuracy;
Bestc = cg_temp(I,1);
Bestg = cg_temp(I,2);
end
if abs( NewBestCVaccuracy-BestMSE ) <= 10^(-2) && ...
cg_temp(I,1) < Bestc
BestMSE = NewBestCVaccuracy;
Bestc = cg_temp(I,1);
Bestg = cg_temp(I,2);
end
trace(gen,1) = min(ObjV);
trace(gen,2) = sum(ObjV)/length(ObjV);
if gen >= MAXGEN/2 && ...
( temp_NewBestCVaccuracy-BestMSE ) <= 10^(-2)
break;
end
if gen == MAXGEN
break;
end
gen = gen + 1;
end
%%
figure;
hold on;
trace = round(trace*10000)/10000;
plot(trace(1:gen,1),'r*-','LineWidth',1);
plot(trace(1:gen,2),'o-','LineWidth',1);
legend('Best','Average');
xlabel('Generation','FontSize',10);
ylabel('Fitness Value','FontSize',10);
grid on;
axis auto;
line1 = 'Genetic Algorithm';
%line2 = ['terminal generation = ',num2str(gen)];
line2 = ['Minimum MSE=',num2str(BestMSE)];
line3 = ['Optimized c=',num2str(Bestc),' Optimized g=',num2str(Bestg)];
title({line1;line2;line3},'FontSize',11);
hold off;
%{
figure;
hold on;
plot(ranking(trace(1:gen,1)),'y+','LineWidth',1.2);
xlabel('Evolutionary Process','FontSize',10);
ylabel('Fitness','FontSize',10);
grid on;
%}
MATLAB中利用支持向量机SVM做图像分类的代码(包括GS_LIBSVM,GA_LIBSVM)
最新推荐文章于 2024-07-18 16:15:47 发布