SVR 参考这篇文章 Libsvm使用笔记【matlab】
close all;
clc
clear
%% 下载数据
load('p_train.mat');
load('p_test.mat');
load('t_train.mat');
load('t_test.mat');
%% 数据归一化
%输入样本归一化
[pn_train,ps1] = mapminmax(p_train');
pn_train = pn_train';
pn_test = mapminmax('apply',p_test',ps1);
pn_test = pn_test';
%输出样本归一化
[tn_train,ps2] = mapminmax(t_train');
tn_train = tn_train';
tn_test = mapminmax('apply',t_test',ps2);
tn_test = tn_test';
%% SVR模型创建/训练
% 寻找最佳c参数/g参数——交叉验证方法
% SVM模型有两个非常重要的参数C与gamma。
% 其中 C是惩罚系数,即对误差的宽容度。
% c越高,说明越不能容忍出现误差,容易过拟合。C越小,容易欠拟合。C过大或过小,泛化能力变差
% gamma是选择RBF函数作为kernel后,该函数自带的一个参数。隐含地决定了数据映射到新的特征空间后的分布,
% gamma越大,支持向量越少,gamma值越小,支持向量越多。支持向量的个数影响训练与预测的速度。
[c,g] = meshgrid(-10:0.5:10,-10:0.5:10);
[m,n] = size(c);
cg = zeros(m,n);
eps = 10^(-4);
v = 10;
bestc = 0;
bestg = 0;
error = Inf;
for i = 1:m
for j = 1:n
cmd = ['-v ',num2str(v),' -t 2',' -c ',num2str(2^c(i,j)),' -g ',num2str(2^g(i,j) ),' -s 3 -p 0.1'];
cg(i,j) = libsvmtrain(tn_train,pn_train,cmd);
if cg(i,j) < error
error = cg(i,j);
bestc = 2^c(i,j);
bestg = 2^g(i,j);
end
if abs(cg(i,j) - error) <= eps && bestc > 2^c(i,j)
error = cg(i,j);
bestc = 2^c(i,j);
bestg = 2^g(i,j);
end
end
end
% 创建/训练SVR
cmd = [' -t 2',' -c ',num2str(bestc),' -g ',num2str(bestg),' -s 3 -p 0.01'];
model = libsvmtrain(tn_train,pn_train,cmd);
%% SVR仿真预测
[Predict_1,error_1,dec_values_1] = libsvmpredict(tn_train,pn_train,model);
[Predict_2,error_2,dec_values_2] = libsvmpredict(tn_test,pn_test,model);
% 反归一化
predict_1 = mapminmax('reverse',Predict_1,ps2);
predict_2 = mapminmax('reverse',Predict_2,ps2);
%% 计算误差
[len,~]=size(predict_2);
[len2,~]=size(predict_1);
error = t_test - predict_2;
error2 = t_train - predict_1;
error2 = error2';
error = error';
MAE1=sum(abs(error./t_test'))/len;
MSE1=error*error'/len;
MSE2=error2*error2'/len2;
RMSE2=MSE2^(1/2);
RMSE1=MSE1^(1/2);
R1 = corrcoef(t_test,predict_2);
R2 = corrcoef(t_train,predict_1);
r1 = R1(1,2);
r2 = R2(1,2);
disp(['........支持向量回归误差计算................'])
disp(['平均相对误差MAPE为:',num2str(MAE1)])
disp(['均方误差为MSE:',num2str(MSE1)])
disp(['均方根误差RMSE为:',num2str(RMSE1)])
disp(['相关系数 r1为:',num2str(r1)])
disp(['相关系数 r2为:',num2str(r2)])
figure(1)
plot(1:length(t_train),t_train,'r-*',1:length(t_train),predict_1,'b:o')
grid on
legend('真实值','预测值')
xlabel('样本编号')
ylabel('值')
% string_1 = {'训练集预测结果对比';
% ['mse = ' num2str(error_1(2)) ' R^2 = ' num2str(error_1(3))]};
title(string_1)
figure(2)
plot(1:length(t_test),t_test,'r-*',1:length(t_test),predict_2,'b:o')
grid on
legend('真实值','预测值')
xlabel('样本编号')
ylabel('值')
% string_2 = {'测试集预测结果对比';
% ['mse = ' num2str(error_2(2)) ' R^2 = ' num2str(error_2(3))]};
title(string_2)
RF随机森林 参考这篇博客 随机森林 matlab
close all
clear
clc
%%导入数据
data=load('训练集整合2.csv'); % Matlab2021版本以上无法使用xlsread函数,可用Load函数替代
data2=load('测试集整合2.csv');%测试集
% 设置的输入和输出
input=data(:,2:end); %训练集输入第2列至最后列为输入
output=data(:,1); %训练集输出第一1列为输出
input2=data2(:,2:end); %测试集输入第2列至最后列为输入
output2=data2(:,1); %测试集输出第一列1列为输出
%% Number of Leaves and Trees Optimization
for RFOptimizationNum=1:2
RFLeaf=[5,10,20,50,100,200,500];
col='rgbcmyk';
figure('Name','RF Leaves and Trees');
for i=1:length(RFLeaf)
RFModel=TreeBagger(2000,input,output,'Method','R','OOBPrediction','On','MinLeafSize',RFLeaf(i));
plot(oobError(RFModel),col(i));
hold on
end
xlabel('Number of Grown Trees');
ylabel('Mean Squared Error') ;
LeafTreelgd=legend({'5' '10' '20' '50' '100' '200' '500'},'Location','NorthEast');
title(LeafTreelgd,'Number of Leaves');
hold off;
disp(RFOptimizationNum);
end
%% Cycle Preparation
RFScheduleBar=waitbar(0,'Random Forest is Solving...');
RFRMSEMatrix=[];
RFrAllMatrix=[];
RFRunNumSet=50000;
for RFCycleRun=1:RFRunNumSet
%% Training Set and Test Set Division
RandomNumber=(randperm(length(output),floor(length(output)*0.2)))';
TrainYield=output;%训练集因变量
TestYield=output2;%测试集因变量
TrainVARI=input;%训练集自变量
TestVARI=input2;%测试集自变量
%% RF
nTree=80;
nLeaf=5;
RFModel=TreeBagger(nTree,TrainVARI,TrainYield,...
'Method','regression','OOBPredictorImportance','on', 'MinLeafSize',nLeaf);
[RFPredictYield,RFPredictConfidenceInterval]=predict(RFModel,TestVARI);
trainPredict=predict(RFModel,TrainVARI);
%% Accuracy of RF
RFRMSE=sqrt((sum((RFPredictYield-TestYield).^2))/size(TestYield,1));
RFrMatrix=corrcoef(RFPredictYield,TestYield);
RFr=RFrMatrix(1,2);
RFRMSEMatrix=[RFRMSEMatrix,RFRMSE];
RFrAllMatrix=[RFrAllMatrix,RFr];
if RFRMSE<1000
disp(RFRMSE);
break;
end
disp(RFCycleRun);
str=['Random Forest is Solving...',num2str(100*RFCycleRun/RFRunNumSet),'%'];
waitbar(RFCycleRun/RFRunNumSet,RFScheduleBar,str);
end
close(RFScheduleBar);
%% Variable Importance Contrast
VariableImportanceX={};
XNum=1;
% for TifFileNum=1:length(TifFileNames)
% if ~(strcmp(TifFileNames(TifFileNum).name(4:end-4),'MaizeArea') | ...
% strcmp(TifFileNames(TifFileNum).name(4:end-4),'MaizeYield'))
% eval(['VariableImportanceX{1,XNum}=''',TifFileNames(TifFileNum).name(4:end-4),''';']);
% XNum=XNum+1;
% end
% end
for i=1:size(input,2)
eval(['VariableImportanceX{1,XNum}=''',num2str(i),''';']);
XNum=XNum+1;
end
figure('Name','Variable Importance Contrast');
VariableImportanceX=categorical(VariableImportanceX);
bar(VariableImportanceX,RFModel.OOBPermutedPredictorDeltaError)
xtickangle(45);
set(gca, 'XDir','normal')
xlabel('Factor');
ylabel('Importance');
%% RF Model Storage
RFModelSavePath='D:\Program Files\MATLAB\R2022b\toolbox\libsvm-3.31\windows';
save(sprintf('%sRF1.mat',RFModelSavePath),'nLeaf','nTree',...
'RandomNumber','RFModel','RFPredictConfidenceInterval','RFPredictYield','RFr','RFRMSE',...
'TestVARI','TestYield','TrainVARI','TrainYield');
RBF径向基神经网络
clear
clc
pp=load('训练集整合2.csv');
ppp=load('测试集整合2.csv');
p_train=pp(:,2:end)';%训练输出
T_train=pp(:,1)';%训练输入值
p_test=ppp(:,2:end)';%预测值输出
T_test=ppp(:,1)';%预测输入值
M=size(p_train,2);
N=size(p_test,2);
%% 数据归一化
%%数据归一化
[p_train,ps_input]=mapminmax(p_train,0,1);
p_test=mapminmax('apply',p_test,ps_input);
[t_train,ps_output]=mapminmax(T_train,0,1);
t_test=mapminmax('apply',T_test, ps_output);
%%创建网络
rbf_spread=2000;
net=newrbe(p_train,t_train,rbf_spread);
%%数据仿真
t_sim1=sim(net,p_train);
t_sim2=sim(net,p_test);
%%数据反归一化
T_sim1=mapminmax('reverse',t_sim1,ps_output);
T_sim2=mapminmax('reverse',t_sim2,ps_output);
%%均方根误差
error1=sqrt(sum((T_sim1-T_train).^2)./M);
error2=sqrt(sum((T_sim2-T_test).^2)./N);
%%查看网络结构
view (net);
%%绘图
figure
plot(1:M,T_train,'r-*',1:M,T_sim1,'b-o','LineWidth',1)
legend('真实值','预测值')
xlabel('训练样本')
ylabel('预测结果')
string={'训练集预测结果对比';['RMSE=' num2str(error1)]};
title(string)
xlim=[1,M]
grid
figure
plot(1:N,T_test,'r-*',1:N,T_sim2,'b-o','LineWidth',1)
legend('真实值','预测值')
xlabel('预测样本')
ylabel('预测结果')
string={'测试集预测结果对比';['RMSE=' num2str(error2)]};
title(string)
xlim=[1,N]
grid
%%相关指标计算;
%R2
R1 = 1 - norm(T_train - T_sim1)^2 / norm(T_train - mean(T_train))^2;
R2= 1 - norm(T_test - T_sim2)^2 / norm(T_test - mean(T_test ))^2;
disp(['训练集数据的R2为: ',num2str(R1)])
disp(['测试集数据的R2为: ', num2str(R2)])
%MAE
mae1 = sum(abs(T_sim1 - T_train)) ./ M;
mae2 = sum(abs(T_sim2 -T_test)) ./N;
disp(['训练集数据的MAE为: ',num2str(mae1)])
disp(['测试集数据的MAE为: ',num2str(mae2)])
%MBE
mbe1 = sum(T_sim1 - T_train) ./M;
mbe2 = sum(T_sim2 - T_test) ./ N ;
disp(['训练集数据的MBE为: ',num2str(mbe1)])
disp(['测试集数据的MBE为: ',num2str(mbe2)])