基于SVM的信息粒化时序回归预测——上证指数开盘指数变化趋势和变化空间预测

信息粒化这一概念最早是由Lotfi A. Zadeh(L.A. Zadeh)教授提出的.信息粒化就是将一个整体分解为一个个的部分进行研究,每个部分为一个信息粒。 Zadeh教授指出:信息粒就是一些元素的集合,这些元素由于难以区别、或相似、或接近或某种功能而结合在一起。

数据链接:https://pan.baidu.com/s/1z-hIOjmBkBAiEkk6OrxZaQ?pwd=wzpe 
提取码:wzpe

子函数SVMcgForClass.m链接:http://t.csdnimg.cn/WeaZK

模糊信息粒化子函数FIG_D: 

function [low,R,up]=FIG_D(XX,MFkind,win_num)
% by Li Yang BNU MATH 05 Email:farutoliyang@gmail.com QQ:516667408
% last modified 2009.2.25
%modified IG based on Pedry by Keqiang Dong
%output
%low:low bounds
%R:representatives
%up:up bounds
%input
%X:times series waited to be IG
%MFkind:the kind of membership function
%triangle trapezoid asygauss asyparabola
%win_num:number of windows
%%

if nargin < 3
    win_num = 10;
end
if nargin < 2
    MFkind = 'trapezoid';
end

[d1,d2] = size(XX);
X = sort(XX);
switch MFkind 
% trapezoid  
    case('trapezoid')       
        if win_num == 1
            if mod(d2,2) ~= 0
                m = X( (d2+1)/2 );
                n = X( (d2+1)/2 );
                mflag = (d2+1)/2;
                nflag = (d2+1)/2;
            else
                m = X( d2/2 );
                n = X( (d2+2)/2 );
                mflag = d2/2;
                nflag = (d2+2)/2;
            end
            
            R(1,1) = m;
            R(2,1) = n;
            
            k1 = mflag;
            k2 = d2 - nflag+1;
            c1 = ( sum(X(1:k1)) )/k1;
            c2 = ( sum(X(nflag:d2)) )/k2;
            
            low = 2*c1 - m;
            up = 2*c2 - n;
            
        else
            low = [];
            R = [];
            up = [];
            k = floor(d2/win_num);
            for i = 1:(win_num-1)
                [l,r,u]=FIG_D(XX( (1+(i-1)*k):(k+(i-1)*k) ),MFkind,1);
                low = [low,l];
                R = [R,r];
                up = [up,u];
            end
            [l,r,u] = FIG_D(XX( (1+(win_num-1)*k):d2 ),MFkind,1);
            low =[low,l];
            R = [R,r];
            up = [up,u];
        end
%% triangle
    case('triangle')
        if win_num == 1
            
            R = median(X);
            m = median(X);
            n = median(X);
            
            mflag = floor(d2/2);
            nflag = ceil(d2/2);
            k1 = mflag;
            k2 = d2-nflag+1;
            c1 = ( sum(X(1:k1)) )/k1;
            c2 = ( sum(X(nflag:d2)) )/k2;
            
            low = 2*c1 - m;
            up = 2*c2 - n;
            
        else
            low = [];
            R = [];
            up = [];
            k = floor(d2/win_num);
            for i = 1:(win_num-1)
                [l,r,u]=FIG_D(XX( (1+(i-1)*k):(k+(i-1)*k) ),MFkind,1);
                low = [low,l];
                R = [R,r];
                up = [up,u];
            end
            [l,r,u] = FIG_D(XX( (1+(win_num-1)*k):d2 ),MFkind,1);
            low =[low,l];
            R = [R,r];
            up = [up,u];
        end
%% asygauss       
    case('asygauss')  %这个与基于Pedrycz的是一样的,因为高斯型的核函数无法修改
        if win_num == 1
            R = median(X);
            m = median(X);
            n = median(X);
            
            mflag = floor(d2/2);
            nflag = ceil(d2/2);
            
            a_final = 0;
            Qa_final = 0;
            for index = 1:( mflag-1 )
                a = X(index);
                Qa=0;              
                x = X( 1:(mflag-1) );
                y = (x<=m).*(exp(-(x-m).^2/a^2) );
                Qa = sum(y);
                Qa = Qa/(m-a);
                    if Qa>=Qa_final
                        Qa_final = Qa;
                        a_final = a;
                    end                              
            end
            
            low = a_final;
            
            b_final = 0;
            Qb_final = 0;
            for index = ( nflag+1 ):d2
                b = X(index);
                Qb = 0;                
                x = X( (nflag+1):d2 );
                y = (x>=m).*(exp(-(x-m).^2/b^2) );   
                Qb = sum(y);
                Qb = Qb/(b-n);
                    if Qb>=Qb_final
                        Qb_final = Qb;
                        b_final = b;
                    end
            end
            
            up = b_final;
            
        else
            low = [];
            R = [];
            up = [];
            k = floor(d2/win_num);
            for i = 1:(win_num-1)
                [l,r,u]=FIG_P(XX( (1+(i-1)*k):(k+(i-1)*k) ),MFkind,1);
                low = [low,l];
                R = [R,r];
                up = [up,u];
            end
            [l,r,u] = FIG_P(XX( (1+(win_num-1)*k):d2 ),MFkind,1);
            low =[low,l];
            R = [R,r];
            up = [up,u];
        end               
%% asyparabola       
    case('asyparabola')   
        if win_num == 1
            R = median(X);
            m = median(X);
            n = median(X);
            
            mflag = floor(d2/2);
            nflag = ceil(d2/2);

            a_final = 0;
            Qa_final = 0;
            for index = 1:( mflag-1 )
                a = X(index);
                Qa=0;             
                x = X( 1:( mflag-1) );          
                y=(x<=m).*(1-(m-x).^2/(m-a)^2);               
                Qa = sum(y);
                Qa = Qa/(m-a);
                    if Qa>=Qa_final
                        Qa_final = Qa;
                        a_final = a;
                    end                                
            end
            
            low = a_final;
            
            b_final = 0;
            Qb_final = 0;
            for index = ( nflag+1 ):d2
                b = X(index);
                Qb = 0;             
                x = X( (nflag+1):d2 );
                y=(x>=m).*(1-(m-x).^2/(m-b)^2);
                Qb = sum(y);
                Qb = Qb/(b-n);
                    if Qb>=Qb_final
                        Qb_final = Qb;
                        b_final = b;
                    end   
            end
            
            up = b_final;
            
       else
            low = [];
            R = [];
            up = [];
            k = floor(d2/win_num);
            for i = 1:(win_num-1)
                [l,r,u]=FIG_P(XX( (1+(i-1)*k):(k+(i-1)*k) ),MFkind,1);
                low = [low,l];
                R = [R,r];
                up = [up,u];
            end
            [l,r,u] = FIG_P(XX( (1+(win_num-1)*k):d2 ),MFkind,1);
            low =[low,l];
            R = [R,r];
            up = [up,u];
        end                
end        

% 基于SVM的信息粒化时序回归预测——上证指数开盘指数变化趋势和变化空间预测

%% 清空环境变量

tic;
close all;
clear;
clc;
format compact;


%% 原始数据的提取

% 载入测试数据上证指数(1990.12.19-2009.08.19)
% 数据是一个4579*6的double型的矩阵,每一行表示每一天的上证指数
% 6列分别表示当天上证指数的开盘指数,指数最高值,指数最低值,收盘指数,当日交易量,当日交易额.
load chapter_sh.mat;

% 提取数据

ts = sh_open;
time = length(ts);
% 画出原始上证指数的每日开盘数
figure;
plot(ts,'LineWidth',2);
title('上证指数的每日开盘数(1990.12.20-2009.08.19)','FontSize',12);
xlabel('交易日天数(1990.12.19-2009.08.19)','FontSize',12);
ylabel('开盘数','FontSize',12);
grid on;
% print -dtiff -r600 original;
snapnow;

%% 对原始数据进行模糊信息粒化

win_num = floor(time/5);
tsx = 1:win_num;
tsx = tsx';
[Low,R,Up]=FIG_D(ts','triangle',win_num);

% 模糊信息粒化可视化图

figure;
hold on;
plot(Low,'b+');
plot(R,'r*');
plot(Up,'gx');
hold off;
%legend('Low','R','Up',2);
legend('Low','R','Up','Location','NorthWest');
title('模糊信息粒化可视化图','FontSize',12);
xlabel('粒化窗口数目','FontSize',12);
ylabel('粒化值','FontSize',12);
grid on;
% print -dtiff -r600 FIGpic;
snapnow;


%% 利用SVM对Low进行回归预测

% 数据预处理,将Low进行归一化处理

% mapminmax为matlab自带的映射函数
[low,low_ps] = mapminmax(Low);
low_ps.ymin = 100;
low_ps.ymax = 500;
% 对Low进行归一化
[low,low_ps] = mapminmax(Low,low_ps);


% 画出Low归一化后的图像

figure;
plot(low,'b+');
title('Low归一化后的图像','FontSize',12);
xlabel('粒化窗口数目','FontSize',12);
ylabel('归一化后的粒化值','FontSize',12);
grid on;
% print -dtiff -r600 lowscale;
% 对low进行转置,以符合libsvm工具箱的数据格式要求
low = low';
snapnow;

% 选择回归预测分析中最佳的SVM参数c&g

% 首先进行粗略选择

[bestmse,bestc,bestg] = SVMcgForRegress(low,tsx,-10,10,-10,10,3,1,1,0.1,1);
% 打印粗略选择结果
disp('打印粗略选择结果');
str = sprintf( 'SVM parameters for Low:Best Cross Validation MSE = %g Best c = %g Best g = %g',bestmse,bestc,bestg);
disp(str);

% 根据粗略选择的结果图再进行精细选择

[bestmse,bestc,bestg] = SVMcgForRegress(low,tsx,-4,8,-10,10,3,0.5,0.5,0.05,1);
% 打印精细选择结果
disp('打印精细选择结果');
str = sprintf( 'SVM parameters for Low:Best Cross Validation MSE = %g Best c = %g Best g = %g',bestmse,bestc,bestg);
disp(str);

% 训练SVM

cmd = ['-c ', num2str(bestc), ' -g ', num2str(bestg) , ' -s 3 -p 0.1'];
low_model = svmtrain(low, tsx, cmd);

% 预测

[low_predict,low_mse,low_values] = svmpredict(low,tsx,low_model);
low_predict = mapminmax('reverse',low_predict,low_ps);
predict_low = svmpredict(1,win_num+1,low_model);
predict_low = mapminmax('reverse',predict_low,low_ps);
predict_low

%% 对于Low的回归预测结果分析

figure;
hold on;
plot(Low,'b+');
plot(low_predict,'r*');
legend('original low','predict low','Location','NorthWest');
title('original vs predict','FontSize',12);
xlabel('粒化窗口数目','FontSize',12);
ylabel('粒化值','FontSize',12);
grid on;
% print -dtiff -r600 lowresult;
figure;
error = low_predict - Low';
plot(error,'ro');
title('误差(predicted data-original data)','FontSize',12);
xlabel('粒化窗口数目','FontSize',12);
ylabel('误差量','FontSize',12);
grid on;
% print -dtiff -r600 lowresulterror;
% snapnow;

%% 利用SVM对R进行回归预测

% 数据预处理,将R进行归一化处理

% mapminmax为matlab自带的映射函数
[r,r_ps] = mapminmax(R);
r_ps.ymin = 100;
r_ps.ymax = 500;
% 对R进行归一化
[r,r_ps] = mapminmax(R,r_ps);
% 画出R归一化后的图像
figure;
plot(r,'r*');
title('r归一化后的图像','FontSize',12);
grid on;
% 对R进行转置,以符合libsvm工具箱的数据格式要求
r = r';
% snapnow;

% 选择回归预测分析中最佳的SVM参数c&g

% 首先进行粗略选择

[bestmse,bestc,bestg] = SVMcgForRegress(r,tsx,-10,10,-10,10,3,1,1,0.1);
% 打印粗略选择结果
disp('打印粗略选择结果');
str = sprintf( 'SVM parameters for R:Best Cross Validation MSE = %g Best c = %g Best g = %g',bestmse,bestc,bestg);
disp(str);

% 根据粗略选择的结果图再进行精细选择

[bestmse,bestc,bestg] = SVMcgForRegress(r,tsx,-4,8,-10,10,3,0.5,0.5,0.05);
% 打印精细选择结果
disp('打印精细选择结果');
str = sprintf( 'SVM parameters for R:Best Cross Validation MSE = %g Best c = %g Best g = %g',bestmse,bestc,bestg);
disp(str);

% 训练SVM

cmd = ['-c ', num2str(bestc), ' -g ', num2str(bestg) , ' -s 3 -p 0.1'];
r_model = svmtrain(r, tsx, cmd);

% 预测

[r_predict,r_mse,r_valies] = svmpredict(r,tsx,low_model);
r_predict = mapminmax('reverse',r_predict,r_ps);
predict_r = svmpredict(1,win_num+1,r_model);
predict_r = mapminmax('reverse',predict_r,r_ps);
predict_r

%% 对于R的回归预测结果分析

figure;
hold on;
plot(R,'b+');
plot(r_predict,'r*');
legend('original r','predict r','Location','NorthWest');
title('original vs predict','FontSize',12);
grid on;
figure;
error = r_predict - R';
plot(error,'ro');
title('误差(predicted data-original data)','FontSize',12);
grid on;
% snapnow;

%% 利用SVM对Up进行回归预测

% 数据预处理,将up进行归一化处理

% mapminmax为matlab自带的映射函数
[up,up_ps] = mapminmax(Up);
up_ps.ymin = 100;
up_ps.ymax = 500;
% 对Up进行归一化
[up,up_ps] = mapminmax(Up,up_ps);
% 画出Up归一化后的图像
figure;
plot(up,'gx');
title('Up归一化后的图像','FontSize',12);
grid on;
% 对up进行转置,以符合libsvm工具箱的数据格式要求
up = up';
snapnow;

% 选择回归预测分析中最佳的SVM参数c&g


% 首先进行粗略选择

[bestmse,bestc,bestg] = SVMcgForRegress(up,tsx,-10,10,-10,10,3,1,1,0.5);
% 打印粗略选择结果
disp('打印粗略选择结果');
str = sprintf( 'SVM parameters for Up:Best Cross Validation MSE = %g Best c = %g Best g = %g',bestmse,bestc,bestg);
disp(str);

% 根据粗略选择的结果图再进行精细选择

[bestmse,bestc,bestg] = SVMcgForRegress(up,tsx,-4,8,-10,10,3,0.5,0.5,0.2);
% 打印精细选择结果
disp('打印精细选择结果');
str = sprintf( 'SVM parameters for Up:Best Cross Validation MSE = %g Best c = %g Best g = %g',bestmse,bestc,bestg);
disp(str);

% 训练SVM

cmd = ['-c ', num2str(bestc), ' -g ', num2str(bestg) , ' -s 3 -p 0.1'];
up_model = svmtrain(up, tsx, cmd);

% 预测

[up_predict,up_mse,up_value] = svmpredict(up,tsx,up_model);
up_predict = mapminmax('reverse',up_predict,up_ps);
predict_up = svmpredict(1,win_num+1,up_model);
predict_up = mapminmax('reverse',predict_up,up_ps);
predict_up

%% 对于Up的回归预测结果分析

figure;
hold on;
plot(Up,'b+');
plot(up_predict,'r*');
legend('original up','predict up','Location','NorthWest');
title('original vs predict','FontSize',12);
grid on;
figure;
error = up_predict - Up';
plot(error,'ro');
title('误差(predicted data-original data)','FontSize',12);
grid on;
toc;
% snapnow;

源自:《Matlab神经网络43个案例分析》。 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值