MSP估计算法


clc;
clear;
close all;
warning off;

%% 
%--------------------------------------------------------------------------------
%
%            Different information measures comparison
%
%--------------------------------------------------------------------------------
%
% This script generates:
% - noise level estimates for Rayleigh distributed data for different
% information measures, i.e., J-divergence, Renyi's divergence, Vajda's measure
% - plots estimators' bias, variance and mean square error (MSE)
%--------------------------------------------------------------------------
%
% (!!!) Before running this script:
% - set appropriate 'iterations' value, e.g., iterations = 1000;
% - set appropriate 'source_path' value
%
%-----------------------------  PATHS  -----------------------------------
%addpath('plot2svg');
%addpath('DataIO');
addpath('MSP');
addpath('QuantitativeMeasures');
addpath('ReferenceMethods');

%---------------------  SIMULATION PARAMETERS  ---------------------------
% simulation
iterations = 100; % iterations
sigma_vector = 5:5:50; % noise levels

% numerical optimization properties (gradient descent method)
sigma0 = 10;
step_size = 5;
max_iter = 100;
max_tolerance = 1e-3;
delta = 0;

% Read real raw data 
%ReadData_Transverse   %  (!!!)  CONNECT TO YOUR OWN DATA

% Temporary handle artificial data - COMMENT HERE
dataset_re = zeros(217, 181);     % real part of the MRI image
dataset_im = zeros(217, 181);     % imaginary part of the MRI image

mask_T1_1mm_bg = zeros(217, 181); % background mask of the MRI image - PUT YOUR MASK HERE
mask_T1_1mm_bg(50:150, 50:100) = 1; % artificial background mask

%% Rayleigh distribution (background data)
Rayleigh_MSP_results_GD_KL1 = zeros(iterations, length(sigma_vector));         % Kullback-Leibler divergence, k = 1
Rayleigh_MSP_results_GD_KL2 = zeros(iterations, length(sigma_vector));         % Kullback-Leibler divergence, k = 4

Rayleigh_MSP_results_GD_Jeffreys1 = zeros(iterations, length(sigma_vector));   % J-divergence, k = 1
Rayleigh_MSP_results_GD_Jeffreys2 = zeros(iterations, length(sigma_vector));   % J-divergence, k = 4
 
Rayleigh_MSP_results_GD_Renyi1 = zeros(iterations, length(sigma_vector));      % Renyi, k = 1, p = 1/2
%Rayleigh_MSP_results_GD_Renyi2 = zeros(iterations, length(sigma_vector));     % Renyi, k = 1, p = 2
Rayleigh_MSP_results_GD_Renyi3 = zeros(iterations, length(sigma_vector));      % Renyi, k = 4, p = 2

%Rayleigh_MSP_results_NM_Vajda1 = zeros(iterations, length(sigma_vector));     % Vajda, k = 1, p = 1
Rayleigh_MSP_results_NM_Vajda2 = zeros(iterations, length(sigma_vector));      % Vajda, k = 1, p = 2
%Rayleigh_MSP_results_NM_Vajda3 = zeros(iterations, length(sigma_vector));     % Vajda, k = 4, p = 2


FUNCTION_HANDLE_MSP_RAYLEIGH_KL = @(sigma_, m_, k_, delta_, weights_)MSP_Rayleigh_distribution(sigma_, m_, k_, delta_, weights_);
FUNCTION_HANDLE_MSP_RAYLEIGH_JEFFREYS = @(sigma_, m_, k_, delta_, weights_)MSP_Rayleigh_distribution_Jeffreys_Divergence(sigma_, m_, k_, delta_, weights_);
FUNCTION_HANDLE_MSP_RAYLEIGH_RENYI = @(sigma_, m_, k_, delta_, weights_)MSP_Rayleigh_distribution_Renyi_Divergence(sigma_, m_, k_, delta_, weights_);
%-----------------------------------------------------------------------------                                            

for iter=1:iterations
     for noise_level=1:length(sigma_vector)

        sigma = sigma_vector(noise_level);

        % Noise in magnitude
        dataset_noise1 = random('norm', 0, sigma, size(dataset_re, 1), size(dataset_re, 2));
        dataset_noise2 = random('norm', 0, sigma, size(dataset_im, 1), size(dataset_im, 2));
        
        dataset_T1_1mm_noisy = sqrt( (dataset_re + dataset_noise1).^2 + (dataset_im  + dataset_noise2).^2 );

        data = dataset_T1_1mm_noisy .* mask_T1_1mm_bg;
        data = data(:);
        data = data(data > 0);
        data = sort(data);
             
        
        % ------------------------------- Individual information measures ------------------------------         
        % Kullback-Leibler (gradient descent optimization method) - k = 1  - KULLBACK-LEIBLER DIVERGENCE
        [sigma_MSP_GD_KL1, function_value_MSP_GD_KL1] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_KL, sigma0, data, 1, delta, step_size, max_iter, max_tolerance, 0);                  

        % Kullback-Leibler (gradient descent optimization method) - k = 4  - KULLBACK-LEIBLER DIVERGENCE
        [sigma_MSP_GD_KL2, function_value_MSP_GD_KL2] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_KL, sigma0, data, 4, delta, step_size, max_iter, max_tolerance, 0);        
        
        %-----------------------------------------------------------------------------------
        
        % J-divergence (gradient descent optimization method) - k = 1  - J-DIVERGENCE
        [sigma_MSP_GD_Jeffreys1, function_value_MSP_GD_Jeffreys1] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_JEFFREYS, sigma0, data, 1, delta, step_size, max_iter, max_tolerance, 0);

        % J-divergence (gradient descent optimization method) - k = 4  - J-DIVERGENCE
        [sigma_MSP_GD_Jeffreys2, function_value_MSP_GD_Jeffreys2] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_JEFFREYS, sigma0, data, 4, delta, step_size, max_iter, max_tolerance, 0);
        %-----------------------------------------------------------------------------------                
                
             
        %-----------------------------------------------------------------------------------                
        % Renyi divergence (Nelder-Mead optimization method) - k = 1, p = 1/2  - RENYI'S DIVERGENCE
        %[sigma_MSP_GD_Renyi1, function_value_MSP_GD_Renyi1] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_RENYI, sigma0, data, 1, delta, step_size, max_iter, max_tolerance, 0.5);     
        n = length(data);     k = 1;      p = 1/2;        
        beta = (n+1)/k;
        RENYI1 = @(sigma) ( -((sign(1-p) * beta^p)/(n-k+2)) .* ( (1 -  exp(-(data(k).^2) ./ (2*sigma^2)))^p    +      sum((exp(-(data(1:n-k).^2) ./ (2*sigma^2)) - exp(-(data(k+1:n).^2) ./ (2*sigma^2))).^p)       +      exp(-(data(n-k+1).^2) ./ (2*sigma^2))^p));
        sigma_MSP_GD_Renyi1 = fminsearch(RENYI1, sigma0);
        %-----------------------------------------------------------------------------------                        
        
        
        %-----------------------------------------------------------------------------------                
        % Renyi divergence (Nelder-Mead optimization method) - k = 4, p = 2  - RENYI'S DIVERGENCE
        %[sigma_MSP_GD_Renyi3, function_value_MSP_GD_Renyi3] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_RENYI, sigma0, data, 4, delta, step_size, max_iter, max_tolerance, 2);            
        n = length(data);       k = 4;        p = 2;
        beta = (n+1)/k;        
        RENYI3 = @(sigma) ( -((sign(1-p) * beta^p)/(n-k+2)) .* ( (1 -  exp(-(data(k).^2) ./ (2*sigma^2)))^p    +      sum((exp(-(data(1:n-k).^2) ./ (2*sigma^2)) - exp(-(data(k+1:n).^2) ./ (2*sigma^2))).^p)       +      exp(-(data(n-k+1).^2) ./ (2*sigma^2))^p));
        sigma_MSP_GD_Renyi3 = fminsearch(RENYI3, sigma0);
        %-----------------------------------------------------------------------------------                
        
                
        %-----------------------------------------------------------------------------------                
        % Vajda's measure (Nelder-Mead optimization method) - k = 1, p = 2
        n = length(data);  k = 1;   p = 2;
        beta = (n+1)/k;             
        VAJDA2 = @(sigma) ( (1/(n-k+2)) .* (  abs( 1 - beta*(1 - exp(-(data(k).^2) ./ (2*sigma^2)) ) )^p   +  sum(  ( abs(1 - beta*(exp(-(data(1:n-k).^2) ./ (2*sigma^2)) - exp(-(data(k+1:n).^2) ./ (2*sigma^2)))  ) ).^p  )   +   (abs(1 - beta*exp(-(data(n-k+1).^2) ./ (2*sigma^2))))^p  ) );        
        sigma_MSP_NM_Vajda2 = fminsearch(VAJDA2, sigma0);        
        %-----------------------------------------------------------------------------------                        
        
        
        % store results in arrays
        Rayleigh_MSP_results_GD_KL1(iter, noise_level) = sigma_MSP_GD_KL1;  
        Rayleigh_MSP_results_GD_KL2(iter, noise_level) = sigma_MSP_GD_KL2;                
        Rayleigh_MSP_results_GD_Jeffreys1(iter, noise_level) = sigma_MSP_GD_Jeffreys1;  
        Rayleigh_MSP_results_GD_Jeffreys2(iter, noise_level) = sigma_MSP_GD_Jeffreys2;                
        Rayleigh_MSP_results_GD_Renyi1(iter, noise_level) = sigma_MSP_GD_Renyi1;
        Rayleigh_MSP_results_GD_Renyi3(iter, noise_level) = sigma_MSP_GD_Renyi3;                 
        Rayleigh_MSP_results_NM_Vajda2(iter, noise_level) = sigma_MSP_NM_Vajda2;                

    end
    
    fprintf('%.1f %%\n', (iter/iterations)*100);    
end


% bias(sigma_hat)
MSP_GD_vector_mean_bias_KL1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_bias_KL2 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_bias_Jeffreys1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_bias_Jeffreys2 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_bias_Renyi1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_bias_Renyi3 = zeros(1, length(sigma_vector));
MSP_NM_vector_mean_bias_Vajda2 = zeros(1, length(sigma_vector));

% variance(sigma_hat) 
MSP_GD_vector_mean_var_KL1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_var_KL2 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_var_Jeffreys1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_var_Jeffreys2 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_var_Renyi1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_var_Renyi3 = zeros(1, length(sigma_vector));
MSP_NM_vector_mean_var_Vajda2 = zeros(1, length(sigma_vector));

% mse(sigma_hat)
MSP_GD_vector_mean_mse_KL1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_mse_KL2 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_mse_Jeffreys1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_mse_Jeffreys2 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_mse_Renyi1 = zeros(1, length(sigma_vector));
MSP_GD_vector_mean_mse_Renyi3 = zeros(1, length(sigma_vector));
MSP_NM_vector_mean_mse_Vajda2 = zeros(1, length(sigma_vector));


for k=1:length(sigma_vector) 
    % bias(sigma) = E(sigma_hat) - sigma
    MSP_GD_vector_mean_bias_KL1(k) = mean(Rayleigh_MSP_results_GD_KL1(:,k)) - sigma_vector(k);    
    MSP_GD_vector_mean_bias_KL2(k) = mean(Rayleigh_MSP_results_GD_KL2(:,k)) - sigma_vector(k);
    MSP_GD_vector_mean_bias_Jeffreys1(k) = mean(Rayleigh_MSP_results_GD_Jeffreys1(:,k)) - sigma_vector(k);    
    MSP_GD_vector_mean_bias_Jeffreys2(k) = mean(Rayleigh_MSP_results_GD_Jeffreys2(:,k)) - sigma_vector(k);    
    MSP_GD_vector_mean_bias_Renyi1(k) = mean(Rayleigh_MSP_results_GD_Renyi1(:,k)) - sigma_vector(k);        
    MSP_GD_vector_mean_bias_Renyi3(k) = mean(Rayleigh_MSP_results_GD_Renyi3(:,k)) - sigma_vector(k);            
    MSP_NM_vector_mean_bias_Vajda2(k) = mean(Rayleigh_MSP_results_NM_Vajda2(:,k)) - sigma_vector(k);                              
    
    % variance(sigma_hat) = E(sigma_hat^2) - E^2(sigma_hat)
    MSP_GD_vector_mean_var_KL1(k) = mean(Rayleigh_MSP_results_GD_KL1(:,k).^2) - mean(Rayleigh_MSP_results_GD_KL1(:,k)).^2;
    MSP_GD_vector_mean_var_KL2(k) = mean(Rayleigh_MSP_results_GD_KL2(:,k).^2) - mean(Rayleigh_MSP_results_GD_KL2(:,k)).^2;
    MSP_GD_vector_mean_var_Jeffreys1(k) = mean(Rayleigh_MSP_results_GD_Jeffreys1(:,k).^2) - mean(Rayleigh_MSP_results_GD_Jeffreys1(:,k)).^2;
    MSP_GD_vector_mean_var_Jeffreys2(k) = mean(Rayleigh_MSP_results_GD_Jeffreys2(:,k).^2) - mean(Rayleigh_MSP_results_GD_Jeffreys2(:,k)).^2;
    MSP_GD_vector_mean_var_Renyi1(k) = mean(Rayleigh_MSP_results_GD_Renyi1(:,k).^2) - mean(Rayleigh_MSP_results_GD_Renyi1(:,k)).^2;
    MSP_GD_vector_mean_var_Renyi3(k) = mean(Rayleigh_MSP_results_GD_Renyi3(:,k).^2) - mean(Rayleigh_MSP_results_GD_Renyi3(:,k)).^2;
    MSP_NM_vector_mean_var_Vajda2(k) = mean(Rayleigh_MSP_results_NM_Vajda2(:,k).^2) - mean(Rayleigh_MSP_results_NM_Vajda2(:,k)).^2;
    
    % MSE(sigma) = bias(sigma)^2 + variance(sigma_hat)
    MSP_GD_vector_mean_mse_KL1(k) = ImageRMSE(Rayleigh_MSP_results_GD_KL1(:,k), sigma_vector(k) .* ones(size(Rayleigh_MSP_results_GD_KL1(:,k))), ones(size(Rayleigh_MSP_results_GD_KL1(:,k))));    
    MSP_GD_vector_mean_mse_KL2(k) = ImageRMSE(Rayleigh_MSP_results_GD_KL2(:,k), sigma_vector(k) .* ones(size(Rayleigh_MSP_results_GD_KL2(:,k))), ones(size(Rayleigh_MSP_results_GD_KL2(:,k))));
    MSP_GD_vector_mean_mse_Jeffreys1(k) = ImageRMSE(Rayleigh_MSP_results_GD_Jeffreys1(:,k), sigma_vector(k) .* ones(size(Rayleigh_MSP_results_GD_Jeffreys1(:,k))), ones(size(Rayleigh_MSP_results_GD_Jeffreys1(:,k))));    
    MSP_GD_vector_mean_mse_Jeffreys2(k) = ImageRMSE(Rayleigh_MSP_results_GD_Jeffreys2(:,k), sigma_vector(k) .* ones(size(Rayleigh_MSP_results_GD_Jeffreys2(:,k))), ones(size(Rayleigh_MSP_results_GD_Jeffreys2(:,k))));
    MSP_GD_vector_mean_mse_Renyi1(k) = ImageRMSE(Rayleigh_MSP_results_GD_Renyi1(:,k), sigma_vector(k) .* ones(size(Rayleigh_MSP_results_GD_Renyi1(:,k))), ones(size(Rayleigh_MSP_results_GD_Renyi1(:,k))));
    MSP_GD_vector_mean_mse_Renyi3(k) = ImageRMSE(Rayleigh_MSP_results_GD_Renyi3(:,k), sigma_vector(k) .* ones(size(Rayleigh_MSP_results_GD_Renyi3(:,k))), ones(size(Rayleigh_MSP_results_GD_Renyi3(:,k))));    
    MSP_NM_vector_mean_mse_Vajda2(k) = ImageRMSE(Rayleigh_MSP_results_NM_Vajda2(:,k), sigma_vector(k) .* ones(size(Rayleigh_MSP_results_NM_Vajda2(:,k))), ones(size(Rayleigh_MSP_results_NM_Vajda2(:,k))));    
end  

%---------------------------- mean bias(sigma_hat) --------------------------
figure,
hold on  
plot(sigma_vector, MSP_GD_vector_mean_bias_KL1, '^-', 'Color', [49/255, 57/255, 174/255], 'LineWidth', 1.5, 'MarkerSize', 11);
plot(sigma_vector, MSP_GD_vector_mean_bias_KL2, '>-', 'Color', [221/255 42/255 43/255], 'LineWidth', 1.5, 'MarkerSize', 11)
plot(sigma_vector, MSP_GD_vector_mean_bias_Jeffreys1, 'd-', 'Color', [0 0.6 0.2], 'LineWidth', 1.5, 'MarkerSize', 12);
plot(sigma_vector, MSP_GD_vector_mean_bias_Renyi1, 's-', 'Color', [0 0 0], 'LineWidth', 1.5, 'MarkerSize', 9);
plot(sigma_vector, MSP_GD_vector_mean_bias_Renyi3, 'x-', 'Color', [115/255 130/255 179/255], 'LineWidth', 1.5, 'MarkerSize', 13);
plot(sigma_vector, MSP_NM_vector_mean_bias_Vajda2, 'o-', 'Color', [255/255 165/255 0], 'LineWidth', 1.5, 'MarkerSize', 11);
legend('Kullback-Leibler divergence (k=1)', 'Kullback-Leibler divergence (k=4)', 'J-divergence (k=1)', 'Renyi divergence (k=1, p=0.5)', 'Renyi divergence (k=4, p=2)', 'Vajda''s measure (k=1, p=2)', 'location', 'NorthEast'); 
grid on
hold off
xlabel('Noise level \sigma');
ylabel('Bias(\sigma)');
text_size = 16;
set(gca, 'FontSize', text_size);
set(get(gca,'xlabel'), 'FontSize', text_size);
set(get(gca,'ylabel'), 'FontSize', text_size);
%set(gca,'YTick', -0.1:0.1:0.9);
%axis([5, 50, -0.1, 0.9]);
%plot2svg('bias_different_measures.svg', 1);

%------------------------ variance(sigma) ----------------------------------------------
sigma_CRLB = linspace(sigma_vector(1), sigma_vector(length(sigma_vector)), 1000);
CRLB = sigma_CRLB.^2 / (4 * length(data));
figure,
hold on
plot(sigma_CRLB, CRLB, '--', 'Color', [0 0 0], 'LineWidth', 2.5);
plot(sigma_vector, MSP_GD_vector_mean_var_KL1, '^-', 'Color', [49/255, 57/255, 174/255], 'LineWidth', 1.5, 'MarkerSize', 11);
plot(sigma_vector, MSP_GD_vector_mean_var_KL2, '>-', 'Color', [221/255 42/255 43/255], 'LineWidth', 1.5, 'MarkerSize', 11)
plot(sigma_vector, MSP_GD_vector_mean_var_Jeffreys1, 'd-', 'Color', [0 0.6 0.2], 'LineWidth', 1.5, 'MarkerSize', 12);
plot(sigma_vector, MSP_GD_vector_mean_var_Renyi1, 's-', 'Color', [0 0 0], 'LineWidth', 1.5, 'MarkerSize', 9);
plot(sigma_vector, MSP_GD_vector_mean_var_Renyi3, 'x-', 'Color', [115/255 130/255 179/255], 'LineWidth', 1.5, 'MarkerSize', 13);
plot(sigma_vector, MSP_NM_vector_mean_var_Vajda2, 'o-', 'Color', [255/255 165/255 0], 'LineWidth', 1.5, 'MarkerSize', 11);
hold off
grid on
xlabel('Noise level \sigma');
ylabel('Variance(\sigma)');
legend('CRLB_\sigma(\sigma)');
text_size = 16;
set(gca, 'FontSize', text_size);
set(get(gca,'xlabel'), 'FontSize', text_size);
set(get(gca,'ylabel'), 'FontSize', text_size);
%axis([5, 50, 0, 0.11]);
%set(gca,'YTick', 0:0.01:0.11);
%plot2svg('variance_different_measures.svg', 2);

%----------------------- MSE(sigma) -----------------------------------------------------
figure,
hold on
plot(sigma_vector, MSP_GD_vector_mean_mse_KL1, '^-', 'Color', [49/255, 57/255, 174/255], 'LineWidth', 1.5, 'MarkerSize', 11);
plot(sigma_vector, MSP_GD_vector_mean_mse_KL2, '>-', 'Color', [221/255 42/255 43/255], 'LineWidth', 1.5, 'MarkerSize', 11)
plot(sigma_vector, MSP_GD_vector_mean_mse_Jeffreys1, 'd-', 'Color', [0 0.6 0.2], 'LineWidth', 1.5, 'MarkerSize', 12);
plot(sigma_vector, MSP_GD_vector_mean_mse_Renyi1, 's-', 'Color', [0 0 0], 'LineWidth', 1.5, 'MarkerSize', 9);
plot(sigma_vector, MSP_GD_vector_mean_mse_Renyi3, 'x-', 'Color', [115/255 130/255 179/255], 'LineWidth', 1.5, 'MarkerSize', 13);
plot(sigma_vector, MSP_NM_vector_mean_mse_Vajda2, 'o-', 'Color', [255/255 165/255 0], 'LineWidth', 1.5, 'MarkerSize', 11);
legend('Kullback-Leibler divergence (k=1)', 'Kullback-Leibler divergence (k=4)', 'J-divergence (k=1)', 'Renyi divergence (k=1, p=0.5)', 'Renyi divergence (k=4, p=2)', 'Vajda''s measure (k=1, p=2)', 'location', 'NorthEast'); 
grid on 
hold off
xlabel('Noise level \sigma');
ylabel('MSE(\sigma)');
text_size = 16;
set(gca, 'FontSize', text_size);
set(get(gca,'xlabel'), 'FontSize', text_size);
set(get(gca,'ylabel'), 'FontSize', text_size);
%axis([5, 50, 0.0, 0.55]);
%plot2svg('mse_different_measures.svg', 3);


  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

fpga和matlab

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值