【目标跟踪】基于卡尔曼滤波实现SLAM地图目标跟踪附matlab代码

✅作者简介:热爱科研的Matlab仿真开发者,修心和技术同步精进,matlab项目合作可私信。

🍎个人主页:Matlab科研工作室

🍊个人信条:格物致知。

⛄ 内容介绍

主要步骤是:

1) 生成建筑物遮罩:应用高度阈值将点云分为地面点和非地面点。这两组点用于生成两个建筑物遮罩,即主要和次要建筑物遮罩。

2) 线提取:从主建筑物遮罩中提取建筑物周围的线。

3) 形成初始/候选建筑物并扩展建筑物:使用提取的线形成初始建筑物,然后(取决于技术)使用来自多光谱图像的 NDVI 和/或熵来扩展候选建筑物。在扩展期间也使用辅助掩码。

4) 移除树木:应用一套规则来移除与建筑物一样高的树木,也可能具有类似树的形状和颜色。

⛄ 部分代码

function ekfSlam

%warming('off');

close all; clear all;

% global stuff

global xVehicleTrue;

global xVehicleEst;

global P;

global landFeatures;

global visionSensorSettings;

global xOdomLast;

global nSteps;

global uTrue;

global B;

global laserSensorSettings

% robot settings

B =1; % axis distance

% sensor settings

laserSensorSettings.maxBearing =25;

laserSensorSettings.maxRange = 100;

laserSensorSettings.deltaBearing =1;

% simulation parameters

nSteps =250;

worldSize =200;

nCornersLandFeatures =4;

cornersLandFeatures = zeros(2,1,nCornersLandFeatures); % make some space

cornersLandFeatures(:,:,1) = [0 0]';

cornersLandFeatures(:,:,2) = [0 120]';

cornersLandFeatures(:,:,3) = [120 120]';

cornersLandFeatures(:,:,4) = [120 0]';

wallPerimeter(:,:) =[ cornersLandFeatures(1,1,1) cornersLandFeatures(1,1,2) cornersLandFeatures(2,1,1) cornersLandFeatures(2,1,2);

                      cornersLandFeatures(1,1,2) cornersLandFeatures(1,1,3) cornersLandFeatures(2,1,2) cornersLandFeatures(2,1,3);

                      cornersLandFeatures(1,1,3) cornersLandFeatures(1,1,4) cornersLandFeatures(2,1,3) cornersLandFeatures(2,1,4);

                      cornersLandFeatures(1,1,4) cornersLandFeatures(1,1,1) cornersLandFeatures(2,1,4) cornersLandFeatures(2,1,1)];

                  

                  

display('Setup Complete');

figure(1); hold on; %grid off; axis off;

xWall = [wallPerimeter(:,1), wallPerimeter(:,2)];

yWall = [wallPerimeter(:,3), wallPerimeter(:,4)];

plot (cornersLandFeatures(1,:), cornersLandFeatures(2,:), 'r+'); hold on;

plot(xWall, yWall, '-b'); hold on;

axis([-worldSize/2 worldSize/2 -worldSize/2 worldSize/2]);

axis([-worldSize worldSize -worldSize worldSize]);

xVehicleTrue =[ 40 60 0]';

% filter states

xTrue=[xVehicleTrue]; 

xEst =[xVehicleTrue];

P = diag([3, 3 ,0.08]);

% Standard deviation error regarding observation

RTrue = diag([0.1, 0.5*pi/180]).^2;

REst = 2*RTrue;

% control input movement

deltaS =[0.28 0.30];

kr=0.0001;

% Gate value for measure acceptacnce

gateAccept =10;

F(nSteps) = struct('cdata',[],'colormap',[]);

% loop

for k =1:nSteps

   

   

   % get true states

   xVehicleTrue = xTrue;

   xMapTrue =cornersLandFeatures;

   

   % get estimated states

   xVehicleEst = xEst(1:3); % get estimated state vehicle

   xMapEst = xEst(4:end); % rest are the mapped corners estimated

   

   

   

   % generate controls true value

   u = getControl(xVehicleTrue,deltaS,0,0);

   xVehicleTrue = moveRobot(xVehicleTrue,u);

   

   

   % generate controls estimated value value

   q = generateNoise(-0.005, 0.005, [3,1]); % noise 

   u = getControl(xVehicleEst,deltaS, 1, q);

   

   xVehicleEst = moveRobot(xVehicleEst,u);

   

  

   

   ds = (deltaS(1) + deltaS(2))/2;

   Q = diag([kr*abs(deltaS(1)), kr*abs(deltaS(2))]);

   

   %compute covariances

   P_vv = J1(u)*P(1:3,1:3)*J1(u)' + J2(xVehicleEst,u,ds)*Q*J2(xVehicleEst,u,ds)';

   P_vc = J1(u)*P(1:3,4:end); % vehicle to corners

   P_cc = P(4:end,4:end); % corners to corners

   

   

   %agragate strue

   xTrue = xVehicleTrue;

   

   % agregate estimated  state again

   xEst = [xVehicleEst; xMapEst];

   P = [P_vv P_vc;

           P_vc' P_cc];

       

   

   % get observations

   obsSigma2 = RTrue(2);

   

   [obsTh, obsRho] =readLidar(xVehicleTrue,wallPerimeter,obsSigma2);

   

   

   % extract the corners split merge

   [zFeat] = getCornerFeatures(xVehicleEst,obsTh, obsRho, 9, 2);

     

   

   

   xCorner=[];

   yCorner=[];

   z=[];

   if(~isempty(zFeat))

        [xCorner, yCorner] = transformPointToWorld(xVehicleEst, zFeat(2), zFeat(1), 0);

        z =zFeat;       

   end

   

     

   

   

   

   newObs=0;

   % if valid observations is found

   if(~isempty(z))

       

       R=diag([1,0.1]);

       

       gateLastAccept=gateAccept;

       

       associateIDX =-1;

       

       % get number feature in track

       [nFeatures, c] = size(xMapEst);

       % loop all features in track

       

       nFeatures =nFeatures/2;

       for f=1: nFeatures

            % extract the feature value

            xFeature = xMapEst(f:f+1);

            %zEst= doObservationModel(xVehicleEst,xFeature);         

            

            % check if is associated

            gateVal =nearestNeighbour(f,xEst,xVehicleEst,xFeature, z, P, R);

            

            %keyboard

            if(gateVal<gateLastAccept)

                % valid association

                associateIDX = f;

                gateLastAccept =gateVal;

                disp('Observation in gate with Mapped Feature ');

                

            end 

       end %for

       

       if(associateIDX >-1) % valid association found, update

           xFeature = xMapEst(associateIDX:associateIDX+1);

           zEst= doObservationModel(xVehicleEst,xFeature);

           

           % get obs jacobians

           [jHxv, jHxf] = getObsJacs(xVehicleEst, xFeature);

           jH = zeros(2,length(xEst));

           jH(:, associateIDX:associateIDX+1) = jHxf;

           jH(:,1:3) = jHxv;

           

           % do kalman update

           INNOV = z-zEst;

           INNOV(2) = angleWrapping(INNOV(2));

           

           % compute covariance innovation

           S=jH*P*jH' + REst;

           % compute kalman gain

           W = P*jH' * inv(S);

           

          

           % update state

           xEst = xEst + W*INNOV;

           

           % update Covariance

           P = P - W*S*W';

           

           % ensure remains symmetric

           P = 0.5*(P+P');

       

           disp('Mapped feature associated with Observation');

       else % not associated, add new

           n = length(xEst);

           

           % copmpute preditect position based on estimated state and

           % sensor readings

           xFeature = xVehicleEst(1:2) + [z(1)*cos(angleWrapping(z(2) + xVehicleEst(3))); z(1)*sin(angleWrapping(z(2) + xVehicleEst(3)))];

           % add to state vector

           xEst =[xEst; xFeature];

           

           % get the new jacobian feature

           [jGxv, jGz] = getNewFeatureJacs(xVehicleEst,zFeat);

           

           

           

           M =[eye(n), zeros(n,2);

           jGxv zeros(2,n-3), jGz];

           

           P = M*blkdiag(P,REst)*M';

           

           disp('New feature Added');

           

      end

          

   end

             

       

   

   

   % Ploting zone

   a = axis;

   clf;

   axis(a); hold on;

   nStates = length(xEst); % get the total length

   nLandMarks = (nStates-3)/2; % number landmarks in track

   

   % plot world

   plot (cornersLandFeatures(1,:), cornersLandFeatures(2,:), 'r+'); hold on;

   plot(xWall, yWall, '-b'); hold on;

   

   % plot vehicle 

   doVehicleGraphics(xEst(1:3), P(1:3,1:3), 3, [pi/2 0]);

    

   % plot laser

   plotPointCloud(xVehicleEst, obsTh, obsRho, 0); hold on;

   

   % plot calculated corner

   plot (xCorner, yCorner, 'g+','MarkerSize',10); hold on

   

   %plot the projected line to the corner

   if(~isempty(z))

       

      h = line([xEst(1), xCorner], [xEst(2), yCorner]);

      set(h, 'linestyle',':');

      legend(sprintf('Estimated Range: %3.2f; Angle: %3.2f',z(1), z(2)*180/pi));

   end

   

   % plot landmarks

   for(ii = 1:nLandMarks)

       iF= 3+2*ii-1;

       plot(xEst(iF), xEst(iF+1), 'b*');hold on;

       plotEllipse(xEst(iF:iF+1), P(iF:iF+1,iF:iF+1),3);hold on;

   end;

   

   drawnow;

   F(k) = getframe(gcf);

   

   %save stuff 

   %xTrueSaved(k,:) = xTrue;

   %xEstSaved(k,:)  = xEst;

   % iteraction

   disp(sprintf('Interaction %d' ,k));

   

   

end

   movie2avi(F,'ekfLidar.avi','compression','none'); 

end

⛄ 运行结果

⛄ 参考文献

1. M. Awrangjeb、M. Ravanbakhsh 和 CS Fraser,“Automatic detection of residential buildings using LIDAR data and multispectral imagery”,ISPRS Journal of Photogrammetry and Remote Sensing,65(5),457-467, 2010 年 9 月

2. M. Awrangjeb、C. Zhang 和 CS Fraser,“复杂场景中的建筑物检测通过将建筑物与树木有效分离”,摄影测量工程与遥感 (PE&RS),卷。78(7), 729-745, 2012 年 7 月。

⛄ Matlab代码关注

❤️部分理论引用网络文献,若有侵权联系博主删除

❤️ 关注我领取海量matlab电子书和数学建模资料

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

matlab科研助手

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值