使用光流法进行视频显著性检测(Matlab)(上)

##由于作者比较菜就使用了别人的代码做了点改进和实验

原始视频(bmp序列):(猴子和狗进行了友好的握手之后,猴子对狗挥手道别,狗目送猴离去)

难点:1.画质差 2.镜头晃动 3.背景复杂

首先,使用的是https://www.cnblogs.com/tiandsp/archive/2012/07/16/2593883.html里使用的HSoptflow.m函数:

function [us,vs] = HSoptflow(Xrgb,n)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Author: Gregory Power gregory.power@wpafb.af.mil
% This MATLAB code shows a Motion Estimation map created by
% using a Horn and Schunck motion estimation technique on two
% consecutive frames.  Input requires.
%     Xrgb(h,w,d,N) where X is a frame sequence of a certain
%                height(h), width (w), depth (d=3 for color), 
%                and number of frames (N). 
%     n= is the starting frame number which is less than N 
%     V= the output variable which is a 2D velocity array
%
% Sample Call: V=HSoptflow(X,3);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
[h,w,d,N]=size(Xrgb);
if n>N-1
   error(1,'requested file greater than frame number required');
end; 
%get two image frames
if d==1
    Xn=double(Xrgb(:,:,1,n));
    Xnp1=double(Xrgb(:,:,1,n+1));
elseif d==3
    Xn=double(Xrgb(:,:,1,n)*0.299+Xrgb(:,:,2,n)*0.587+Xrgb(:,:,3,n)*0.114);
    Xnp1=double(Xrgb(:,:,1,n+1)*0.299+Xrgb(:,:,2,n+1)*0.587+Xrgb(:,:,3,n+1)*0.114);
else
    error(2,'not an RGB or Monochrome image file');
end;

%get image size and adjust for border
size(Xn);
hm5=h-5; wm5=w-5;
z=zeros(h,w); v1=z; v2=z;

%initialize
dsx2=v1; dsx1=v1; dst=v1;
alpha2=625;
imax=20;

%Calculate gradients
dst(5:hm5,5:wm5) = ( Xnp1(6:hm5+1,6:wm5+1)-Xn(6:hm5+1,6:wm5+1) + Xnp1(6:hm5+1,5:wm5)-Xn(6:hm5+1,5:wm5)+ Xnp1(5:hm5,6:wm5+1)-Xn(5:hm5,6:wm5+1) +Xnp1(5:hm5,5:wm5)-Xn(5:hm5,5:wm5))/4;
dsx2(5:hm5,5:wm5) = ( Xnp1(6:hm5+1,6:wm5+1)-Xnp1(5:hm5,6:wm5+1) + Xnp1(6:hm5+1,5:wm5)-Xnp1(5:hm5,5:wm5)+ Xn(6:hm5+1,6:wm5+1)-Xn(5:hm5,6:wm5+1) +Xn(6:hm5+1,5:wm5)-Xn(5:hm5,5:wm5))/4;
dsx1(5:hm5,5:wm5) = ( Xnp1(6:hm5+1,6:wm5+1)-Xnp1(6:hm5+1,5:wm5) + Xnp1(5:hm5,6:wm5+1)-Xnp1(5:hm5,5:wm5)+ Xn(6:hm5+1,6:wm5+1)-Xn(6:hm5+1,5:wm5) +Xn(5:hm5,6:wm5+1)-Xn(5:hm5,5:wm5))/4;

for i=1:imax
   delta=(dsx1.*v1+dsx2.*v2+dst)./(alpha2+dsx1.^2+dsx2.^2);
   v1=v1-dsx1.*delta;
   v2=v2-dsx2.*delta;
end
u=z; u(5:hm5,5:wm5)=v1(5:hm5,5:wm5);
v=z; v(5:hm5,5:wm5)=v2(5:hm5,5:wm5);

xskip=round(h/64);
[hs,ws]=size(u(1:xskip:h,1:xskip:w));
us=zeros(hs,ws); vs=us;

N=xskip^2;
for i=1:hs-1
  for j=1:ws-1
     hk=i*xskip-xskip+1;
     hl=i*xskip;
     wk=j*xskip-xskip+1;
     wl=j*xskip;
     us(i,j)=sum(sum(u(hk:hl,wk:wl)))/N;
     vs(i,j)=sum(sum(v(hk:hl,wk:wl)))/N;
  end
end

figure(1);
quiver(us,vs);
colormap('default');
axis ij;
axis tight;
axis equal;

运行结果如下:


实现代码:

close all;
clear all;
clc;
img_dir = dir('./monkeydog/*.bmp');
[m n]=size(img_dir);
raw=zeros(240,320,1,m);
for i=1:m
    name=strcat('./monkeydog/',img_dir(i).name);
    I=imread(name);
    raw(:,:,1,i)=rgb2gray(I);
end

for i=1:m-1
    V=HSoptflow(raw,i);
    imshow(V)
    imgname=strcat('C:\Users\71405\Desktop\回收站\计算机视觉\2018.6.12\output2\img_',num2str(i),'.jpg');
    saveas(gcf,imgname);#保存图片
end


很混乱,而且尺寸很小,不会处理,于是采用了第二种方法:

原文链接:https://blog.csdn.net/garfielder007/article/details/50441756

其中缺少的computeColor.m链接:http://www.codeforge.cn/read/215288/computeColor.m__html

function img = computeColor(u,v)
 
%   computeColor color codes flow field U, V
 
%   According to the c++ source code of Daniel Scharstein 
%   Contact: schar@middlebury.edu
 
%   Author: Deqing Sun, Department of Computer Science, Brown University
%   Contact: dqsun@cs.brown.edu
%   $Date: 2007-10-31 21:20:30 (Wed, 31 Oct 2006) $
 
% Copyright 2007, Deqing Sun.
%
%                         All Rights Reserved
%
% Permission to use, copy, modify, and distribute this software and its
% documentation for any purpose other than its incorporation into a
% commercial product is hereby granted without fee, provided that the
% above copyright notice appear in all copies and that both that
% copyright notice and this permission notice appear in supporting
% documentation, and that the name of the author and Brown University not be used in
% advertising or publicity pertaining to distribution of the software
% without specific, written prior permission.
%
% THE AUTHOR AND BROWN UNIVERSITY DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
% INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ANY
% PARTICULAR PURPOSE.  IN NO EVENT SHALL THE AUTHOR OR BROWN UNIVERSITY BE LIABLE FOR
% ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 
 
nanIdx = isnan(u) | isnan(v);
u(nanIdx) = 0;
v(nanIdx) = 0;
 
colorwheel = makeColorwheel();
ncols = size(colorwheel, 1);
 
rad = sqrt(u.^2+v.^2);          
 
a = atan2(-v, -u)/pi;
 
fk = (a+1) /2 * (ncols-1) + 1;  % -1~1 maped to 1~ncols
    
k0 = floor(fk);                 % 1, 2, ..., ncols
 
k1 = k0+1;
k1(k1==ncols+1) = 1;
 
f = fk - k0;
 
for i = 1:size(colorwheel,2)
    tmp = colorwheel(:,i);
    col0 = tmp(k0)/255;
    col1 = tmp(k1)/255;
    col = (1-f).*col0 + f.*col1;   
    
    idx = rad <= 1;   
    col(idx) = 1-rad(idx).*(1-col(idx));    % increase saturation with radius
     
    col(~idx) = col(~idx)*0.75;             % out of range
     
    img(:,:, i) = uint8(floor(255*col.*(1-nanIdx)));         
end;    
 
%%
function colorwheel = makeColorwheel()
 
%   color encoding scheme
 
%   adapted from the color circle idea described at
%   http://members.shaw.ca/quadibloc/other/colint.htm
 
 
RY = 15;
YG = 6;
GC = 4;
CB = 11;
BM = 13;
MR = 6;
 
ncols = RY + YG + GC + CB + BM + MR;
 
colorwheel = zeros(ncols, 3); % r g b
 
col = 0;
%RY
colorwheel(1:RY, 1) = 255;
colorwheel(1:RY, 2) = floor(255*(0:RY-1)/RY)';
col = col+RY;
 
%YG
colorwheel(col+(1:YG), 1) = 255 - floor(255*(0:YG-1)/YG)';
colorwheel(col+(1:YG), 2) = 255;
col = col+YG;
 
%GC
colorwheel(col+(1:GC), 2) = 255;
colorwheel(col+(1:GC), 3) = floor(255*(0:GC-1)/GC)';
col = col+GC;
 
%CB
colorwheel(col+(1:CB), 2) = 255 - floor(255*(0:CB-1)/CB)';
colorwheel(col+(1:CB), 3) = 255;
col = col+CB;
 
%BM
colorwheel(col+(1:BM), 3) = 255;
colorwheel(col+(1:BM), 1) = floor(255*(0:BM-1)/BM)';
col = col+BM;
 
%MR
colorwheel(col+(1:MR), 3) = 255 - floor(255*(0:MR-1)/MR)';
colorwheel(col+(1:MR), 1) = 255;

并使用了基于局部距离的图像显著性做遮罩,即图像显著性高的地方为1,显著性低的为0。对光流法所得的显著性做遮罩,只保留图像显著性为1的地方。

最后结果如下:


实现代码:

img_dir = dir('./monkeydog/*.bmp');

[m n]=size(img_dir);

for q=1:m
    name1=strcat('./monkeydog/',img_dir(q).name);
    name2=strcat('./monkeydog/',img_dir(q+1).name);
    Iref = imread(name1);
    Iinput = imread(name2);
    I=Iinput;
     
    
    %% 求后一帧的显著性
     [m n d]=size(I);
    Iq=imresize(uint8(I),[240,240*n/m]);
    Iref=imresize(uint8(Iref),[240,240*n/m]);
    Iinput=imresize(uint8(Iinput),[240,240*n/m]);
    mode=fspecial('gaussian', 1, 1);
    %IS=imfilter(Iq,mode,'replicate');  
    IS=Iq;
    [label,num]=superpixels(IS,300);
    BW = boundarymask(label);
     %imshow(imoverlay(IS,BW,'cyan'),'InitialMagnification',100);
     %figure
    [m n]=size(label);
    supmean=uint64(zeros(num,5)); 
    labelnum=zeros(num);

    for i=1:m
        for j=1:n
            supmean(label(i,j),1)=supmean(label(i,j),1)+i;
            supmean(label(i,j),2)=supmean(label(i,j),2)+j;
            supmean(label(i,j),3)=(supmean(label(i,j),3)+uint64(IS(i,j,1)));
            supmean(label(i,j),4)=(supmean(label(i,j),4)+uint64(IS(i,j,2)));
            supmean(label(i,j),5)=(supmean(label(i,j),5)+uint64(IS(i,j,3)));
            labelnum(label(i,j))=labelnum(label(i,j))+1;
        end
    end
    for i=1:num
        supmean(i,1)=uint16(supmean(i,1)./labelnum(i));
        supmean(i,2)=uint16(supmean(i,2)./labelnum(i));
        supmean(i,3)=uint16(supmean(i,3)./labelnum(i));
        supmean(i,4)=uint16(supmean(i,4)./labelnum(i));
        supmean(i,5)=uint16(supmean(i,5)./labelnum(i));
    end

    IM=zeros(m,n,3);
    for i=1:m
        for j=1:n
            IM(i,j,1)=uint8(supmean(label(i,j),3));
            IM(i,j,2)=uint8(supmean(label(i,j),4));
            IM(i,j,3)=uint8(supmean(label(i,j),5));
        end
    end
    supim=uint8(cat(3,IM(:,:,1),IM(:,:,2),IM(:,:,3)));
    %imshow(supim);


    %全局 
    supmean=double(supmean);
    dist_all=zeros(num,1);
    w0=0.0;
    for i=1:num
       for j=1:num
           if ((supmean(i,1)-supmean(j,1))^2+(supmean(i,2)-supmean(j,2))^2)<500
                dist_all(i)=dist_all(i)+(w0*(supmean(i,1)-supmean(j,1))^2+w0*(supmean(i,2)-supmean(j,2))^2+(supmean(i,3)-supmean(j,3))^2+(supmean(i,4)-supmean(j,4))^2+(supmean(i,5)-supmean(j,5))^2)^0.5;
           end
       end
    end
    %归一化
    dist_min=min(dist_all);
    dist_max=max(dist_all);
    for i=1:num
        dist_all(i)=(dist_all(i)-dist_min)*255/(dist_max-dist_min);
    end
    dist_all=uint8(dist_all);
    im_all=zeros(240,240*n/m);
    for i=1:m
        for j=1:n
            im_all(i,j)=dist_all(label(i,j));
        end
    end
    im_all=uint8(im_all);
    im_all_bw=im2bw(im_all,0.55);
    %im_all=im2bw(im_all,0.05);
    
    %% 光流法
    if size(Iref, 3) == 3
        Irefg = rgb2gray(Iref);
        Iinputg = rgb2gray(Iinput);
    else
        Irefg = Iref;
        Iinputg = Iinput;
    end
% 创建光流对象及类型转化对象
    opticalFlow = vision.OpticalFlow('ReferenceFrameDelay',1);
    converter = vision.ImageDataTypeConverter;
% 修改光流对象的配置
    opticalFlow.OutputValue = 'Horizontal and vertical components in complex form'; % 返回复数形式光流图
    opticalFlow.ReferenceFrameSource = 'Input port'; % 对比两张图片,而不是视频流
    if 1 % 使用的算法
        opticalFlow.Method = 'Lucas-Kanade';
        opticalFlow.NoiseReductionThreshold = 0.001; % 默认是0.0039
    else
        opticalFlow.Method = 'Horn-Schunck';
        opticalFlow.Smoothness = 0.5; % 默认是1
    end
% 调用光流对象计算两张图片的光流
    Iinputg_c = step(converter, Iinputg);
    Irefg_c = step(converter, Irefg);
    of = step(opticalFlow, Iinputg_c, Irefg_c);
    ofH = real(of);
    ofV = imag(of);
    
 [m n]=size(ofV);
    for j=1:m
        for k=1:n
            if abs(double(ofV(m,n)))<0.5
                ofV(m,n)=1;
            else
                ofV(m,n)=0;
            end
        end
    end
    %im_all=1-im_all;
% 将光流转化为彩色图显示
   % B=strel('disk',1);
    %ofV=imdilate(ofV,B);
    %ofV=imerode(ofV,B); 
    ofI = computeColor(ofH, ofV);
    ofI(:,:,1)=ofI(:,:,1).*uint8(im_all_bw);
    ofI(:,:,2)=ofI(:,:,2).*uint8(im_all_bw);
    ofI(:,:,3)=ofI(:,:,3).*uint8(im_all_bw);
    ofI=cat(3,ofI(:,:,1),ofI(:,:,2),ofI(:,:,3));
%显示结果
%     ofV=uint8(ofV);
%     ofV=ofV.*im_all;
    for qa=1:m
        for qb=1:n
            if ofI(qa,qb,1)+ofI(qa,qb,2)+ofI(qa,qb,3)==0
                ofI(qa,qb,1)=255;
                ofI(qa,qb,2)=255;
                ofI(qa,qb,3)=255;
            end
        end
    end
    subplot(121),imshow(I);
    subplot(122),
    imshow(ofI)
    pause(0.01);
    imgname=strcat('C:\Users\71405\Desktop\回收站\计算机视觉\2018.6.12\output3\img_',num2str(q),'.jpg');
    saveas(gcf,imgname);
end

效果也不太好,于是使用Liu Ce所撰写的代码:

原文链接:http://people.csail.mit.edu/celiu/OpticalFlow/

效果图:

(左:光流为距离显著做遮罩 右:距离显著由光流做遮罩)

效果还凑活,就是有的时候会丢失,导致结果一闪一闪的。

代码:

close all;
clear all;
clc;
img_dir = dir('./monkeydog/*.bmp');
[m n]=size(img_dir);
for i=1:m-1
    name1=strcat('./monkeydog/',img_dir(i).name);
    name2=strcat('./monkeydog/',img_dir(i+1).name);
    I1=imread(name1);
    I2=imread(name2);
    I1=imresize(I1,[120,160]);
    I2=imresize(I2,[120,160]);
    alpha = 0.012;
    ratio = 0.75;
    minWidth = 20;
    nOuterFPIterations = 7;
    nInnerFPIterations = 1;
    nSORIterations = 30;
    para = [alpha,ratio,minWidth,nOuterFPIterations,nInnerFPIterations,nSORIterations];
    [vx,vy,warpI2] = Coarse2FineTwoFrames(I1,I2,para);
    vx=vx-mean(mean(vx));%去中心化
    vy=vy-mean(mean(vy));
    v=sqrt(vx.^2+vy.^2);
    v1=v;
    [l,d]=size(v);

    %v=log(double(v));
    v=imadjust(v,[],[],1.1);%增加对比度 亮的更亮 暗的更暗
    maxi=max(max(v));
    mini=min(min(v));
    for k=1:l
        for o=1:d
            v(k,o)=(v(k,o)-mini)*255/(maxi-mini);%归一化到[0 255]
        end
    end
    v=uint8(v);
    img=suptype(I2);
    %方法一: v作为遮罩
    v1=im2bw(v,0.85);
    img3=filter2(fspecial('gaussian'),sample)
    img2=0.8*img.*uint8(v1)+0.2*img;
    %方法二:img作为遮罩
    img1=im2bw(img,0.65);
    v2=0.8*v.*uint8(img1)+0.2*img;
    
    
    img2=imadjust(img2,[],[],2);
    v2=imadjust(v2,[],[],2);
    subplot(121),imshow(img2);
    subplot(122),imshow(v2);
    pause(0.01)
    imgname=strcat('C:\Users\71405\Desktop\回收站\计算机视觉\2018.6.12\output\img_',num2str(i),'.jpg');
    saveas(gcf,imgname);
end

其中求显著性图像的代码为suptype.m

function img = suptype(I)
    [m n d]=size(I);
    %I=imresize(uint8(I),[120,120*n/m]);
    %mode=fspecial('gaussian', 1, 1);
    %IS=imfilter(I,mode,'replicate');  
    IS=I;
    [label,num]=superpixels(IS,300);
    BW = boundarymask(label);
     %imshow(imoverlay(IS,BW,'cyan'),'InitialMagnification',100);
     %figure
    [m n]=size(label);
    supmean=uint64(zeros(num,5)); 
    labelnum=zeros(num);

    for i=1:m
        for j=1:n
            supmean(label(i,j),1)=supmean(label(i,j),1)+i;
            supmean(label(i,j),2)=supmean(label(i,j),2)+j;
            supmean(label(i,j),3)=(supmean(label(i,j),3)+uint64(IS(i,j,1)));
            supmean(label(i,j),4)=(supmean(label(i,j),4)+uint64(IS(i,j,2)));
            supmean(label(i,j),5)=(supmean(label(i,j),5)+uint64(IS(i,j,3)));
            labelnum(label(i,j))=labelnum(label(i,j))+1;
        end
    end
    for i=1:num
        supmean(i,1)=uint16(supmean(i,1)./labelnum(i));
        supmean(i,2)=uint16(supmean(i,2)./labelnum(i));
        supmean(i,3)=uint16(supmean(i,3)./labelnum(i));
        supmean(i,4)=uint16(supmean(i,4)./labelnum(i));
        supmean(i,5)=uint16(supmean(i,5)./labelnum(i));
    end

    IM=zeros(m,n,3);
    for i=1:m
        for j=1:n
            IM(i,j,1)=uint8(supmean(label(i,j),3));
            IM(i,j,2)=uint8(supmean(label(i,j),4));
            IM(i,j,3)=uint8(supmean(label(i,j),5));
        end
    end
    supim=uint8(cat(3,IM(:,:,1),IM(:,:,2),IM(:,:,3)));
    %imshow(supim);


    %全局 
    supmean=double(supmean);
    dist_all=zeros(num,1);
    w0=0.0;
    for i=1:num
       for j=1:num
           if ((supmean(i,1)-supmean(j,1))^2+(supmean(i,2)-supmean(j,2))^2)<500
                dist_all(i)=dist_all(i)+(w0*(supmean(i,1)-supmean(j,1))^2+w0*(supmean(i,2)-supmean(j,2))^2+(supmean(i,3)-supmean(j,3))^2+(supmean(i,4)-supmean(j,4))^2+(supmean(i,5)-supmean(j,5))^2)^0.5;
           end
       end
    end
    %归一化
    dist_min=min(dist_all);
    dist_max=max(dist_all);
    for i=1:num
        dist_all(i)=(dist_all(i)-dist_min)*255/(dist_max-dist_min);
    end
    dist_all=uint8(dist_all);
    im_all=zeros(120,120*n/m);
    for i=1:m
        for j=1:n
            im_all(i,j)=dist_all(label(i,j));
        end
    end
    im_all=uint8(im_all);
    %im_all=im2bw(im_all,0.55);
    %im_all=uint8(im2bw(im_all,0.55)).*I(:,:,1);
    
    %显示图片 
    img=im_all;

并使用了该论文中的部分函数:


论文代码下载地址:http://people.csail.mit.edu/celiu/OpticalFlow/OpticalFlow.zip


最后使用ae伪造一个效果图(没错这其实是一个如何用ae伪造结果的博客):

第一步,导入bmp序列:


第二步:把序列导入合成,并点 效果->抠像->颜色差值键


第三步:选择效果控件中 视图->已校正遮罩 并使用预览图中的三个吸管(用第二个取背景  第三个取前景)求出最后效果如下:


多取几次,使猴子基本为黑,背景基本为白。

第四步:用矩形工具建立如下大小左右的遮罩。


第五步:根据猴子的移动设置对遮罩关键帧“跟踪”即可


简单做几个关键帧即可,使得猴子全身基本在框内就可以。最后直接渲染即可。

结果图:

(是不是比前边几个效果好太多 [/斜眼笑]  )

  • 5
    点赞
  • 33
    收藏
    觉得还不错? 一键收藏
  • 3
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值