光流法追踪动态目标算法,上一篇博客中的算法的改进版本

// Farneback dense optical flow calculate and show in Munsell system of colors
// Author : Zouxy
// Date   : 2013-3-15
// HomePage : http://blog.csdn.net/zouxy09
// Email  : zouxy09@qq.com

// API calcOpticalFlowFarneback() comes from OpenCV, and this
// 2D dense optical flow algorithm from the following paper:
// Gunnar Farneback. "Two-Frame Motion Estimation Based on Polynomial Expansion".
// And the OpenCV source code locate in ..\opencv2.4.3\modules\video\src\optflowgf.cpp
#define _CRT_SECURE_NO_WARNINGS
#include <iostream>
#include "opencv2/opencv.hpp"

using namespace cv;
using namespace std;

#define UNKNOWN_FLOW_THRESH 1e9

// Color encoding of flow vectors from:
// http://members.shaw.ca/quadibloc/other/colint.htm
// This code is modified from:
// http://vision.middlebury.edu/flow/data/
void makecolorwheel(vector<Scalar> &colorwheel)
{
	int RY = 15;
	int YG = 6;
	int GC = 4;
	int CB = 11;
	int BM = 13;
	int MR = 6;

	int i;

	for (i = 0; i < RY; i++) colorwheel.push_back(Scalar(255, 255 * i / RY, 0));
	for (i = 0; i < YG; i++) colorwheel.push_back(Scalar(255 - 255 * i / YG, 255, 0));
	for (i = 0; i < GC; i++) colorwheel.push_back(Scalar(0, 255, 255 * i / GC));
	for (i = 0; i < CB; i++) colorwheel.push_back(Scalar(0, 255 - 255 * i / CB, 255));
	for (i = 0; i < BM; i++) colorwheel.push_back(Scalar(255 * i / BM, 0, 255));
	for (i = 0; i < MR; i++) colorwheel.push_back(Scalar(255, 0, 255 - 255 * i / MR));
}

void motionToColor(Mat flow, Mat &color)
{
	if (color.empty())
		color.create(flow.rows, flow.cols, CV_8UC3);

	static vector<Scalar> colorwheel; //Scalar r,g,b
	if (colorwheel.empty())
		makecolorwheel(colorwheel);

	// determine motion range:
	float maxrad = -1;

	// Find max flow to normalize fx and fy
	for (int i = 0; i < flow.rows; ++i)
	{
		for (int j = 0; j < flow.cols; ++j)
		{
			Vec2f flow_at_point = flow.at<Vec2f>(i, j);
			float fx = flow_at_point[0];
			float fy = flow_at_point[1];
			if ((fabs(fx) >  UNKNOWN_FLOW_THRESH) || (fabs(fy) >  UNKNOWN_FLOW_THRESH))
				continue;
			float rad = sqrt(fx * fx + fy * fy);
			maxrad = maxrad > rad ? maxrad : rad;
		}
	}

	for (int i = 0; i < flow.rows; ++i)
	{
		for (int j = 0; j < flow.cols; ++j)
		{
			uchar *data = color.data + color.step[0] * i + color.step[1] * j;
			Vec2f flow_at_point = flow.at<Vec2f>(i, j);

			float fx = flow_at_point[0] / maxrad;
			float fy = flow_at_point[1] / maxrad;
			if ((fabs(fx) >  UNKNOWN_FLOW_THRESH) || (fabs(fy) >  UNKNOWN_FLOW_THRESH))
			{
				data[0] = data[1] = data[2] = 0;
				continue;
			}
			float rad = sqrt(fx * fx + fy * fy);

			float angle = atan2(-fy, -fx) / CV_PI;
			float fk = (angle + 1.0) / 2.0 * (colorwheel.size() - 1);
			int k0 = (int)fk;
			int k1 = (k0 + 1) % colorwheel.size();
			float f = fk - k0;
			//f = 0; // uncomment to see original color wheel

			for (int b = 0; b < 3; b++)
			{
				float col0 = colorwheel[k0][b] / 255.0;
				float col1 = colorwheel[k1][b] / 255.0;
				float col = (1 - f) * col0 + f * col1;
				if (rad <= 1)
					col = 1 - rad * (1 - col); // increase saturation with radius
				else
					col *= .75; // out of range
				data[2 - b] = (int)(255.0 * col);
			}
		}
	}
}

int main(int, char**)
{
	VideoCapture cap;
	//cap.open(0);
	cap.open("D:\\car\\girl2.mp4");

	if (!cap.isOpened())
		return -1;

	Mat prevgray, gray, flow, cflow, frame;
	namedWindow("flow", 1);

	Mat motion2color;

	while(1)//在源代码的基础上进行修改的
	{
		double t = (double)cvGetTickCount();

		cap >> frame;
		cvtColor(frame, gray, CV_BGR2GRAY);
		imshow("original", frame);

		if (prevgray.data)
		{
			calcOpticalFlowFarneback(prevgray, gray, flow, 0.5, 3, 15, 3, 5, 1.2, 0);
			motionToColor(flow, motion2color);
			imshow("flow", motion2color); 
			imwrite("D:\\car\\1.png", motion2color);
		}
		waitKey(10);//此处的代码是经过修改的,源代码是会闪退的
			//break;
		std::swap(prevgray, gray);

		t = (double)cvGetTickCount() - t;
		cout << "cost time: " << t / ((double)cvGetTickFrequency()*1000.) << endl;
	}
	return 0;
}

上述算法是在该篇文章(光流Optical Flow介绍与OpenCV实现)的基础上进行的改进版本,理论可以参照该篇博客,但是该篇博客的代码存在bug,我对其进行了修改就可以在vs中跑起来了,但是算法的实时性很差。

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
下面是一段使用光流法实现弱小目标跟踪的 MATLAB 代码: ```matlab % 读取视频文件 video = VideoReader('video.mp4'); % 初始化光流法参数 points = detectMinEigenFeatures(rgb2gray(read(video,1)), 'MinQuality', 0.001); oldPoints = points.Location; tracker = vision.PointTracker('MaxBidirectionalError',1); initialize(tracker, oldPoints, rgb2gray(read(video,1))); % 开始跟踪 while hasFrame(video) frame = readFrame(video); grayFrame = rgb2gray(frame); % 使用光流法计算点的移动 [points,validity] = tracker(grayFrame); newPoints = points(validity,:); oldPoints = oldPoints(validity,:); % 计算点的平均移动向量 if size(newPoints,1) >= 2 && size(oldPoints,1) >= 2 [flow,~,~] = estimateFlow(opticalFlowFarneback(oldGray,newGray)); meanFlow = mean(flow(validity,:),1); else meanFlow = [0,0]; end % 更新跟踪点 if size(newPoints,1) < 10 % 如果跟踪点数量太少,则重新检测特征点 points = detectMinEigenFeatures(grayFrame, 'MinQuality', 0.001); oldPoints = points.Location; initialize(tracker, oldPoints, grayFrame); else % 否则更新跟踪点位置 setPoints(tracker, newPoints); oldGray = grayFrame; oldPoints = newPoints; end % 根据平均移动向量更新目标位置 targetPosition = targetPosition + meanFlow; end ``` 这段代码使用了 `detectMinEigenFeatures` 函数来检测视频帧的特征点,并使用 `vision.PointTracker` 类来跟踪这些点的移动。然后,使用 `opticalFlowFarneback` 函数计算光流向量,并使用 `mean` 函数计算跟踪点的平均移动向量。最后,根据平均移动向量更新目标位置。如果跟踪点数量太少,则重新检测特征点。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值