[OpenCV+VS2015]表计读数识别(二):基于SURF的表计校正

OpenCV+VS2015]表计读数识别(二):基于SURF的表计校正

本文是基于传统视频图像处理办法检测表计读数,作者资历尚浅,如有不足之处,欢迎指正,谢谢!

1 思路简述和代码

上一章节:[OpenCV+VS2015]表计读数识别(一):表计位置检测(表计歪的咋办!?

通过表计位置检测,我们获得了具体的表计图像:
在这里插入图片描述
我们需要把图像扭正,所以用到了SURF算法,我使用的是SURF算法模板匹配来获得仿射变换的三个点,从而获得仿射变换矩阵,进而进行仿射变换

//【1】载入源图片
	//Mat srcImage_1 = imread("E:\\vs_work\\project-3\\10表计1.jpg");  //表计10.1作为模板
	if (!srcImage_1.data || !Image_2.data)//检测是否读取成功
	{
		printf("读取图片错误,请确定目录下是否有imread函数指定名称的图片存在~! \n"); return false;
	}
	Mat srcImage_2;
	resize(Image_2, srcImage_2, srcImage_1.size());


	//【2】利用SURF检测器检测的关键点
	int minHessian = 3000;
	Ptr<SURF>detector = SURF::create(minHessian);
	std::vector<KeyPoint> keypoints_1, keypoints_2;
	detector->detect(srcImage_1, keypoints_1);
	detector->detect(srcImage_2, keypoints_2);

	//【4】 使用SIFT算子提取特征(计算特征向量)
	Ptr<SURF> extractor = SurfDescriptorExtractor::create();
	Mat descriptors_1, descriptors_2;
	extractor->compute(srcImage_1, keypoints_1, descriptors_1);
	extractor->compute(srcImage_2, keypoints_2, descriptors_2);

	//【4】采用FLANN算法匹配描述符向量
	Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("FlannBased");
	vector< DMatch > matches;
	matcher->match(descriptors_1, descriptors_2, matches);

	//【5】快速计算关键点之间的最大和最小距离
	double max_dist = 0; double min_dist = 100; int i = 0;
	for (i = 0; i < descriptors_1.rows; i++)
	{
		double dist = matches[i].distance;
		if (dist < min_dist) min_dist = dist;
		if (dist > max_dist) max_dist = dist;
	}
	if (max_dist == 0) { max_dist = 50; }
	if (min_dist == 0) { min_dist = 10; }
	printf("> 最大距离(Max dist) : %f \n", max_dist);
	printf("> 最小距离(Min dist) : %f \n", min_dist);

	//【6】存下符合条件的匹配结果(即其距离小于2* min_dist的),使用radiusMatch同样可行
	vector< DMatch > good_matches;
	float coefficient = 1.5;
	float eps = 0.01;
	int field1 = 20, field2 = 60;    //field1是用于判断匹配点的正确性,field2用于使相邻匹配点不邻近
									 //for (i = 0; i < descriptors_1.rows; i++)
									 //{
									 //	if (matches[i].distance < 2 * min_dist)
									 //	{
									 //		good_matches.push_back(matches[i]);
									 //	}
									 //}

	int test_image1_x, test_image1_y, test_image2_x, test_image2_y;
	int pre1_test_image1_x = 0, pre1_test_image1_y = 0, pre2_test_image1_x = 0, pre2_test_image1_y = 0;
	int good_match_number;
	int iter = 0, Max_iter = 3000;

	while ((good_matches.size() != 3) && (iter<Max_iter)) {
		iter++;
		if (good_matches.size() < 3)
		{
			good_matches.clear();
			coefficient = coefficient + eps;
			for (i = 0; i < descriptors_1.rows; i++)
			{
				good_match_number = good_matches.size();
				if (matches[i].distance < coefficient * min_dist)     //用距离筛选是可能正确的匹配点
				{
					test_image1_x = keypoints_1[matches[i].queryIdx].pt.x;
					test_image1_y = keypoints_1[matches[i].queryIdx].pt.y;
					test_image2_x = keypoints_2[matches[i].trainIdx].pt.x;
					test_image2_y = keypoints_2[matches[i].trainIdx].pt.y;
					if ((test_image2_x >(test_image1_x - field1)) && (test_image2_x<(test_image1_x + field1)) &&
						(test_image2_y>(test_image1_y - field1)) && (test_image2_y < (test_image1_y + field1)))      //用可能正确的匹配点的相对关系定位正确点
					{
						switch (good_match_number)
						{
						case 0:
							good_matches.push_back(matches[i]);
							pre1_test_image1_x = test_image1_x;
							pre1_test_image1_y = test_image1_y;
							break;

						case 1:
							if (((test_image1_x<(pre1_test_image1_x - field2)) || (test_image1_x>(pre1_test_image1_x + field2))) &&
								((test_image1_y<(pre1_test_image1_y - field2)) || (test_image1_y>(pre1_test_image1_y + field2))))
							{
								good_matches.push_back(matches[i]);
								pre2_test_image1_x = test_image1_x;
								pre2_test_image1_y = test_image1_y;
							}
							break;
						case 2:
							if (((test_image1_x<(pre1_test_image1_x - field2)) || (test_image1_x>(pre1_test_image1_x + field2))) &&
								((test_image1_y<(pre1_test_image1_y - field2)) || (test_image1_y>(pre1_test_image1_y + field2))) &&
								((test_image1_x<(pre2_test_image1_x - field2)) || (test_image1_x>(pre2_test_image1_x + field2))) &&
								((test_image1_y<(pre2_test_image1_y - field2)) || (test_image1_y>(pre2_test_image1_y + field2))))
							{
								good_matches.push_back(matches[i]);
							}
							break;
						default:
							break;
						}
					}
				}
			}
		}
		if (good_matches.size() > 3)
		{
			good_matches.clear();
			coefficient = coefficient - eps;
			for (i = 0; i < descriptors_1.rows; i++)
			{
				good_match_number = good_matches.size();
				if (matches[i].distance < coefficient * min_dist)
				{
					test_image1_x = keypoints_1[matches[i].queryIdx].pt.x;
					test_image1_y = keypoints_1[matches[i].queryIdx].pt.y;
					test_image2_x = keypoints_2[matches[i].trainIdx].pt.x;
					test_image2_y = keypoints_2[matches[i].trainIdx].pt.y;
					if ((test_image2_x >(test_image1_x - field1)) && ((test_image2_x<test_image1_x + field1)) &&
						(test_image2_y>(test_image1_y - field1)) && ((test_image2_y < test_image1_y + field1)))
					{
						switch (good_match_number)
						{
						case 0:
							good_matches.push_back(matches[i]);
							pre1_test_image1_x = test_image1_x;
							pre1_test_image1_y = test_image1_y;
							break;

						case 1:
							if (((test_image1_x<(pre1_test_image1_x - field2)) || (test_image1_x>(pre1_test_image1_x + field2))) &&
								((test_image1_y<(pre1_test_image1_y - field2)) || (test_image1_y>(pre1_test_image1_y + field2))))
							{
								good_matches.push_back(matches[i]);
								pre2_test_image1_x = test_image1_x;
								pre2_test_image1_y = test_image1_y;
							}
							break;
						case 2:
							if (((test_image1_x<(pre1_test_image1_x - field2)) || (test_image1_x>(pre1_test_image1_x + field2))) &&
								((test_image1_y<(pre1_test_image1_y - field2)) || (test_image1_y>(pre1_test_image1_y + field2))) &&
								((test_image1_x<(pre2_test_image1_x - field2)) || (test_image1_x>(pre2_test_image1_x + field2))) &&
								((test_image1_y<(pre2_test_image1_y - field2)) || (test_image1_y>(pre2_test_image1_y + field2))))
							{
								good_matches.push_back(matches[i]);
							}
							break;
						default:
							break;
						}
					}
				}
			}
		}
	}
	if (iter >= Max_iter) {
		printf("\n  ");
		printf(">>    发生错误:图像不清楚无法识别,请重试\n  ");
		printf("\n  ");
		return false;
	}
	printf("coefficient = %.2f\n  ", coefficient);

	//【7】绘制出符合条件的匹配点
	Mat img_matches;
	drawMatches(srcImage_1, keypoints_1, srcImage_2, keypoints_2,
		good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
		vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);

	//【8】输出相关匹配点信息
	Point2f image1_point[3], image2_point[3];;

	for (int i = 0; i < good_matches.size(); i++)
	{
		image1_point[i] = keypoints_1[good_matches[i].queryIdx].pt;
		image2_point[i] = keypoints_2[good_matches[i].trainIdx].pt;
		printf(">符合条件的匹配点 [%d] 特征点1: %d  -- 特征点2: %d    特征点1坐标(%f, %f) 特征点2坐标(%f, %f)\n",
			i + 1, good_matches[i].queryIdx, good_matches[i].trainIdx, image1_point[i].x, image1_point[i].y,
			image2_point[i].x, image2_point[i].y);
	}

	//【9】显示效果图
	/*imshow("【匹配效果图】", img_matches);*/

	//【10】进行仿射变换
	Mat warpAffine_srcImage_2;
	Mat M = getAffineTransform(image2_point, image1_point);
	warpAffine(srcImage_2, warpAffine_srcImage_2, M, srcImage_1.size());
	//imshow(">调试调用……【仿射变换后图像】", warpAffine_srcImage_2);

效果如下:
在这里插入图片描述
在这里插入图片描述

2 注意点:

  • 本人使用的是Opencv4.5.2 ,使用SURF算法需要Opencv_contrib安装包!可以参考[OpenCV+VS2015]Opencv_contrib安装包添加
  • 表计变换的时候需要筛选特征点,我这里筛选的方法是:特征点之间的欧氏距离,变换前后的大概位置、不选邻近特征点
  • 1
    点赞
  • 7
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值