级联 SIFT

1BFSIFT

https://blog.csdn.net/Small_Munich/article/details/80457521

https://blog.csdn.net/yirant7/article/details/52451145

https://blog.csdn.net/small_munich/category_7597772.html

https://blog.csdn.net/Small_Munich/article/details/103429639

2级联SIFT

http://rpg.ifi.uzh.ch/docs/teaching/2018/06_feature_detection_2.pdf

基于级联结构的多光谱图像精确特征点匹配	荆晶	北京邮电大学	2016-01-10	硕士
应用于多源SAR图像匹配的级联SIFT算法	王峰; 尤红建; 傅兴玉; 许宁	电子学报	2016-03-15	期刊

最近自己写了一个orb级联的,这种方式局限性是两幅图一样大且视角偏移不太大

void runImagePair() {
  Mat img1_ori = imread("Texture_8Bit.png");
  Mat img2_ori = imread("Texture_8Bit.png");

  Mat img1,img2;
  cv::resize(img1_ori, img1, Size(800,600));
  cv::resize(img2_ori, img2, Size(800,600));
  GmsMatch(img1, img2, img1_ori, img2_ori);

}

int GmsMatch(Mat &img1, Mat &img2, Mat &img1_ori, Mat &img2_ori) {

  float ratio_x = img1_ori.cols / (1.0 * img1.cols);
  float ratio_y = img1_ori.rows / (1.0 * img1.rows);

  int oriH = img1.rows;
  int oriW = img2.cols;
  vector<Rect> rectVector;
  rectVector.push_back(Rect(0,0, oriW/2, oriH/2));
  rectVector.push_back(Rect(oriW/2,0, oriW/2, oriH/2));
  rectVector.push_back(Rect(0,oriH/2, oriW/2, oriH/2));
  rectVector.push_back(Rect(oriW/2,oriH/2, oriW/2, oriH/2));

  std::vector<cv::Point2f> queryPoints;
  std::vector<cv::Point2f> trainPoints;
  vector<DMatch> goodMatches;
  //https://blog.csdn.net/linxihe123/article/details/70173476
  vector<pair<KeyPoint, KeyPoint> > kp_pairs_temp;

  for (int re = 0; re < rectVector.size(); ++re) {

    Mat img1ROI = img1(rectVector[re]);
	cv::Mat img1Temp = cv::Mat::zeros(cv::Size(oriW,oriH),CV_8UC3);
	Mat zero1ROI = img1Temp(rectVector[re]);
	Mat temp1 = img1ROI.clone();
	temp1.copyTo(zero1ROI);

	Mat img2ROI = img2(rectVector[re]);
	cv::Mat img2Temp = cv::Mat::zeros(cv::Size(oriW,oriH),CV_8UC3);
	Mat zero2ROI = img2Temp(rectVector[re]);
	Mat temp2 = img2ROI.clone();
	temp2.copyTo(zero2ROI);

	vector<KeyPoint> kp1, kp2;
	Mat d1, d2;
	vector<DMatch> matches_all, matches_gms;
	Ptr<ORB> orb = ORB::create(100000);
	orb->setFastThreshold(0);
	orb->detectAndCompute(img1Temp, Mat(), kp1, d1);
	orb->detectAndCompute(img2Temp, Mat(), kp2, d2);
	cout << "Get total " << kp1.size() << " kp1." << endl;
	cout << "Get total " << kp2.size() << " kp2." << endl;
#ifdef USE_GPU
	GpuMat gd1(d1), gd2(d2);
	Ptr<cuda::DescriptorMatcher> matcher = cv::cuda::DescriptorMatcher::createBFMatcher(NORM_HAMMING);
	matcher->match(gd1, gd2, matches_all);
#else
	BFMatcher matcher(NORM_HAMMING);
    matcher.match(d1, d2, matches_all);
#endif
	// GMS filter
	std::vector<bool> vbInliers;
	gms_matcher gms(kp1, img1.size(), kp2, img2.size(), matches_all);
	int num_inliers = gms.GetInlierMask(vbInliers, false, false);
	cout << "Get total " << num_inliers << " gms-matches." << endl;
	// collect matches
	for (size_t i = 0; i < vbInliers.size(); ++i) {
	  if (vbInliers[i] == true) {
		matches_gms.push_back(matches_all[i]);
	  }
	}

	for (size_t i = 0; i < matches_gms.size(); i++) {
	  KeyPoint temp1 = kp1[matches_gms[i].queryIdx];
	  KeyPoint temp2 = kp2[matches_gms[i].trainIdx];
	  goodMatches.push_back(matches_gms[i]);
	  queryPoints.push_back(temp1.pt);
	  trainPoints.push_back(temp2.pt);
	  kp_pairs_temp.push_back(make_pair(temp1, temp2));
	}
  }

  cout << "Get total " << goodMatches.size() << " gms-matches." << endl;

}

修改版:

void find_homography_orbgms(const cv::Mat &img1_src, const cv::Mat &img2_src, const cv::Mat &img1_dep, const cv::Mat &img2_dep, cv::Mat &homography)
{
  float ratio_x = img1_src.cols / 800.0;
  float ratio_y = img1_src.rows / 600.0;

  cv::Mat img1;
  cv::Mat img2;
  cv::resize(img1_src, img1, Size(800, 600));
  cv::resize(img2_src, img2, Size(800, 600));



  int oriH = img1.rows;
  int oriW = img2.cols;
  vector<Rect> rectVector;
  rectVector.push_back(Rect(0,0, oriW/2, oriH/2));
  rectVector.push_back(Rect(oriW/2,0, oriW/2, oriH/2));
  rectVector.push_back(Rect(0,oriH/2, oriW/2, oriH/2));
  rectVector.push_back(Rect(oriW/2,oriH/2, oriW/2, oriH/2));

  std::vector<cv::Point2f> queryPoints;
  std::vector<cv::Point2f> trainPoints;
  vector<DMatch> goodMatches;
  //https://blog.csdn.net/linxihe123/article/details/70173476
  vector<pair<KeyPoint, KeyPoint> > kp_pairs_temp;

  for (int re = 0; re < rectVector.size(); ++re) {

	Mat img1ROI = img1(rectVector[re]);
	cv::Mat img1Temp = cv::Mat::zeros(cv::Size(oriW,oriH),CV_8UC3);
	Mat zero1ROI = img1Temp(rectVector[re]);
	Mat temp1 = img1ROI.clone();
	temp1.copyTo(zero1ROI);

	Mat img2ROI = img2(rectVector[re]);
	cv::Mat img2Temp = cv::Mat::zeros(cv::Size(oriW,oriH),CV_8UC3);
	Mat zero2ROI = img2Temp(rectVector[re]);
	Mat temp2 = img2ROI.clone();
	temp2.copyTo(zero2ROI);

	vector<KeyPoint> kp1, kp2;
	Mat d1, d2;
	vector<DMatch> matches_all, matches_gms;
	Ptr<ORB> orb = ORB::create(100000);
	orb->setFastThreshold(0);
	orb->detectAndCompute(img1Temp, Mat(), kp1, d1);
	orb->detectAndCompute(img2Temp, Mat(), kp2, d2);
	cout << "Get total " << kp1.size() << " kp1." << endl;
	cout << "Get total " << kp2.size() << " kp2." << endl;
#ifdef USE_GPU
	GpuMat gd1(d1), gd2(d2);
	Ptr<cuda::DescriptorMatcher> matcher = cv::cuda::DescriptorMatcher::createBFMatcher(NORM_HAMMING);
	matcher->match(gd1, gd2, matches_all);
#else
	BFMatcher matcher(NORM_HAMMING);
    matcher.match(d1, d2, matches_all);
#endif
	// GMS filter
	std::vector<bool> vbInliers;
	gms_matcher gms(kp1, img1.size(), kp2, img2.size(), matches_all);
	int num_inliers = gms.GetInlierMask(vbInliers, false, false);
	cout << "Get total " << num_inliers << " gms-matches." << endl;
	// collect matches
	for (size_t i = 0; i < vbInliers.size(); ++i) {
	  if (vbInliers[i] == true) {
		matches_gms.push_back(matches_all[i]);
	  }
	}

	for (size_t i = 0; i < matches_gms.size(); i++) {
	  KeyPoint temp1 = kp1[matches_gms[i].queryIdx];
	  KeyPoint temp2 = kp2[matches_gms[i].trainIdx];
	  goodMatches.push_back(matches_gms[i]);
	  queryPoints.push_back(temp1.pt);
	  trainPoints.push_back(temp2.pt);
	  kp_pairs_temp.push_back(make_pair(temp1, temp2));
	}
  }

  cout << "Get total " << goodMatches.size() << " gms-matches." << endl;

  // 4点条件判断
  const int minNumberMatchesAllowed = 4;
  if (queryPoints.size() < minNumberMatchesAllowed)
	return -1;

  double reprojectionThreshold = 10.0;
  std::vector<unsigned char> inliersMask(goodMatches.size());
  Mat homography_temp = findHomography(queryPoints,
								  trainPoints,
								  FM_RANSAC,
								  reprojectionThreshold,
								  inliersMask,
								  2000,
								  0.995);

  vector<pair<KeyPoint, KeyPoint> > kp_pairs;
  std::vector<cv::Point2f> newp1;
  std::vector<cv::Point2f> newp2;
  std::vector<cv::Point2f> newp1c;
  std::vector<cv::Point2f> newp2c;
  vector<DMatch> goodgoodMatches;
  int hh = img1_src.rows;
  int ww = img1_src.cols;
  for (size_t i = 0; i < inliersMask.size(); i++) {
	if (inliersMask[i]) {
	  goodgoodMatches.push_back(goodMatches[i]);
	  kp_pairs.push_back(kp_pairs_temp[i]);

	  float x1 = kp_pairs_temp[i].first.pt.x * ratio_x;
	  float y1 = kp_pairs_temp[i].first.pt.y * ratio_y;
	  float x2 = kp_pairs_temp[i].second.pt.x * ratio_x;
	  float y2 = kp_pairs_temp[i].second.pt.y * ratio_y;
	  x1 = x1 > ww ? ww : x1;
	  x1 = x1 < 0 ? 0 : x1;
	  x2 = x2 > ww ? ww : x2;
	  x2 = x2 < 0 ? 0 : x2;

	  y1 = y1 > hh ? hh : y1;
	  y1 = y1 < 0 ? 0 : y1;
	  y2 = y2 > hh ? hh : y2;
	  y2 = y2 < 0 ? 0 : y2;


	  newp1.push_back(cv::Point2f(x1, y1));
	  newp2.push_back(cv::Point2f(x2, y2));

	  newp1c.push_back(kp_pairs_temp[i].first.pt);
	  newp2c.push_back(kp_pairs_temp[i].second.pt);

	}

  }

  std::vector<unsigned char> EssentialMask(newp1.size());
  cv::Mat intrinsics = (cv::Mat_<float>(3, 3) << 2269.16, 0, 1065.54, 0, 2268.4, 799.032, 0, 0, 1);
  Mat Essential = findEssentialMat(newp1, newp2, intrinsics, cv::RANSAC, 0.999, 1, EssentialMask);


  std::vector<cv::Point2f> Essential_p1;
  std::vector<cv::Point2f> Essential_p2;
  for (size_t key = 0; key < newp1.size(); key++) {
	if (EssentialMask[key]) {
	  Essential_p1.push_back(newp1[key]);
	  Essential_p2.push_back(newp2[key]);
	}
  }

  std::vector<unsigned char> FundamentalMask(Essential_p1.size());
  Mat Fundamental = findFundamentalMat(Essential_p1, Essential_p2, cv::RANSAC, 3, 0.999, FundamentalMask);

  std::vector<cv::Point2f> Fundamental_p1;
  std::vector<cv::Point2f> Fundamental_p2;
  for (size_t key = 0; key < Essential_p1.size(); key++) {
	if (FundamentalMask[key]) {
	  Fundamental_p1.push_back(Essential_p1[key]);
	  Fundamental_p2.push_back(Essential_p2[key]);
	}
  }

  std::vector<cv::Point2f> trp1_temp;
  std::vector<cv::Point2f> trp2_temp;
  for (size_t key = 0; key < Fundamental_p1.size(); key++) {
	float x1 = Fundamental_p1[key].x;
	float y1 = Fundamental_p1[key].y;
	float x2 = Fundamental_p2[key].x;
	float y2 = Fundamental_p2[key].y;
	float d1 = img1_dep.at<float>(int(y1+0.5), int(x1+0.5));
	cv::Point3f p1;
	p1.z = float(d1) / intrinsics.ptr<float>(2)[2];
	p1.x = (x1 - intrinsics.ptr<float>(0)[2]) * p1.z / intrinsics.ptr<float>(0)[0];
	p1.y = (y1 - intrinsics.ptr<float>(1)[2]) * p1.z / intrinsics.ptr<float>(1)[1];

	float d2 = img2_dep.at<float>(int(y2+0.5), int(x2+0.5));
	cv::Point3f p2;
	p2.z = float(d2) / intrinsics.ptr<float>(2)[2];
	p2.x = (x2 - intrinsics.ptr<float>(0)[2]) * p2.z / intrinsics.ptr<float>(0)[0];
	p2.y = (y2 - intrinsics.ptr<float>(1)[2]) * p2.z / intrinsics.ptr<float>(1)[1];

	//>0.001是为了去除深度为0的特征点
	if (((abs(p1.x) + abs(p1.y) + abs(p1.z)) > 0.001) and
		((abs(p2.x) + abs(p2.y) + abs(p2.z)) > 0.001)) {
	  trp1_temp.push_back(Fundamental_p1[key]);
	  trp2_temp.push_back(Fundamental_p2[key]);
	}
  }

  double reprojectionThreshold_ = 5.0;
  std::vector<unsigned char> inliersMask_(goodMatches.size());
  homography = findHomography(trp1_temp,
							  trp2_temp,
							  FM_RANSAC,
							  reprojectionThreshold_,
							  inliersMask_,
							  20,
							  0.995);
}

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值