opencv 2.x 学习笔记-Features2D + Homography to find a known object(源代码)

本文介绍了在OpenCV 3.1.0中使用Features2D进行特征点检测和图像匹配的重要性,并探讨了ORB、暴力匹配等方法在物体检测和视觉跟踪中的应用。
摘要由CSDN通过智能技术生成
/#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/nonfree/nonfree.hpp"

using namespace cv;

void readme();

/** @function main */
int main( int argc, char** argv )
{
  if( argc != 3 )
  { readme(); return -1; }

  Mat img_object = imread( argv[1], CV_LOAD_IMAGE_GRAYSCALE );
  Mat img_scene = imread( argv[2], CV_LOAD_IMAGE_GRAYSCALE );

  if( !img_object.data || !img_scene.data )
  { std::cout<< " --(!) Error reading images " << std::endl; return -1; }

  //-- Step 1: Detect the keypoints using SURF Detector
  int minHessian = 400;

  SurfFeatureDetector detector( minHessian );

  std::vector<KeyPoint> keypoints_object, keypoints_scene;

  detector.detect( img_object, keypoints_object );
  detector.detect( img_scene, keypoints_scene );

  //-- Step 2: Calculate descriptors (feature vectors)
  SurfDescriptorExtractor extractor;

  Mat descriptors_object, descriptors_scene;

  extractor.compute( img_object, keypoints_object, descriptors_object );
  extractor.compute( img_scene, keypoints_scene, descriptors_scene );

  //-- Step 3: Matching descriptor vectors using FLANN matcher
  FlannBasedMatcher matcher;
  std::vector< DMatch > matches;
  matcher.match( descriptors_object, descriptors_scene, matches );

  double max_dist = 0; double min_dist = 100;

  //-- Quick calculation of max and min distances between keypoints
  for( int i = 0; i < descriptors_object.rows; i++ )
  { double dist = matches[i].distance;
    if( dist < min_dist ) min_dist = dist;
    if( dist > max_dist ) max_dist = dist;
  }

  printf("-- Max dist : %f \n", max_dist );
  printf("-- Min dist : %f \n", min_dist );

  //-- Draw only "good" matches (i.e. whose distance is less than 3*min_dist )
  std::vector< DMatch > good_matches;

  for( int i = 0; i < descriptors_object.rows; i++ )
  { if( matches[i].distance < 3*min_dist )
     { good_matches.push_back( matches[i]); }
  }

  Mat img_matches;
  drawMatches( img_object, keypoints_object, img_scene, keypoints_scene,
               good_matches, img_matches, Scalar::all(-1), Scalar::all(-1),
               vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );

  //-- Localize the object
 //特征点一致性检测
  std::vector<Point2f> obj;
  std::vector<Point2f> scene;

  for( int i = 0; i < good_matches.size(); i++ )
  {
    //-- Get the keypoints from the good matches
    obj.push_back( keypoints_object[ good_matches[i].queryIdx ].pt );
    scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt );
  }

  Mat H = findHomography( obj, scene, CV_RANSAC );

  //-- Get the corners from the image_1 ( the object to be "detected" )
  std::vector<Point2f> obj_corners(4);
  obj_corners[0] = cvPoint(0,0); obj_corners[1] = cvPoint( img_object.cols, 0 );
  obj_corners[2] = cvPoint( img_object.cols, img_object.rows ); obj_corners[3] = cvPoint( 0, img_object.rows );
  std::vector<Point2f> scene_corners(4);

  perspectiveTransform( obj_corners, scene_corners, H);

  //-- Draw lines between the corners (the mapped object in the scene - image_2 )
  line( img_matches, scene_corners[0] + Point2f( img_object.cols, 0), scene_corners[1] + Point2f( img_object.cols, 0), Scalar(0, 255, 0), 4 );
  line( img_matches, scene_corners[1] + Point2f( img_object.cols, 0), scene_corners[2] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 );
  line( img_matches, scene_corners[2] + Point2f( img_object.cols, 0), scene_corners[3] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 );
  line( img_matches, scene_corners[3] + Point2f( img_object.cols, 0), scene_corners[0] + Point2f( img_object.cols, 0), Scalar( 0, 255, 0), 4 );

  //-- Show detected matches
  imshow( "Good Matches & Object detection", img_matches );

  waitKey(0);
  return 0;
  }

  /** @function readme */
  void readme()
  { std::cout << " Usage: ./SURF_descriptor <img1> <img2>" << std::endl; }

opencv3.1.0 特征点检测与图像匹配(features2d、xfeatures2d)

特征检测与匹配,在物体检测,视觉跟踪,三维重建等领域都有广泛的应用。所以学习features2d、xfeatures2d中函数的使用,很有必要。

  特征点匹配的几种方法

(1)与ORB结合使用,效果较好

void match_features_knn(Mat& query, Mat& train, vector<DMatch>& matches)  
{  
    flann::Index flannIndex(query,flann::LshIndexParams(12,20,2),cvflann::FLANN_DIST_HAMMING);  
    Mat matchindex(train.rows,2,CV_32SC1);  
    Mat matchdistance(train.rows, 2, CV_32FC1);  
    flannIndex.knnSearch(train, matchindex, matchdistance,2,flann::SearchParams());  
    //根据劳氏算法  
    for (int i = 0; i < matchdistance.rows; i++)  
    {  
        if (matchdistance.at<float>(i, 0) < 0.6*matchdistance.at<float>(i, 1))  
        {  
            DMatch dmatches(matchindex.at<int>(i, 0),i, matchdistance.at<float>(i, 0));  
            matches.push_back(dmatches);  
        }  
    }  
} 

(2)个人感觉这种方法,效果与暴力匹配法没啥区别,但是被注释掉的方法,效果不好

void match_features_FLANN(Mat& query, Mat& train, vector<DMatch>& matches)  
{  
    FlannBasedMatcher matcher;  
      
    /*vector<DMatch> match; 
    matcher.match(query, train, match); 
    double max_dist = 0; 
    double min_dist = 100; 
    for (int i = 0; i < match.size(); i++) 
    { 
        double dist = match[i].distance; 
        if (dist < min_dist) min_dist = dist; 
        if (dist > max_dist) max_dist = dist; 
    } 
    for (int i = 0; i < match.size(); i++) 
    { 
        if (match[i].distance < 2 * min_dist) matches.push_back(match[i]); 
    }*/  
  
    vector<vector<DMatch>> knn_matches;  
    matcher.knnMatch(query, train, knn_matches, 2);  
  
    //获取满足Ratio Test的最小匹配的距离  
    float min_dist = FLT_MAX;  
    for (int r = 0; r < knn_matches.size(); ++r)  
    {  
        //Ratio Test  
        if (knn_matches[r][0].distance > 0.6*knn_matches[r][1].distance)  
            continue;  
  
        float dist = knn_matches[r][0].distance;  
        if (dist < min_dist) min_dist = dist;  
    }  
  
    matches.clear();  
    for (size_t r = 0; r < knn_matches.size(); ++r)  
    {  
        //排除不满足Ratio Test的点和匹配距离过大的点  
        if (  
            knn_matches[r][0].distance > 0.6*knn_matches[r][1].distance ||  
            knn_matches[r][0].distance > 5 * max(min_dist, 10.0f)  
            )  
            continue;  
  
        //保存匹配点  
        matches.push_back(knn_matches[r][0]);  
    }  
  
}  

(3)也叫暴力匹配法,此种方法结合sift、surf用的比较多

void match_features(Mat& query, Mat& train, vector<DMatch>& matches)  
{  
    vector<vector<DMatch>> knn_matches;  
    BFMatcher matcher(NORM_L2);  
  
    matcher.knnMatch(query, train, knn_matches, 2);  
  
    //获取满足Ratio Test的最小匹配的距离  
    float min_dist = FLT_MAX;  
    for (int r = 0; r < knn_matches.size(); ++r)  
    {  
        //Ratio Test  
        if (knn_matches[r][0].distance > 0.6*knn_matches[r][1].distance)  
            continue;  
  
        float dist = knn_matches[r][0].distance;  
        if (dist < min_dist) min_dist = dist;  
    }  
  
    matches.clear();  
    for (size_t r = 0; r < knn_matches.size(); ++r)  
    {  
        //排除不满足Ratio Test的点和匹配距离过大的点  
        if (  
            knn_matches[r][0].distance > 0.6*knn_matches[r][1].distance ||  
            knn_matches[r][0].distance > 5 * max(min_dist, 10.0f)  
            )  
            continue;  
  
        //保存匹配点  
        matches.push_back(knn_matches[r][0]);  
    }  
参考;OpenCV中feature2D学习——SIFT和SURF算法实现目标检测
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值