opencv学习 第十章 估算图像之间的投影关系
10.2 计算图像对的基础矩阵
#include"stdafx.h"
#include<iostream>
#include<opencv2/core/core.hpp>
#include<opencv2/highgui/highgui.hpp>
#include<opencv2/features2d/features2d.hpp>
#include<opencv2/imgproc/imgproc.hpp>
#include<opencv2/xfeatures2d.hpp>
#include<opencv2/opencv.hpp>
using namespace std;
using namespace cv;
int main() {
Mat image1 =imread("故宫5.jpg");
Mat image2 =imread("故宫7.jpg");
resize(image1, image1, Size(360, 480));
resize(image2, image2, Size(360, 480));
if (!image1.data || !image2.data)
return 0;
//特征点的向量
vector<KeyPoint> keypoints1, keypoints2;
//构造SURF特征检测器
Ptr<xfeatures2d::SurfFeatureDetector> ptrSURF = xfeatures2d::SurfFeatureDetector::create(2000.0);//阈值
//检测SURF特征
ptrSURF->detect(image1, keypoints1);
ptrSURF->detect(image2, keypoints2);
//提取SURF描述子
Mat descriptors1, descriptors2;
ptrSURF->compute(image1, keypoints1, descriptors1);
ptrSURF->compute(image2, keypoints2, descriptors2);
//构造匹配器
BFMatcher matcher(NORM_L2);
//匹配两幅图像的描述子
vector<DMatch> matches;
matcher.match(descriptors1, descriptors2, matches);
nth_element(matches.begin(), //初始位置
matches.begin() +7, //排序元素的位置
matches.end()); //终止位置
//移除第7位之后所有的元素
matches.erase(matches.begin() + 7, matches.end());
Mat imageMatches;
drawMatches(
image1, keypoints1, //第一幅图像及其特征点
image2, keypoints2, //第二幅图像及其特征点
matches, //匹配结果
imageMatches, //生成的图像
Scalar(255, 255, 255),//直线的颜色
Scalar(0,255,0)); //点的颜色
imshow("match", imageMatches);
//转换KeyPoint类型到Point2f
vector<Point2f>selPoints1, selPoints2;
vector<int>pointIndexes1, pointIndexes2;
/*
for (std::vector<cv::DMatch>::const_iterator it = matches.begin();
it != matches.end(); ++it)
{
// Get the indexes of the selected matched keypoints
pointIndexes1.push_back(it->queryIdx);
pointIndexes2.push_back(it->trainIdx);
}
*/
KeyPoint::convert(keypoints1, selPoints1, pointIndexes1);
KeyPoint::convert(keypoints2, selPoints2, pointIndexes2);
//从7个矩阵中计算F矩阵
Mat fundemental =findFundamentalMat(
Mat(selPoints1), //图1中的点
Mat(selPoints2), //图2中的点
CV_FM_7POINT); //使用7个点的方法
//计算左图中点的极线 绘制在右图中 在右图中绘制对应的极线
vector<Vec3f> lines1;
computeCorrespondEpilines(
Mat(selPoints1), //图像点
1, //图1(也可以是2)
fundemental, //F矩阵
lines1); //一组极线
//对于所有极线
for (vector<Vec3f>::const_iterator it = lines1.begin(); it != lines1.end(); ++it)
{
//绘制第一列与最后一列之间的直线
line(image2,
Point(0, -(*it)[2] / (*it)[1]),
Point(image2.cols, -((*it)[2] + (*it)[0] * image2.cols) / (*it)[1]), Scalar(255, 255, 255));
}
namedWindow("Left Image Epilines");
//imshow("right", image1);
imshow("Left Image Epilines", image2);
waitKey(0);
return 0;
}
运行结果: