特征点的提取
//-- 第一步:检测 Oriented FAST 角点位置
Mat img_1 = imread(argv[1], CV_LOAD_IMAGE_COLOR);
Ptr<FeatureDetector> detector = ORB::create();
std::vector<KeyPoint> keypoints_1;
detector->detect(img_1, keypoints_1);
描述子的计算
//-- 第二步:根据角点位置计算 BRIEF 描述子
Mat descriptors_1;
Ptr<DescriptorExtractor> descriptor = ORB::create();
descriptor->compute(img_1, keypoints_1, descriptors_1);
特征点的匹配(根据描述子的距离)
//-- 第三步:对两幅图像中的BRIEF描述子进行匹配,使用 Hamming 距离
vector<DMatch> matches; //作为输出的匹配对
Ptr<DescriptorMatcher> matcher =
DescriptorMatcher::create("BruteForce-Hamming"); //使用 Hamming 距离
matcher->match(descriptors_1, descriptors_2, matches);
两个2D图像之间使用对极约束求解相机运动
把匹配点转换成vector
//-- 把匹配点转换为vector<Point2f>的形式
vector<Point2f> points1;
vector<Point2f> points2;
for (int i = 0; i < (int)matches.size(); i++) {
points1.push_back(keypoints_1[matches[i].queryIdx].pt);
points2.push_back(keypoints_2[matches[i].trainIdx].pt);
}
计算基础矩阵 (本质矩阵以及单应矩阵)
//-- 计算基础矩阵
Mat fundamental_matrix;
fundamental_matrix = findFundamentalMat(points1, points2, CV_FM_8POINT);
cout << "fundamental_matrix is " << endl << fundamental_matrix << endl;
//-- 计算本质矩阵
Point2d principal_point(325.1, 249.7); //相机光心, TUM dataset标定值
double focal_length = 521; //相机焦距, TUM dataset标定值
Mat essential_matrix;
essential_matrix =
findEssentialMat(points1, points2, focal_length, principal_point);
cout << "essential_matrix is " << endl << essential_matrix << endl;
//-- 计算单应矩阵
Mat homography_matrix;
homography_matrix = findHomography(points1, points2, RANSAC, 3);
cout << "homography_matrix is " << endl << homography_matrix << endl;
从本质矩阵恢复旋转和平移信息
//-- 从本质矩阵中恢复旋转和平移信息.
recoverPose(essential_matrix, points1, points2, R, t, focal_length,
principal_point);
cout << "R is " << endl << R << endl;
cout << "t is " << endl << t << endl;