第一个就是opencvcontribuild的编译,需要使用cmake进行,直接编译source,网上有教程,注意如果最后想编译成一个opencv.lib需要勾选buildonetgarget
#include "measure.h"
static int state = 0;//通过0 1 2 来控制目前的状态,0采集鼠标单机的第一个点,2 采集鼠标单机的第二个点
static std::vector<cv::Point> pointList(2);//记录鼠标点击两点出的距离
static std::vector<cv::Vec3f> Point3D(2);
Mymeasure::Mymeasure() {
calibrationFilePath = "calibration_results.xml";
imgSize = cv::Size(640, 480);
isPaused = false;
block_size = 13;
NumDisparities = 4;
img_channels = 1;
UniquenessRatio = 15;
SpeckleWindowSize = 0;
}
void Mymeasure::getDist(int event, int x, int y, int flag, void* userdata) {
if (event == cv::EVENT_LBUTTONDOWN) {
if (state == 0) {
pointList[state] = cv::Point(x, y);
std::cout << state + 1 << "th point saved:" << pointList[state].x << " ; " << pointList[state].y << std::endl;
state++;
}
else if (state == 1) {
pointList[state] = cv::Point(x, y);
std::cout << state + 1 << "th point saved:" << pointList[state].x << " ; " << pointList[state].y << std::endl;
state++;
}
else {
std::cout << "Reset!" << std::endl;
state = 0;
}
}
}
void mouseWrapperOnline(int event, int x, int y, int flags, void* param)
{
Mymeasure* ptr = (Mymeasure*)(param);
ptr->getDist(event, x, y, flags, 0);
}
void Mymeasure::measureDistOnline() {
// 打开 XML 相机标定文件
cv::FileStorage fs(calibrationFilePath, cv::FileStorage::READ);
// 读取左相机的内参数矩阵和畸变系数
fs["M1"] >> leftCameraMatrix;
fs["D1"] >> leftDistCoeffs;
// 读取右相机的内参数矩阵和畸变系数
fs["M2"] >> rightCameraMatrix;
fs["D2"] >> rightDistCoeffs;
// 读取相机之间的旋转矩阵和平移向量
fs["R"] >> R;
fs["T"] >> T;
fs.release(); // 关闭文件
//创建StereoBM对象并设置参数
static cv::Ptr<cv::StereoSGBM> matcher = cv::StereoSGBM::create();
matcher->setMinDisparity(0);
matcher->setDisp12MaxDiff(1);
matcher->setPreFilterCap(1);
matcher->setSpeckleRange(1);
matcher->setMode(cv::StereoSGBM::MODE_SGBM);
matcher->setBlockSize(block_size);
matcher->setP1(16 * img_channels * block_size * block_size);
matcher->setP1(32 * img_channels * block_size * block_size);
matcher->setNumDisparities(NumDisparities * 16);//一般64是比较理想的效果
matcher->setUniquenessRatio(UniquenessRatio);
matcher->setSpeckleWindowSize(SpeckleWindowSize);
stereoRectify(leftCameraMatrix, leftDistCoeffs, rightCameraMatrix, rightDistCoeffs, imgSize, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY,
0, imgSize, &validROI1, &validROI2);
cv::initUndistortRectifyMap(leftCameraMatrix, leftDistCoeffs, R1, P1,
imgSize, CV_16SC2, map1Left, map2Left);
cv::initUndistortRectifyMap(rightCameraMatrix, rightDistCoeffs, R2, P2,
imgSize, CV_16SC2, map1Right, map2Right);
cv::namedWindow("Online measure");
cv::namedWindow("Disparity");
cv::namedWindow("WLSDisparity");
cv::setMouseCallback("Online measure", mouseWrapperOnline);
/*cv::createTrackbar("BlockSize", "Disparity", &block_size, 21, barWrapperBS);
cv::createTrackbar("NumDisparities", "Disparity", &NumDisparities, 16, barWrapperNDC);
cv::createTrackbar("UniquenessRatio", "Disparity", &UniquenessRatio, 30, barWrapperUR);
cv::createTrackbar("SpeckleWindowSize", "Disparity", &SpeckleWindowSize, 100, barWrapperSWSC);*/
cv::Mat imgLeftGray, imgRightGray;
cv::Mat copyL, copyR;
//"G:/CodeFiles/Visual Studio/OpenCV/CV_Demo/CV_Demo/outputleft.avi"
cv::VideoCapture cap1(1), cap2(2);
while (1) {
if (isPaused == false) {
cap1.read(imgLeft);
cap2.read(imgRight);
}
else {
if (state == 0) {
imgLeft.copyTo(copyL);
imgRight.copyTo(copyR);
}
else if (state == 2) {
copyL.copyTo(imgLeft);
copyR.copyTo(imgRight);
}
}
/*matcher->setBlockSize(block_size);
matcher->setP1(8 * img_channels * block_size * block_size);
matcher->setP1(32 * img_channels * block_size * block_size);
matcher->setNumDisparities(NumDisparities * 16);
matcher->setUniquenessRatio(UniquenessRatio);
matcher->setSpeckleWindowSize(SpeckleWindowSize);*/
//将原始图像转化为灰度图进行全局匹配
cv::cvtColor(imgLeft, imgLeftGray, cv::COLOR_BGR2GRAY);
cv::cvtColor(imgRight, imgRightGray, cv::COLOR_BGR2GRAY);
cv::Mat imgLeft2;
cv::remap(imgLeft, imgLeft2, map1Left, map2Left, cv::INTER_LINEAR);
//对图像进行映射
cv::remap(imgLeftGray, imgLeftRec, map1Left, map2Left, cv::INTER_LINEAR);
cv::remap(imgRightGray, imgRightRec, map1Right, map2Right, cv::INTER_LINEAR);
cv::Ptr<cv::StereoMatcher> right_matcher = cv::ximgproc::createRightMatcher(matcher);
cv::Mat right_disp;
cv::Mat filtered_disp;
matcher->compute(imgLeftRec, imgRightRec, disparityMap);
right_matcher->compute(imgRightRec, imgLeftRec, right_disp);
//matcher->compute(imgLeftRec, imgRightRec, disparityMap);
//disparityMap.convertTo(disparityMap, CV_32F, 1.0 / 16); //除以16得到真实视差值
//cv::Mat disp8 = cv::Mat(disparityMap.rows, disparityMap.cols, CV_8UC1);
//normalize(disparityMap, disp8, 0, 255, cv::NORM_MINMAX, CV_8UC1);
//cv::reprojectImageTo3D(disparityMap, myPointCloud, Q, true);
//myPointCloud *= 16;
//把这段代码放在这里就不会相互影响了
cv::Ptr<cv::ximgproc::DisparityWLSFilter> wls_filter = cv::ximgproc::createDisparityWLSFilter(matcher);
wls_filter->filter(disparityMap, imgLeft2, filtered_disp, right_disp);
int vis_mult = 1.0;
cv::Mat WLSdisp8 = cv::Mat(filtered_disp.rows, filtered_disp.cols, CV_8UC1);
cv::ximgproc::getDisparityVis(filtered_disp, WLSdisp8, vis_mult);
//cv::Ptr<cv::ximgproc::DisparityWLSFilter> wls_filter = cv::ximgproc::createDisparityWLSFilter(matcher);
wls_filter->setLambda(lambda);
wls_filter->setSigmaColor(sigma);
//cv::Mat filtered_disparity;
//wls_filter->filter(disparityMap, imgLeftRec, filtered_disparity, imgRightRec);
//cv::Mat dispWLS = cv::Mat(filtered_disparity.rows, filtered_disparity.cols, CV_8UC1);
//normalize(filtered_disparity, dispWLS, 0, 255, cv::NORM_MINMAX, CV_8UC1);
cv::reprojectImageTo3D(filtered_disp, myPointCloud, Q, true);
myPointCloud *= 16;
if (state == 2) {
Point3D[0] = myPointCloud.at<cv::Vec3f>(pointList[0]);
Point3D[1] = myPointCloud.at<cv::Vec3f>(pointList[1]);
cv::rectangle(imgLeft, pointList[0], pointList[1], cv::Scalar(255, 0, 0));
float distance = sqrt((Point3D[0][0] - Point3D[1][0]) * (Point3D[0][0] - Point3D[1][0]) +
(Point3D[0][1] - Point3D[1][1]) * (Point3D[0][1] - Point3D[1][1]) +
(Point3D[0][2] - Point3D[1][2]) * (Point3D[0][2] - Point3D[1][2]));
int fontFace = cv::FONT_HERSHEY_SIMPLEX;
cv::putText(imgLeft, std::to_string(distance), pointList[0], fontFace, 0.8, cv::Scalar(0, 0, 1));
}
cv::imshow("Online measure", imgLeft);
//cv::imshow("Disparity", disp8);
cv::imshow("WLSDisparity", WLSdisp8);
cv::waitKey(1);
}
}
void Mymeasure::testWLS() {
//创建StereoBM对象并设置参数
static cv::Ptr<cv::StereoSGBM> matcher = cv::StereoSGBM::create();
matcher->setMinDisparity(0);
matcher->setDisp12MaxDiff(1);
matcher->setPreFilterCap(1);
matcher->setSpeckleRange(1);
matcher->setMode(cv::StereoSGBM::MODE_SGBM);
matcher->setBlockSize(block_size);
matcher->setP1(16 * img_channels * block_size * block_size);
matcher->setP1(32 * img_channels * block_size * block_size);
matcher->setNumDisparities(NumDisparities * 16);//一般64是比较理想的效果
matcher->setUniquenessRatio(UniquenessRatio);
matcher->setSpeckleWindowSize(SpeckleWindowSize);
cv::Mat imgLeftGray, imgRightGray;
imgLeft = cv::imread("ambush_5_left.jpg");
imgRight = cv::imread("ambush_5_right.jpg");
cv::cvtColor(imgLeft, imgLeftGray, cv::COLOR_BGR2GRAY);
cv::cvtColor(imgRight, imgRightGray, cv::COLOR_BGR2GRAY);
cv::Ptr<cv::StereoMatcher> right_matcher = cv::ximgproc::createRightMatcher(matcher);
cv::Mat right_disp;
cv::Mat filtered_disp;
matcher->compute(imgLeftGray, imgRightGray, disparityMap);
right_matcher->compute(imgRightGray, imgLeftGray, right_disp);
//把这段代码放在这里就不会相互影响了
cv::Ptr<cv::ximgproc::DisparityWLSFilter> wls_filter = cv::ximgproc::createDisparityWLSFilter(matcher);
wls_filter->filter(disparityMap, imgLeft, filtered_disp, right_disp);
int vis_mult = 1.0;
cv::Mat disp8 = cv::Mat(disparityMap.rows, disparityMap.cols, CV_8UC1);
cv::ximgproc::getDisparityVis(disparityMap, disp8, vis_mult);
//normalize(disparityMap, disp8, 0, 255, cv::NORM_MINMAX, CV_8UC1);
cv::Mat WLSdisp8 = cv::Mat(filtered_disp.rows, filtered_disp.cols, CV_8UC1);
cv::ximgproc::getDisparityVis(filtered_disp, WLSdisp8, vis_mult);
cv::imshow("WLSDisparity", WLSdisp8);
cv::imshow("Disparity", disp8);
cv::waitKey(0);
}
int main() {
Mymeasure my;
my.measureDistOnline();
//my.testWLS();
system("pause");
return 0;
}
#pragma once
#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/ximgproc.hpp>
class Mymeasure {
public:
Mymeasure();
void getDist(int event, int x, int y, int flag, void* userdata);
void measureDistOnline();
void testWLS();
private:
bool isPaused;
cv::Size imgSize; //图像尺寸,默认一般为640,480
cv::Mat leftCameraMatrix, leftDistCoeffs, rightCameraMatrix, rightDistCoeffs;
std::vector<cv::Mat> leftRvecs, leftTvecs, rightRvecs, rightTvecs;
cv::Mat R, T, E, F;
std::string calibrationFilePath;
cv::Mat imgLeft, imgRight, imgBackup; //进行三维重建的左右相机图片
cv::Mat R1, R2, P1, P2, Q; //矫正映射矩阵
cv::Rect validROI1, validROI2;
cv::Mat map1Left, map2Left, map1Right, map2Right;
cv::Mat imgLeftRec, imgRightRec; //矫正后的左右图像
cv::Mat disparityMap;
cv::Mat myPointCloud; //openCV的点云格式
int block_size;
int NumDisparities;
int img_channels;
int UniquenessRatio;
int SpeckleWindowSize;
double lambda = 8000.0; // 控制平滑度的参数
double sigma = 1.5; // 控制像素权重的参数
int num_iterations = 3; // 迭代次数
};