使用C++ opencv接口,去除鱼眼图像的畸变。包含Mei模型和KB4模型。主要参考opencv实现图像去畸变——几种实现方式(含完整代码)&&效果对比图&&详细参数说明&&核心参数变化对应变化效果图&&常见问题_cv::fisheye::initundistortrectifymap-CSDN博客
但貌似这位博主,并没有很好的区分KB4和Mei模型,混用了参数,导致得到KB4效果不好的结论。KB4模型是鱼眼去畸变最好的模型,唯一的劣势在于不存在undistorted解析表达式。
代码如下:
#include<iostream>
#include<opencv2/opencv.hpp>
#include <opencv2/core/mat.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/calib3d.hpp>
#include <opencv2/ccalib/omnidir.hpp>
int main()
{
#if 0
int scale = 2;
cv::Size imageSize(848, 800);
cv::Mat map1, map2;
cv::Mat K = cv::Mat::eye(3, 3, CV_32FC1); // 内参矩阵
K.at<float>(0, 0) = 286.047302246094;
K.at<float>(1, 1) = 286.164611816406;
K.at<float>(0, 2) = 413.002807617188;
K.at<float>(1, 2) = 390.916687011719;
cv::Mat D = cv::Mat::zeros(1, 4, CV_32FC1); // 畸变系数矩阵 顺序是[k1, k2, p1, p2]
D.at<float>(0, 0) = -0.00725533021613955;
D.at<float>(0, 1) = 0.0416268110275269;
D.at<float>(0, 2) = -0.038124680519104;
D.at<float>(0, 3) = 0.00613553822040558;
cv::Mat newCameraMatrix = cv::Mat::eye(3, 3, CV_64FC1);
// 相机焦距, 与世界尺度相关
newCameraMatrix.at<double>(0, 0) = 286.047302246094 / scale;
newCameraMatrix.at<double>(1, 1) = 286.164611816406 / scale;
// 主点位置,与图像中心点相关
newCameraMatrix.at<double>(0, 2) = K.at<float>(0, 2) / imageSize.width * imageSize.width;//static_cast<double>(imageSize.width / 2) ;
newCameraMatrix.at<double>(1, 2) = K.at<float>(1, 2) / imageSize.height * imageSize.height;//static_cast<double>(imageSize.height / 2);
//1. 效果不好、不是很明显
//cv::initUndistortRectifyMap(K, D, cv::Mat(), NewCameraMatrix, imageSize, CV_16SC2, map1, map2);
//2. 效果不是很明显
cv::fisheye::initUndistortRectifyMap(K, D, cv::Mat(), newCameraMatrix, imageSize, CV_16SC2, map1, map2);
//3. 效果不错。对应扩展库是opencv_contrib
//cv::omnidir::initUndistortRectifyMap(K, D, xi, cv::Mat::eye(3, 3, CV_64FC1), newCameraMatrix, imageSize, CV_32FC1, g_map1, g_map2, cv::omnidir::RECTIFY_PERSPECTIVE);
cv::Mat bgrImg = cv::imread("test.png");
cv::Mat distortImage;
cv::remap(bgrImg, distortImage, map1, map2, cv::INTER_LINEAR);
cv::imshow("undistored", distortImage);
cv::waitKey(0);
#else
int scale = 8;
cv::Size imageSize(848, 800);
cv::Mat map1, map2;
cv::Mat K = cv::Mat::eye(3, 3, CV_32FC1); // 内参矩阵
K.at<float>(0, 0) = 8.12306004770254e+02;
K.at<float>(1, 1) = 8.108531654989733e+02;
K.at<float>(0, 2) = 4.1268019297647686e+02;
K.at<float>(1, 2) = 3.905885032857454e+02;
cv::Mat D = cv::Mat::zeros(1, 4, CV_32FC1); // 畸变系数矩阵 顺序是[k1, k2, p1, p2]
D.at<float>(0, 0) = 6.617923063969984e-03;
D.at<float>(0, 1) = -2.0971112858027338e-01;
D.at<float>(0, 2) = 8.490272080796737e-04;
D.at<float>(0, 3) = -1.2129804363276184e-03;
cv::Mat xi = cv::Mat::zeros(1, 1, CV_32FC1);
xi.at<float>(0, 0) = 1.7805747674301189e+00;
cv::Mat newCameraMatrix = cv::Mat::eye(3, 3, CV_64FC1);
// 相机焦距, 与世界尺度相关
newCameraMatrix.at<double>(0, 0) = 286.164611816406 / scale;
newCameraMatrix.at<double>(1, 1) = 286.164611816406 / scale;
// 主点位置,与图像中心点相关
newCameraMatrix.at<double>(0, 2) = K.at<float>(0, 2) / imageSize.width * imageSize.width;//static_cast<double>(imageSize.width / 2) ;
newCameraMatrix.at<double>(1, 2) = K.at<float>(1, 2) / imageSize.height * imageSize.height;//static_cast<double>(imageSize.height / 2);
//1. 效果不好、不是很明显
//cv::initUndistortRectifyMap(K, D, cv::Mat(), NewCameraMatrix, imageSize, CV_16SC2, map1, map2);
//2. 效果不是很明显
//cv::fisheye::initUndistortRectifyMap(K, D, cv::Mat(), newCameraMatrix, imageSize, CV_16SC2, map1, map2);
//3. 效果不错。对应扩展库是opencv_contrib
cv::omnidir::initUndistortRectifyMap(K, D, xi, cv::Mat::eye(3, 3, CV_64FC1), newCameraMatrix, imageSize, CV_32FC1, map1, map2, cv::omnidir::RECTIFY_PERSPECTIVE);
cv::Mat bgrImg = cv::imread("test.png");
cv::Mat distortImage;
cv::remap(bgrImg, distortImage, map1, map2, cv::INTER_LINEAR);
cv::imshow("undistored", distortImage);
cv::waitKey(0);
#endif
return 0;
}