OpenCV中的鱼眼相机模型详解

该代码实现了使用ROS消息类型和OpenCV库处理鱼眼镜头的投影点算法。首先进行相机内参和外参的获取,然后通过Rodrigues变换和投影函数对3D点进行二维图像上的投影,并进行了畸变校正。最终将原始和校正后的投影点进行比较。
摘要由CSDN通过智能技术生成

16e39d3634674095ba020ead66339bce.png

 32e251a04fed41e1a91109c416068678.png

 a62a2a110e8b4ac3b39edb916fe87e4f.png

ab57b9f50ac7493ab49a1b82771ebb3b.png 09e9a18bc6b849d2971520bcfc519db0.png

 adfdab514f084cf9b2eccccdf27e2058.png

 fa2cb8cbc4e544f387c9df70a2ad8e0e.png

 cf9a3056369f431c9e259a40a3a693de.png

 e61d93fde1c2444d86f915c56137339f.png

 d87763c1dae24acfa3c5c0058bba35d6.png

 

#include <ros/ros.h>
#include <visualization_msgs/Marker.h>
#include<time.h>
 #include <unistd.h>
#include<iostream>
#include<eigen3/Eigen/Core>
#include<eigen3/Eigen/Geometry>
#include <opencv2/opencv.hpp>
#include<opencv2/calib3d.hpp>
#include <tf/transform_broadcaster.h>
using namespace cv;
using namespace std;
using namespace Eigen;

void opencv_project_points(InputArray objectPoints, OutputArray imagePoints, InputArray _rvec,
        InputArray _tvec, InputArray _K, InputArray _D, double alpha)
{
    // will support only 3-channel data now for points
    CV_Assert(objectPoints.type() == CV_32FC3 || objectPoints.type() == CV_64FC3);
    imagePoints.create(objectPoints.size(), CV_MAKETYPE(objectPoints.depth(), 2));
    size_t n = objectPoints.total();

    CV_Assert(_rvec.total() * _rvec.channels() == 3 && (_rvec.depth() == CV_32F || _rvec.depth() == CV_64F));
    CV_Assert(_tvec.total() * _tvec.channels() == 3 && (_tvec.depth() == CV_32F || _tvec.depth() == CV_64F));
    CV_Assert(_tvec.getMat().isContinuous() && _rvec.getMat().isContinuous());

    Vec3d om = _rvec.depth() == CV_32F ? (Vec3d)*_rvec.getMat().ptr<Vec3f>() : *_rvec.getMat().ptr<Vec3d>();
    Vec3d T  = _tvec.depth() == CV_32F ? (Vec3d)*_tvec.getMat().ptr<Vec3f>() : *_tvec.getMat().ptr<Vec3d>();

    CV_Assert(_K.size() == Size(3,3) && (_K.type() == CV_32F || _K.type() == CV_64F) && _D.type() == _K.type() && _D.total() == 4);

    cv::Vec2d f, c;
    if (_K.depth() == CV_32F)
    {

        Matx33f K = _K.getMat();
        f = Vec2f(K(0, 0), K(1, 1));
        c = Vec2f(K(0, 2), K(1, 2));
    }
    else
    {
        Matx33d K = _K.getMat();
        f = Vec2d(K(0, 0), K(1, 1));
        c = Vec2d(K(0, 2), K(1, 2));
    }

    Vec4d k = _D.depth() == CV_32F ? (Vec4d)*_D.getMat().ptr<Vec4f>(): *_D.getMat().ptr<Vec4d>();


    Matx33d R;
    Matx<double, 3, 9> dRdom;
    Rodrigues(om, R, dRdom);
    cv::Affine3d aff(om, T);

    const Vec3f* Xf = objectPoints.getMat().ptr<Vec3f>();
    const Vec3d* Xd = objectPoints.getMat().ptr<Vec3d>();
    Vec2f *xpf = imagePoints.getMat().ptr<Vec2f>();
    Vec2d *xpd = imagePoints.getMat().ptr<Vec2d>();

    for(size_t i = 0; i < n; ++i)
    {
        Vec3d Xi = objectPoints.depth() == CV_32F ? (Vec3d)Xf[i] : Xd[i];
        Vec3d Y = aff*Xi;

        Vec2d x(Y[0]/Y[2], Y[1]/Y[2]);
         cout<<"a="<<x(0)<<"b="<<x(1)<<endl;
        double r2 = x.dot(x);
        double r = std::sqrt(r2);

        cout<<"r"<<r<<endl;
        // Angle of the incoming ray:
        double theta = atan(r);

        double theta2 = theta*theta, theta3 = theta2*theta, theta4 = theta2*theta2, theta5 = theta4*theta,
                theta6 = theta3*theta3, theta7 = theta6*theta, theta8 = theta4*theta4, theta9 = theta8*theta;

        double theta_d = theta + k[0]*theta3 + k[1]*theta5 + k[2]*theta7 + k[3]*theta9;

        double inv_r = r > 1e-8 ? 1.0/r : 1;
        double cdist = r > 1e-8 ? theta_d * inv_r : 1;

        Vec2d xd1 = x * cdist;
        Vec2d xd3(xd1[0] + alpha*xd1[1], xd1[1]);
        Vec2d final_point(xd3[0] * f[0] + c[0], xd3[1] * f[1] + c[1]);

        if (objectPoints.depth() == CV_32F)
            xpf[i] = final_point;
        else
            xpd[i] = final_point;

    }
}

void fish_project_point(Point3d pt3d,Mat R,Mat t,Mat K,Mat coff,Point2d& pt2d)
{
    //1. Transform into Camera Coords.
    Vec3d p3Dw (pt3d);
    cv::Affine3d aff(R, t);
    //cv::Mat p3Dc = R*p3Dw+t;
    Vec3d p3Dc = aff*p3Dw;
    cout<<"p3Dc"<<p3Dc<<endl;
    // 2.Project into norm plan
    double invz = 1/p3Dc(2);
    double a = p3Dc(0)*invz;
    double b = p3Dc(1)*invz;
    //cv::Mat p3Dw (pt3d);
    //cv::Mat p3Dc = R*p3Dw+t;
    //const double invz = 1/p3Dc.at<double>(2);
    //const double a = p3Dc.at<double>(0)*invz;
    //const double b = p3Dc.at<double>(1)*invz;
    cout<<"project norm plan x="<<a<<"y="<<b<<endl;

    //3.undistort a b
    const double k1=coff.at<double>(0,0);
    const double k2=coff.at<double>(0,1);
    const double k3=coff.at<double>(0,2);
    const double k4=coff.at<double>(0,3);

    //cout<<"k1"<<k1<<" "<<k2<<" "<<k3<<" "<<k4<<endl;
    double r2=a*a+b*b;
    double r=std::sqrt(r2);
    cout<<"r"<<r<<endl;
    double theta = atan(r);

    double theta2 = theta*theta, theta3 = theta2*theta, theta4 = theta2*theta2, theta5 = theta4*theta,
            theta6 = theta3*theta3, theta7 = theta6*theta, theta8 = theta4*theta4, theta9 = theta8*theta;

    double theta_d = theta + k1*theta3 + k2*theta5 + k3*theta7 + k4*theta9;

    double inv_r = r > 1e-8 ? 1.0/r : 1;
    double cdist = r > 1e-8 ? theta_d * inv_r : 1;

    double xd=a*cdist;
    double yd=b*cdist;
    //double xd=theta_d*a*inv_r;
	//double yd=theta_d*b*inv_r;
    double fx=K.at<double>(0,0);
    double fy=K.at<double>(1,1);
    double cx=K.at<double>(0,2);
    double cy=K.at<double>(1,2);
    //cout<<"fx"<<fx<<" "<<fy<<" "<<cx<<" "<<cy<<endl;
    double u = fx*xd+cx;
    double v = fy*yd+cy;
    pt2d.x=u;
    pt2d.y=v;
    return ;
}

int main( int argc, char** argv )
{
  ros::init(argc, argv, "projecttest");
   double len=4;
  //1.1 get para intrinsic
  double f = 200, w = 1000, h = 1000;
  cv::Mat K = (Mat_<double>(3, 3) <<
               f, 0, w/2,
               0, f, h/2,
               0, 0, 1);
  cv::Mat coff=(Mat_<double>(1,4) <<0.02,0.1,0.0001,0.0001);
  //1.2 get para extrinsic
  Mat cv_R12=(cv::Mat_<double>(3, 3) <<
                   0,0,1,
                   -1,0,0,
                   0,-1,0);
  cv::Mat cv_t12 = (cv::Mat_<double>(3, 1) << 0,0,len/2);
  Eigen::Matrix3d e_R12;
  e_R12<<0,0,1,
          -1,0,0,
          0,-1,0;

  //t
  Eigen::Vector3d e_t12={0,0,len/2};

  Mat cv_R21=cv_R12.inv();
  Mat cv_t21=-cv_R12.inv()*cv_t12;
  cout<<"cv_t21"<<cv_t21<<endl;
  Eigen::Matrix3d e_R21=e_R12.inverse();
  Eigen::Vector3d e_t21=-e_R12.inverse()*e_t12;
  cout<<"e_t21"<<e_t21<<endl;


  //x>0 ABCD EFGH
  Point3d cube_center={len,0,len/2};
  Point3d A={cube_center.x-len/2,cube_center.y+len/2,cube_center.z-len/2};
  Point3d B={cube_center.x-len/2,cube_center.y-len/2,cube_center.z-len/2};

  Point3d C={cube_center.x+len/2,cube_center.y-len/2,cube_center.z-len/2};
  Point3d D={cube_center.x+len/2,cube_center.y+len/2,cube_center.z-len/2};


  cout<<"ABCD"<<A<< B<<C<<D<<endl;
  vector<Point3d> vp3d;
  vp3d.push_back(A);
  vp3d.push_back(B);
  vp3d.push_back(C);
  vp3d.push_back(D);

  //3.get binImage
  vector<Point2d> vpt2d;
  vector<Point2d> fishPrj2d;
  cv::Mat rvec21;
  Rodrigues(cv_R21,rvec21);

  cout<<"rvec21"<<rvec21<<endl;
  //opencv implement
  fisheye::projectPoints(vp3d,vpt2d,rvec21,cv_t21,K,coff);
  //opencv_project_points(vp3d,vpt2d, rvec21,cv_t21,K,coff,0);
  //my implement
  for(size_t i=0;i<vp3d.size();i++)
  {
      Point3d p3d=vp3d[i];
      Point2d p2d;
      fish_project_point(p3d,cv_R21,cv_t21,K,coff,p2d);
      fishPrj2d.push_back(p2d);
  }

  for(int i=0;i<vpt2d.size();i++)
  {
      cout<<"prj x="<<vpt2d[i].x<<" y="<<vpt2d[i].y<<endl;
      cout<<"my prj x="<<fishPrj2d[i].x<<" y="<<fishPrj2d[i].y<<endl;
  }

  return 0;
}

 

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

不忘初心t

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值