一、雷达和相机标定
参考:激光雷达和相机联合标定
二、准备工作
数据集:M2DGR
没找到M2DGR数据集的标定文件,平移量为图中所示,旋转默认90度;
坐标转换:
雷达坐标系:x前,y左,z上
相机坐标系:z前,x右,y下
lidar->camera:
(1)绕x轴逆时针旋转90度:
(2)绕y轴顺时针旋转90度:
(3)绕z轴旋转0度;
旋转矩阵:
R = Rz* Ry* Rx = [ 0 -1 0 , 0 0 -1, 1 0 0 ]
三、雷达投影到相机图像
projection.h
#include <ros/ros.h>
#include <iostream>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <opencv2/opencv.hpp>
#include <cv_bridge/cv_bridge.h>
#include <image_transport/image_transport.h>
#include <pcl/point_types.h>
#include <pcl_conversions/pcl_conversions.h>
#include <message_filters/subscriber.h>
#include <message_filters/time_synchronizer.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <ros/package.h>
struct initial_parameters
{
std::string camera_topic;
std::string lidar_topic;
cv::Mat cameraIn;
cv::Mat RT;
} i_params;
projection.cpp
#include "projection.h"
class projection
{
private:
cv::Mat image_proj;
image_transport::Publisher image_publisher;
float maxX = 25.0, maxY = 6.0, minZ = -1.4;
public:
projection()
{
ros::NodeHandle nh("~");
initParams();
message_filters::Subscriber<sensor_msgs::Image> image_sub(nh, i_params.camera_topic, 30);
message_filters::Subscriber<sensor_msgs::PointCloud2> lidar_sub(nh, i_params.lidar_topic, 10);
typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::PointCloud2> MySyncPolicy;
message_filters::Synchronizer<MySyncPolicy> sync(MySyncPolicy(5), image_sub, lidar_sub);
sync.registerCallback(boost::bind(&projection::projection_callback, this, _1, _2));
image_transport::ImageTransport imageTranspors(nh);
image_publisher = imageTranspors.advertise("/project_pc_image", 20);
ros::spin();
}
void initParams()
{
std::string pkg_loc = ros::package::getPath("projection");
std::ifstream infile(pkg_loc + "/cfg/initial_params.txt");
infile >> i_params.camera_topic;
infile >> i_params.lidar_topic;
double_t camtocam[12];
double_t cameraIn[16];
double_t RT[16];
for (int i = 0; i < 16; i++)
{
infile >> camtocam[i];
}
cv::Mat(4, 4, 6, &camtocam).copyTo(i_params.camtocam_mat);
for (int i = 0; i < 12; i++)
{
infile >> cameraIn[i];
}
cv::Mat(4, 4, 6, &cameraIn).copyTo(i_params.cameraIn);
for (int i = 0; i < 16; i++)
{
infile >> RT[i];
}
cv::Mat(4, 4, 6, &RT).copyTo(i_params.RT);
std::cout << i_params.RT << std::endl;
}
void projection_callback(const sensor_msgs::ImageConstPtr &img_msg, const sensor_msgs::PointCloud2ConstPtr &lidar_msg)
{
cv_bridge::CvImagePtr cv_ptr;
try
{
cv_ptr = cv_bridge::toCvCopy(img_msg, "bgr8");
}
catch(cv_bridge::Exception &e)
{
return;
}
cv::Mat raw_img = cv_ptr->image;
pcl::PointCloud<pcl::PointXYZI>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZI>);
pcl::fromROSMsg(*lidar_msg, *cloud);
cv::Mat visImg = raw_img.clone();
cv::Mat overlay = visImg.clone();
std::cout << "Get lidar and image data" << std::endl;
cv::Mat X(4,1, cv::DataType<double>::type);
cv::Mat Y(3,1, cv::DataType<double>::type);
pcl::PointCloud<pcl::PointXYZI>::const_iterator it;
for(it = cloud->points.begin(); it != cloud->points.end(); it++)
{
if(it->x > maxX || it->x < 0.0 || abs(it->y) > maxY || it->z < minZ)
continue;
X.at<double>(0,0) = it->x;
X.at<double>(0,1) = it->y;
X.at<double>(0,2) = it->z;
X.at<double>(0,3) = 1;
Y = i_params.cameraIn * i_params.RT * X;
cv::Point pt;
pt.x = Y.at<double>(0,0) / Y.at<double>(0,2);
pt.y = Y.at<double>(1,0) / Y.at<double>(0,2);
float val = it->x;
float maxVal = 20.0;
int red = std::min(255, (int)(255 * abs((val - maxVal) / maxVal)));
int green = std::min(255,(int)(255 * (1 - abs((val - maxVal) / maxVal))));
cv::circle(overlay, pt, 1, cv::Scalar(0, green, red), -1);
}
ros::Time time = ros::Time::now();
cv_ptr->encoding = "bgr8";
cv_ptr->header.stamp = time;
cv_ptr->header.frame_id = "veloydne";
cv_ptr->image = overlay;
image_publisher.publish(cv_ptr->toImageMsg());
std::cout<<"project picture is published!"<<std::endl;
}
};
int main(int argc, char **argv)
{
ros::init(argc, argv, "projection");
projection pj;
return 0;
}
效果: