提取图片
- 查看.bag文件的信息
rosbag info xxx.bag
- /home/stone/Desktop/data/2021-06-09-15-16-30.bag
此填写提取文件路径 - /detector_img
填写相应的topic
<launch>
<node pkg="rosbag" type="play" name="rosbag" args=" /home/stone/Desktop/data/2021-06-09-15-16-30.bag"/>
<node name="extract" pkg="image_view" type="extract_images" respawn="false" output="screen" cwd="ROS_HOME">
<remap from="image" to="/detector_img"/>
</node>
</launch>
- 运行
roscore
roslaunch export.launch
cd ~/.ros
mv ~/.ros/*.jpg /home/stone/Desktop/data/output
# 储存图片的路径
提取点云
rosrun pcl_ros bag_to_pcd /home/stone/Desktop/data/2021-06-09-15-16-30.bag /obstacle_pcl /home/stone/Desktop/data/output-obstacle
-
topic是 /obstacle_pcl
-
保存路径是/home/stone/Desktop/data/output-obstacle
-
点云可视化
pcl_viewer xxx.pcd
rosws project
roslaunch ilcc2 pcd2image.launch
<?xml version="1.0"?>
<launch>
<node pkg="ilcc2" type="pcd2image" name="pcd2image" output="screen">
<param name="distance_valid" value="80" />
<param name="extrinsic_file" value="/config/pointgrey1.bin" />
<param name="yaml_path" value= "cam_matrix_b.yaml" />
<param name="image_topic" value= "/a_image" />
<param name="lidar_topic" value= "/livox/lidar" />
</node>
</launch>
rosbag中的点云提取为png深度图
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
// void cv::undistort
// ( InputArray src, // 原始图像
// OutputArray dst, // 矫正图像
// InputArray cameraMatrix, // 原相机内参矩阵
// InputArray distCoeffs, // 相机畸变参数
// InputArray newCameraMatrix = noArray() // 新相机内参矩阵
// )
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
cv::Mat rectifyImage; //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
inten_low = 0;
inten_high = 60;
// std::cout<<"start project "<< cloud->size() << " , ";
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high-inten_low)*255; //对深度进行归一化
image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.8, cv::Scalar(r,g,b), 0.8);
// image.ptr<uchar>(y)[x*3] = 255;
// image.ptr<uchar>(y)[x*3+1] = 0;
// image.ptr<uchar>(y)[x*3+2] = 0;
counter++;
}
}
// std::cout << counter << " points ok\n";
cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_data/"+"abc"+".jpg", rectifyImage);
cv::waitKey(5);
}
void callback_LidarCam(const sensor_msgs::PointCloud2ConstPtr& msg_pc,
const sensor_msgs::ImageConstPtr& msg_img)
{
ROS_INFO_STREAM("Velodyne scan received at " << msg_pc->header.stamp.toSec());
ROS_INFO_STREAM("image received at " << msg_img->header.stamp.toSec());
pcl::PointCloud<pcl::PointXYZI> input_cloud;
pcl::fromROSMsg(*msg_pc, input_cloud); //转换为模板点云fromROSMsg
cv::Mat img = cv_bridge::toCvCopy(msg_img,"bgr8")->image; //image从ros转为opencv常用格式
processData(img, input_cloud.makeShared());
}
int number=0;
void chatterCallback(const sensor_msgs::PointCloud2& msg_pc)
{
ROS_INFO("here");
number=number+1;
std::string mystring=std::to_string(number);
pcl::PointCloud<pcl::PointXYZI> input_cloud;
pcl::fromROSMsg(msg_pc, input_cloud);
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<input_cloud.size(); index++) //找深度的最大值和最小值
{
ptmp =input_cloud.points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
inten_low = 0;
inten_high = 60;
// std::cout<<"start project "<< cloud->size() << " , ";
int counter = 0;
cv::Mat greyimage(360,640,CV_8UC1);
for (int i = 0; i < greyimage.rows; i++)
{
uchar *p = greyimage.ptr<uchar>(i);
for (int j = 0; j < greyimage.cols; j++)
{
p[j] = 0;
}
}
for(unsigned int index=0; index<input_cloud.size(); index++)
{
ptmp = input_cloud.points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high-inten_low)*255; //对深度进行归一化
uchar *p = greyimage.ptr<uchar>(y);
p[x] = h;
// image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.8, cv::Scalar(r,g,b), 0.8);
// image.ptr<uchar>(y)[x*3] = 255;
// image.ptr<uchar>(y)[x*3+1] = 0;
// image.ptr<uchar>(y)[x*3+2] = 0;
counter++;
}
}
// std::cout << counter << " points ok\n";
// cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
// cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
double secs =msg_pc.header.stamp.toSec();
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_data/"+std::to_string(secs)+".png", greyimage);
cv::waitKey(5);
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub(nh, lidar_topic,2); //点云接收
message_filters::Subscriber<sensor_msgs::Image> image_sub(nh, image_topic, 2); // 图像接收
ros::Subscriber sub = nh.subscribe(lidar_topic, 1000, chatterCallback);
// typedef sync_policies::ApproximateTime<sensor_msgs::PointCloud2, sensor_msgs::Image> MySyncPolicy; //时间戳同步
// Synchronizer<MySyncPolicy> sync(MySyncPolicy(10), cloud_sub, image_sub);
// sync.registerCallback(boost::bind(&callback_LidarCam, _1, _2)); //回调函数
// ROS_INFO("waiting for lidar image topic %s %s", lidar_topic.c_str(), image_topic.c_str());
ros::spin();
// while (ros::ok())
// {
// ros::spin();
// }
}
rosbag获取矫正过的图像png
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
// void cv::undistort
// ( InputArray src, // 原始图像
// OutputArray dst, // 矫正图像
// InputArray cameraMatrix, // 原相机内参矩阵
// InputArray distCoeffs, // 相机畸变参数
// InputArray newCameraMatrix = noArray() // 新相机内参矩阵
// )
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
cv::Mat rectifyImage; //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
inten_low = 0;
inten_high = 60;
// std::cout<<"start project "<< cloud->size() << " , ";
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high-inten_low)*255; //对深度进行归一化
image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.8, cv::Scalar(r,g,b), 0.8);
// image.ptr<uchar>(y)[x*3] = 255;
// image.ptr<uchar>(y)[x*3+1] = 0;
// image.ptr<uchar>(y)[x*3+2] = 0;
counter++;
}
}
// std::cout << counter << " points ok\n";
cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_data/"+"abc"+".jpg", rectifyImage);
cv::waitKey(5);
}
void callback_LidarCam(const sensor_msgs::PointCloud2ConstPtr& msg_pc,
const sensor_msgs::ImageConstPtr& msg_img)
{
ROS_INFO_STREAM("Velodyne scan received at " << msg_pc->header.stamp.toSec());
ROS_INFO_STREAM("image received at " << msg_img->header.stamp.toSec());
pcl::PointCloud<pcl::PointXYZI> input_cloud;
pcl::fromROSMsg(*msg_pc, input_cloud); //转lidar_topic换为模板点云fromROSMsg
cv::Mat img = cv_bridge::toCvCopy(msg_img,"bgr8")->image; //image从ros转为opencv常用格式
processData(img, input_cloud.makeShared());
}
int number=0;
void camCallback(const sensor_msgs::Image& msg_img)
{
ROS_INFO_STREAM("image received at " << msg_img.header.stamp.toSec());
cv::Mat img = cv_bridge::toCvCopy(msg_img,"bgr8")->image;
cv::Mat rectifyImage; //矫正的图像
cv::resize(img, img, cv::Size(640, 360));
cv::undistort(img, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
double secs =msg_img.header.stamp.toSec();
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_image/"+std::to_string(secs)+".png", img);
cv::waitKey(5);
}
void lidarCallback(const sensor_msgs::PointCloud2& msg_pc)
{
ROS_INFO("here");
number=number+1;
std::string mystring=std::to_string(number);
pcl::PointCloud<pcl::PointXYZI> input_cloud;
pcl::fromROSMsg(msg_pc, input_cloud);
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<input_cloud.size(); index++) //找深度的最大值和最小值
{
ptmp =input_cloud.points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
inten_low = 0;
inten_high = 60;
// std::cout<<"start project "<< cloud->size() << " , ";
int counter = 0;
cv::Mat greyimage(360,640,CV_8UC1);
for (int i = 0; i < greyimage.rows; i++)
{
uchar *p = greyimage.ptr<uchar>(i);
for (int j = 0; j < greyimage.cols; j++)
{
p[j] = 0;
}
}
for(unsigned int index=0; index<input_cloud.size(); index++)
{
ptmp = input_cloud.points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high-inten_low)*255; //对深度进行归一化
uchar *p = greyimage.ptr<uchar>(y);
p[x] = h;
// image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.8, cv::Scalar(r,g,b), 0.8);
// image.ptr<uchar>(y)[x*3] = 255;
// image.ptr<uchar>(y)[x*3+1] = 0;
// image.ptr<uchar>(y)[x*3+2] = 0;
counter++;
}
}
// std::cout << counter << " points ok\n";
// cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
// cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
double secs =msg_pc.header.stamp.toSec();
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_data/"+std::to_string(secs)+".png", greyimage);
cv::waitKey(5);
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub(nh, lidar_topic,2); //点云接收
message_filters::Subscriber<sensor_msgs::Image> image_sub(nh, image_topic, 2); // 图像接收
ros::Subscriber sub = nh.subscribe(image_topic, 1000, camCallback);
// typedef sync_policies::ApproximateTime<sensor_msgs::PointCloud2, sensor_msgs::Image> MySyncPolicy; //时间戳同步
// Synchronizer<MySyncPolicy> sync(MySyncPolicy(10), cloud_sub, image_sub);
// sync.registerCallback(boost::bind(&callback_LidarCam, _1, _2)); //回调函数
// ROS_INFO("waiting for lidar image topic %s %s", lidar_topic.c_str(), image_topic.c_str());
ros::spin();
// while (ros::ok())
// {
// ros::spin();
// }
}
从rosbag同时获取点云和图像
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
// void cv::undistort
// ( InputArray src, // 原始图像
// OutputArray dst, // 矫正图像
// InputArray cameraMatrix, // 原相机内参矩阵
// InputArray distCoeffs, // 相机畸变参数
// InputArray newCameraMatrix = noArray() // 新相机内参矩阵
// )
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
cv::Mat rectifyImage; //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
inten_low = 0;
inten_high = 60;
// std::cout<<"start project "<< cloud->size() << " , ";
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high-inten_low)*255; //对深度进行归一化
image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.8, cv::Scalar(r,g,b), 0.8);
// image.ptr<uchar>(y)[x*3] = 255;
// image.ptr<uchar>(y)[x*3+1] = 0;
// image.ptr<uchar>(y)[x*3+2] = 0;
counter++;
}
}
// std::cout << counter << " points ok\n";
cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_data/"+"abc"+".jpg", rectifyImage);
cv::waitKey(5);
}
void callback_LidarCam(const sensor_msgs::PointCloud2ConstPtr& msg_pc,
const sensor_msgs::ImageConstPtr& msg_img)
{
ROS_INFO_STREAM("Velodyne scan received at " << msg_pc->header.stamp.toSec());
ROS_INFO_STREAM("image received at " << msg_img->header.stamp.toSec());
pcl::PointCloud<pcl::PointXYZI> input_cloud;
pcl::fromROSMsg(*msg_pc, input_cloud); //转lidar_topic换为模板点云fromROSMsg
cv::Mat img = cv_bridge::toCvCopy(msg_img,"bgr8")->image; //image从ros转为opencv常用格式
processData(img, input_cloud.makeShared());
}
int number=0;
void camCallback(const sensor_msgs::Image& msg_img)
{
ROS_INFO_STREAM("image received at " << msg_img.header.stamp.toSec());
cv::Mat img = cv_bridge::toCvCopy(msg_img,"bgr8")->image;
cv::Mat rectifyImage; //矫正的图像
cv::resize(img, img, cv::Size(640, 360));
cv::undistort(img, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
double secs =msg_img.header.stamp.toSec();
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_image/"+std::to_string(secs)+".png", img);
cv::waitKey(5);
}
void lidarCallback(const sensor_msgs::PointCloud2& msg_pc)
{
ROS_INFO_STREAM("lidar received at " << msg_pc.header.stamp.toSec());
number=number+1;
std::string mystring=std::to_string(number);
pcl::PointCloud<pcl::PointXYZI> input_cloud;
pcl::fromROSMsg(msg_pc, input_cloud);
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<input_cloud.size(); index++) //找深度的最大值和最小值
{
ptmp =input_cloud.points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
inten_low = 0;
inten_high = 60;
// std::cout<<"start project "<< cloud->size() << " , ";
int counter = 0;
cv::Mat greyimage(360,640,CV_8UC1);
for (int i = 0; i < greyimage.rows; i++)
{
uchar *p = greyimage.ptr<uchar>(i);
for (int j = 0; j < greyimage.cols; j++)
{
p[j] = 0;
}
}
for(unsigned int index=0; index<input_cloud.size(); index++)
{
ptmp = input_cloud.points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high-inten_low)*255; //对深度进行归一化
uchar *p = greyimage.ptr<uchar>(y);
p[x] = h;
// image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
// cv::circle(rectifyImage, cv::Point2d(x,y), 0.8, cv::Scalar(r,g,b), 0.8);
// image.ptr<uchar>(y)[x*3] = 255;
// image.ptr<uchar>(y)[x*3+1] = 0;
// image.ptr<uchar>(y)[x*3+2] = 0;
counter++;
}
}
// std::cout << counter << " points ok\n";
// cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
// cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
double secs =msg_pc.header.stamp.toSec();
std::string package_path = ros::package::getPath("ilcc2");
cv::imwrite(package_path+"/process_data/"+std::to_string(secs)+".png", greyimage);
cv::waitKey(5);
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
message_filters::Subscriber<sensor_msgs::PointCloud2> cloud_sub(nh, lidar_topic,2); //点云接收
message_filters::Subscriber<sensor_msgs::Image> image_sub(nh, image_topic, 2); // 图像接收
ros::Subscriber sub = nh.subscribe(image_topic, 1000, camCallback);
ros::Subscriber asub = nh.subscribe(lidar_topic, 1000, lidarCallback);
// typedef sync_policies::ApproximateTime<sensor_msgs::PointCloud2, sensor_msgs::Image> MySyncPolicy; //时间戳同步
// Synchronizer<MySyncPolicy> sync(MySyncPolicy(10), cloud_sub, image_sub);
// sync.registerCallback(boost::bind(&callback_LidarCam, _1, _2)); //回调函数
// ROS_INFO("waiting for lidar image topic %s %s", lidar_topic.c_str(), image_topic.c_str());
ros::spin();
// while (ros::ok())
// {
// ros::spin();
// }
}
深度图和rgb重叠显示
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
cv::Mat rectifyImage;
cv::resize(image, image, cv::Size(640, 360)); //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
ROS_INFO("max distance: %d", int(inten_high));
inten_low = 0;
inten_high = 60;
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/( distance_valid -inten_low)*255; //对深度进行归一化
image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
counter++;
}
}
// std::cout << counter << " points ok\n";
cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
// std::string package_path = ros::package::getPath("ilcc2");
// cv::imwrite(package_path+"/process_data/"+"abc"+".jpg", rectifyImage);
cv::waitKey(5);
}
std::deque<cv::Mat> img_buffer;
std::deque<pcl::PointCloud<pcl::PointXYZI>> cloud_buffer;
void ImageHandler(const sensor_msgs::Image::ConstPtr& img_msg) {
cv::Mat img = cv_bridge::toCvCopy(img_msg)->image;
if (!img.empty()) {
img_buffer.push_back(img);
if (img_buffer.size() > 3) {
img_buffer.pop_front();
}
}
}
void LiDARHandler(const sensor_msgs::PointCloud2::ConstPtr& lidar_msg) {
pcl::PointCloud<pcl::PointXYZI> cloud;
pcl::fromROSMsg(*lidar_msg, cloud);
cloud_buffer.push_back(cloud);
if (cloud_buffer.size() > 3) {
cloud_buffer.pop_front();
}
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
ros::Subscriber sub_livox =nh.subscribe<sensor_msgs::PointCloud2>(lidar_topic, 10, LiDARHandler);
ros::Subscriber sub_image = nh.subscribe<sensor_msgs::Image>(image_topic, 20, ImageHandler);
ros::Rate loop_rate(10);
while (ros::ok()) {
if (img_buffer.size() > 0 && cloud_buffer.size() > 0) {
cv::Mat tmp_image = img_buffer.back();
pcl::PointCloud<pcl::PointXYZI> tmp_cloud = cloud_buffer.back();
img_buffer.clear();
processData(tmp_image, tmp_cloud.makeShared());
}
ros::spinOnce();
loop_rate.sleep();
}
return 0;
}
融合的同时保存对应的深度图和RGB图像
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
int num=0;
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
num=num+1;
std::string package_path = ros::package::getPath("ilcc2");
cv::Mat rectifyImage;
cv::resize(image, image, cv::Size(640, 360)); //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
cv::imwrite(package_path+"/data/image/"+std::to_string(num)+".jpg", rectifyImage);
cv::Mat depthimage(360,640,CV_8UC1);
for (int i = 0; i < depthimage.rows; i++)
{
uchar *p = depthimage.ptr<uchar>(i);
for (int j = 0; j < depthimage.cols; j++)
{
p[j] = 0;
}
}
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas; #include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
int num=0;
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
num=num+1;
std::string package_path = ros::package::getPath("ilcc2");
cv::Mat rectifyImage;
cv::resize(image, image, cv::Size( 640, 360)); //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
cv::resize(rectifyImage, rectifyImage, cv::Size( 640, 360));
cv::Mat saverectifyImage;
saverectifyImage= rectifyImage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/image/"+std::to_string(num)+".png", saverectifyImage);
cv::Mat depthimage(360,640,CV_16UC1);
for (int i = 0; i < depthimage.rows; i++)
{
unsigned short *p = depthimage.ptr<unsigned short>(i);
for (int j = 0; j < depthimage.cols; j++)
{
p[j] = 0;
}
}
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
ROS_INFO("max distance: %d", int(inten_high));
inten_low = 0;
inten_high = 260;
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high -inten_low)*65535; //对深度进行归一化
unsigned short *p = depthimage.ptr<unsigned short>(y);
p[x] = h;
image_corners_est->HSVtoRGB(h/256, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
counter++;
}
}
// std::cout << counter << " points ok\n";
//cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
cv::Mat savelidar_image;
savelidar_image= rectifyImage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/lidar_image/"+std::to_string(num)+".png", savelidar_image);
vector<int> compression_params;
compression_params.push_back(CV_IMWRITE_PNG_COMPRESSION);
compression_params.push_back(0);
cv::Mat savedepthimage;
savedepthimage= depthimage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/depthmap/"+std::to_string(num)+".png", savedepthimage,compression_params);
cv::waitKey(5);
}
std::deque<cv::Mat> img_buffer;
std::deque<pcl::PointCloud<pcl::PointXYZI>> cloud_buffer;
void ImageHandler(const sensor_msgs::Image::ConstPtr& img_msg) {
cv::Mat img = cv_bridge::toCvCopy(img_msg)->image;
if (!img.empty()) {
img_buffer.push_back(img);
if (img_buffer.size() > 3) {
img_buffer.pop_front();
}
}
}
void LiDARHandler(const sensor_msgs::PointCloud2::ConstPtr& lidar_msg) {
pcl::PointCloud<pcl::PointXYZI> cloud;
pcl::fromROSMsg(*lidar_msg, cloud);
cloud_buffer.push_back(cloud);
if (cloud_buffer.size() > 3) {
cloud_buffer.pop_front();
}
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
ros::Subscriber sub_livox =nh.subscribe<sensor_msgs::PointCloud2>(lidar_topic, 10, LiDARHandler);
ros::Subscriber sub_image = nh.subscribe<sensor_msgs::Image>(image_topic, 20, ImageHandler);
ros::Rate loop_rate(10);
while (ros::ok()) {
if (img_buffer.size() > 0 && cloud_buffer.size() > 0) {
cv::Mat tmp_image = img_buffer.back();
pcl::PointCloud<pcl::PointXYZI> tmp_cloud = cloud_buffer.back();
img_buffer.clear();
processData(tmp_image, tmp_cloud.makeShared());
}
ros::spinOnce();
loop_rate.sleep();
}
return 0;
}
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
ROS_INFO("max distance: %d", int(inten_high));
inten_low = 0;
inten_high = 60;
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high -inten_low)*255; //对深度进行归一化
uchar *p = depthimage.ptr<uchar>(y);
p[x] = h;
image_corners_est->HSVtoRGB(h, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
counter++;
}
}
// std::cout << counter << " points ok\n";
cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
cv::imwrite(package_path+"/data/lidar_image/"+std::to_string(num)+".jpg", rectifyImage);
cv::imwrite(package_path+"/data/depthmap/"+std::to_string(num)+".png", depthimage);
cv::waitKey(5);
}
std::deque<cv::Mat> img_buffer;
std::deque<pcl::PointCloud<pcl::PointXYZI>> cloud_buffer;
void ImageHandler(const sensor_msgs::Image::ConstPtr& img_msg) {
cv::Mat img = cv_bridge::toCvCopy(img_msg)->image;
if (!img.empty()) {
img_buffer.push_back(img);
if (img_buffer.size() > 3) {
img_buffer.pop_front();
}
}
}
void LiDARHandler(const sensor_msgs::PointCloud2::ConstPtr& lidar_msg) {
pcl::PointCloud<pcl::PointXYZI> cloud;
pcl::fromROSMsg(*lidar_msg, cloud);
cloud_buffer.push_back(cloud);
if (cloud_buffer.size() > 3) {
cloud_buffer.pop_front();
}
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
ros::Subscriber sub_livox =nh.subscribe<sensor_msgs::PointCloud2>(lidar_topic, 10, LiDARHandler);
ros::Subscriber sub_image = nh.subscribe<sensor_msgs::Image>(image_topic, 20, ImageHandler);
ros::Rate loop_rate(10);
while (ros::ok()) {
if (img_buffer.size() > 0 && cloud_buffer.size() > 0) {
cv::Mat tmp_image = img_buffer.back();
pcl::PointCloud<pcl::PointXYZI> tmp_cloud = cloud_buffer.back();
img_buffer.clear();
processData(tmp_image, tmp_cloud.makeShared());
}
ros::spinOnce();
loop_rate.sleep();
}
return 0;
}
深度图改为16bit
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
int num=0;
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
num=num+1;
std::string package_path = ros::package::getPath("ilcc2");
cv::Mat rectifyImage;
cv::resize(image, image, cv::Size( 640, 360)); //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
cv::resize(rectifyImage, rectifyImage, cv::Size( 640, 360));
cv::Mat saverectifyImage;
saverectifyImage= rectifyImage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/image/"+std::to_string(num)+".png", saverectifyImage);
cv::Mat depthimage(360,640,CV_16UC1);
for (int i = 0; i < depthimage.rows; i++)
{
unsigned short *p = depthimage.ptr<unsigned short>(i);
for (int j = 0; j < depthimage.cols; j++)
{
p[j] = 0;
}
}
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double inten_low=255, inten_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(inten_low > ptmp.intensity)
inten_low = ptmp.intensity;
if(inten_high < ptmp.intensity)
inten_high = ptmp.intensity;
datas.push_back(ptmp.intensity);
}
ROS_INFO("max distance: %d", int(inten_high));
inten_low = 0;
inten_high = 260;
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.intensity-inten_low)/(inten_high -inten_low)*65535; //对深度进行归一化
unsigned short *p = depthimage.ptr<unsigned short>(y);
p[x] = h;
image_corners_est->HSVtoRGB(h/256, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
counter++;
}
}
// std::cout << counter << " points ok\n";
//cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
cv::Mat savelidar_image;
savelidar_image= rectifyImage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/lidar_image/"+std::to_string(num)+".png", savelidar_image);
vector<int> compression_params;
compression_params.push_back(CV_IMWRITE_PNG_COMPRESSION);
compression_params.push_back(0);
cv::Mat savedepthimage;
savedepthimage= depthimage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/depthmap/"+std::to_string(num)+".png", savedepthimage,compression_params);
cv::waitKey(5);
}
std::deque<cv::Mat> img_buffer;
std::deque<pcl::PointCloud<pcl::PointXYZI>> cloud_buffer;
void ImageHandler(const sensor_msgs::Image::ConstPtr& img_msg) {
cv::Mat img = cv_bridge::toCvCopy(img_msg)->image;
if (!img.empty()) {
img_buffer.push_back(img);
if (img_buffer.size() > 3) {
img_buffer.pop_front();
}
}
}
void LiDARHandler(const sensor_msgs::PointCloud2::ConstPtr& lidar_msg) {
pcl::PointCloud<pcl::PointXYZI> cloud;
pcl::fromROSMsg(*lidar_msg, cloud);
cloud_buffer.push_back(cloud);
if (cloud_buffer.size() > 3) {
cloud_buffer.pop_front();
}
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
ros::Subscriber sub_livox =nh.subscribe<sensor_msgs::PointCloud2>(lidar_topic, 10, LiDARHandler);
ros::Subscriber sub_image = nh.subscribe<sensor_msgs::Image>(image_topic, 20, ImageHandler);
ros::Rate loop_rate(10);
while (ros::ok()) {
if (img_buffer.size() > 0 && cloud_buffer.size() > 0) {
cv::Mat tmp_image = img_buffer.back();
pcl::PointCloud<pcl::PointXYZI> tmp_cloud = cloud_buffer.back();
img_buffer.clear();
processData(tmp_image, tmp_cloud.makeShared());
}
ros::spinOnce();
loop_rate.sleep();
}
return 0;
}
png转为深度图
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
#include "ilcc2/config.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
ros::Publisher pubLaserCloud;
double distance_valid;
int main(int argc, char** argv){
ros::init(argc,argv,"rgb_lidar");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data_backup/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file;
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) );
image_corners_est->txt2extrinsic( extrinsic_path );
cv::Mat rgb, depth;
rgb = cv::imread("/home/stone/fsdownload/1.png");
const char filename[] = "/home/stone/Desktop/rosws/src/ilcc2/data/depthmap/1.png";
//“2”拿深度
depth = cv::imread(filename, 2);
cv::imshow("img", depth);
cv::waitKey(0);
pcl::PointCloud<pcl::PointXYZRGB>::Ptr rgbcloud(new pcl::PointCloud<pcl::PointXYZRGB>);
Eigen::Matrix3d m_Rinverse;
m_Rinverse=image_corners_est-> m_R;
cout <<"inverse.............." << endl;
m_Rinverse = m_Rinverse.inverse().eval();
cout <<"inverse get.............." << endl;
pubLaserCloud = nh.advertise<sensor_msgs::PointCloud2>("/rgb_lidar", 10);
ros::Rate loop_rate(10);
while (ros::ok()) {
for (int m = 0; m < depth.rows; m++)
{
for (int n = 0; n < depth.cols; n++)
{
ushort d = depth.ptr<ushort>(m)[n];
if (d == 0)
continue;
pcl::PointXYZRGB rgbpoint;
Eigen::Vector3d P_tmp;
P_tmp[0] = d*(m-image_corners_est->m_cx)/image_corners_est->m_fx;
P_tmp[1] = d*(n-(image_corners_est->m_cy-4))/image_corners_est->m_fy;
P_tmp[2] = d;
Eigen::Vector3d P_w;
P_w = m_Rinverse*(P_tmp-image_corners_est->m_t);
rgbpoint.x = P_w[0];
rgbpoint.y = P_w[1];
rgbpoint.z = P_w[2];
rgbpoint.b = rgb.ptr<uchar>(n)[m*3];
rgbpoint.g = rgb.ptr<uchar>(n)[m*3+1];
rgbpoint.r = rgb.ptr<uchar>(n)[m*3+2];
rgbcloud->push_back(rgbpoint);
}
}
sensor_msgs::PointCloud2 cloud_msg;
pcl::toROSMsg(*rgbcloud, cloud_msg);
cloud_msg.header.stamp = ros::Time::now();
cloud_msg.header.frame_id = "/livox_frame";
pubLaserCloud.publish(cloud_msg);
rgbcloud->is_dense = false;
cout <<"here.............." << endl;
try {
//保存点云图
pcl::io::savePCDFile("/home/stone/pcd.pcd", *rgbcloud);
}
catch (pcl::IOException &e) {
cout << e.what() << endl;
}
ros::spinOnce();
loop_rate.sleep();
}
}
前面整错了,应该是深度
#include <fstream>
#include <iostream>
#include <math.h>
#include <ros/package.h> /// ros::package::getPath
#include <Eigen/Core>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <sstream>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/common/transforms.h> /// pcl::transformPointCloud
#include "ilcc2/ImageCornersEst.h"
using namespace std;
using namespace message_filters;
ImageCornersEst::Ptr image_corners_est(new ImageCornersEst);
double distance_valid;
int num=0;
void processData(cv::Mat image, pcl::PointCloud<pcl::PointXYZI>::Ptr cloud)
{
num=num+1;
std::string package_path = ros::package::getPath("ilcc2");
cv::Mat rectifyImage;
cv::resize(image, image, cv::Size( 640, 360)); //矫正的图像
cv::undistort(image, rectifyImage, image_corners_est->camK, image_corners_est->distort_param, image_corners_est->camK);
cv::resize(rectifyImage, rectifyImage, cv::Size( 640, 360));
cv::Mat saverectifyImage;
saverectifyImage= rectifyImage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/image/"+std::to_string(num)+".png", saverectifyImage);
cv::Mat depthimage(360,640,CV_16UC1);
for (int i = 0; i < depthimage.rows; i++)
{
unsigned short *p = depthimage.ptr<unsigned short>(i);
for (int j = 0; j < depthimage.cols; j++)
{
p[j] = 0;
}
}
pcl::PointXYZI ptmp;
/// 生成每个点的颜色
double z_low=255, z_high=0;
std::vector<double> datas;
for(unsigned int index=0; index<cloud->size(); index++) //找深度的最大值和最小值
{
ptmp = cloud->points[index];
if(z_low > ptmp.z)
z_low = ptmp.z;
if(z_high < ptmp.z)
z_high = ptmp.z;
datas.push_back(ptmp.z);
}
ROS_INFO("max distance: %lf ,min distance: %lf", z_high, z_low);
z_low = -2;
z_high = 40;
int counter = 0;
for(unsigned int index=0; index<cloud->size(); index++)
{
ptmp = cloud->points[index];
// std::cout << ptmp.z << endl;
Eigen::Vector3d Pw(ptmp.x, ptmp.y, ptmp.z);
Eigen::Vector2d Pcam;
//转换平面
if ( image_corners_est->spaceToPlane(Pw, Pcam, distance_valid) ) {
int x = Pcam[0];
int y = Pcam[1];
unsigned char r, g, b;
double h = (ptmp.z-z_low)/(z_high -z_low)*65535; //对深度进行归一化
unsigned short *p = depthimage.ptr<unsigned short>(y);
p[x] = h;
image_corners_est->HSVtoRGB(h/256, 100, 100, &r, &g, &b);
cv::circle(rectifyImage, cv::Point2d(x,y), 0.6, cv::Scalar(r,g,b), 2);
counter++;
}
}
// std::cout << counter << " points ok\n";
//cv::resize(rectifyImage, rectifyImage, cv::Size(rectifyImage.cols/1, rectifyImage.rows/1));
cv::imshow("img_liar_point", rectifyImage); //矫正后的图像
cv::Mat savelidar_image;
savelidar_image= rectifyImage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/lidar_image/"+std::to_string(num)+".png", savelidar_image);
vector<int> compression_params;
compression_params.push_back(CV_IMWRITE_PNG_COMPRESSION);
compression_params.push_back(0);
cv::Mat savedepthimage;
savedepthimage= depthimage(cv::Rect(0,4,640,352));
cv::imwrite(package_path+"/data/depthmap/"+std::to_string(num)+".png", savedepthimage,compression_params);
cv::waitKey(5);
}
std::deque<cv::Mat> img_buffer;
std::deque<pcl::PointCloud<pcl::PointXYZI>> cloud_buffer;
void ImageHandler(const sensor_msgs::Image::ConstPtr& img_msg) {
cv::Mat img = cv_bridge::toCvCopy(img_msg)->image;
if (!img.empty()) {
img_buffer.push_back(img);
if (img_buffer.size() > 3) {
img_buffer.pop_front();
}
}
}
void LiDARHandler(const sensor_msgs::PointCloud2::ConstPtr& lidar_msg) {
pcl::PointCloud<pcl::PointXYZI> cloud;
pcl::fromROSMsg(*lidar_msg, cloud);
cloud_buffer.push_back(cloud);
if (cloud_buffer.size() > 3) {
cloud_buffer.pop_front();
}
}
int main(int argc, char** argv){
ros::init(argc,argv,"pcd2image");
ros::NodeHandle nh;
std::string package_path = ros::package::getPath("ilcc2");
ROS_INFO("ilcc2 package at %s", package_path.c_str());
std::string yaml_path, image_topic, lidar_topic;
std::string extrinsic_file;
ros::NodeHandle nh_private("~");
nh_private.param<double>("distance_valid", distance_valid, 5);
nh_private.param<std::string>("extrinsic_file", extrinsic_file, "/process_data/pose.bin");
nh_private.param<std::string>("yaml_path", yaml_path, "30w.yaml");
nh_private.param<std::string>("image_topic", image_topic, "/front");
nh_private.param<std::string>("lidar_topic", lidar_topic, "/velodyne_points");
std::string extrinsic_path = package_path + extrinsic_file; //外参路径
ROS_INFO("load T_lidar2cam from %s", extrinsic_path.c_str());
image_corners_est = ImageCornersEst::Ptr(new ImageCornersEst(package_path+"/config/"+yaml_path, true) ); //内参路径
image_corners_est->txt2extrinsic( extrinsic_path );
ros::Subscriber sub_livox =nh.subscribe<sensor_msgs::PointCloud2>(lidar_topic, 10, LiDARHandler);
ros::Subscriber sub_image = nh.subscribe<sensor_msgs::Image>(image_topic, 20, ImageHandler);
ros::Rate loop_rate(10);
while (ros::ok()) {
if (img_buffer.size() > 0 && cloud_buffer.size() > 0) {
cv::Mat tmp_image = img_buffer.back();
pcl::PointCloud<pcl::PointXYZI> tmp_cloud = cloud_buffer.back();
img_buffer.clear();
processData(tmp_image, tmp_cloud.makeShared());
}
ros::spinOnce();
loop_rate.sleep();
}
return 0;
}