雷达相机融合(七)--显示鼠标位置处的实际空间位置

6 篇文章 0 订阅

思路

通过鼠标回调函数获取鼠标所在位置处的像素坐标,从而得出鼠标所在像素坐标处的点云坐标,由于点云稀疏,如果该像素没有对应雷达点,则遍历,寻找出附近最近的雷达点。

为了得到像素对应位置的gps,在点云密集化的程序中将所需的位姿信息发布出来,然后在该程序中订阅。

鼠标位置处的雷达坐标

//*************************************************************************************************
//    显示图像 获取鼠标位置处的实际空间位置
//    source ~/catkin_ws/devel/setup.bash &&  rosrun my_lidar_cam_fusion fire_img_show
//
//****************************************************************************************************

#include <ros/ros.h>
#include <boost/thread.hpp>
#include<iostream>
#include<sstream>

#include <cv_bridge/cv_bridge.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>

#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/time_synchronizer.h>

#include <sensor_msgs/Image.h>
#include <sensor_msgs/PointCloud2.h>


using namespace std;
using namespace cv;
static const std::string FIRE = "fire_img_show";
static int mouse_x;
static int mouse_y;

class fire_img_show
{

public:

  fire_img_show()
  {
      cout<< "show:image"<<endl;
      row = 480; //行
      col = 640;
      sub1.subscribe(n,"cloud_img",100);
      sub2.subscribe(n,"image_fire",300);
      sync.connectInput( sub1,  sub2);
      sync.registerCallback(  boost::bind(&fire_img_show::chatterCallback, this,_1, _2)  );
      cv::namedWindow(FIRE);


  }
  ~fire_img_show()
  {
       cv::destroyWindow(FIRE);
  }
  void chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire );

  static void on_Mouse(int event, int x, int y, int flags, void* param);

  bool cloud_position(int r,int c);


private:
   ros::NodeHandle n;

   message_filters::Subscriber<sensor_msgs::Image> sub1;
   message_filters::Subscriber<sensor_msgs::Image> sub2;
   typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> MySyncPolicy;
   message_filters::Synchronizer<MySyncPolicy> sync{MySyncPolicy(2000)};

   ros::Time time;
   int row; //行
   int col;
   int mouse_r;
   int mouse_c;
   cv::Mat cloud_img;
   cv::Mat image_fire;
   string str;

};
void fire_img_show::on_Mouse(int event, int x, int y, int flags, void* param)
{
    switch(event)
    {
    case CV_EVENT_MOUSEMOVE://鼠标滑动
    {
        mouse_x=x;
        mouse_y=y;
        //cout << x<< y<<endl;
    }
    break;
    }

}

void fire_img_show::chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img_msg,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire_msg )
{
    time=cloud_img_msg->header.stamp;
    //从ros消息中提取opencv图像
    cv_bridge::CvImagePtr cv_ptr1 = cv_bridge::toCvCopy(cloud_img_msg, sensor_msgs::image_encodings::TYPE_64FC4 );
    cloud_img  = cv_ptr1 -> image;

    cv_bridge::CvImagePtr cv_ptr2 = cv_bridge::toCvCopy(image_fire_msg, sensor_msgs::image_encodings::BGR8);
    image_fire  = cv_ptr2 -> image;

    setMouseCallback(FIRE, on_Mouse,0);//鼠标回调
    if(20<mouse_x && mouse_x<620 && 20<mouse_y && mouse_y<460)
    {
        if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]>0)
        {
            ostringstream inStr;
            inStr<<setiosflags(ios::fixed)<<setprecision(9);//
            inStr<<"("<<cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]<<","<<cloud_img.at<Vec4d>(mouse_y, mouse_x)[1]<<","<<cloud_img.at<Vec4d>(mouse_y, mouse_x)[2]<<")";
            str=inStr.str();
        }
        if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]==0)
        {
            if(cloud_position( mouse_y, mouse_x))
            {
                ostringstream inStr;
                inStr<<setiosflags(ios::fixed)<<setprecision(9);//
                inStr<<"("<<cloud_img.at<Vec4d>(mouse_r, mouse_c)[0]<<","<<cloud_img.at<Vec4d>(mouse_r, mouse_c)[1]<<","<<cloud_img.at<Vec4d>(mouse_r, mouse_c)[2]<<")";
                str=inStr.str();
            }

        }
        IplImage image=IplImage(image_fire);
        CvArr* s=(CvArr*) &image ;
        CvFont font;
        cvInitFont(&font,CV_FONT_HERSHEY_SIMPLEX,0.4,0.4,0,1,CV_AA);
        if(mouse_x>300)
            cvPutText(s,str.c_str(),cvPoint(mouse_x-300-15,mouse_y-5),&font,CV_RGB(255,0,0));
        else
            cvPutText(s,str.c_str(),cvPoint(mouse_x+5,mouse_y-5),&font,CV_RGB(255,0,0));
    }
    Point point;
    point.x = mouse_x;
    point.y = mouse_y;
    circle(image_fire,point, 2,Scalar(0,0,255),-1);
    imshow(FIRE, image_fire);
    waitKey(1);

}

bool fire_img_show::cloud_position(int r,int c)
{
    cv::Mat point(4,3,CV_64F,Scalar());
    for(int j=c;j>(c-5);j--)
    {
        if(cloud_img.at<Vec4d>(r, j)[0]>0)
        {
            point.at<double>(0,0)=r;
            point.at<double>(0,1)=j;
            point.at<double>(0,2)=c-j;
            break;
        }
    }
    for(int j=c;j<(c+5);j++)
    {
        if(cloud_img.at<Vec4d>(r, j)[0]>0)
        {
            point.at<double>(1,0)=r;
            point.at<double>(1,1)=j;
            point.at<double>(1,2)=j-c;
            break;
        }
    }
    if(point.at<double>(0,2)>0 || point.at<double>(1,2)>0)
    {
        if(point.at<double>(0,2)>0 && point.at<double>(1,2)>0)
        {
            if(point.at<double>(0,2)<point.at<double>(1,2))
            {
                mouse_r=point.at<double>(0,0);
                mouse_c=point.at<double>(0,1);
            }else
            {
                mouse_r=point.at<double>(1,0);
                mouse_c=point.at<double>(1,1);
            }
        }else
        {
            if(point.at<double>(0,2)>0)
            {
                mouse_r=point.at<double>(0,0);
                mouse_c=point.at<double>(0,1);
            }else
            {
                mouse_r=point.at<double>(1,0);
                mouse_c=point.at<double>(1,1);
            }
        }

         return true;
    }
    //--------------------------------------------------
    for(int i=r;i>(r-10);i--)
    {
        if(cloud_img.at<Vec4d>(i, c)[0]>0)
        {
            point.at<double>(2,0)=i;
            point.at<double>(2,1)=c;
            point.at<double>(2,2)=r-i;
            break;
        }
    }
    for(int i=r;i<(r+10);i++)
    {
        if(cloud_img.at<Vec4d>(i, c)[0]>0)
        {
            point.at<double>(3,0)=i;
            point.at<double>(3,1)=c;
            point.at<double>(3,2)=i-r;
            break;
        }
    }
    if(point.at<double>(2,2)>0 || point.at<double>(3,2)>0)
    {
        if(point.at<double>(2,2)>0 && point.at<double>(3,2)>0)
        {
            if(point.at<double>(2,2)<point.at<double>(3,2))
            {
                mouse_r=point.at<double>(2,0);
                mouse_c=point.at<double>(2,1);
            }else
            {
                mouse_r=point.at<double>(3,0);
                mouse_c=point.at<double>(3,1);
            }
        }else
        {
            if(point.at<double>(2,2)>0)
            {
                mouse_r=point.at<double>(2,0);
                mouse_c=point.at<double>(2,1);
            }else
            {
                mouse_r=point.at<double>(3,0);
                mouse_c=point.at<double>(3,1);
            }
        }

        return true;
    }
    return false;
}

int main(int argc,char** argv)
{
  ros::init (argc, argv, "fire_img_show");
  fire_img_show fis;

  //ros::MultiThreadedSpinner spinner(2);
  //spinner.spin();
  ros::spin();
  return 0;
}




鼠标位置处的GPS

对上述程序做简单修改:

//*************************************************************************************************
//    显示图像 获取鼠标位置处的实际空间位置
//    source ~/catkin_ws/devel/setup.bash &&  rosrun my_lidar_cam_fusion fire_img_show
//
//****************************************************************************************************

#include <ros/ros.h>
#include <boost/thread.hpp>
#include<iostream>
#include<sstream>

#include <cv_bridge/cv_bridge.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>

#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/time_synchronizer.h>

#include <sensor_msgs/Image.h>
#include <sensor_msgs/PointCloud2.h>

#define PI 3.1415926
using namespace std;
using namespace cv;
static const std::string FIRE = "fire_img_show";
static int mouse_x;
static int mouse_y;

class fire_img_show
{

public:

  fire_img_show()
  {
      cout<< "show:image"<<endl;
      row = 480; //行
      col = 640;
      sub1.subscribe(n,"cloud_img",100);
      sub2.subscribe(n,"image_fire",300);
      sub3.subscribe(n,"CV_GPS_R_IL",100);
      sync.connectInput( sub1,  sub2 ,sub3);
      sync.registerCallback(  boost::bind(&fire_img_show::chatterCallback, this,_1, _2 ,_3)  );
      cv::namedWindow(FIRE);


  }
  ~fire_img_show()
  {
       cv::destroyWindow(FIRE);
  }
  void chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img_msg,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire_msg,const  boost::shared_ptr<const sensor_msgs::Image>& CV_G_R_ni_msg );

  static void on_Mouse(int event, int x, int y, int flags, void* param);

  bool cloud_position(int r,int c);

  void mi_to_du(cv::Mat I_point);

  void get_CV_G_R_IL();


private:
   ros::NodeHandle n;

   message_filters::Subscriber<sensor_msgs::Image> sub1;
   message_filters::Subscriber<sensor_msgs::Image> sub2;
   message_filters::Subscriber<sensor_msgs::Image> sub3;
   typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image ,sensor_msgs::Image> MySyncPolicy;
   message_filters::Synchronizer<MySyncPolicy> sync{MySyncPolicy(2000)};

   ros::Time time;
   int row; //行
   int col;
   int mouse_r;
   int mouse_c;
   cv::Mat cloud_img;
   cv::Mat image_fire;
   cv::Mat CV_G_R_IL;
   cv::Mat GPS;//3x1
   cv::Mat R; //4X4 旋转
   cv::Mat I2L;//4X4 旋转平移

   string str;

};
void fire_img_show::on_Mouse(int event, int x, int y, int flags, void* param)
{
    switch(event)
    {
    case CV_EVENT_MOUSEMOVE://鼠标滑动
    {
        mouse_x=x;
        mouse_y=y;
        //cout << x<< y<<endl;
    }
    break;
    }

}

void fire_img_show::chatterCallback(const  boost::shared_ptr<const sensor_msgs::Image>& cloud_img_msg,const  boost::shared_ptr<const sensor_msgs::Image>& image_fire_msg,const  boost::shared_ptr<const sensor_msgs::Image>& CV_G_R_ni_msg )
{
    time=cloud_img_msg->header.stamp;
    //从ros消息中提取opencv图像
    cv_bridge::CvImagePtr cv_ptr1 = cv_bridge::toCvCopy(cloud_img_msg, sensor_msgs::image_encodings::TYPE_64FC4 );
    cloud_img  = cv_ptr1 -> image;

    cv_bridge::CvImagePtr cv_ptr2 = cv_bridge::toCvCopy(image_fire_msg, sensor_msgs::image_encodings::BGR8);
    image_fire  = cv_ptr2 -> image;

    cv_bridge::CvImagePtr cv_ptr3 = cv_bridge::toCvCopy(CV_G_R_ni_msg, sensor_msgs::image_encodings::TYPE_64FC3 );
    CV_G_R_IL  = cv_ptr3 -> image;

    get_CV_G_R_IL();

    setMouseCallback(FIRE, on_Mouse,0);//鼠标回调
    if(20<mouse_x && mouse_x<620 && 20<mouse_y && mouse_y<460)
    {
        if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]>0)
        {
            Mat point = (Mat_<double>(4, 1) << cloud_img.at<Vec4d>(mouse_y, mouse_x)[0],cloud_img.at<Vec4d>(mouse_y, mouse_x)[1], cloud_img.at<Vec4d>(mouse_y, mouse_x)[2], 1);
            Mat I_point(4, 1, CV_64F);
            I_point= R * I2L*point;
            mi_to_du(I_point);
            ostringstream inStr;
            inStr<<setiosflags(ios::fixed)<<setprecision(9);//
            inStr<<"("<<I_point.at<double>(0)<<","<<I_point.at<double>(1)<<","<<I_point.at<double>(2)<<")";
            str=inStr.str();
        }
        if(cloud_img.at<Vec4d>(mouse_y, mouse_x)[0]==0)
        {
            if(cloud_position( mouse_y, mouse_x))
            {
                Mat point = (Mat_<double>(4, 1) << cloud_img.at<Vec4d>(mouse_r, mouse_c)[0],cloud_img.at<Vec4d>(mouse_r, mouse_c)[1], cloud_img.at<Vec4d>(mouse_r, mouse_c)[2], 1);
                Mat I_point(4, 1, CV_64F);
                I_point= R * I2L*point;
                mi_to_du(I_point);
                ostringstream inStr;
                inStr<<setiosflags(ios::fixed)<<setprecision(9);//
                inStr<<"("<<I_point.at<double>(0)<<","<<I_point.at<double>(1)<<","<<I_point.at<double>(2)<<")";
                str=inStr.str();
            }

        }
        IplImage image=IplImage(image_fire);
        CvArr* s=(CvArr*) &image ;
        CvFont font;
        cvInitFont(&font,CV_FONT_HERSHEY_SIMPLEX,0.4,0.4,0,1,CV_AA);
        if(mouse_x>300)
            cvPutText(s,str.c_str(),cvPoint(mouse_x-300-15,mouse_y-5),&font,CV_RGB(255,0,0));
        else
            cvPutText(s,str.c_str(),cvPoint(mouse_x+5,mouse_y-5),&font,CV_RGB(255,0,0));
    }
    Point point;
    point.x = mouse_x;
    point.y = mouse_y;
    circle(image_fire,point, 2,Scalar(0,0,255),-1);
    imshow(FIRE, image_fire);
    waitKey(1);

}
void fire_img_show::mi_to_du(cv::Mat I_point)
{
    I_point.at<double>(0)=I_point.at<double>(0)/111000+GPS.at<double>(1);//wei
    double lon=111000*cos(GPS.at<double>(1)/180*PI);
    I_point.at<double>(1)=I_point.at<double>(1)/lon+GPS.at<double>(0);//jin
    I_point.at<double>(2)=-I_point.at<double>(2)+GPS.at<double>(2);//gao

}
void fire_img_show::get_CV_G_R_IL()
{
     //get gps
     GPS= (Mat_<double>(3, 1) << CV_G_R_IL.at<Vec3d>(0,0)[0],CV_G_R_IL.at<Vec3d>(0,1)[0],CV_G_R_IL.at<Vec3d>(0,2)[0]);
     //cout << "gps"<<GPS<<setprecision(9)<<endl;
     //与初始坐标系的平移  CV_G_R_IL.at<Vec3d>(1,0)[0]   (1,1)[0]  (1,2)[0]
     //四元数  CV_G_R_IL.at<Vec3d>(2,0)[0]  (2,1)[0]   (2,2)[0]  (2,3)[0]
     //第二层: 4x4 旋转  初始坐标系toIMU系
     R= (Mat_<double>(4, 4)<< CV_G_R_IL.at<Vec3d>(0,0)[1] ,CV_G_R_IL.at<Vec3d>(0,1)[1],CV_G_R_IL.at<Vec3d>(0,2)[1],0,CV_G_R_IL.at<Vec3d>(1,0)[1] ,CV_G_R_IL.at<Vec3d>(1,1)[1],CV_G_R_IL.at<Vec3d>(1,2)[1],0,CV_G_R_IL.at<Vec3d>(2,0)[1] ,CV_G_R_IL.at<Vec3d>(2,1)[1],CV_G_R_IL.at<Vec3d>(2,2)[1],0,0,0,0,1);
     I2L= (Mat_<double>(4, 4)<< CV_G_R_IL.at<Vec3d>(0,0)[2] ,CV_G_R_IL.at<Vec3d>(0,1)[2],CV_G_R_IL.at<Vec3d>(0,2)[2],CV_G_R_IL.at<Vec3d>(0,3)[2],CV_G_R_IL.at<Vec3d>(1,0)[2] ,CV_G_R_IL.at<Vec3d>(1,1)[2],CV_G_R_IL.at<Vec3d>(1,2)[2],CV_G_R_IL.at<Vec3d>(1,3)[2],CV_G_R_IL.at<Vec3d>(2,0)[2] ,CV_G_R_IL.at<Vec3d>(2,1)[2],CV_G_R_IL.at<Vec3d>(2,2)[2],CV_G_R_IL.at<Vec3d>(2,3)[2],0,0,0,1);
}

bool fire_img_show::cloud_position(int r,int c)
{
    cv::Mat point(4,3,CV_64F,Scalar());
    for(int j=c;j>(c-5);j--)
    {
        if(cloud_img.at<Vec4d>(r, j)[0]>0)
        {
            point.at<double>(0,0)=r;
            point.at<double>(0,1)=j;
            point.at<double>(0,2)=c-j;
            break;
        }
    }
    for(int j=c;j<(c+5);j++)
    {
        if(cloud_img.at<Vec4d>(r, j)[0]>0)
        {
            point.at<double>(1,0)=r;
            point.at<double>(1,1)=j;
            point.at<double>(1,2)=j-c;
            break;
        }
    }
    if(point.at<double>(0,2)>0 || point.at<double>(1,2)>0)
    {
        if(point.at<double>(0,2)>0 && point.at<double>(1,2)>0)
        {
            if(point.at<double>(0,2)<point.at<double>(1,2))
            {
                mouse_r=point.at<double>(0,0);
                mouse_c=point.at<double>(0,1);
            }else
            {
                mouse_r=point.at<double>(1,0);
                mouse_c=point.at<double>(1,1);
            }
        }else
        {
            if(point.at<double>(0,2)>0)
            {
                mouse_r=point.at<double>(0,0);
                mouse_c=point.at<double>(0,1);
            }else
            {
                mouse_r=point.at<double>(1,0);
                mouse_c=point.at<double>(1,1);
            }
        }

         return true;
    }
    //--------------------------------------------------
    for(int i=r;i>(r-10);i--)
    {
        if(cloud_img.at<Vec4d>(i, c)[0]>0)
        {
            point.at<double>(2,0)=i;
            point.at<double>(2,1)=c;
            point.at<double>(2,2)=r-i;
            break;
        }
    }
    for(int i=r;i<(r+10);i++)
    {
        if(cloud_img.at<Vec4d>(i, c)[0]>0)
        {
            point.at<double>(3,0)=i;
            point.at<double>(3,1)=c;
            point.at<double>(3,2)=i-r;
            break;
        }
    }
    if(point.at<double>(2,2)>0 || point.at<double>(3,2)>0)
    {
        if(point.at<double>(2,2)>0 && point.at<double>(3,2)>0)
        {
            if(point.at<double>(2,2)<point.at<double>(3,2))
            {
                mouse_r=point.at<double>(2,0);
                mouse_c=point.at<double>(2,1);
            }else
            {
                mouse_r=point.at<double>(3,0);
                mouse_c=point.at<double>(3,1);
            }
        }else
        {
            if(point.at<double>(2,2)>0)
            {
                mouse_r=point.at<double>(2,0);
                mouse_c=point.at<double>(2,1);
            }else
            {
                mouse_r=point.at<double>(3,0);
                mouse_c=point.at<double>(3,1);
            }
        }

        return true;
    }
    return false;
}

int main(int argc,char** argv)
{
  ros::init (argc, argv, "fire_img_show");
  fire_img_show fis;

  //ros::MultiThreadedSpinner spinner(2);
  //spinner.spin();
  ros::spin();
  return 0;
}


修改后的点云密集化程序如下:

//*************************************************************************************************
//    获取密集点云,迭代发布:cloud_cut
//    source ~/catkin_ws/devel/setup.bash &&  rosrun my_lidar_cam_fusion my_cloud_dense
//
//    http://docs.ros.org/en/melodic/api/sensor_msgs/html/image__encodings_8h.html
//
//****************************************************************************************************



#include "ros/ros.h"
#include <math.h>
#include <iostream>
#include <cmath>
#include "std_msgs/String.h"
#include <boost/thread.hpp>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h> //ROS图象类型的编码函数
#include <image_transport/image_transport.h> //在ROS系统中的话题上发布和订阅图象消息
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>

#include <sbg_driver/SbgImuData.h>
#include <sbg_driver/SbgEkfQuat.h>
#include <sbg_driver/SbgGpsPos.h>
#include <sbg_driver/SbgEkfNav.h>

#include <message_filters/subscriber.h>
#include <message_filters/synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/sync_policies/exact_time.h>
#include <message_filters/time_synchronizer.h>

#include <geometry_msgs/TransformStamped.h>
#include "tf/transform_datatypes.h"

#include <pcl/io/pcd_io.h>
#include <pcl/point_cloud.h>
#include <pcl/common/common.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl_ros/point_cloud.h>
#include <pcl_ros/transforms.h>
#include <pcl/point_types.h>
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#include <pcl/sample_consensus/ransac.h>
#include <pcl/sample_consensus/sac_model_plane.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/filters/passthrough.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/filters/statistical_outlier_removal.h>

#include <sensor_msgs/Imu.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/PointCloud.h>

#include <Eigen/Eigen>
#include <Eigen/Dense>
#include <Eigen/Geometry>
#include <Eigen/Eigenvalues>

#define PI 3.1415926

#define rQ 40 //雷达旋转角度
#define TX 0.18//0.14 //平移0.18
#define TY 0
#define TZ 0.24//0.30  //0.24

using namespace sensor_msgs;
using namespace std;
using namespace cv;


class cloud_dense
{

public:

  cloud_dense()
  {
      cout<< "获取密集点云,迭代发布:cloud_CUT   cloud_img  "<<endl;
      k=true;
      M= (Mat_<double>(3, 3) << 1120.793247366556, 0, 404.6240125739656, 0, 1126.958397947255, 248.9162767468943, 0, 0, 1);
      cout << "M=" << M << endl;
      RT= (Mat_<double>(3, 4) << -0.0209971, - 0.999327, 0.0300679 , 0.01784, - 0.0238557 ,- 0.0295651, - 0.999278, - 0.0963614, 0.999495 ,- 0.0216992, - 0.0232189, 0.00614244 );
      cout << "RT=" << RT << endl;
      row = 480;
      col = 640;
      cloud_pub = n.advertise<sensor_msgs::PointCloud2>("cloud_cut", 1);//拼接后的点云发布
      imu_pub = n.advertise<sensor_msgs::Imu>("imu_RT", 1);//发布RT
      //image_pub = n.advertise<sensor_msgs::Image>("image_raw", 1);
      cloud_img_pub =  n.advertise<sensor_msgs::Image>("cloud_img", 1);//发布
      CV_GPS_R_IL =  n.advertise<sensor_msgs::Image>("CV_GPS_R_IL", 1);//发布
      //sub = n.subscribe<sensor_msgs::Image>("/image_raw", 30, &ThreadListener::myCallback5,this,ros::TransportHints().tcpNoDelay());
      sub1.subscribe(n,"points_raw",10);//订阅激光雷达
      sub2.subscribe(n,"/gps_raw",10);
      sub3.subscribe(n,"/quat_raw",100);

      sync.connectInput( sub1,  sub2, sub3 );
      sync.registerCallback(  boost::bind(&cloud_dense::chatterCallback, this,_1, _2, _3 )  );


  }
  void chatterCallback(const  boost::shared_ptr<const sensor_msgs::PointCloud2>& pc2,const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg);

  void get_Quat_gps(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg);

  void Down_sampling(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_tmp,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud);

  void GetDirectDistance(double srcLon, double srcLat,double gz, double destLon, double destLat,double tz,double *x,double *y,double *z);

  void lidar_to_imu(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg);

  void cloud_to_Initial_coord(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud );

  void Initial_to_lidar_coord( pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2 ,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar );

  void Cloud_Cut(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut);

  void Effective_point(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut);

  void Publishe_msg(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut );

  void Publishe_RT();

  void Publishe_cloudImg_msg(cv::Mat cloudImg);

private:
   ros::NodeHandle n;

   message_filters::Subscriber<sensor_msgs::PointCloud2> sub1;
   message_filters::Subscriber<sbg_driver::SbgEkfNav> sub2;
   message_filters::Subscriber<sbg_driver::SbgEkfQuat> sub3;
   typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::PointCloud2, sbg_driver::SbgEkfNav,sbg_driver::SbgEkfQuat> MySyncPolicy;
   message_filters::Synchronizer<MySyncPolicy> sync{MySyncPolicy(2000)};


   ros::Publisher cloud_pub;
   ros::Publisher imu_pub;
   ros::Publisher cloud_img_pub;
   ros::Publisher CV_GPS_R_IL;
   //ros::Subscriber sub;

   bool k;//调试

   double gx,gy,gz ;//转到初始坐标系的平移基准
   double x,y,z,w,tx,ty,tz;
   //  平移计算
   double nx,ny,nz;

   Eigen::Matrix4d lidartoimu;
   Eigen::Matrix4d l2i_ni;
   Eigen::Matrix4d T_matrix;//4x4旋转矩阵赋值
   Eigen::Matrix4d T_ni;
   pcl::PointCloud<pcl::PointXYZ> cloud_new1;

   ros::Time time;

   cv::Mat M;
   cv::Mat RT;
   cv::Mat cv_g_r;
   int row; //行
   int col;

};

void cloud_dense::Down_sampling(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_tmp,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud)
{
    pcl::VoxelGrid<pcl::PointXYZ> sor;
    sor.setInputCloud(cloud_tmp);
    sor.setLeafSize(0.08f, 0.08f, 0.08f);  //体素滤波 20cm
    sor.filter(*cloud);
}

// 把经纬坐标转米

void cloud_dense::GetDirectDistance(double srcLon, double srcLat,double gz, double destLon, double destLat,double tz,double *x,double *y,double *z)
{
    *x=(destLon-srcLon)*111000*cos(srcLat/180*PI);
    *y=(destLat-srcLat)*111000;
    *z=tz-gz;
}
void cloud_dense::lidar_to_imu(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg)
{
    //cout<<" 将发布密集点云:cloud_lidar  "<<endl;
    double  RQ=rQ*PI/180;
    gx = gpsmsg->position.y;//把gps值放到这里
    gy = gpsmsg->position.x;
    gz = gpsmsg->position.z;

    Eigen::Matrix3d r_y;//4x4矩阵绕Y轴
    r_y(0,0)=cos(RQ);
    r_y(0,1)=0;
    r_y(0,2)=sin(RQ);
    r_y(1,0)=0;
    r_y(1,1)=1;
    r_y(1,2)=0;
    r_y(2,0)=-sin(RQ);
    r_y(2,1)=0;
    r_y(2,2)=cos(RQ);

    Eigen::Matrix3d r_l2I;//4x4矩阵雷达到贯导
    r_l2I(0,0)=0;
    r_l2I(0,1)=0;
    r_l2I(0,2)=1;
    r_l2I(1,0)=0;
    r_l2I(1,1)=-1;
    r_l2I(1,2)=0;
    r_l2I(2,0)=1;
    r_l2I(2,1)=0;
    r_l2I(2,2)=0;

    Eigen::Matrix3d r_l_i=r_y*r_l2I;//4x4矩阵雷达到贯导
    lidartoimu(0,0)=r_l_i(0,0);
    lidartoimu(0,1)=r_l_i(0,1);
    lidartoimu(0,2)=r_l_i(0,2);
    lidartoimu(0,3)=TX;
    lidartoimu(1,0)=r_l_i(1,0);
    lidartoimu(1,1)=r_l_i(1,1);
    lidartoimu(1,2)=r_l_i(1,2);
    lidartoimu(1,3)=TY;
    lidartoimu(2,0)=r_l_i(2,0);
    lidartoimu(2,1)=r_l_i(2,1);
    lidartoimu(2,2)=r_l_i(2,2);
    lidartoimu(2,3)=TZ;
    lidartoimu(3,0)=0;
    lidartoimu(3,1)=0;
    lidartoimu(3,2)=0;
    lidartoimu(3,3)=1;

    l2i_ni=lidartoimu.inverse();
    k=false;

}

void cloud_dense::cloud_to_Initial_coord( pcl::PointCloud<pcl::PointXYZ>::Ptr cloud )
{
    for (std::size_t i = 0; i <cloud->size(); i++)
    {
        if(3<cloud->points[i].x || cloud->points[i].x<-3)
        {
            if(-20<cloud->points[i].y && cloud->points[i].y<20)
            {
               Eigen::MatrixXd pointxyz(4,1);//点云坐标
               pointxyz(0,0)=cloud->points[i].x;
               pointxyz(1,0)=cloud->points[i].y;
               pointxyz(2,0)=cloud->points[i].z;
               pointxyz(3,0)=1;

               Eigen::MatrixXd n=T_matrix*lidartoimu*pointxyz; //4X4
               pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );
               cloud_new1.push_back(point_1);
               /* //===================================================================================================================

               Mat uv(3, 1, CV_64F);
               Mat point = (Mat_<double>(4, 1) << cloud->points[i].x, cloud->points[i].y, cloud->points[i].z, 1);
               uv = M * RT*point;
               if (uv.at<double>(2) == 0)
               {
                   cout << "uv.at<double>(2)=0" << endl;
                   break;
               }
               double u = uv.at<double>(0) / uv.at<double>(2);
               double v = uv.at<double>(1) / uv.at<double>(2);
               int px = int(u + 0.5);
               int py = int(v + 0.5);
               //cout << "(u,v)=" << px << "  "<< py << endl;
               //注意:image.at<Vec3b>(row,rol)  但是像素坐标为(x,y),即(u,v),即(rol,row)
               if (0 <= px && px < col && 0 <= py && py < row)
               {
                   Eigen::MatrixXd n=T_matrix*lidartoimu*pointxyz; //4X4
                   pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );
                   cloud_new1.push_back(point_1);
               }
               */

            }//if
       }//if
    }//for
    //降采样
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2(new pcl::PointCloud<pcl::PointXYZ>);
    Down_sampling(cloud_new1.makeShared(), cloud2);

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar(new pcl::PointCloud<pcl::PointXYZ>);
    Initial_to_lidar_coord( cloud2, cloud_lidar );
}

void cloud_dense::Initial_to_lidar_coord( pcl::PointCloud<pcl::PointXYZ>::Ptr cloud2 ,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar )
{
    for (std::size_t i = 0; i <cloud2->size(); i++)
    {
        Eigen::MatrixXd pointxyz(4,1);//点云坐标
        pointxyz(0,0)=cloud2->points[i].x;
        pointxyz(1,0)=cloud2->points[i].y;
        pointxyz(2,0)=cloud2->points[i].z;
        pointxyz(3,0)=1;
        Eigen::MatrixXd n=l2i_ni*T_ni*pointxyz;
        pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );
        cloud_lidar->push_back(point_1);
    }
    //裁剪
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut(new pcl::PointCloud<pcl::PointXYZ>);
    Cloud_Cut(cloud_lidar,cloud_cut);
    //保存有效点用于迭代
    Effective_point(cloud_cut);
    Publishe_msg( cloud_cut );

}

void cloud_dense::Cloud_Cut(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_lidar,pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut)
{
    /*
    //x轴直通滤波处理
    pcl::PassThrough<pcl::PointXYZ> pass2;
    pass2.setInputCloud(cloud_lidar);
    pass2.setFilterFieldName("x");
    if(forget_x<3){ forget_x=3;}
    pass2.setFilterLimits(forget_x-1,forget_x+30);
    pass2.setFilterLimitsNegative(false);
    pass2.filter(*cloud_cut);
    */

    //==========================================================================================
    for (std::size_t i = 0; i <cloud_lidar->size(); i++)
    {
        Mat uv(3, 1, CV_64F);
        Mat point = (Mat_<double>(4, 1) << cloud_lidar->points[i].x, cloud_lidar->points[i].y, cloud_lidar->points[i].z, 1);
        uv = M * RT*point;
        if (uv.at<double>(2) == 0)
        {
            cout << "uv.at<double>(2)=0" << endl;
            break;
        }
        double u = uv.at<double>(0) / uv.at<double>(2);
        double v = uv.at<double>(1) / uv.at<double>(2);
        int px = int(u + 0.5);
        int py = int(v + 0.5);
        //cout << "(u,v)=" << px << "  "<< py << endl;
        //注意:image.at<Vec3b>(row,rol)  但是像素坐标为(x,y),即(u,v),即(rol,row)
        if (0 <= px && px < col && 0 <= py && py < (row+100))
        {
            cloud_cut->push_back(cloud_lidar->points[i]);
        }
    }
    /*
    cv::Mat cloudImg(row,col,CV_64FC3,Scalar());
    for (std::size_t i = 0; i <cloud_lidar->size(); i++)
    {
        Mat uv(3, 1, CV_64F);
        Mat point = (Mat_<double>(4, 1) << cloud_lidar->points[i].x, cloud_lidar->points[i].y, cloud_lidar->points[i].z, 1);
        uv = M * RT*point;
        if (uv.at<double>(2) == 0)
        {
            cout << "uv.at<double>(2)=0" << endl;
            break;
        }
        double u = uv.at<double>(0) / uv.at<double>(2);
        double v = uv.at<double>(1) / uv.at<double>(2);
        int px = int(u + 0.5);
        int py = int(v + 0.5);
        //cout << "(u,v)=" << px << "  "<< py << endl;
        //注意:image.at<Vec3b>(row,rol)  但是像素坐标为(x,y),即(u,v),即(rol,row)
        if (0 <= px && px < col && 0 <= py && py < row)
        {
            cloudImg.at<Vec3d>(py,px)[0]= point.at<double>(0);
            cloudImg.at<Vec3d>(py,px)[1]= point.at<double>(1);
            cloudImg.at<Vec3d>(py,px)[2]= point.at<double>(2);
            cloud_cut->push_back(cloud_lidar->points[i]);
        }
    }
    Publishe_cloudImg_msg( cloudImg);
    */

}

void cloud_dense::Publishe_cloudImg_msg(cv::Mat cloudImg)
{
    cv_bridge::CvImage cvi;
    cvi.header.stamp = time;
    cvi.header.frame_id = "CV_GPS_R_IL";
    cvi.encoding = "64FC3";
    cvi.image = cloudImg;

    sensor_msgs::Image msg;
    cvi.toImageMsg(msg);
    cloud_img_pub.publish(msg);

}

void cloud_dense::Effective_point(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut)
{
    pcl::PointCloud<pcl::PointXYZ> cloud_new2;
    for (std::size_t i = 0; i <cloud_cut->size(); i++)
    {
        Eigen::MatrixXd pointxyz(4,1);//点云坐标
        pointxyz(0,0)=cloud_cut->points[i].x;
        pointxyz(1,0)=cloud_cut->points[i].y;
        pointxyz(2,0)=cloud_cut->points[i].z;
        pointxyz(3,0)=1;

        Eigen::MatrixXd n=T_matrix*lidartoimu*pointxyz;
        pcl::PointXYZ point_1(  n(0,0),  n(1,0),  n(2,0) );
        cloud_new2.push_back(point_1);
    }
    cloud_new1=cloud_new2;
}
void cloud_dense::Publishe_msg(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_cut )
{
    //发布点云
    sensor_msgs::PointCloud2 output;
    pcl::toROSMsg(*cloud_cut, output);
    output.header.stamp = time;
    output.header.frame_id = "map";
    cloud_pub.publish(output);

    //发布位姿
    sensor_msgs::Imu outimu;
    outimu.header.stamp = time;
    outimu.header.frame_id = "lidar_RT";
    outimu.orientation.x=x;
    outimu.orientation.y=y;
    outimu.orientation.y=y;
    outimu.orientation.w=w;
    outimu.angular_velocity.x=ny;
    outimu.angular_velocity.y=nx;
    outimu.angular_velocity.z=-nz;
    imu_pub.publish(outimu);

    //发布CV--GPS--T--T_ni
    cv_bridge::CvImage cvi;
    cvi.header.stamp = time;
    cvi.header.frame_id = "cloud_image";
    cvi.encoding = "64FC3";
    cvi.image = cv_g_r;

    sensor_msgs::Image msg;
    cvi.toImageMsg(msg);
    CV_GPS_R_IL.publish(msg);

}
void cloud_dense::get_Quat_gps(const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg)
{
    x = quatmsg->quaternion.x;//四元数
    y = quatmsg->quaternion.y;
    z = quatmsg->quaternion.z;
    w = quatmsg->quaternion.w;
    tx = gpsmsg->position.y;//把gps值放到这里
    ty = gpsmsg->position.x;
    tz = gpsmsg->position.z;


    GetDirectDistance(gx,gy,gz,tx,ty,tz,&nx,&ny,&nz);//计算GPS变化量

    //  四元数转换为旋转矩阵
    Eigen::Quaterniond quaternion(w,x,y,z);
    Eigen::Matrix3d rotation_matrix;
    rotation_matrix=quaternion.toRotationMatrix();

    T_matrix(0,0)=rotation_matrix(0,0);
    T_matrix(1,0)=rotation_matrix(1,0);
    T_matrix(2,0)=rotation_matrix(2,0);
    T_matrix(3,0)=0;
    T_matrix(0,1)=rotation_matrix(0,1);
    T_matrix(1,1)=rotation_matrix(1,1);
    T_matrix(2,1)=rotation_matrix(2,1);
    T_matrix(3,1)=0;
    T_matrix(0,2)=rotation_matrix(0,2);
    T_matrix(1,2)=rotation_matrix(1,2);
    T_matrix(2,2)=rotation_matrix(2,2);
    T_matrix(3,2)=0;
    T_matrix(0,3)=ny;//x->jin   y->wei   z-> -h
    T_matrix(1,3)=nx;
    T_matrix(2,3)=-nz;
    T_matrix(3,3)=1;
    //cout<< T_matrix << endl;
    T_ni=T_matrix.inverse();

    Publishe_RT();

}

void cloud_dense::Publishe_RT()
{
    cv_g_r=Mat(4,4,CV_64FC3,Scalar());
    //第一层--GPS--nx-ny-nz--x-y-z-w
    cv_g_r.at<Vec3d>(0,0)[0]=tx;//ji  gps
    cv_g_r.at<Vec3d>(0,1)[0]=ty;
    cv_g_r.at<Vec3d>(0,2)[0]=tz;
    cv_g_r.at<Vec3d>(1,0)[0]=nx;//ji
    cv_g_r.at<Vec3d>(1,1)[0]=ny;
    cv_g_r.at<Vec3d>(1,2)[0]=nz;
    cv_g_r.at<Vec3d>(2,0)[0]=x;//quat
    cv_g_r.at<Vec3d>(2,1)[0]=y;
    cv_g_r.at<Vec3d>(2,2)[0]=z;
    cv_g_r.at<Vec3d>(2,3)[0]=w;
    //第二层--4x4旋转 初始坐标系toIMU系
    cv_g_r.at<Vec3d>(0,0)[1]=T_matrix(0,0);
    cv_g_r.at<Vec3d>(0,1)[1]=T_matrix(0,1);
    cv_g_r.at<Vec3d>(0,2)[1]=T_matrix(0,2);
    cv_g_r.at<Vec3d>(0,3)[1]=T_matrix(0,3);
    cv_g_r.at<Vec3d>(1,0)[1]=T_matrix(1,0);
    cv_g_r.at<Vec3d>(1,1)[1]=T_matrix(1,1);
    cv_g_r.at<Vec3d>(1,2)[1]=T_matrix(1,2);
    cv_g_r.at<Vec3d>(1,3)[1]=T_matrix(1,3);
    cv_g_r.at<Vec3d>(2,0)[1]=T_matrix(2,0);
    cv_g_r.at<Vec3d>(2,1)[1]=T_matrix(2,1);
    cv_g_r.at<Vec3d>(2,2)[1]=T_matrix(2,2);
    cv_g_r.at<Vec3d>(2,3)[1]=T_matrix(2,3);
    cv_g_r.at<Vec3d>(3,0)[1]=T_matrix(3,0);
    cv_g_r.at<Vec3d>(3,1)[1]=T_matrix(3,1);
    cv_g_r.at<Vec3d>(3,2)[1]=T_matrix(3,2);
    cv_g_r.at<Vec3d>(3,3)[1]=T_matrix(3,3);
    //第三层--4x4旋转 imu系to雷达
    cv_g_r.at<Vec3d>(0,0)[2]=lidartoimu(0,0);
    cv_g_r.at<Vec3d>(0,1)[2]=lidartoimu(0,1);
    cv_g_r.at<Vec3d>(0,2)[2]=lidartoimu(0,2);
    cv_g_r.at<Vec3d>(0,3)[2]=lidartoimu(0,3);
    cv_g_r.at<Vec3d>(1,0)[2]=lidartoimu(1,0);
    cv_g_r.at<Vec3d>(1,1)[2]=lidartoimu(1,1);
    cv_g_r.at<Vec3d>(1,2)[2]=lidartoimu(1,2);
    cv_g_r.at<Vec3d>(1,3)[2]=lidartoimu(1,3);
    cv_g_r.at<Vec3d>(2,0)[2]=lidartoimu(2,0);
    cv_g_r.at<Vec3d>(2,1)[2]=lidartoimu(2,1);
    cv_g_r.at<Vec3d>(2,2)[2]=lidartoimu(2,2);
    cv_g_r.at<Vec3d>(2,3)[2]=lidartoimu(2,3);
    cv_g_r.at<Vec3d>(3,0)[2]=lidartoimu(3,0);
    cv_g_r.at<Vec3d>(3,1)[2]=lidartoimu(3,1);
    cv_g_r.at<Vec3d>(3,2)[2]=lidartoimu(3,2);
    cv_g_r.at<Vec3d>(3,3)[2]=lidartoimu(3,3);

}

//  回调函数
void cloud_dense::chatterCallback(const  boost::shared_ptr<const sensor_msgs::PointCloud2>& pc2,const  boost::shared_ptr<const sbg_driver::SbgEkfNav>& gpsmsg,const  boost::shared_ptr<const sbg_driver::SbgEkfQuat>& quatmsg)
{
    time = pc2->header.stamp;
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_tmp(new pcl::PointCloud<pcl::PointXYZ>);//点云对象
    pcl::fromROSMsg (*pc2, *cloud_tmp);
    // 下采样
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);
    Down_sampling(cloud_tmp, cloud);

    // 雷达与惯导间的旋转平移
    if(k)
    {
        lidar_to_imu( gpsmsg );
    }
    else
    {
        // 提取出四元数与gps
       get_Quat_gps(gpsmsg,quatmsg);
        //点云处理:转到初始坐标系下
        cloud_to_Initial_coord(cloud);

    } //else
}

int main(int argc, char **argv)
{
    ros::init(argc, argv, "my_cloud_dense");
    cloud_dense cf;

    ros::MultiThreadedSpinner spinner(3);
    spinner.spin();
    //ros::spin();
    return 0;
}


效果

1.显示鼠标处的雷达坐标

在这里插入图片描述

2.显示鼠标处的GPS

在这里插入图片描述

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
随着自动驾驶技术的不断发展,多模态感知成为了实现自动驾驶的关键技术之一。激光雷达相机是自动驾驶中最常用的两种传感器,它们分别具有高精度测距和高分辨率成像的特点。如何将激光雷达相机的信息融合起来,实现更加全面、准确的环境感知,成为了研究的热点。 面向自动驾驶多模态感知的激光雷达-相机融合框架主要包括以下几个步骤: 1. 数据预理:对激光雷达相机采集到的数据进行预理,包括去噪、校准、配准等操作,以确保数据的准确性和一致性。 2. 特征提取:对激光雷达相机数据进行特征提取,提取出各自的特征信息。激光雷达可以提取出点云数据,相机可以提取出图像特征点、颜色等信息。 3. 特征融合:将激光雷达相机提取出的特征融合起来,形成一个多模态感知的环境模型。常用的融合方法包括点云-图像投影融合、特征点匹配融合等。 4. 目标检测与跟踪:利用融合后的环境模型,进行目标检测与跟踪。可以利用深度学习等方法进行目标检测,利用卡尔曼滤波等方法进行目标跟踪。 5. 场景分割与建图:根据融合后的环境模型,对环境进行场景分割,将场景分成不同的区域,同时进行三维建图,建立起环境模型。 6. 路径规划与控制:基于环境模型和目标检测结果,进行路径规划与控制,实现自动驾驶。 总之,面向自动驾驶多模态感知的激光雷达-相机融合框架可以有效提高自动驾驶系统的环境感知能力,为实现自动驾驶提供更加可靠、安全的技术支持。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值