R3Live跑自制数据集(1)

R3Live跑自制数据集(1)

R3Live 本身是没有添加对 Velodyne 的支持的,参考下面的项目进行修改

https://github.com/Winsleo/r3live

主要的修改在 r3live_lio.cpp

bool R3LIVE::get_pointcloud_data_from_ros_message( sensor_msgs::PointCloud2::ConstPtr &msg, pcl::PointCloud< pcl::PointXYZINormal > &pcl_pc )
{

    // printf("Frame [%d] %.3f ", g_LiDAR_frame_index,  msg->header.stamp.toSec() - g_camera_lidar_queue.m_first_imu_time);
    pcl::PointCloud< pcl::PointXYZI > res_pc;
    scope_color( ANSI_COLOR_YELLOW_BOLD );
    // printf_field_name(msg);
    if ( msg->fields.size() < 3 )
    {
        cout << "Get pointcloud data from ros messages fail!!!" << endl;
        scope_color( ANSI_COLOR_RED_BOLD );
        printf_field_name( msg );
        return false;
    }
    else
    {
        if ( ( msg->fields.size() == 8 ) && ( msg->fields[ 3 ].name == "intensity" ) &&
             ( msg->fields[ 4 ].name == "normal_x" ) ) // Input message type is pcl::PointXYZINormal
        {
            pcl::fromROSMsg( *msg, pcl_pc );
            return true;
        }
        // else if ( ( msg->fields.size() == 6 ) && ( msg->fields[ 3 ].name == "intensity" ) && ( msg->fields[ 4 ].name == "ring" ) && msg->fields[5].name=="time") // Input message type is pcl::PointXYZIRT
        // {
        //     pcl::PointCloud<velodyne_ros::Point> pcl_pc_in;
        //     pcl::fromROSMsg( *msg, pcl_pc_in );
        //     int    pt_count = 0;

        //     pcl_pc.resize( pcl_pc_in.size() );
        //     for ( size_t i = 0; i < pcl_pc_in.points.size(); i++ )
        //     {
        //         pcl::PointXYZINormal pcl_pc_in_point;
        //         pcl_pc_in_point.x = pcl_pc_in.points[ i ].x;
        //         pcl_pc_in_point.y = pcl_pc_in.points[ i ].y;
        //         pcl_pc_in_point.z = pcl_pc_in.points[ i ].z;
        //         pcl_pc_in_point.intensity = pcl_pc_in.points[ i ].intensity;
        //         pcl_pc_in_point.curvature = 0;
        //         // pcl_pc_in_point.normal_x = pcl_pc_in.points[ i ].ring;
        //         // pcl_pc_in_point.normal_y = pcl_pc_in.points[ i ].ring;
        //         // pcl_pc_in_point.normal_z = pcl_pc_in.points[ i ].ring;
        //         // pcl_pc_in_point.ring = pcl_pc_in.points[ i ].time;
        //         pcl_pc.points[ pt_count ] = pcl_pc_in_point;
        //         pt_count++;
        //         // res_pc.points.push_back( pcl_pc );
        //     }
        //     pcl_pc.points.resize(pt_count);

        //     return true;
        // }
        else if ( ( msg->fields.size() == 4 ) && ( msg->fields[ 3 ].name == "rgb" ) )
        {
            double maximum_range = 5;
            get_ros_parameter< double >( m_ros_node_handle, "iros_range", maximum_range, 5 );
            pcl::PointCloud< pcl::PointXYZRGB > pcl_rgb_pc;
            pcl::fromROSMsg( *msg, pcl_rgb_pc );
            double lidar_point_time = msg->header.stamp.toSec();
            int    pt_count = 0;
            pcl_pc.resize( pcl_rgb_pc.points.size() );
            for ( int i = 0; i < pcl_rgb_pc.size(); i++ )
            {
                pcl::PointXYZINormal temp_pt;
                temp_pt.x = pcl_rgb_pc.points[ i ].x;
                temp_pt.y = pcl_rgb_pc.points[ i ].y;
                temp_pt.z = pcl_rgb_pc.points[ i ].z;
                double frame_dis = sqrt( temp_pt.x * temp_pt.x + temp_pt.y * temp_pt.y + temp_pt.z * temp_pt.z );
                if ( frame_dis > maximum_range )
                {
                    continue;
                }
                temp_pt.intensity = ( pcl_rgb_pc.points[ i ].r + pcl_rgb_pc.points[ i ].g + pcl_rgb_pc.points[ i ].b ) / 3.0;
                temp_pt.curvature = 0;
                pcl_pc.points[ pt_count ] = temp_pt;
                pt_count++;
            }
            pcl_pc.points.resize( pt_count );
            return true;
        }
        else // TODO, can add by yourself
        {
            cout << "Get pointcloud data from ros messages fail!!! ";
            scope_color( ANSI_COLOR_RED_BOLD );
            printf_field_name( msg );
            return false;
        }
    }
}

modify.h 中添加自定义的 PCL 点

#include <pcl/point_types.h>
namespace velodyne_ros {
  struct EIGEN_ALIGN16 Point {
      PCL_ADD_POINT4D;
      float intensity;
      float time;
      std::uint16_t ring;
      EIGEN_MAKE_ALIGNED_OPERATOR_NEW
  };
}  // namespace velodyne_ros
POINT_CLOUD_REGISTER_POINT_STRUCT(velodyne_ros::Point,
    (float, x, x)
    (float, y, y)
    (float, z, z)
    (float, intensity, intensity)
    (float, time, time)
    (std::uint16_t, ring, ring)
)

//ANCHOR robosense modify
namespace robosense_ros {
    struct EIGEN_ALIGN16 Point {
        PCL_ADD_POINT4D;
        std::uint8_t intensity;
        std::uint16_t ring;
        double timestamp;
        EIGEN_MAKE_ALIGNED_OPERATOR_NEW
    };
}
// namespace robosense_ros
POINT_CLOUD_REGISTER_POINT_STRUCT(robosense_ros::Point,
    (float, x, x)
    (float, y, y)
    (float, z, z)
    // use std::uint32_t to avoid conflicting with pcl::uint32_t
    (std::uint8_t, intensity, intensity)
    (std::uint16_t, ring, ring)
    (double, timestamp, timestamp)
)

namespace ouster_ros {

struct EIGEN_ALIGN16 Point {
    PCL_ADD_POINT4D;
    float intensity;
    uint32_t t;
    uint16_t reflectivity;
    uint8_t ring;
    uint16_t ambient;
    uint32_t range;
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW
};
}  // namespace ouster_ros

// clang-format off
POINT_CLOUD_REGISTER_POINT_STRUCT(ouster_ros::Point,
    (float, x, x)
    (float, y, y)
    (float, z, z)
    (float, intensity, intensity)
    // use std::uint32_t to avoid conflicting with pcl::uint32_t
    (std::uint32_t, t, t)
    (std::uint16_t, reflectivity, reflectivity)
    (std::uint8_t, ring, ring)
    (std::uint16_t, ambient, ambient)
    (std::uint32_t, range, range)
)
// clang-format on
template<typename T>
bool has_nan(T point) {
    // remove nan point, or the feature assocaion will crash, the surf point will containing nan points
    // pcl remove nan not work normally
    // ROS_ERROR("Containing nan point!");
    if ( std::isnan(point.x) || std::isnan(point.y) || std::isnan(point.z)) {
        return true;
    } else {
        return false;
    }
}

修改后可以运行,launch 文件和 YAML 配置文件如下

<launch>
    <!-- Subscribed topics -->
    <param name="/LiDAR_pointcloud_topic" type="string" value= "/velodyne_points" />
    <param name="/IMU_topic" type="string" value= "/zedm/zed_node/imu/data_raw" />
    <param name="/Image_topic" type="string" value= "/zedm/zed_node/left/image_rect_color" />
    <param name="map_output_dir" type="string" value="$(env HOME)/Documents/r3live_output/indoor" />
    <rosparam command="load" file="$(find r3live)/../config/r3live_config_redwall.yaml" />
    
    <!-- set LiDAR type as velodyne spining LiDAR -->
    <param name="/Lidar_front_end/lidar_type" type="int" value= "2" /> 
    <param name="/Lidar_front_end/point_step" type="int" value="1" />
    <param name="r3live_lio/lio_update_point_step" type="int" value="3" />
        
    <node pkg="r3live" type="r3live_LiDAR_front_end" name="r3live_LiDAR_front_end"  output="screen" required="true"/>
    <node pkg="r3live" type="r3live_mapping" name="r3live_mapping" output="screen" required="true" />
    
    <arg name="rviz" default="1" />
    <group if="$(arg rviz)">
        <node name="rvizvisualisation" pkg="rviz" type="rviz" output="log" args="-d $(find r3live)/../config/rviz/r3live_rviz_config_ouster.rviz" />
    </group>
 </launch>
Lidar_front_end:
   lidar_type: 2   # 1 for Livox-avia, 2 for velodyne, 3 for Ouster-OS1-64, 4 for RoboSense
   N_SCANS: 16     # 雷达线数
   using_raw_point: 1  # 是否使用raw_data(激光雷达点数据)
   point_step: 1   # 扫描点云数据的跨度,降采样跨度
   
r3live_common:
   if_save_pose_image: 0            # If recording camera pose and image. [default = 0]
   if_dump_log: 0                   # If recording ESIKF update log. [default = 0]
   record_offline_map: 1            # If recording offline map. [default = 1]
   pub_pt_minimum_views: 3          # Publish points which have been render up to "pub_pt_minimum_views" time. [default = 3]
   minimum_pts_size: 0.01           # The minimum distance for every two points in Global map (unit in meter). [default = 0.01] 
   image_downsample_ratio: 1        # The downsample ratio of the input image. [default = 1]
   estimate_i2c_extrinsic: 1        # If enable estimate the extrinsic between camera and IMU. [default = 1] 
   estimate_intrinsic: 1            # If enable estimate the online intrinsic calibration of the camera lens. [default = 1] 
   maximum_vio_tracked_pts: 600     # The maximum points for tracking. [default = 600]
   append_global_map_point_step: 2  # The point step of append point to global map. [default = 4]
   recent_visited_voxel_activated_time: 0 
   maximum_image_buffer: 150000     # 最大图像缓存区size
   tracker_minimum_depth: 1         # 视觉跟踪点的最小深度
   tracker_maximum_depth: 10        # 视觉跟踪点的最大深度
   track_windows_size: 40           # 滑动窗口尺寸
   esikf_iter_times: 20             # esikf迭代次数
   

r3live_vio:
   image_width: 640   
   image_height: 360  
   camera_intrinsic:
      [354.38533994265794, 0., 316.9161407913751, 
      0., 355.65442412870925, 186.00204948797, 
       0., 0., 1. ] 
   camera_dist_coeffs: [0., 0., 0., 0., 0. ]  #k1, k2, p1, p2, k3
   # Fine extrinsic value. form camera-LiDAR calibration.
   # camera_ext_R: [ 0.999895, -0.007797, 0.012203, 
   #                 0.007883, 0.999944, -0.007073, 
   #                 -0.012147, 0.007168, 0.999901]
   camera_ext_R: [ 0.999895, 0.007883, -0.012147,
                   -0.007797, 0.999944, 0.007168,
                   0.012203, -0.007073, 0.999901]
   # camera_ext_t: [ -0.068703, -0.094532, 0.067275]
   camera_ext_t: [ 0.068703, 0.094532, -0.067275]
   # Rough extrinsic value, form CAD model, is not correct enough, but can be online calibrated in our datasets.
   # camera_ext_R:
   #    [0, 0, 1,
   #     -1, 0, 0,
   #     0, -1, 0]
   # camera_ext_t: [0,0,0] 
   
r3live_lio:        
   lio_update_point_step: 2   # Point step used for LIO update.  
   max_iteration: 5           # Maximum times of LIO esikf.
   lidar_time_delay: 0.127214  # The time-offset between LiDAR and IMU, provided by user. 
   filter_size_corner: 0.30   
   filter_size_surf: 0.30
   filter_size_surf_z: 0.30
   filter_size_map: 0.30
   # from Lidar to IMU
   lidar_imu_r: 
      [ 0.999895, -0.007797, 0.012203, 
        0.007883, 0.999944, -0.007073, 
        -0.012147, 0.007168, 0.999901]  
   lidar_imu_t: [ -0.068703, -0.094532, 0.067275]

在这里插入图片描述

主要存在两个问题

1、配置文件中的camera_ext_R是表示怎样的坐标变换关系,lidar2camrea 还是 camera2lidar

已经有人提出了相关的 Issue

https://github.com/hku-mars/r3live/issues/16

在这里插入图片描述

在这里插入图片描述

💡 因此相机外参表示相机到雷达的坐标变换关系

2、点云结果无法保存

Export the whole offline map to file: /home/redwall/Documents/r3live_output/test.r3live
Saving globale map...
Saving global map: 100 %
Save globale map cost 499.006 ms
Saving views and camera poses 100 % ...
Save views and camera poses cost 123.524 ms
Save Rgb points to /home/redwall/Documents/r3live_output/rgb_pt
Saving offline map 100% ...
Total have 0 points.
Now write to: /home/redwall/Documents/r3live_output/rgb_pt
[rvizvisualisation-3] process has finished cleanly
log file: /home/redwall/.ros/log/a207363a-b66c-11ee-9adb-ac1203c93a01/rvizvisualisation-3*.log

Example 的数据集是可以保存点云地图的,自己的数据集就显示 0 points

看到 Issue 中有人提出了差不多的问题,可能和相机与雷达的时空标定有关,还需要研究下

  • 9
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

Prejudices

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值