刚性物体的鲁棒姿态估计

在本教程中,展示如何在具有杂乱和遮挡的场景中找到刚性对象的对齐姿势。

一、The code

1#include <Eigen/Core>
  2
  3#include <pcl/point_types.h>
  4#include <pcl/point_cloud.h>
  5#include <pcl/common/time.h>
  6#include <pcl/console/print.h>
  7#include <pcl/features/normal_3d_omp.h>
  8#include <pcl/features/fpfh_omp.h>
  9#include <pcl/filters/filter.h>
 10#include <pcl/filters/voxel_grid.h>
 11#include <pcl/io/pcd_io.h>
 12#include <pcl/registration/sample_consensus_prerejective.h>
 13#include <pcl/visualization/pcl_visualizer.h>
 14
 15// Types
 16typedef pcl::PointNormal PointNT;
 17typedef pcl::PointCloud<PointNT> PointCloudT;
 18typedef pcl::FPFHSignature33 FeatureT;
 19typedef pcl::FPFHEstimationOMP<PointNT,PointNT,FeatureT> FeatureEstimationT;
 20typedef pcl::PointCloud<FeatureT> FeatureCloudT;
 21typedef pcl::visualization::PointCloudColorHandlerCustom<PointNT> ColorHandlerT;
 22
 23// Align a rigid object to a scene with clutter and occlusions
 24int
 25main (int argc, char **argv)
 26{
 27  // Point clouds
 28  PointCloudT::Ptr object (new PointCloudT);
 29  PointCloudT::Ptr object_aligned (new PointCloudT);
 30  PointCloudT::Ptr scene_before_downsampling (new PointCloudT);
 31  PointCloudT::Ptr scene (new PointCloudT);
 32  FeatureCloudT::Ptr object_features (new FeatureCloudT);
 33  FeatureCloudT::Ptr scene_features (new FeatureCloudT);
 34  
 35  // Get input object and scene
 36  if (argc != 3)
 37  {
 38    pcl::console::print_error ("Syntax is: %s object.pcd scene.pcd\n", argv[0]);
 39    return (1);
 40  }
 41  
 42  // Load object and scene
 43  pcl::console::print_highlight ("Loading point clouds...\n");
 44  if (pcl::io::loadPCDFile<PointNT> (argv[1], *object) < 0 ||
 45      pcl::io::loadPCDFile<PointNT> (argv[2], *scene_before_downsampling) < 0)
 46  {
 47    pcl::console::print_error ("Error loading object/scene file!\n");
 48    return (1);
 49  }
 50  
 51  // Downsample
 52  pcl::console::print_highlight ("Downsampling...\n");
 53  pcl::VoxelGrid<PointNT> grid;
 54  const float leaf = 0.005f;
 55  grid.setLeafSize (leaf, leaf, leaf);
 56  grid.setInputCloud (object);
 57  grid.filter (*object);
 58  grid.setInputCloud (scene_before_downsampling);
 59  grid.filter (*scene);
 60  
 61  // Estimate normals for scene
 62  pcl::console::print_highlight ("Estimating scene normals...\n");
 63  pcl::NormalEstimationOMP<PointNT,PointNT> nest;
 64  nest.setRadiusSearch (0.005);
 65  nest.setInputCloud (scene);
 66  nest.setSearchSurface (scene_before_downsampling);
 67  nest.compute (*scene);
 68  
 69  // Estimate features
 70  pcl::console::print_highlight ("Estimating features...\n");
 71  FeatureEstimationT fest;
 72  fest.setRadiusSearch (0.025);
 73  fest.setInputCloud (object);
 74  fest.setInputNormals (object);
 75  fest.compute (*object_features);
 76  fest.setInputCloud (scene);
 77  fest.setInputNormals (scene);
 78  fest.compute (*scene_features);
 79  
 80  // Perform alignment
 81  pcl::console::print_highlight ("Starting alignment...\n");
 82  pcl::SampleConsensusPrerejective<PointNT,PointNT,FeatureT> align;
 83  align.setInputSource (object);
 84  align.setSourceFeatures (object_features);
 85  align.setInputTarget (scene);
 86  align.setTargetFeatures (scene_features);
 87  align.setMaximumIterations (50000); // Number of RANSAC iterations
 88  align.setNumberOfSamples (3); // Number of points to sample for generating/prerejecting a pose
 89  align.setCorrespondenceRandomness (5); // Number of nearest features to use
 90  align.setSimilarityThreshold (0.95f); // Polygonal edge length similarity threshold
 91  align.setMaxCorrespondenceDistance (2.5f * leaf); // Inlier threshold
 92  align.setInlierFraction (0.25f); // Required inlier fraction for accepting a pose hypothesis
 93  {
 94    pcl::ScopeTime t("Alignment");
 95    align.align (*object_aligned);
 96  }
 97  
 98  if (align.hasConverged ())
 99  {
100    // Print results
101    printf ("\n");
102    Eigen::Matrix4f transformation = align.getFinalTransformation ();
103    pcl::console::print_info ("    | %6.3f %6.3f %6.3f | \n", transformation (0,0), transformation (0,1), transformation (0,2));
104    pcl::console::print_info ("R = | %6.3f %6.3f %6.3f | \n", transformation (1,0), transformation (1,1), transformation (1,2));
105    pcl::console::print_info ("    | %6.3f %6.3f %6.3f | \n", transformation (2,0), transformation (2,1), transformation (2,2));
106    pcl::console::print_info ("\n");
107    pcl::console::print_info ("t = < %0.3f, %0.3f, %0.3f >\n", transformation (0,3), transformation (1,3), transformation (2,3));
108    pcl::console::print_info ("\n");
109    pcl::console::print_info ("Inliers: %i/%i\n", align.getInliers ().size (), object->size ());
110    
111    // Show alignment
112    pcl::visualization::PCLVisualizer visu("Alignment");
113    visu.addPointCloud (scene, ColorHandlerT (scene, 0.0, 255.0, 0.0), "scene");
114    visu.addPointCloud (object_aligned, ColorHandlerT (object_aligned, 0.0, 0.0, 255.0), "object_aligned");
115    visu.spin ();
116  }
117  else
118  {
119    pcl::console::print_error ("Alignment failed!\n");
120    return (1);
121  }
122  
123  return (0);
124}

二、解释

1、我们从定义便利类型开始,以免代码混乱。

// Types
typedef pcl::PointNormal PointNT;
typedef pcl::PointCloud<PointNT> PointCloudT;
typedef pcl::FPFHSignature33 FeatureT;
typedef pcl::FPFHEstimationOMP<PointNT,PointNT,FeatureT> FeatureEstimationT;
typedef pcl::PointCloud<FeatureT> FeatureCloudT;
typedef pcl::visualization::PointCloudColorHandlerCustom<PointNT> ColorHandlerT;

2、然后我们实例化必要的数据容器,检查输入参数并加载对象和场景点云。虽然我们已经定义了包含法线的基本点类型,但我们只有对象的那些(通常是这种情况)。我们将估计下面场景的法线信息。

 // Point clouds
  PointCloudT::Ptr object (new PointCloudT);
  PointCloudT::Ptr object_aligned (new PointCloudT);
  PointCloudT::Ptr scene_before_downsampling (new PointCloudT);
  PointCloudT::Ptr scene (new PointCloudT);
  FeatureCloudT::Ptr object_features (new FeatureCloudT);
  FeatureCloudT::Ptr scene_features (new FeatureCloudT);
  
  // Get input object and scene
  if (argc != 3)
  {
    pcl::console::print_error ("Syntax is: %s object.pcd scene.pcd\n", argv[0]);
    return (1);
  }
  
  // Load object and scene
  pcl::console::print_highlight ("Loading point clouds...\n");
  if (pcl::io::loadPCDFile<PointNT> (argv[1], *object) < 0 ||
      pcl::io::loadPCDFile<PointNT> (argv[2], *scene_before_downsampling) < 0)
  {
    pcl::console::print_error ("Error loading object/scene file!\n");
    return (1);
  }

3、为了加快处理速度,我们使用 PCL 的 VoxelGrid 类将对象和场景点云的采样率降低到 5 毫米的分辨率。

// Downsample
  pcl::console::print_highlight ("Downsampling...\n");
  pcl::VoxelGrid<PointNT> grid;
  const float leaf = 0.005f;
  grid.setLeafSize (leaf, leaf, leaf);
  grid.setInputCloud (object);
  grid.filter (*object);
  grid.setInputCloud (scene_before_downsampling);
  grid.filter (*scene);

4、现在使用 PCL 的 NormalEstimationOMP 估计场景缺失的表面法线。计算下面用于匹配的特征需要表面法线。

// Estimate normals for scene
  pcl::console::print_highlight ("Estimating scene normals...\n");
  pcl::NormalEstimationOMP<PointNT,PointNT> nest;
  nest.setRadiusSearch (0.005);
  nest.setInputCloud (scene);
  nest.setSearchSurface (scene_before_downsampling);
  nest.compute (*scene);

5、对于下采样点云中的每个点,我们现在使用 PCL 的 FPFHEstimationOMP 类来计算在对齐过程中用于匹配的快速点特征直方图 (FPFH) 描述符。

// Estimate features
  pcl::console::print_highlight ("Estimating features...\n");
  FeatureEstimationT fest;
  fest.setRadiusSearch (0.025);
  fest.setInputCloud (object);
  fest.setInputNormals (object);
  fest.compute (*object_features);
  fest.setInputCloud (scene);
  fest.setInputNormals (scene);
  fest.compute (*scene_features);

6、我们现在准备好设置对齐过程。我们使用 SampleConsensusPrerejective 类,它实现了一个高效的 RANSAC 姿势估计循环。这是通过使用 CorrespondenceRejectorPoly 类提前消除不良姿势假设来实现的。

// Perform alignment
  pcl::console::print_highlight ("Starting alignment...\n");
  pcl::SampleConsensusPrerejective<PointNT,PointNT,FeatureT> align;
  align.setInputSource (object);
  align.setSourceFeatures (object_features);
  align.setInputTarget (scene);
  align.setTargetFeatures (scene_features);
  align.setMaximumIterations (50000); // Number of RANSAC iterations
  align.setNumberOfSamples (3); // Number of points to sample for generating/prerejecting a pose
  align.setCorrespondenceRandomness (5); // Number of nearest features to use
  align.setSimilarityThreshold (0.95f); // Polygonal edge length similarity threshold
  align.setMaxCorrespondenceDistance (2.5f * leaf); // Inlier threshold
  align.setInlierFraction (0.25f); // Required inlier fraction for accepting a pose hypothesis

  • 8
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

小小蜗牛,大大梦想

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值