PCL之粗配准

基于FPFH点云特征粗配准

#include <Eigen/Core>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/common/time.h>
#include <pcl/console/print.h>
#include <pcl/features/normal_3d_omp.h>
#include <pcl/features/fpfh_omp.h>
#include <pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/io/pcd_io.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/sample_consensus_prerejective.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/visualization/pcl_visualizer.h>

// 首先从定义类型开始,以免使代码混乱
// Types
typedef pcl::PointNormal PointNT;
typedef pcl::PointCloud<PointNT> PointCloudT;
typedef pcl::FPFHSignature33 FeatureT;
typedef pcl::FPFHEstimationOMP<PointNT, PointNT, FeatureT> FeatureEstimationT;
typedef pcl::PointCloud<FeatureT> FeatureCloudT;
typedef pcl::visualization::PointCloudColorHandlerCustom<PointNT> ColorHandlerT;

// Align a rigid object to a scene with clutter and occlusions
int
main(int argc, char **argv)
{
	// 然后,我们实例化必要的数据容器,检查输入参数并加载对象和场景点云。 
	// Point clouds
	PointCloudT::Ptr object(new PointCloudT);
	PointCloudT::Ptr object_aligned(new PointCloudT);
	PointCloudT::Ptr scene(new PointCloudT);
	FeatureCloudT::Ptr object_features(new FeatureCloudT);
	FeatureCloudT::Ptr scene_features(new FeatureCloudT);

	// Get input object and scene
	/*if (argc != 3)
	{
		pcl::console::print_error("Syntax is: %s object.pcd scene.pcd\n", argv[0]);
		return (1);
	}*/

	// Load object and scene
	pcl::console::print_highlight("Loading point clouds...\n");
	if (pcl::io::loadPCDFile<PointNT>("data//registration/bun090.pcd", *object) < 0 ||
		pcl::io::loadPCDFile<PointNT>("data//registration/bun045.pcd", *scene) < 0)
	{
		pcl::console::print_error("Error loading object/scene file!\n");
		return (1);
	}

	// Downsample
	// 为了加快处理速度,我们使用PCL的:pcl::VoxelGrid类将对象和场景点云的采样率下采样至5 mm。
	pcl::console::print_highlight("Downsampling...\n");
	pcl::VoxelGrid<PointNT> grid;
	const float leaf = 0.005f;
	grid.setLeafSize(leaf, leaf, leaf);
	grid.setInputCloud(object);
	grid.filter(*object);
	grid.setInputCloud(scene);
	grid.filter(*scene);

	// Estimate normals for scene
	// 首先, 生成法线
	pcl::console::print_highlight("Estimating scene normals...\n");
	pcl::NormalEstimationOMP<PointNT, PointNT> nest;
	nest.setRadiusSearch(0.01);
	nest.setInputCloud(scene);
	nest.compute(*scene);

	nest.setInputCloud(object);
	nest.compute(*object);

	// Estimate features
	// 对于下采样点云中的每个点,我们现在使用PCL的pcl::FPFHEstimationOMP<>类来计算用于对齐过程中用于匹配的快速点特征直方图(FPFH)描述符。
	pcl::console::print_highlight("Estimating features...\n");
	FeatureEstimationT fest;
	fest.setRadiusSearch(0.025);
	fest.setInputCloud(object);
	fest.setInputNormals(object);
	fest.compute(*object_features);
	fest.setInputCloud(scene);
	fest.setInputNormals(scene);
	fest.compute(*scene_features);

	// Perform alignment
	// SampleConsensusPrerejective 实现了有效的RANSAC姿势估计循环
	pcl::console::print_highlight("Starting alignment...\n");
	pcl::SampleConsensusPrerejective<PointNT, PointNT, FeatureT> align;
	align.setInputSource(object);
	align.setSourceFeatures(object_features);
	align.setInputTarget(scene);
	align.setTargetFeatures(scene_features);
	align.setMaximumIterations(50000); // 最大迭代次数
	align.setNumberOfSamples(3); // 设置采样点数
	align.setCorrespondenceRandomness(5); // 设置随机匹配点数
	align.setSimilarityThreshold(0.9f); // 相似度阈值
	align.setMaxCorrespondenceDistance(2.5f * leaf); // 最大匹配距离
	align.setInlierFraction(0.25f); // 内点比例
	{
		pcl::ScopeTime t("Alignment");
		align.align(*object_aligned); 
	}

	if (align.hasConverged())
	{
		// Print results
		printf("\n");
		Eigen::Matrix4f transformation = align.getFinalTransformation();  //获取最终变换矩阵
		pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(0, 0), transformation(0, 1), transformation(0, 2));
		pcl::console::print_info("R = | %6.3f %6.3f %6.3f | \n", transformation(1, 0), transformation(1, 1), transformation(1, 2));
		pcl::console::print_info("    | %6.3f %6.3f %6.3f | \n", transformation(2, 0), transformation(2, 1), transformation(2, 2));
		pcl::console::print_info("\n");
		pcl::console::print_info("t = < %0.3f, %0.3f, %0.3f >\n", transformation(0, 3), transformation(1, 3), transformation(2, 3));
		pcl::console::print_info("\n");
		pcl::console::print_info("Inliers: %i/%i\n", align.getInliers().size(), object->size());

		// Show alignment
		pcl::visualization::PCLVisualizer visu("Alignment");
		visu.addPointCloud(scene, ColorHandlerT(scene, 0.0, 255.0, 0.0), "scene");
		visu.addPointCloud(object_aligned, ColorHandlerT(object_aligned, 0.0, 0.0, 255.0), "object_aligned");
		visu.spin();
	}
	else
	{
		pcl::console::print_error("Alignment failed!\n");
		return (1);
	}

	return (0);
}

基于PCA粗配准

#include <Eigen/Core>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/common/time.h>
#include <pcl/console/print.h>
#include <pcl/features/normal_3d_omp.h>
#include <pcl/features/fpfh_omp.h>
#include <pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/io/pcd_io.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/sample_consensus_prerejective.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/common/transforms.h>
#include <pcl/common/common.h>
#include <pcl/common/pca.h>

// 首先从定义类型开始,以免使代码混乱
// Types

typedef pcl::PointXYZRGB PointXYZRGB;
typedef pcl::PointNormal PointNT;
typedef pcl::PointCloud<PointXYZRGB> PointCloudT;

void PCA_registration(pcl::PointCloud<PointXYZRGB>::Ptr &input_obj, pcl::PointCloud<PointXYZRGB>::Ptr &input_scene, pcl::PointCloud<PointXYZRGB>::Ptr &projected_obj, Eigen::Matrix4f &result_transform) // PCA粗配准
{
	using PointType = PointXYZRGB; //点类型
	pcl::PCA<PointType> pca;       // PCA算法

	pcl::PointCloud<PointType> objProj;
	pca.setInputCloud(input_obj);                          //设置输入点云
	pca.project(*input_obj, objProj);                      //投影点云
	Eigen::Matrix3f EigenSpaceObj = pca.getEigenVectors(); //获取特征向量
	// std::cout << pca.getMean() << std::endl;
	Eigen::Vector3f PcaTransObj(pca.getMean()(0), pca.getMean()(1), pca.getMean()(2)); //获取平均值
	Eigen::Matrix4f transform_obj;                                                     //变换矩阵
	transform_obj << EigenSpaceObj(0, 0), EigenSpaceObj(0, 1), EigenSpaceObj(0, 2), PcaTransObj(0),
		EigenSpaceObj(1, 0), EigenSpaceObj(1, 1), EigenSpaceObj(1, 2), PcaTransObj(1),
		EigenSpaceObj(2, 0), EigenSpaceObj(2, 1), EigenSpaceObj(2, 2), PcaTransObj(2),
		0, 0, 0, 1; //设置变换矩阵

	std::cout << transform_obj << std::endl;

	Eigen::Matrix3f EigenSpaceObjT = EigenSpaceObj.transpose();      //获取特征向量的转置矩阵
	Eigen::Vector3f PcaTransObj_inv = -EigenSpaceObjT * PcaTransObj; //获取平均值的逆矩阵
	Eigen::Matrix4f transform_obj_inv;
	transform_obj_inv << EigenSpaceObjT(0, 0), EigenSpaceObjT(0, 1), EigenSpaceObjT(0, 2), PcaTransObj_inv(0),
		EigenSpaceObjT(1, 0), EigenSpaceObjT(1, 1), EigenSpaceObjT(1, 2), PcaTransObj_inv(1),
		EigenSpaceObjT(2, 0), EigenSpaceObjT(2, 1), EigenSpaceObjT(2, 2), PcaTransObj_inv(2),
		0, 0, 0, 1;

	std::cout << transform_obj_inv << std::endl;

	pcl::PointCloud<PointType> sceneProj;                    //投影点云
	pca.setInputCloud(input_scene);                          //设置输入点云
	pca.project(*input_scene, sceneProj);                    //投影点云
	Eigen::Matrix3f EigenSpaceScene = pca.getEigenVectors(); //获取特征向量
	Eigen::Vector4f PcaTransScene = pca.getMean();           //获取平均值
	Eigen::Matrix4f transform_scene;
	// std::cout << pca.getMean() << std::endl;

	transform_scene << EigenSpaceScene(0, 0), EigenSpaceScene(0, 1), EigenSpaceScene(0, 2), PcaTransScene(0),
		EigenSpaceScene(1, 0), EigenSpaceScene(1, 1), EigenSpaceScene(1, 2), PcaTransScene(1),
		EigenSpaceScene(2, 0), EigenSpaceScene(2, 1), EigenSpaceScene(2, 2), PcaTransScene(2),
		0, 0, 0, 1;

	std::cout << transform_scene << std::endl;

	result_transform = transform_scene * transform_obj_inv;                 //计算最终变换矩阵
	pcl::transformPointCloud(*input_obj, *projected_obj, result_transform); //变换点云
	pcl::io::savePCDFileBinaryCompressed("data//registration//projected.pcd", *projected_obj);
	return;
};

// Align a rigid object to a scene with clutter and occlusions
int main(int argc, char **argv)
{
	// 然后,我们实例化必要的数据容器,检查输入参数并加载对象和场景点云。 
	// Point clouds
	PointCloudT::Ptr object(new PointCloudT);
	PointCloudT::Ptr projected_obj(new PointCloudT);
	PointCloudT::Ptr scene(new PointCloudT);

	pcl::io::loadPCDFile("data//registration/room_scan1.pcd", *object);
	pcl::io::loadPCDFile("data//registration/room_scan2.pcd", *scene);

	Eigen::Matrix4f result_transform;

	PCA_registration(object, scene, projected_obj, result_transform);

	return (0);
}

4PCS算法粗配准

#include <Eigen/Core>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/common/time.h>
#include <pcl/console/print.h>
#include <pcl/features/normal_3d_omp.h>
#include <pcl/features/fpfh_omp.h>
#include <pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/io/pcd_io.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/sample_consensus_prerejective.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/visualization/pcl_visualizer.h>
#include <pcl/common/transforms.h>
#include <pcl/common/common.h>
#include <pcl/common/pca.h>

// 首先从定义类型开始,以免使代码混乱
// Types

typedef pcl::PointXYZ PointXYZRGB;
typedef pcl::PointCloud<PointXYZRGB> PointCloudT;


//PCL 4PCS算法实现点云粗配准
#include <pcl/registration/ia_fpcs.h> // 4PCS算法
void _4PCS(pcl::PointCloud<pcl::PointXYZ>::Ptr& target_cloud,
	pcl::PointCloud<pcl::PointXYZ>::Ptr& source_cloud)
{
	
	//--------------初始化4PCS配准对象-------------------
	pcl::registration::FPCSInitialAlignment<pcl::PointXYZ, pcl::PointXYZ> fpcs;
	fpcs.setInputSource(source_cloud);  // 源点云
	fpcs.setInputTarget(target_cloud);  // 目标点云
	fpcs.setApproxOverlap(0.7);         // 设置源和目标之间的近似重叠度。
	fpcs.setDelta(0.01);                // 设置配准后对应点之间的距离(以米为单位)。
	fpcs.setNumberOfSamples(100);       // 设置验证配准效果时要使用的采样点数量
	pcl::PointCloud<pcl::PointXYZ>::Ptr pcs(new pcl::PointCloud<pcl::PointXYZ>);
	fpcs.align(*pcs);                   // 计算变换矩阵
	
	cout << "变换矩阵:" << fpcs.getFinalTransformation() << endl;
	// 使用创建的变换对为输入点云进行变换
	pcl::transformPointCloud(*source_cloud, *pcs, fpcs.getFinalTransformation());
	pcl::io::savePCDFileBinaryCompressed("data//registration//pcs.pcd", *pcs);
	
}



// Align a rigid object to a scene with clutter and occlusions
int main(int argc, char **argv)
{
	// 然后,我们实例化必要的数据容器,检查输入参数并加载对象和场景点云。 
	// Point clouds
	PointCloudT::Ptr object(new PointCloudT);
	PointCloudT::Ptr projected_obj(new PointCloudT);
	PointCloudT::Ptr scene(new PointCloudT);

	pcl::io::loadPCDFile("data//registration/bun090.pcd", *object);
	pcl::io::loadPCDFile("data//registration/bun270.pcd", *scene);

	Eigen::Matrix4f result_transform;

	_4PCS(object, scene);

	return (0);
}
  • 2
    点赞
  • 13
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

AICVer

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值