前几天配置好了open3d 今天实现一下
# coding:utf-8
import numpy as np
import copy
import open3d as o3d
#用于可视化配准后的点云
def draw_registration_result(source, target, transformation):
source_temp = copy.deepcopy(source)#拷贝点云
target_temp = copy.deepcopy(target)
source_temp.paint_uniform_color([1, 0.706, 0])#设置点云颜色
target_temp.paint_uniform_color([0, 0.651, 0.929])
source_temp.transform(transformation)#用于点云变化=点云*变换矩阵
o3d.visualization.draw_geometries([source_temp, target_temp])#可视化点云
#点云选点
def pick_points(pcd):
print("we: 394467238 ")
print("1) 至少选取3个点用于配准, 使用按键 [shift + 鼠标左键] 选取")
print(" 使用按键 [shift + 鼠标右键] 取消选取点")
print("2) 选取点结束后,按键 Q 结束选取,进入配准计算。")
vis = o3d.visualization.VisualizerWithEditing() #加载交互功能 还有其他功能
vis.create_window()
vis.add_geometry(pcd) #加载点云
vis.run() # 防止程序退出
vis.destroy_window()
print("")
return vis.get_picked_points() # 类似返回 点的下表
def demo_manual_registration():
print("手动 ICP 示例")
print("we: 394467238 ,")
#加载点云
source = o3d.io.read_point_cloud("C:\\Users\\Len\\Desktop\\2.pcd")
target = o3d.io.read_point_cloud("C:\\Users\\Len\\Desktop\\3.pcd")
#开始选点并且保存到picked_id_source
# pick points from two point clouds and builds correspondences
picked_id_source = pick_points(source)
picked_id_target = pick_points(target)
assert (len(picked_id_source) >= 3 and len(picked_id_target) >= 3)#判断点数是否大于3个以上
assert (len(picked_id_source) == len(picked_id_target))#判断点数是否相等
corr = np.zeros((len(picked_id_source), 2)) #创建一个 n*2的矩阵
corr[:, 0] = picked_id_source #写入 具体格式[[1,2],[6,8],[7,4]]
corr[:, 1] = picked_id_target
# estimate rough transformation using correspondences
print("Compute a rough transform using the correspondences given by user")
p2p = o3d.pipelines.registration.TransformationEstimationPointToPoint()
#相当于pcl中的粗配准 o3d.utility.Vector2iVector(corr)在这应该是获取对应点的变换矩阵 我猜测的
trans_init = p2p.compute_transformation(source, target,
o3d.utility.Vector2iVector(corr))#将形状的浮点数64数字数组转换为Open3D格式
print(o3d.utility.Vector2iVector(corr))
# point-to-point ICP for refinement
print("Perform point-to-point ICP refinement")
#参数
threshold = 0.03 # 3cm distance threshold
#icp配准
reg_p2p = o3d.pipelines.registration.registration_icp(
source, target, threshold, trans_init,
o3d.pipelines.registration.TransformationEstimationPointToPoint())
#可视化
draw_registration_result(source, target, reg_p2p.transformation)
print(reg_p2p.transformation)
if __name__ == "__main__":
demo_manual_registration()
#include <boost/lexical_cast.hpp>
#include "NumCpp.hpp"
#include <iostream>
#include <memory>
#include <thread>
#include <random>
#include <cstdlib>
#include <Open3D/Open3D.h>
#include <Eigen/Dense>
#include <cstdlib>
#include "open3d/geometry/KDTreeFlann.h"
#include "open3d/geometry/PointCloud.h"
#include "open3d/utility/Console.h"
#include "open3d/utility/Helper.h"
std::vector<size_t> a(std::shared_ptr<const open3d::geometry::Geometry> cloud_ptr) {
open3d::visualization::VisualizerWithEditing vis;
vis.CreateVisualizerWindow();
vis.AddGeometry(cloud_ptr); vis.Run();
vis.DestroyVisualizerWindow();
return vis.GetPickedPoints();
}
void draw_registration_result(const open3d::geometry::PointCloud& source,
const open3d::geometry::PointCloud& target, const Eigen::Matrix4d& Transforamtion)
{
std::shared_ptr<open3d::geometry::PointCloud> source_temp(new open3d::geometry::PointCloud);
std::shared_ptr<open3d::geometry::PointCloud> target_temp(new open3d::geometry::PointCloud);
*source_temp = source;
*target_temp = target;
source_temp->PaintUniformColor(Eigen::Vector3d(1, 0.706, 0));
target_temp->PaintUniformColor(Eigen::Vector3d(0, 0.651, 0.929));
source_temp->Transform(Transforamtion);
open3d::visualization::DrawGeometries({ source_temp, target_temp }, "Registration result");
}
using namespace std;
#include <vector>
using namespace open3d::registration;
int main() {
auto cloud_ptr = std::make_shared<open3d::geometry::PointCloud>();//点云类型
auto cloud_old = std::make_shared<open3d::geometry::PointCloud>();//点云类型
// open3d::visualization::Visualizer visualizer; //可视化
open3d::io::ReadPointCloud("C:\\Users\\Len\\Desktop/2.pcd", *cloud_old);//读取点云
open3d::io::ReadPointCloud("C:\\Users\\Len\\Desktop/3.pcd", *cloud_ptr);//读取点云
std::vector<size_t> mo, mn;
mo = a(cloud_old);
for (int i = 0; i < mo.size(); i++) {
mo.at(i);
}
mn = a(cloud_ptr);
for (int i = 0; i < mn.size(); i++) {
mn.at(i);
}
open3d::registration::CorrespondenceSet corres_ji;
for (int j = 0; j < mn.size(); j++)
{
corres_ji.push_back(Eigen::Vector2i(mo[j], mn[j]));
}
open3d::registration::TransformationEstimationPointToPoint p2p;
auto sl=p2p.ComputeTransformation(*cloud_old, *cloud_ptr, corres_ji);//chu
double threshold = 0.03;
open3d::registration::RegistrationResult res = open3d::registration::RegistrationICP(
*cloud_old, *cloud_ptr, threshold, sl,
open3d::registration::TransformationEstimationPointToPoint());
cout << res.transformation_;
draw_registration_result(*cloud_old, *cloud_ptr, res.transformation_);
return 0;
}