点云分割-kmeans-原理+代码

kmeans做为无监督学习的一种聚类方法,原理非常简单,本质上是根据重心(密度中心)不断进行迭代的一个分割方法。

其主要步骤为:
(1)初始化k个中心点
(2)计算所有点到中心点的欧氏距离,形成集合dist
(3)找到dist最小值所在的索引i,将点加入第i个簇
(4)重新计算簇的所有中心,重复2-3直到中心点不变或者达到最大迭代次数。

具体的数学原理可以参考知乎

show the codes

语言:C++
依赖库:PCL1.9.1
//Kmeans.h
#pragma once

#include <iostream>
#include <algorithm>
#include<pcl/point_cloud.h>
#include<pcl/io/pcd_io.h>
#include <pcl/common/centroid.h>
class KMeans
{
private:
	unsigned int max_iteration_;
	const unsigned int cluster_num_;//k
	double pointsDist(const pcl::PointXYZ& p1, const pcl::PointXYZ& p2);

public:

	pcl::PointCloud<pcl::PointXYZ>::Ptr centre_points_;
	//KMeans() = default;
	KMeans(unsigned int k, unsigned int max_iteration);
	void kMeans(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud, std::vector<pcl::PointCloud<pcl::PointXYZ>> &cluster_cloud1);
	
	~KMeans(){}
};
//Kmeans.cpp
#include "KMeans.h"

double KMeans::pointsDist(const pcl::PointXYZ& p1, const pcl::PointXYZ& p2)
{
	//std::cerr << p1.x<<std::endl;
	
		return std::sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y) + (p1.z - p2.z) * (p1.z - p2.z));
	
}

KMeans::KMeans(unsigned int k, unsigned int max_iteration):cluster_num_(k),max_iteration_(max_iteration),centre_points_(new pcl::PointCloud<pcl::PointXYZ>)
{
}

void KMeans::kMeans(const pcl::PointCloud<pcl::PointXYZ>::Ptr& cloud, std::vector<pcl::PointCloud<pcl::PointXYZ>> &cluster_cloud1)
{
	
	if (!cloud->empty()&&!centre_points_->empty())
	{
		unsigned int iterations = 0;
		double sum_diff = 0.2;
		std::vector<pcl::PointCloud<pcl::PointXYZ>>cluster_cloud;
		while (!(iterations>=max_iteration_||sum_diff<=0.05))//如果大于迭代次数或者两次重心之差小于0.05就停止
		//while ((iterations<= max_iteration_ || sum_diff >= 0.1))
	
		{
			sum_diff = 0;
			std::vector<int> points_processed(cloud->points.size(), 0);
			cluster_cloud.clear();
			cluster_cloud.resize(cluster_num_);
			for (size_t i = 0; i < cloud->points.size(); ++i)

			{
				if (!points_processed[i])
				{
					std::vector<double>dists(0, 0);
					for (size_t j = 0; j < cluster_num_; ++j)
					{						
						dists.emplace_back(pointsDist(cloud->points[i], centre_points_->points[j]));					
					}
					std::vector<double>::const_iterator min_dist = std::min_element(dists.cbegin(), dists.cend());
					unsigned int it = std::distance(dists.cbegin(), min_dist);//获取最小值所在的序号或者位置(从0开始)
					//unsigned int it=std::distance(std::cbegin(dists), min_dist);
					cluster_cloud[it].points.push_back(cloud->points[i]);//放进最小距离所在的簇
					points_processed[i] = 1;
				}

				else
					continue;

			}
			//重新计算簇重心
			pcl::PointCloud<pcl::PointXYZ> new_centre;
			for (size_t k = 0; k < cluster_num_; ++k)
			{
				Eigen::Vector4f centroid;
				pcl::PointXYZ centre;
				pcl::compute3DCentroid(cluster_cloud[k], centroid);
				centre.x = centroid[0];
				centre.y = centroid[1];
				centre.z = centroid[2];
				//centre_points_->clear();
				//centre_points_->points.push_back(centre);
				new_centre.points.push_back(centre);

			}
			//计算重心变化量
			for (size_t s = 0; s < cluster_num_; ++s)
			{
				std::cerr << " centre" << centre_points_->points[s] << std::endl;

				std::cerr << "new centre" << new_centre.points[s] << std::endl;
				sum_diff += pointsDist(new_centre.points[s], centre_points_->points[s]);

			}
			std::cerr << sum_diff << std::endl;
			centre_points_->points.clear();
			*centre_points_ = new_centre;
			
			++iterations;
		}
		std::cerr << cluster_cloud[0].size() << std::endl;
		std::cerr << cluster_cloud[1].size() << std::endl;

		cluster_cloud1.assign(cluster_cloud.cbegin(),cluster_cloud.cend());//复制点云向量

	}

}

//main.cpp
#include"KMeans.h"
int main()
{
    KMeans test(2, 10);
   
   
    //test.centre_points_->points.push_back(pcl::PointXYZ(16.3,-6.35,11.4));
   // test.centre_points_->points.push_back(pcl::PointXYZ(15.6, -5.13, 7.49));
    //设置初始值
    test.centre_points_->points.push_back(pcl::PointXYZ(15.88, -5.5, 11.12));
    test.centre_points_->points.push_back(pcl::PointXYZ(15.69, -4.98, 11.05));

    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);
    std::vector<pcl::PointCloud<pcl::PointXYZ>> output_cloud;
    pcl::io::loadPCDFile("2.pcd",*cloud);
    std::cerr << "raw cloud size" << cloud->points.size() << std::endl;
    test.kMeans(cloud, output_cloud);

    for (int i = 0; i < 2; ++i)
    {
        //pcl::PointCloud<pcl::PointXYZ> cloud1;
        //cloud1 = output_cloud[i];
        std::cerr <<"output_cloud[i].points.size()"<< output_cloud[i].points.size()<<std::endl;
        output_cloud[i].width = output_cloud[i].points.size();
        output_cloud[i].height = 1;
        output_cloud[i].resize(output_cloud[i].width * output_cloud[i].height);
        pcl::io::savePCDFile( "kmeans"+std::to_string(i) + ".pcd", output_cloud[i]);
        //pcl::io::savePCDFile()
    }
    std::cout << "Hello World!\n";
}

实验结果

在这里插入图片描述

总结

1.必须已知聚类对象个数,并有合适的初始值
2.对于杆状点云效果不好,面状可以还好
3.kmean本质上还是依赖相似性,和所有依赖欧式距离的分割方法一样(欧式聚类,DBSCAN),点云的距离分布对最终的分割至关重要,当然可以换成别的距离或者度量方式可能效果会好点。

  • 1
    点赞
  • 30
    收藏
    觉得还不错? 一键收藏
  • 4
    评论
CPC (Contrastive Predictive Coding) 是一种自监督学习算法,已经被广泛应用于点云分割任务中。下面是使用 CPC 算法进行点云分割的示例代码,仅供参考: ```python # 导入相关库 import torch import torch.nn as nn from torch.utils.data import DataLoader import numpy as np from sklearn.neighbors import KDTree import open3d as o3d # 定义 CPC 算法模型 class CPCModel(nn.Module): def __init__(self, input_shape, hidden_size, output_size): super(CPCModel, self).__init__() self.encoder = nn.Sequential( nn.Conv1d(input_shape[1], hidden_size, 1), nn.BatchNorm1d(hidden_size), nn.ReLU(), nn.Conv1d(hidden_size, hidden_size, 1), nn.BatchNorm1d(hidden_size), nn.ReLU(), nn.Conv1d(hidden_size, hidden_size, 1), nn.BatchNorm1d(hidden_size), nn.ReLU(), nn.Conv1d(hidden_size, hidden_size, 1), nn.BatchNorm1d(hidden_size), nn.ReLU(), nn.Conv1d(hidden_size, output_size, 1), nn.BatchNorm1d(output_size) ) def forward(self, x): return self.encoder(x) # 定义数据集类 class PointCloudDataset(torch.utils.data.Dataset): def __init__(self, point_clouds): self.point_clouds = point_clouds self.kdtrees = [KDTree(pc) for pc in point_clouds] def __len__(self): return len(self.point_clouds) def __getitem__(self, idx): pc = self.point_clouds[idx] kdtree = self.kdtrees[idx] # 随机选择两个点 idx1, idx2 = np.random.choice(len(pc), size=2, replace=False) # 获取两个点周围的点 _, idxs1 = kdtree.query(pc[idx1], k=64) _, idxs2 = kdtree.query(pc[idx2], k=64) # 将两个点及其周围的点组成一个 batch batch = np.concatenate([pc[idx1][None], pc[idx2][None], pc[idxs1], pc[idxs2]], axis=0) return batch.T # 加载点云数据并创建数据集实例 point_clouds = [o3d.io.read_point_cloud(f"point_cloud_{i}.ply").xyz for i in range(10)] dataset = PointCloudDataset(point_clouds) # 创建数据加载器 batch_size = 32 loader = DataLoader(dataset, batch_size=batch_size, shuffle=True) # 创建 CPC 模型和优化器 input_shape = (batch_size, 6, 64) hidden_size = 128 output_size = 128 model = CPCModel(input_shape, hidden_size, output_size) optimizer = torch.optim.Adam(model.parameters()) # 训练 CPC 模型 num_epochs = 10 for epoch in range(num_epochs): for batch in loader: batch = batch.float().transpose(1, 2) preds = model(batch) loss = -(preds[:, :, :-1] * preds[:, :, 1:]).sum(dim=-1).mean() optimizer.zero_grad() loss.backward() optimizer.step() print(f"Epoch {epoch + 1}/{num_epochs}, loss={loss:.4f}") # 使用 CPC 模型进行点云分割 for i, pc in enumerate(point_clouds): # 将点云转换成 tensor pc = torch.tensor(pc).float()[None].transpose(1, 2) # 获取点云中每个点的特征向量 features = model.encoder(pc).transpose(1, 2) # 对特征向量进行聚类 kmeans = KMeans(n_clusters=2) labels = kmeans.fit_predict(features) # 将聚类结果可视化 colors = np.zeros((len(pc[0]), 3)) colors[labels == 0] = [1, 0, 0] colors[labels == 1] = [0, 1, 0] pcd = o3d.geometry.PointCloud() pcd.points = o3d.utility.Vector3dVector(pc[0].numpy()) pcd.colors = o3d.utility.Vector3dVector(colors) o3d.visualization.draw_geometries([pcd]) ``` 需要注意的是,上述代码中使用了 Open3D 库进行点云的读取和可视化,可以根据需求进行替换。此外,为了简化示例代码,上述代码中使用了 KMeans 算法进行聚类,实际应用中应根据需求选择适合的聚类算法。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值