深蓝学院高翔《自动驾驶与机器人中的SLAM技术》第六章作业

深蓝学院高翔《自动驾驶与机器人中的SLAM技术》第五章作业

第一题

  • 使用 g2o
  • 在图优化外部进行最近邻搜索
  • 分别实现点到点 ICP , 点到线 ICP
// 在 ICP 类中增加两个成员函数
    bool AlignGaussNewtonPoint2PointWithg2o(SE2& init_pose);

    bool AlignGaussNewtonPoint2PlaneWithg2o(SE2& init_pose);

点到点的 ICP

  • 首先需要设置图优化所需的边,在 g2o_types.h 中增加 EdgeSE2Point2Point 类
    pandoc input.md -o output.pdf
class EdgeSE2Point2Point : public g2o::BaseUnaryEdge<2, Vec2d, VertexSE2> {
   public:
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
    EdgeSE2Point2Point(const Vec2d& point)
        : point_(point){}

    void computeError() override {
        VertexSE2* v = (VertexSE2*)_vertices[0];
        SE2 pose = v->estimate();
        Vec2d pw = pose * point_;
        _error = pw - measurement();
    }

    bool read(std::istream& is) override { return true; }
    bool write(std::ostream& os) const override { return true; }

   private:
    Vec2d point_;
};
  • 实现点到点 ICP
bool Icp2d::AlignGaussNewtonPoint2PointWithg2o(SE2& init_pose)
{
    int iterations = 10;
    SE2 current_pose = init_pose;
    const float max_dis = 0.3;      // 最近邻时的最远距离
    const int min_effect_pts = 20;  // 最小有效点数

    // pose graph optimization
    using BlockSolverType = g2o::BlockSolver<g2o::BlockSolverTraits<3, 1>>;
    using LinearSolverType = g2o::LinearSolverCholmod<BlockSolverType::PoseMatrixType>;
    auto* solver = new g2o::OptimizationAlgorithmLevenberg(
        g2o::make_unique<BlockSolverType>(g2o::make_unique<LinearSolverType>()));
    g2o::SparseOptimizer optimizer;
    optimizer.setAlgorithm(solver);

    for (int i = 0; i < iterations; i++) {
        optimizer.clear();

        auto* v = new VertexSE2();
        v->setId(0);
        v->setEstimate(current_pose);
        optimizer.addVertex(v);
        
        int effective_num = 0;  // 有效点数
        for (size_t j = 0; j < source_scan_->ranges.size(); ++j) {
            float r = source_scan_->ranges[j];
            if (r < source_scan_->range_min || r > source_scan_->range_max) {
                continue;
            }

            float angle = source_scan_->angle_min + j * source_scan_->angle_increment;
            Vec2d pd = Vec2d(r * std::cos(angle), r * std::sin(angle));
            Vec2d pw = current_pose * pd;
            Point2d pt;
            pt.x = pw.x();
            pt.y = pw.y();

            std::vector<int> nn_idx;
            std::vector<float> dis;
            kdtree_.nearestKSearch(pt, 1, nn_idx, dis);

            if (nn_idx.size() > 0 && dis[0] < max_dis) {
                effective_num++;

                auto* edge = new EdgeSE2Point2Point(pd);
                edge->setId(j);
                edge->setVertex(0, v);
                edge->setMeasurement(Vec2d(target_cloud_->points[nn_idx[0]].x, target_cloud_->points[nn_idx[0]].y));
                edge->setInformation(Mat2d::Identity());
                optimizer.addEdge(edge);
            }
        }

        if (effective_num < min_effect_pts) {
            return false;
        }

        optimizer.setVerbose(true);
        optimizer.initializeOptimization();
        optimizer.optimize(5);

        current_pose = v->estimate();

    }

    init_pose = current_pose;
    LOG(INFO) << "estimated pose: " << current_pose.translation().transpose()
              << ", theta: " << current_pose.so2().log();


    return true;
}

点到线的 ICP

  • 同样需要设置图优化所需的边,在 g2o_types.h 中增加 EdgeSE2Point2Plane 类
class EdgeSE2Point2Plane : public g2o::BaseUnaryEdge<1, double, VertexSE2> {
   public:
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
    EdgeSE2Point2Plane(const Vec2d& point, const Vec3d& plane)
        : point_(point), plane_(plane){}

    void computeError() override {
        VertexSE2* v = (VertexSE2*)_vertices[0];
        SE2 pose = v->estimate();
        Vec2d pw = pose * point_;
        _error[0] = plane_[0] * pw[0] + plane_[1] * pw[1] + plane_[2];
    }

    bool read(std::istream& is) override { return true; }
    bool write(std::ostream& os) const override { return true; }

   private:
    Vec2d point_;
    Vec3d plane_;
};
  • 实现点到线 ICP
bool Icp2d::AlignGaussNewtonPoint2PlaneWithg2o(SE2& init_pose)
{
    int iterations = 10;
    SE2 current_pose = init_pose;
    const float max_dis = 0.3;      // 最近邻时的最远距离
    const int min_effect_pts = 20;  // 最小有效点数

    // pose graph optimization
    using BlockSolverType = g2o::BlockSolver<g2o::BlockSolverTraits<3, 1>>;
    using LinearSolverType = g2o::LinearSolverCholmod<BlockSolverType::PoseMatrixType>;
    auto* solver = new g2o::OptimizationAlgorithmLevenberg(
        g2o::make_unique<BlockSolverType>(g2o::make_unique<LinearSolverType>()));
    g2o::SparseOptimizer optimizer;
    optimizer.setAlgorithm(solver);

    for (int iter = 0; iter < iterations; iter++) {
        optimizer.clear();

        auto* v = new VertexSE2();
        v->setId(0);
        v->setEstimate(current_pose);
        optimizer.addVertex(v);

        int effective_num = 0;  // 有效点数

        for (size_t i = 0; i < source_scan_->ranges.size(); ++i) {
            float r = source_scan_->ranges[i];
            if (r < source_scan_->range_min || r > source_scan_->range_max) {
                continue;
            }

            float angle = source_scan_->angle_min + i * source_scan_->angle_increment;
            float theta = current_pose.so2().log();
            Vec2d pd = Vec2d(r * std::cos(angle), r * std::sin(angle));
            Vec2d pw = current_pose * pd;
            Point2d pt;
            pt.x = pw.x();
            pt.y = pw.y();

            // 查找5个最近邻
            std::vector<int> nn_idx;
            std::vector<float> dis;
            kdtree_.nearestKSearch(pt, 5, nn_idx, dis);

            std::vector<Vec2d> effective_pts;  // 有效点
            for (int j = 0; j < nn_idx.size(); ++j) {
                if (dis[j] < max_dis) {
                    effective_pts.emplace_back(
                        Vec2d(target_cloud_->points[nn_idx[j]].x, target_cloud_->points[nn_idx[j]].y));
                }
            }

            // 拟合直线,有效点数不足,跳过 
            if (effective_pts.size() < 3) {
                continue;
            }

            // 拟合直线,组装J、H和误差
            Vec3d line_coeffs;
            if (math::FitLine2D(effective_pts, line_coeffs)) {
                effective_num++;

                auto* edge = new EdgeSE2Point2Plane(pd, line_coeffs);
                edge->setId(i);
                edge->setVertex(0, v);
                edge->setInformation(Eigen::Matrix<double, 1, 1>::Identity());
                optimizer.addEdge(edge);
            }

        } 

        if (effective_num < min_effect_pts) {
            return false;
        }

        optimizer.setVerbose(true);
        optimizer.initializeOptimization();
        optimizer.optimize(5);

        current_pose = v->estimate();
    }

    init_pose = current_pose;
    LOG(INFO) << "estimated pose: " << current_pose.translation().transpose()
              << ", theta: " << current_pose.so2().log();

    return true;
}

第二题

  • 使用 math_utils.h 中的 GetPixelValue() 进行双线性插值,修改likelihood_field.cc 中求图像梯度的部分
    // 图像梯度,把似然场函数定义成一个光滑的函数,这里用中心差分
    // float dx = 0.5 * (field_.at<float>(pf[1], pf[0] + 1) - field_.at<float>(pf[1], pf[0] - 1));
    // float dy = 0.5 * (field_.at<float>(pf[1] + 1, pf[0]) - field_.at<float>(pf[1] - 1, pf[0]));

    // 双线性插值
    float dx = 0.5 * (math::GetPixelValue<float>(field_, pf[0] + 1, pf[1]) -
                        math::GetPixelValue<float>(field_, pf[0] - 1, pf[1]));
    float dy = 0.5 * (math::GetPixelValue<float>(field_, pf[0], pf[1] + 1) -
                        math::GetPixelValue<float>(field_, pf[0], pf[1] - 1));

第三题

  • 首先截取示例数据中的两帧作为检测是否退化的数据点云
  • 使用欧式聚类对点云进行聚类
  • 对聚类的结果中类中点数超过150的类进行直线拟合,采用math_utils.h中的直线拟合方法FitLine2D()
  • 计算各个直线的斜率与角度
  • 比较所有拟合直线间的角度差异,存在差距大的就说明在该帧没有退化现象,如果拟合直线间角度差距很小,说明可能是空旷场景没有拟合直线或者在类似长廊中的退化场景中
// 读取点云
pcl::PointCloud<PointType>::Ptr cloud(new pcl::PointCloud<PointType>);
pcl::PointCloud<PointType>::Ptr cloud_filtered(new pcl::PointCloud<PointType>);
pcl::io::loadPCDFile(FLAGS_pcd_path, *cloud);

LOG(INFO) << "cloud points: " << cloud->size();

pcl::StatisticalOutlierRemoval<PointType> sor;
sor.setInputCloud(cloud);
sor.setMeanK(30);
sor.setStddevMulThresh(1.0);
sor.filter(*cloud_filtered);

pcl::search::KdTree<PointType>::Ptr kdtree_;
kdtree_ = boost::make_shared<pcl::search::KdTree<PointType>>();
kdtree_->setInputCloud(cloud_filtered);

pcl::EuclideanClusterExtraction<PointType> cluster;
std::vector<pcl::PointIndices> clusters;   // 创建一个向量来存储聚类的结果

cluster.setClusterTolerance(0.03);     // 设置聚类的距离阈值为0.03m
cluster.setMinClusterSize(10);        // 设置聚类的最小点数为10个点
cluster.setMaxClusterSize(1000);    // 设置聚类的最大点数为1000个点
cluster.setSearchMethod(kdtree_);   // 设置点云的搜索机制
cluster.setInputCloud(cloud_filtered);
cluster.extract(clusters);          // 从点云中提取聚类,并将点云索引保存在cluster_indices中

LOG(INFO) << "cluster size: " << clusters.size();

pcl::visualization::PCLVisualizer cluster_viewer("cluster viewer");
cluster_viewer.setBackgroundColor(0, 0, 0);
cluster_viewer.addPointCloud<PointType>(cloud_filtered, "cloud");

int cluster_num = 1;
int line_num = 0;
std::vector<Eigen::Vector2d> linecoeffs_cloud;
std::vector<double> slope;
std::vector<double> angle;
for (const auto& indice : clusters) {

    if(indice.indices.size() < 150) {
        LOG(INFO) << "Cluster " << cluster_num << " has " << indice.indices.size() << " points." << "点数太少,不进行直线拟合";
        cluster_num++;
        continue;
    } 
    
    LOG(INFO) << "Cluster " << cluster_num << " has " << indice.indices.size() << " points.";
    pcl::PointCloud<PointType>::Ptr cluster_cloud(new pcl::PointCloud<PointType>);
    pcl::copyPointCloud(*cloud_filtered, indice, *cluster_cloud);

    // 将pcl::PointXYZ类型的点云转换为Eigen::Vector3d类型的点云
    
    std::vector<Eigen::Vector2d> pts;
    pts.reserve(cluster_cloud->size());

    for (const PointType& pt : *cluster_cloud) {
        pts.emplace_back(Eigen::Vector2d(pt.x, pt.y));
    }
        
    Eigen::Vector3d line_coeffs;
    // 利用当前点附近的几个有效近邻点,基于SVD奇异值分解,拟合出ax+by+c=0 中的最小直线系数 a,b,c
    if (sad::math::FitLine2D(pts, line_coeffs)) {
        if(line_coeffs[0] < 0.0)
            line_coeffs = -line_coeffs;

        double s = -line_coeffs[0] / line_coeffs[1];
        slope.emplace_back(s);
        double a = std::atan(slope[line_num]);
        angle.emplace_back(a);
        LOG(INFO) << "line_coeffs: " << line_coeffs[0] << ", " << line_coeffs[1] << ", " << line_coeffs[2];
        LOG(INFO) << "slope: " << slope[line_num];
        LOG(INFO) << "angle: " << angle[line_num];
        line_num++;

        linecoeffs_cloud.emplace_back(line_coeffs.head<2>());
    }


    // 为聚类分配不同的颜色
    double r = static_cast<double>(rand()) / RAND_MAX;
    double g = static_cast<double>(rand()) / RAND_MAX;
    double b = static_cast<double>(rand()) / RAND_MAX;

    std::string clusterId = "cluster_" + std::to_string(cluster_num);
    cluster_viewer.addPointCloud<PointType>(cluster_cloud, clusterId);
    cluster_viewer.setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_COLOR, r, g, b, clusterId);

    cluster_num++;
}

bool k = true;
for (int n = 0; n < angle.size(); n++) {
    for (int m = n + 1; m < angle.size(); m++) {
        if (std::abs(angle[n] - angle[m]) > 0.2) {
            LOG(INFO) << "存在不朝一个方向的拟合直线,不处于退化场景中";
            k = false;
        }
    }
}

if (k) {
    LOG(INFO) << "拟合直线之间夹角较小,可能处于退化场景中";
}
  • 正常场景

  • 下图中可以看到三类主要直线,颜色不同,有两个直线组合之间角度很大
    在这里插入图片描述

  • 退化场景

  • 下图中可以看到两类主要直线,颜色不同,两直线之间的角度差异很小,在类似走廊的退化场景中
    在这里插入图片描述

第四题

  • 增加传感器:通过添加额外的传感器(例如:RGBD深度相机等)来捕捉实际场景中的立体深度信息,更好地建立地图,并避开悬空物体和低矮物体。

  • 人工干预:对于特定场景中存在的悬空物体和低矮物体,可以事先通过手动标注或预先建模,将这些特殊物体的位置和高度信息加入到地图中,以帮助机器人进行更准确的导航和避障。

  • 7
    点赞
  • 10
    收藏
    觉得还不错? 一键收藏
  • 3
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值