RoadLib---这两年值得一看的建图定位项目

0. 简介

好久都没有看到比较条理清晰的开源项目了,这个项目目前看来有作为记忆行车这类比较火的项目的前提。虽然特征比较少,但是作为建图和定位来说还是基本可以胜任的。这一篇文章我们将着重来看一下整个代码的逻辑,相关的代码注释以及详细内容已经在Github上开源了。由于作者能力有限,欢迎大家一起来讨论

在这里插入图片描述

轻量级道路特征地图(开源了)

1. RoadLib之roadlib.h

用于道路检测和地图构建的模块,主要利用Eigen和OpenCV库来处理相机数据和图像数据,实现道路特征的识别和管理。代码的核心功能包括传感器配置、道路实例补丁的定义与处理、帧与地图的管理和融合。

首先,代码定义了一个SensorConfig结构体,用于存储传感器的配置信息,例如相机配置、平滑处理的参数、patch(补丁)相关参数和建图参数等。这些配置参数对于处理和分析传感器数据至关重要,确保数据在处理过程中的一致性和精确性。

其次,代码定义了RoadInstancePatch类,这个类表示道路实例的补丁。每个补丁包含多个属性,例如ID、道路类型、关联帧ID、边界框的四个点和不确定性距离、线相关参数、原始点的坐标等。这个类的主要作用是存储和管理每个检测到的道路特征,并提供计算边界框高度、宽度和方向的方法。

接着,代码定义了RoadInstancePatchFrame类,这个类对应每一帧中的所有道路信息。它包含帧的唯一ID、时间戳、旋转矩阵和平移向量,以及存储不同类型补丁的映射。这个类的主要功能是将图像帧中的道路特征转换为地图帧中的特征。

然后,代码定义了RoadInstancePatchMap类,这个类用于将每一帧的信息存储并归纳到地图中。它包含存储不同类型补丁的映射、参考向量、队列中的姿势、时间戳等。这个类提供了多种方法,例如添加帧、合并补丁、清理地图、保存和加载地图、构建KD树进行地图匹配等。

#pragma once
#include <Eigen/Core>
#include <Eigen/Dense>
#include <Eigen/SVD>
#include<Eigen/StdVector>
#include <opencv2/opencv.hpp>
#include <opencv2/core/eigen.hpp>
#include <opencv2/flann.hpp>

#include <iostream>
#include <filesystem>
#include <chrono>
#include <unordered_map>
#include <iomanip>
#include <set>
#include <random>
#include "gv_utils.h"
#include "ipm_processer.h"
#include "utils.hpp"
#include "gviewer.h"

using namespace Eigen;
using namespace std;

/// @brief 这个是roadlib当中用于配制的一些参数
struct SensorConfig
{
public:
	SensorConfig(string path);
	SensorConfig() {};
public:
	gv::CameraConfig cam;//用于存储相机配置信息
	
	int pose_smooth_window = 20;//平滑窗口大小,用于姿态平滑
	bool need_smooth = true;//是否需要进行平滑处理
	double large_slope_thresold = 1.5;// 大斜率阈值
	double t_start;//起始时间
	double t_end;//结束时间

	int patch_min_size = 50;//patch最小尺寸
	double patch_dashed_min_h = 1.35;//虚线patch最小高度
	double patch_dashed_max_h = 10.0;//虚线patch最大高度
	double patch_dashed_max_dist = 12.0;//虚线patch最大距离
	double patch_guide_min_h = 0.0;//引导线patch最小高度
	double patch_guide_max_h = 1000.0;//引导线patch最大高度
	double patch_guide_max_dist = 20.0;//引导线patch最大距离
	double patch_solid_max_dist = 15.0;// 实线patch最大距离
	double patch_stop_max_dist = 12.0;//停止线patch最大距离

	int mapping_step = 10;//建图步长
	double mapping_patch_freeze_distance = 10.0;//patch冻结距离
	double mapping_line_freeze_distance = 10.0;//线冻结距离
	double mapping_line_freeze_max_length = 50.0;//线冻结最大长度
	double mapping_line_cluster_max_dist = 1.0;//线簇最大距离
	double mapping_line_cluster_max_across_dist1 = 1.0;//线簇最大横向距离1
	double mapping_line_cluster_max_across_dist2 = 0.4;//线簇最大横向距离2
	double mapping_line_cluster_max_theta = 10.0;//线簇最大角度

	int localization_max_windowsize = 100;//定位最大窗口大小
	int localization_force_last_n_frames = 2;// 定位强制使用最后n帧
	int localization_every_n_frames = 5;//定位每隔n帧执行一次
	int localization_min_keyframe_dist = 1.0;//定位最小关键帧距离
	int localization_max_strict_match_dist = 1.0;//定位最大严格匹配距离
	int localization_solid_sample_interval = 3.0;//定位实线采样间隔

	bool enable_vis_image = true;
	bool enable_vis_3d = true;
};

extern gviewer viewer;
extern vector<VisualizedInstance> vis_instances;
extern std::normal_distribution<double> noise_distribution;
extern std::default_random_engine random_engine;

enum PatchType { EMPTY = -1, SOLID = 0, DASHED = 1, GUIDE = 2, ZEBRA = 3, STOP = 4 };
/// @brief 将灰度值转换为patch类型
/// @param gray 灰度值
inline PatchType gray2class(int gray)
{
	if (gray == 2) return PatchType::DASHED;
	else if (gray == 3) return PatchType::GUIDE;
	else if (gray == 4) return PatchType::ZEBRA;
	else if (gray == 5) return PatchType::STOP;
	else if (gray > 0) return PatchType::SOLID;
	else return PatchType::EMPTY;
}
/// @brief 将patch类型转换为灰度值
/// @param PatchType patch类型
inline string PatchType2str(PatchType PatchType)
{
	if (PatchType == PatchType::DASHED) return "dashed";
	else if (PatchType == PatchType::GUIDE) return "guide";
	else if (PatchType == PatchType::ZEBRA) return "zebra";
	else if (PatchType == PatchType::SOLID) return "solid";
	else if (PatchType == PatchType::STOP) return "stop";
	else if (PatchType == PatchType::EMPTY) return "empty";
	else return "unknown";
}

/// @brief 车道实例补丁,这个是给出车道实例的一些信息
class RoadInstancePatch
{
public:
	static long long next_id;//生成下一个实例的id
	EIGEN_MAKE_ALIGNED_OPERATOR_NEW

public:
	//** ID/Class
	long long id;//实例的唯一标识符
	PatchType road_class;//表示道路类型的枚举变量
	map<PatchType, int> road_class_count;//存储不同道路类型数量的映射

	//** Flags
	bool line_valid;
	bool merged = false;
	bool frozen = false;
	bool valid_add_to_map = false;
	bool out_range = false;

	long long frame_id;//关联的帧id

	// 关联RoadInstancePatchMap::timestamps和queued_poses
	// 对于patch实例->linked_frames[0]
	// 对于线实例->linked_frames[i], i = 0, ..., line_points_metric.size()
	vector<vector<long long>> linked_frames; //存储与该实例相关的帧id的向量

	//** Bounding box related
	// Main parameters for patch-like instances.
	// 
	//    p3----p2
	//    |      |
	//    |      |
	//    p0 --- p1
	//
	Eigen::Vector3d b_point[4];        // 用于表示边界框的四个点,图像帧中的坐标
	Eigen::Vector3d b_point_metric[4]; // 用于表示边界框的四个点,在地图帧中的坐标
	double b_unc_dist[4];              // 边界框四个点的距离不确定性

	//** Line related
	// Main parameters for line-like instances.
	Eigen::VectorXd line_koef;//用于表示线的参数
	vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d>> line_points;//线的点集,在图像帧中的坐标,aligned_allocator是为了保证内存对齐
	vector<Eigen::Vector3d, Eigen::aligned_allocator<Eigen::Vector3d>> line_points_metric;//线的点集,在地图帧中的坐标
	vector<Eigen::Matrix3d, Eigen::aligned_allocator<Eigen::Matrix3d>> line_points_uncertainty;//线点的不确定性矩阵

	//** Raw points related (image frame and metric frame)
	// General attributes.
	// 1) Image frame
	double top, left, width, height;//边界框的位置和尺寸
	Eigen::Vector3d mean;
	Eigen::Matrix3d cov;
	Eigen::Vector3d direction;
	double eigen_value[3];
	vector<Vector3d, Eigen::aligned_allocator<Eigen::Vector3d>> points;//原始点的坐标

	// 2) Body/map frame
	Eigen::Vector3d mean_metric;//在地图帧中的均值
	vector<Vector3d, Eigen::aligned_allocator<Eigen::Vector3d>> points_metric;//原始点在地图帧中的坐标
	Eigen::Matrix3d mean_uncertainty;
	double percept_distance = 10000;//感知距离


public:
	RoadInstancePatch()
	{
		id = next_id++;
		for (int ii = 0; ii < 4; ii++)
		{
			b_point[ii].setZero();
			b_point_metric[ii].setZero();
		}
	}

	// Height of bounding box.
	double h() const;

	// Width of bounding box.
	double w() const;

	// Direction of bounding box/line.
	Eigen::Vector3d d() const;

};

/// @brief 车道实例补丁帧,对应了一个帧当中所有的车道信息
class RoadInstancePatchFrame
{
public:
	static long long next_id;//生成每个RoadInstancePatchFrame对象的唯一id
	EIGEN_MAKE_ALIGNED_OPERATOR_NEW
public:
	long long id;//每个RoadInstancePatchFrame对象的唯一id
	double time;
	Eigen::Matrix3d R; // 旋转矩阵,表示世界坐标系到车体坐标系的变换
	Eigen::Vector3d t; // 平移向量,表示世界坐标系到车体坐标系的变换
	map<PatchType, vector<shared_ptr<RoadInstancePatch>>> patches;//存储不同类型补丁的映射,每种类型对应一个补丁对象的共享指针向量。
public:
	RoadInstancePatchFrame()
	{
		id = next_id++;//生成唯一id
	}

	// Calculate metric-scale properties of the patches.
	// Image frame -> body frame.
	int generateMetricPatches(const SensorConfig &config, const gv::IPMProcesser &ipm);
};

/// @brief 车道实例补丁地图,这里会将每一帧的信息存储归纳到地图中
class RoadInstancePatchMap
{
public:
	EIGEN_MAKE_ALIGNED_OPERATOR_NEW
	map<PatchType, vector<shared_ptr<RoadInstancePatch>>> patches;//存储不同类型补丁的映射,每种类型对应一个补丁对象的共享指针向量。
	Eigen::Vector3d ref;//参考向量。

	map<long long, pair<Matrix3d, Vector3d>> queued_poses;//队列中的姿势
	map<long long, double> timestamps;
public:
	int addFrame(const RoadInstancePatchFrame & frame);

	// Merge patches in the current map.
	// mode 0 : incremental mapping; mode 1: map merging/map checking
	// For mode 0, Rwv and twv are used to determine active instances and freeze old instances.
	// For mode 1, Rwv and twv are useless. Just use dummy values. (To be improved
	int mergePatches(const SensorConfig &config, const int mode, const Eigen::Matrix3d &Rwv = Eigen::Matrix3d::Identity(), const Eigen::Vector3d & twv = Eigen::Vector3d::Zero());
	
	// Simply stacking the patches from two maps.
	// The patches wouldn't be merged.
	int mergeMap(const RoadInstancePatchMap& road_map_other);

	// Clear the map.
	int clearMap();

	// Unfreeze the pathes (for further merging).
	int unfreeze();
	
	// Integrity checking.
	int cleanMap();

	// Save/load functions.
	// Notice that only metric-scale properties are saved.
	int saveMapToFileBinaryRaw(string filename);
	int loadMapFromFileBinaryRaw(string filename);

	// Build KDtree for map matching.
	int buildKDTree();

	// Instance-level nearest matching.
	map<PatchType, vector<pair<int, int>>> mapMatch(const SensorConfig &config, 
		RoadInstancePatchFrame &frame, int mode = 0); // mode : 0(normal), 1(strict)
	
	// Line segment matching (for measurement construction).
	vector<pair<int, int>> getLineMatch(const SensorConfig &config, RoadInstancePatchFrame &frame, PatchType road_class,
		int frame_line_count, int map_line_count, int mode =0);

	// Geo-register the map elements based on linked frames.
	// Function 'mergePatches' should be called later for consistency.
	int geoRegister(const Trajectory& new_traj,
		vector<VisualizedInstance>& lines_vis);

public:
	map<long long, double> ignore_frame_ids; // frame_id - distance threshold
private:

};


/**
 * @brief					Merge the line instance cluster.
 * @param lines_in			The raw line cluser.
 * @param line_est			The merged line instance.
 *
 * @return					Success flag.
 */
extern int LineCluster2SingleLine(const PatchType road_class, const vector<shared_ptr<RoadInstancePatch>>& lines_in, shared_ptr<RoadInstancePatch>& line_est, Eigen::Matrix3d Rwv = Eigen::Matrix3d::Identity());

/**
 * @brief					Use the semantic IPM image to generate the road marking instances.
 * @param config			The raw line cluser.
 * @param ipm				IPM processor with up-to-date camera-ground parameters.
 * @param ipm_raw			RGB IPM image.
 * @param ipm_class			Semantic IPM image. (label = 0,1,2,3,4,5, other)
 *
 * @return					Success flag.
 */
extern RoadInstancePatchFrame generateInstancePatch(const SensorConfig& config, const gv::IPMProcesser& ipm, const cv::Mat& ipm_raw, const cv::Mat& ipm_class);

/**
 * @brief					Calculate the uncertainty of the element on the IPM image based on the pixel coordinates (uI, vI).
 * @param uI, vI			Pixel coordinates on the IPM image.
 *
 * @return					Uncertainty matrix.
 */
extern Matrix3d calcUncertainty(const gv::IPMProcesser& ipm, double uI, double vI);

extern int PointCloud2Curve2D(const vector<Vector3d, Eigen::aligned_allocator<Eigen::Vector3d>>& points, int dim, VectorXd& K);

extern int LabelClustering(const cv::Mat& ipm_class, cv::Mat& ipm_label, cv::Mat& stats, cv::Mat& centroids);

2. RoadLib之roadlib.cpp

该代码主要用于处理道路图像数据,通过标签聚类、计算不确定性和生成道路实例补丁,实现道路特征的检测与管理。以下是对代码核心内容的概括:

  1. 初始化与导入依赖:代码使用了Eigen和OpenCV库进行矩阵运算和图像处理,并导入了一些自定义的头文件和命名空间。通过#include "roadlib.h"来导入自定义的函数和结构体。

  2. 标签聚类函数 LabelClustering:该函数对输入的语义图像进行聚类,计算每个聚类的统计信息(如面积和中心点),并将结果存储在输出矩阵中。它通过OpenCV的connectedComponentsWithStats函数实现语义标签的聚类,并处理了不同语义类别的特殊情况,如停车线。

  3. 不确定性计算函数 calcUncertainty:该函数根据IPM(Inverse Perspective Mapping)图像处理配置,计算图像像素点的不确定性。它考虑了像素误差、俯仰角误差和高度误差,通过矩阵运算得到一个3x3的不确定性矩阵。

  4. 生成道路实例补丁 generateInstancePatch:该函数针对每一帧图像,使用聚类结果生成道路实例补丁。它初始化了一个RoadInstancePatchFrame对象,包含多个补丁类型(如实线、虚线、引导线等),并计算每个补丁的特征值(如均值、协方差、特征向量等)。函数通过标签聚类结果和原始IPM图像,将像素点归类到相应的补丁中,并进行特征提取和不确定性计算。

  5. 道路实例补丁框架 RoadInstancePatchFrame:该类用于管理一帧图像中的所有道路补丁信息。通过generateMetricPatches函数,将图像帧中的补丁信息转换到度量空间,并根据传感器配置和IPM处理器对象,计算每个补丁的度量特征和感知距离。

  6. 道路实例补丁 RoadInstancePatch:该类用于存储和管理单个道路补丁的信息,包括补丁ID、道路类型、边界框点、线特征、原始点和度量点等。提供了计算补丁高度、宽度和方向的方法,并使用Eigen库进行矩阵运算和特征值分解。

  7. 传感器配置 SensorConfig:该结构体从配置文件读取相机和传感器相关的参数,如IPM图像尺寸、分辨率、平滑窗口、补丁最小尺寸和距离等。通过解析配置文件,初始化相机配置和传感器参数,为后续的图像处理和特征提取提供依据。

#include "roadlib.h"

long long RoadInstancePatch::next_id = 0;
long long RoadInstancePatchFrame::next_id = 0;

vector<RoadInstancePatch> patches_temp;
/// @brief 对图像中的语义标签进行聚类,并计算每个聚类的统计信息(如面积和中心点)
/// @param ipm_semantic 输入的语义图像
/// @param ipm_label 输出的标签图像
/// @param stats 输出的统计信息,包含每个聚类的面积、边界框等
/// @param centroids 输出的聚类中心点
/// @return 
int LabelClustering(const cv::Mat& ipm_semantic, cv::Mat& ipm_label, cv::Mat& stats, cv::Mat& centroids)
{
	cv::Mat ipm_label_stop, stats_stop, centroids_stop;
	cv::Mat ipm_semantic_temp;
	ipm_semantic.copyTo(ipm_semantic_temp);
	cv::Mat ipm_semantic_stop = cv::Mat::zeros(ipm_semantic.size(), CV_8UC1);
	ipm_semantic_stop.setTo(255, ipm_semantic == 5);//将所有值为5的像素设置为255,以便单独处理停车线
	ipm_semantic_temp.setTo(0, ipm_semantic == 5);//将所有值为5的像素设置为0,以便排除不需要的语义类别
	// https://blog.csdn.net/qq_40784418/article/details/106023288:stats:表示每一个标记的统计信息,是一个5列的矩阵,每一行对应每个连通区域的外接矩形的x、y、width、height和面积,示例如下: 0 0 720 720 291805
	cv::connectedComponentsWithStats(ipm_semantic_temp, ipm_label, stats, centroids, 4, CV_16U);//对除停车线外的语义类别进行聚类,得到另一个标签图像、统计信息和中心点
	cv::connectedComponentsWithStats(ipm_semantic_stop, ipm_label_stop, stats_stop, centroids_stop, 4, CV_16U);//对停车线进行聚类,得到另一个标签图像、统计信息和中心点
	cv::Mat stats_all, centroids_all;
	if (stats_stop.rows > 1)//如果停车线聚类的数量大于1
	{
		cv::vconcat(stats, stats_stop(cv::Rect(0, 1, stats_stop.cols, stats_stop.rows - 1)), stats);//将停车线聚类的统计信息拼接到所有聚类的统计信息中
		cv::vconcat(centroids, centroids_stop(cv::Rect(0, 1, centroids_stop.cols, centroids_stop.rows - 1)), centroids);//将停车线聚类的中心点拼接到所有聚类的中心点中
	}
	for (int i = 1; i < stats_stop.rows; i++)//将停车线聚类的标签值设置为停车线的标签值,然后将第二个标签图像的标签映射到第一个标签图像的索引上
	{
		ipm_label.setTo(stats.rows - stats_stop.rows + i, ipm_label_stop == i);
	}
	return 0;
}
/// @brief 计算了图像处理过程中的不确定性,通常用于估计图像重建或特征提取的误差
/// @param ipm IPM预处理信息,包含图像处理的配置和参数
/// @param uI 图像中的像素坐标u,是横坐标
/// @param vI 图像中的像素坐标v,是纵坐标
/// @return 
Matrix3d calcUncertainty(const gv::IPMProcesser& ipm, double uI, double vI)
{

	double fx = ipm._fx;
	double fy = ipm._fy;

	double reso_temp = ipm._config.IPM_RESO;

	double d = ipm._config.cg.getH();

	Eigen::Matrix3d K_IPM_i_d;
	K_IPM_i_d << reso_temp, 0.0, -ipm._config.IPM_WIDTH * reso_temp / 2,
		0.0, reso_temp, -(ipm._config.IPM_HEIGHT - 1) * reso_temp,
		0.0, 0.0, d;

	Eigen::Matrix3d K_IPM = K_IPM_i_d.inverse() * d;//给出IPM矩阵

	Vector3d xyI, xy;
	double xI, yI, x, y;
	Eigen::MatrixXd F, F1, F2, F3;
	xyI = K_IPM.inverse() * Vector3d(uI, vI, 1);//将像素坐标转换为归一化坐标xyI
	xI = xyI(0); yI = xyI(1);
	y = -1 / yI; x = xI * y;//将归一化坐标转换为像素坐标

	// Pixel error.
	F1 = (Eigen::MatrixXd(2, 2) << d / reso_temp, 0, 0, d / reso_temp).finished();//给出像素误差的F矩阵
	F2 = (Eigen::MatrixXd(2, 2) << 1 / y, -x / y / y, 0, 1 / y / y).finished();
	F3 = (Eigen::MatrixXd(2, 2) << 1 / fx, 0, 0, 1 / fy).finished();
	F = F1 * F2 * F3;//计算像素误差的F矩阵
	MatrixXd var_pixel = (Eigen::MatrixXd(2, 2) << 2, 0, 0, 2).finished();
	var_pixel = var_pixel * var_pixel;
	Matrix2d var_pixel_error = F * var_pixel * F.transpose(); //计算像素误差的协方差矩阵

	// 计算俯仰角误差的协方差矩阵
	F1 = (Eigen::MatrixXd(2, 2) << d / reso_temp, 0, 0, d / reso_temp).finished();
	F2 = (Eigen::MatrixXd(2, 1) << x / y / y, (1 + y * y) / (y * y)).finished();
	F = F1 * F2;
	double var_pitch = 0.3 / 57.3;
	var_pitch = var_pitch * var_pitch;
	Matrix2d var_pitch_error = F * var_pitch * F.transpose(); // (in pixels)

	// 计算高度误差的协方差矩阵
	F = (Eigen::MatrixXd(2, 1) << xI / reso_temp, yI / reso_temp).finished();
	double var_D = 0.05;
	var_D = var_D * var_D;
	Matrix2d var_D_error = F * var_D * F.transpose(); // (in pixels)

	Matrix3d uncertainty = Matrix3d::Identity(3, 3);
	uncertainty.block(0, 0, 2, 2) = (var_pixel_error + var_pitch_error + var_D_error) * (reso_temp * reso_temp);
	uncertainty(2, 2) = pow((0.5 * sqrt(uncertainty(1, 1))), 2);//将这些误差矩阵组合成一个3x3的不确定性矩阵
	return uncertainty;
}
/// @brief 生成度量空间中的patch,这个是针对一帧的处理
/// @param config 传感器配置信息的引用
/// @param ipm gv::IPMProcesser对象的引用
/// @param ipm_raw 包含原始IPM图像的OpenCV矩阵
/// @param ipm_class 包含IPM类别信息的OpenCV矩阵
/// @return 
RoadInstancePatchFrame generateInstancePatch(const SensorConfig& config, const gv::IPMProcesser& ipm, const cv::Mat& ipm_raw, const cv::Mat& ipm_class)
{
	cv::Mat ipm_instance, stats, centroids;
	LabelClustering(ipm_class, ipm_instance, stats, centroids);//对ipm_class进行标签聚类,得到ipm_instance、stats和centroids

	// 初始化RoadInstancePatchFrame对象,对应一帧当中的元素特征
	RoadInstancePatchFrame frame;
	frame.patches[PatchType::SOLID] = vector<shared_ptr<RoadInstancePatch>>();
	frame.patches[PatchType::DASHED] = vector<shared_ptr<RoadInstancePatch>>();
	frame.patches[PatchType::GUIDE] = vector<shared_ptr<RoadInstancePatch>>();
	frame.patches[PatchType::ZEBRA] = vector<shared_ptr<RoadInstancePatch>>();
	frame.patches[PatchType::STOP] = vector<shared_ptr<RoadInstancePatch>>();

	patches_temp = vector<RoadInstancePatch>(centroids.rows);//初始化对应patch信息
	vector<int> patches_count(centroids.rows);

	for (int i = 0; i < centroids.rows; i++)
	{
		patches_temp[i].points.resize(stats.at<int>(i, cv::CC_STAT_AREA));//对应mat下输出的统计信息,设置patch的点数
		patches_temp[i].top = stats.at<int>(i, cv::CC_STAT_TOP);
		patches_temp[i].left = stats.at<int>(i, cv::CC_STAT_LEFT);
		patches_temp[i].width = stats.at<int>(i, cv::CC_STAT_WIDTH);
		patches_temp[i].height = stats.at<int>(i, cv::CC_STAT_HEIGHT);
	}

	// Count every patch.
	uint16_t cur_label;
	for (int i = 0; i < ipm_instance.rows; i++)
	{
		for (int j = 0; j < ipm_instance.cols; j++)//遍历ipm_instance,根据标签将像素点添加到对应的patches_temp中
		{
			cur_label = ipm_instance.at<uint16_t>(i, j);//获取当前像素点的标签
			patches_temp[cur_label].points[patches_count[cur_label]] = Eigen::Vector3d(j, i, 1);//将像素点添加到对应的patch中
			patches_temp[cur_label].road_class_count[gray2class(ipm_class.at<uchar>(i, j))]++;//统计每个patch中的语义类别
			if (cur_label > 0)//判断是否在边界上
			{
				if (i + 1 >= ipm_instance.rows || j <= 0 || j >= ipm_instance.cols - 1 ||
					ipm_raw.at<cv::Vec3b>(i + 1, j) == cv::Vec3b(0, 0, 0) ||
					ipm_raw.at<cv::Vec3b>(i + 1, j - 1) == cv::Vec3b(0, 0, 0) ||
					ipm_raw.at<cv::Vec3b>(i + 1, j + 1) == cv::Vec3b(0, 0, 0))
				{
					patches_temp[cur_label].out_range = true;//如果在边界上,将out_range设置为true
				}
			}
			patches_count[cur_label]++;//访问下一个像素点
		}
	}
	for (int i = 0; i < patches_temp.size(); i++)
	{
		assert(patches_temp[i].points.size() == patches_count[i]);//检查patch的点数是否和统计信息中的点数一致
	}

	// 清除冗余的补丁
	auto it = patches_temp.begin();
	while (it != patches_temp.end()) {
		int max_class_count = 0;
		PatchType max_class = PatchType::EMPTY;
		for (auto class_it : it->road_class_count)//遍历patch中的语义类别
		{
			if (class_it.second > max_class_count)//找到出现次数最多的语义类别
			{
				max_class_count = class_it.second;
				max_class = class_it.first;
			}
		}
		it->road_class = max_class;//将patch的road_class设置为出现次数最多的语义类别
		if (max_class == PatchType::EMPTY)//如果没有语义类别,将patch删除
		{
			it = patches_temp.erase(it);
			continue;
		}

		bool out_range = false;
		if (it->points.size() < config.patch_min_size
			|| (it->road_class == PatchType::DASHED && out_range)
			|| (it->road_class == PatchType::GUIDE && out_range)
			)//如果patch的点数小于最小点数,或者是虚线且在边界上,或者是引导线且在边界上,将patch删除
		{
			it = patches_temp.erase(it);
			continue;
		}
		it++;
	}

	// 计算每个补丁的特征值,如均值、协方差、特征向量等
	for (int i = 0; i < patches_temp.size(); i++)
	{
		auto& patch = patches_temp[i];
		patch.mean.setZero();
		patch.cov.setZero();
		Eigen::Vector3d vector_temp;

		for (int j = 0; j < patch.points.size(); j++)
		{
			patch.mean += patch.points[j];
		}
		patch.mean /= patch.points.size();//对于每个patch,将其所有点相加得到均值

		for (int j = 0; j < patch.points.size(); j++)
		{
			vector_temp = patch.points[j] - patch.mean;
			patch.cov += vector_temp * vector_temp.transpose();//计算每个点与均值的差值,更新协方差矩阵
		}
		patch.cov /= patch.points.size();

		JacobiSVD<Eigen::MatrixXd> svd(patch.cov, ComputeThinU | ComputeThinV);//使用JacobiSVD分解协方差矩阵,得到特征向量和特征值
		Matrix3d V = svd.matrixV(), U = svd.matrixU();
		Matrix3d S = U.inverse() * patch.cov * V.transpose().inverse();//A=UΣV',其中U和V是两组正交单位向量,都是是对角阵,是表示奇异值
		patch.eigen_value[0] = sqrt(S(0, 0));
		patch.eigen_value[1] = sqrt(S(1, 1));
		patch.eigen_value[2] = sqrt(S(2, 2));

		patch.direction = U.block(0, 0, 3, 1);//根据特征值计算patch的方向
		patch.mean_uncertainty = calcUncertainty(ipm, patch.mean(0), patch.mean(1));//平均不确定性


		if (patch.direction(1) < 0) patch.direction *= -1;//如在特征向量计算过程中,特征向量的方向是无法确定的,只有其方向的相对性是可确定的。所以在实际应用中,为了保持一致性,可以将特征向量的方向调整为统一的方向。
        //特征值比例大于2且road_class为SOLID或STOP
		if (patch.eigen_value[0] / patch.eigen_value[1] > 2
			&& (patches_temp[i].road_class == PatchType::SOLID || patches_temp[i].road_class == PatchType::STOP))
		{
			auto points_direction = patch.points;//获取点云
			double direction_angle = atan2(patch.direction(1), patch.direction(0)) + M_PI / 2; //根据特征向量计算方向角度
			Matrix3d direction_rotation;
			direction_rotation << cos(direction_angle), sin(direction_angle), 0,
				-sin(direction_angle), cos(direction_angle), 0,
				0, 0, 1;//根据方向角度计算旋转矩阵
			for (int i = 0; i < points_direction.size(); i++)
			{
				points_direction[i] = direction_rotation * (patch.points[i] - patch.mean) + patch.mean;//将点云旋转到特征向量的方向
			}

			// 将线状点云转换为骨架点
			if (PointCloud2Curve2D(points_direction, 4, patch.line_koef) < 0)
				patch.line_valid = false;
			else
			{
				patch.line_valid = true;
				double yy, xx;
				Vector3d pt_img;
				double min_uncertainty = 1e9;
				double reso_temp = ipm._config.IPM_RESO;
				for (int ll = (int)ceil(-patch.eigen_value[0] * 2 / (0.5 / reso_temp));
					ll < patch.eigen_value[0] * 2 / (0.5 / reso_temp); ll++)//从patch的中心点开始,沿着特征向量的方向,计算线状点云的不确定性
				{
					yy = patch.mean(1) + ll * 0.5 / reso_temp;//计算线状点云的y坐标
					xx = 0.0;
					for (int i = 0; i < patch.line_koef.rows(); i++)
						xx += pow(yy, i) * patch.line_koef(i);//计算线状点云的x坐标,这个是曲线信息
					pt_img = direction_rotation.transpose() * (Vector3d(xx, yy, 1) - patch.mean) + patch.mean;//将线状点云旋转到特征向量的方向

					if (pt_img(1) > (patch.top + patch.height)) continue;

					double valid_distance = 20;
					if (patches_temp[i].road_class == PatchType::SOLID) valid_distance = config.patch_solid_max_dist;//根据road_class的不同,设置不同的有效距离
					else if (patches_temp[i].road_class == PatchType::STOP) valid_distance = config.patch_stop_max_dist;

					if ((ipm._config.IPM_HEIGHT - pt_img(1)) * reso_temp < valid_distance)//如果点云的y坐标小于有效距离,将点云添加到patch的线状点云中
					{
						patch.line_points.push_back(pt_img);
						patch.line_points_uncertainty.push_back(calcUncertainty(ipm, pt_img(0), pt_img(1)));
						min_uncertainty = min(min_uncertainty,
							sqrt(patch.line_points_uncertainty.back()(0, 0) + patch.line_points_uncertainty.back()(1, 1)));//计算线状点云的不确定性
					}
				}
				if (patch.line_points.size() < 2 || min_uncertainty > 1.0)
					patch.line_valid = false;
			}
		}
		else
			patch.line_valid = false;
	}

	// We collect nearby lane line segments to determine the direction of patch-like markings.
	// This could be improved.
	vector<pair<Vector3d, Vector3d>> direction_field;
	for (int i = 0; i < patches_temp.size(); i++)
	{
		auto& patch = patches_temp[i];

		if (patch.road_class == PatchType::DASHED)
			direction_field.push_back(make_pair(patch.mean, patch.direction));
		if (patches_temp[i].road_class == PatchType::SOLID && patch.line_valid)
			for (int ii = 0; ii < patch.line_points.size() - 1; ii++)
				direction_field.push_back(make_pair(patch.line_points[ii], patch.line_points[ii + 1] - patch.line_points[ii]));//将线状点云的方向添加到方向场中
	}

	//计算边界框。使用局部方向场确定补丁式标记的方向
	for (int i = 0; i < patches_temp.size(); i++)
	{
		auto patch = make_shared<RoadInstancePatch>(patches_temp[i]);

		if (patch->road_class == PatchType::GUIDE || patch->road_class == PatchType::DASHED)//如果是引导线或虚线
		{
			double min_dist = 999999;
			int min_ii = -1;
			for (int ii = 0; ii < direction_field.size(); ii++)//计算patch的方向
			{
				double dist = (direction_field[ii].first - patch->mean).norm();//计算patch的中心点和方向场中的点的距禙
				if (dist < min_dist)
				{
					min_dist = dist;
					min_ii = ii;
				}
			}
			if (min_dist < 10000)//如果距离小于10000,计算patch的边界框
			{
				Eigen::Vector3d b_direction = direction_field[min_ii].second / direction_field[min_ii].second.norm();//计算patch的方向
				if (b_direction(1) < 0)  b_direction *= -1;//如果方向向下,将方向调整为向上

				double bb= -1e9; Vector3d tt_support;
				double tt = 1e9; Vector3d bb_support;
				double ll = 1e9; Vector3d ll_support;
				double rr = -1e9; Vector3d rr_support;

				double direction_angle = -atan2(b_direction(1), b_direction(0)) + M_PI / 2;
				Matrix3d direction_rotation;
				direction_rotation << cos(direction_angle), sin(direction_angle), 0,
					-sin(direction_angle), cos(direction_angle), 0,
					0, 0, 1;//根据方向角度计算旋转矩阵

				for (int jj = 0; jj < patch->points.size(); jj++)
				{
					Vector3d pt = direction_rotation.transpose() * (patch->points[jj] - patch->mean);//根据最小距离的方向,将patch的点云旋转到特征向量的方向
					if (pt.x() < ll) { ll = pt.x(); ll_support = patch->points[jj]; }//计算patch的边界框
					if (pt.x() > rr) { rr = pt.x(); rr_support = patch->points[jj]; }
					if (pt.y() < tt) { tt = pt.y(); tt_support = patch->points[jj]; }
					if (pt.y() > bb) { bb = pt.y(); bb_support = patch->points[jj]; }
					//if (patch->road_class == PatchType::GUIDE)
					//	std::cerr << patch->points[jj].transpose() << std::endl;

				}
				patch->b_point[0] = direction_rotation * Vector3d(ll, bb, 0) + patch->mean;//将patch的边界框旋转到特征向量的方向
				patch->b_point[1] = direction_rotation * Vector3d(rr, bb, 0) + patch->mean;
				patch->b_point[2] = direction_rotation * Vector3d(rr, tt, 0) + patch->mean;
				patch->b_point[3] = direction_rotation * Vector3d(ll, tt, 0) + patch->mean;

				Eigen::Matrix3d P0 = calcUncertainty(ipm, ll_support(0), ll_support(1));//计算patch的不确定性
				Eigen::Matrix3d P1 = calcUncertainty(ipm, rr_support(0), rr_support(1));
				Eigen::Matrix3d P2 = calcUncertainty(ipm, bb_support(0), bb_support(1));
				Eigen::Matrix3d P3 = calcUncertainty(ipm, tt_support(0), tt_support(1));
#ifdef DEBUG
				//cv::Mat mm(1000, 1000, CV_8UC3);
				//cv::circle(mm, cv::Point2f(ll_support(0), ll_support(1)), 5, cv::Scalar(255, 0, 0));
				//cv::circle(mm, cv::Point2f(rr_support(0), rr_support(1)), 5, cv::Scalar(0, 255, 0));
				//cv::circle(mm, cv::Point2f(bb_support(0), bb_support(1)), 5, cv::Scalar(0, 0, 255));
				//cv::circle(mm, cv::Point2f(tt_support(0), tt_support(1)), 5, cv::Scalar(255, 255, 0));

				drawUncertainty(mm, ll_support, P0/config.IPM_RESO/config.IPM_RESO);
				drawUncertainty(mm, rr_support, P1/config.IPM_RESO/config.IPM_RESO);
				drawUncertainty(mm, bb_support, P2/config.IPM_RESO/config.IPM_RESO);
				drawUncertainty(mm, tt_support, P3/config.IPM_RESO/config.IPM_RESO);
				//drawUncertainty_dist(mm, ll_support, P0 / config.IPM_RESO / config.IPM_RESO, Vector3d(-b_direction(1), b_direction(0), b_direction(2)));
				//drawUncertainty_dist(mm, rr_support, P1 / config.IPM_RESO / config.IPM_RESO, Vector3d(-b_direction(1), b_direction(0), b_direction(2)));
				//drawUncertainty_dist(mm, bb_support, P2 / config.IPM_RESO / config.IPM_RESO, b_direction);
				//drawUncertainty_dist(mm, tt_support, P3 / config.IPM_RESO / config.IPM_RESO, b_direction);


				//cv::imshow("mm_debug", mm);
				//cv::waitKey(0);
#endif

				Vector3d b_direction_t = Vector3d(-b_direction(1), b_direction(0), b_direction(2));
				patch->b_unc_dist[0] = sqrt(b_direction.transpose() * P2 * b_direction);
				patch->b_unc_dist[1] = sqrt(b_direction_t.transpose() * P1 * b_direction_t);
				patch->b_unc_dist[2] = sqrt(b_direction.transpose() * P3 * b_direction);
				patch->b_unc_dist[3] = sqrt(b_direction_t.transpose() * P0 * b_direction_t);
				if (patch->out_range)
				{
					patch->b_unc_dist[0] *=100;
					patch->b_unc_dist[1] *=100;
					patch->b_unc_dist[3] *=100;
				}
			}
			else
			{
				continue;
			}
		}

		if (patches_temp[i].road_class == PatchType::SOLID ||
			patches_temp[i].road_class == PatchType::DASHED ||
			patches_temp[i].road_class == PatchType::GUIDE ||
			patches_temp[i].road_class == PatchType::ZEBRA ||
			patches_temp[i].road_class == PatchType::STOP)
		{
			frame.patches[patches_temp[i].road_class].push_back(patch);
		}
	}
	return frame;
}

/// @brief 道路实例信息处理函数
/// @param config 配置文件
/// @param ipm IPM图
/// @return 
int RoadInstancePatchFrame::generateMetricPatches(const SensorConfig& config, const gv::IPMProcesser& ipm)
{
	auto cam = ipm._config;//首先使用 IPM 处理器对象获取摄像头参数 cam
	for (auto iter_class : patches)
	{
		for (int i = 0; i < iter_class.second.size(); i++)
		{
			auto& patch = iter_class.second[i];

			patch->points_metric.resize(patch->points.size());

			for (int j = 0; j < patch->points.size(); j++)
			{
				patch->points_metric[j] = cam.tic
					+ cam.Ric * ipm.IPM2Metric(cv::Point2f(patch->points[j].x(), patch->points[j].y()));//将patch的点云转换到度量空间
			}

			if (patch->line_valid)
			{
				patch->line_points_metric.resize(patch->line_points.size());
				for (int j = 0; j < patch->line_points.size(); j++)
				{
					patch->line_points_metric[j] = cam.tic
						+ cam.Ric * ipm.IPM2Metric(cv::Point2f(patch->line_points[j].x(), patch->line_points[j].y()));
				}
			}

			if (patch->road_class == PatchType::DASHED || patch->road_class == PatchType::GUIDE)
			{
				for (int ii = 0; ii < 4; ii++)
				{
					patch->b_point_metric[ii] = cam.tic
						+ cam.Ric * ipm.IPM2Metric(cv::Point2f(patch->b_point[ii].x(), patch->b_point[ii].y()));
				}
			}

			patch->mean_metric.setZero();
			Eigen::Vector3d vector_temp;

			for (int j = 0; j < patch->points_metric.size(); j++)
			{
				patch->mean_metric += patch->points_metric[j];
			}
			patch->mean_metric /= patch->points_metric.size();//计算补丁的平均度量
			patch->percept_distance = patch->mean_metric(1);//计算补丁的感知距离

			if (iter_class.first == PatchType::DASHED
				&& patch->h() > config.patch_dashed_min_h
				&& patch->h() < config.patch_dashed_max_h
				&& patch->mean_metric(1) < config.patch_dashed_max_dist)
				patch->valid_add_to_map = true;
			else if (iter_class.first == PatchType::GUIDE
				&& patch->h() > config.patch_guide_min_h
				&& patch->h() < config.patch_guide_max_h
				&& patch->mean_metric(1) < config.patch_guide_max_dist)
				patch->valid_add_to_map = true;
			else if (patch->line_valid)
				patch->valid_add_to_map = true;

		}
	}
	return 0;
}
/// @brief 计算补丁的高度
/// @return 
double RoadInstancePatch::h() const
{
	return (b_point_metric[2] - b_point_metric[1]).norm();
}
/// @brief 计算补丁的宽度
/// @return 
double RoadInstancePatch::w() const
{
	return (b_point_metric[1] - b_point_metric[0]).norm();
}
/// @brief 根据不同类型的补丁计算其方向
/// @return 
Eigen::Vector3d RoadInstancePatch::d() const
{
		if (road_class == PatchType::DASHED || road_class == PatchType::GUIDE) 
			return (b_point_metric[2] - b_point_metric[1]).normalized();
		else if (road_class == PatchType::SOLID || road_class == PatchType::STOP)
		{
			assert(line_points_metric.size() >= 2);
			return (line_points_metric.back() - line_points_metric.front()).normalized();
		}
		else
			throw exception();
}
/// @brief 从文件路径读取摄像头和传感器相关的配置参数
/// @param path 文件路径
SensorConfig::SensorConfig(string path)
{
	gv::CameraConfig conf;

	cv::FileStorage fs_config(path, cv::FileStorage::READ);

	conf.IPM_HEIGHT = (int)fs_config["IPM_HEIGHT"];
	conf.IPM_WIDTH = (int)fs_config["IPM_WIDTH"];
	conf.IPM_RESO = (double)fs_config["IPM_RESO"];
	double t_start = (double)fs_config["t_start"];
	double t_end = (double)fs_config["t_end"];
	conf.RAW_RESIZE = 1;
	int pn = path.find_last_of('/');
	std::string configPath = path.substr(0, pn);

	conf.camera = camodocal::CameraFactory::instance()->generateCameraFromYamlFile(configPath + '/' + (string)fs_config["cam0_calib"]);

	conf.cg = gv::CameraGroundGeometry((double)fs_config["cg_alpha"], (double)fs_config["cg_theta"], (double)fs_config["cg_h"]);

	cv::Mat cv_T;
	fs_config["body_T_cam0"] >> cv_T;
	Eigen::Matrix4d Tic;
	cv::cv2eigen(cv_T, Tic);
	conf.Ric = Tic.block<3, 3>(0, 0);
	conf.tic = Tic.block<3, 1>(0, 3);
	conf.Tic.setIdentity();
	conf.Tic.block<3, 3>(0, 0) = conf.Ric;
	conf.Tic.block<3, 1>(0, 3) = conf.tic;
	this->cam = conf;
	this->pose_smooth_window = (int)fs_config["pose_smooth_window"];
	this->need_smooth = (bool)(int)fs_config["need_smooth"];
	this->large_slope_thresold = (double)fs_config["large_slope_thresold"];

	this->patch_min_size = (int)fs_config["patch.min_size"];
	this->patch_dashed_min_h = (double)fs_config["patch.dashed_min_h"];
	this->patch_dashed_max_h = (double)fs_config["patch.dashed_max_h"];
	this->patch_dashed_max_dist = (double)fs_config["patch.dashed_max_dist"];
	this->patch_guide_min_h = (double)fs_config["patch.guide_min_h"];
	this->patch_guide_max_h = (double)fs_config["patch.guide_max_h"];
	this->patch_guide_max_dist = (double)fs_config["patch.guide_max_dist"];
	this->patch_solid_max_dist = (double)fs_config["patch.solid_max_dist"];
	this->patch_stop_max_dist = (double)fs_config["patch.stop_max_dist"];

	this->mapping_step = (int)fs_config["mapping.step"];
	this->mapping_patch_freeze_distance = (double)fs_config["mapping.patch_freeze_distance"];
	this->mapping_line_freeze_distance = (double)fs_config["mapping.line_freeze_distance"];
	this->mapping_line_freeze_max_length = (double)fs_config["mapping.line_freeze_max_length"];
	this->mapping_line_cluster_max_dist = (double)fs_config["mapping.line_cluster_max_dist"];
	this->mapping_line_cluster_max_across_dist1 = (double)fs_config["mapping.line_cluster_max_across_dist1"];
	this->mapping_line_cluster_max_across_dist2 = (double)fs_config["mapping.line_cluster_max_across_dist2"];
	this->mapping_line_cluster_max_theta = (double)fs_config["mapping.line_cluster_max_theta"];

	this->localization_every_n_frames = (int)fs_config["localization.every_n_frames"];
	this->localization_force_last_n_frames = (int)fs_config["localization.force_last_n_frames"];
	this->localization_max_windowsize = (int)fs_config["localization.max_windowsize"];
	this->localization_min_keyframe_dist = (int)fs_config["localization.min_keyframe_dist"];
	this->localization_max_strict_match_dist = (int)fs_config["localization.max_strict_match_dist"];
	this->localization_solid_sample_interval = (int)fs_config["localization.solid_sample_interval"];

	this->enable_vis_image = (bool)(int)fs_config["enable_vis_image"];
	this->enable_vis_3d = (bool)(int)fs_config["enable_vis_3d"];

	this->t_start = (double)fs_config["t_start"];
	this->t_end = (double)fs_config["t_end"];

}

3. RoadLib之roadlib_optim.cpp

这段代码实现了两个主要功能:将三维点云数据拟合成二维曲线,以及将多个线段聚类成一条直线。以下是对代码核心内容的概括:

点云拟合成二维曲线 (PointCloud2Curve2D)

该函数将三维点云数据拟合成二维曲线。具体步骤如下:

  1. 初始化矩阵和向量:创建矩阵U和向量Y,其中U的每行代表一个点在不同维度上的幂次方值,Y是目标值(即点的x坐标)。
  2. 填充数据:遍历输入的三维点云数据,将点的y坐标的幂次方值填入U,将x坐标填入Y
  3. 最小二乘法求解:使用最小二乘法求解系数向量K,使得U * K ≈ Y
  4. 计算误差:计算拟合误差,如果误差小于阈值则返回成功,否则返回失败。
线段聚类成直线 (LineCluster2SingleLine)

该函数将多个线段聚类成一条直线。具体步骤如下:

  1. 预处理:对输入的线段进行预处理,去除空线段。
  2. 计算所有点的均值和协方差:将所有线段的点聚合到一个向量中,计算这些点的均值和协方差。
  3. 特征值分解:对协方差矩阵进行特征值分解,获取主方向。
  4. 坐标变换:将所有点变换到新的坐标系中,使得主方向与车辆航向角对齐。
  5. 线段拟合:在新坐标系下,对线段进行参数化拟合,优化过程中考虑点之间的距离和连续性,最终计算出线段的估计值。
  6. 计算协方差矩阵:使用稀疏矩阵和线性方程求解器计算线段点的不确定性协方差矩阵。
  7. 生成最终的线段实例:将拟合的线段点和不确定性添加到最终的线段实例中,计算线段的均值。
#include "roadlib.h"
#include <ceres/ceres.h>
#include <Eigen/Sparse>
/// @brief 将给定的三维点云数据拟合成二维曲线
/// @param points 输入的三维点云数据
/// @param dim 曲线的维度
/// @param K 拟合得到的系数,类型为VectorXd&
/// @return 
int PointCloud2Curve2D(const vector<Vector3d, Eigen::aligned_allocator<Eigen::Vector3d>>& points, int dim, VectorXd& K)
{
	MatrixXd U = MatrixXd::Zero(points.size(), dim);//U是一个矩阵,每行代表一个点在不同维度上的幂次方值
	VectorXd Y = VectorXd::Zero(points.size());//Y是目标值(即x坐标)
	for (int i = 0; i < points.size(); i++)
	{
		for (int j = 0; j < dim; j++)
			U(i, j) = pow(points[i].y(), j);
		Y(i) = points[i].x();
	}
	K = (U.transpose() * U).inverse() * U.transpose() * Y;//使用最小二乘法求解系数K
	double res = sqrt(pow((U * K - Y).norm(), 2) / points.size());//得到拟合的误差
	if (res < 5) return 0;
	else return -1;
}
/// @brief  将多个线段聚类成一条直线
/// @param road_class 道路类型
/// @param lines_in 输入的线段集合
/// @param line_est 输出的估计直线
/// @param Rwv 旋转矩阵
/// @return 
int LineCluster2SingleLine(const PatchType road_class, const vector<shared_ptr<RoadInstancePatch>>& lines_in, shared_ptr<RoadInstancePatch>& line_est, Eigen::Matrix3d Rwv)
{
	vector<RoadInstancePatch> lines;
    //对输入的线段进行预处理,去除空线段
	for (int i = 0; i < lines_in.size(); i++)
	{
		if (lines_in[i]->line_points_metric.size() < 1) continue;
		lines.push_back(*lines_in[i]);
	}
	if (lines_in.size() == 1)
	{
		line_est = lines_in.front();
		return 0;
	}
	if (lines_in.size() == 0)
		return -1;
	vector<Vector3d> all_pts;
	Eigen::Vector3d vector_temp;
	Eigen::Vector3d all_mean; all_mean.setZero();
	Eigen::Matrix3d all_cov; all_cov.setZero();
    //将所有的点放入一个向量中
	for (auto& line_seg : lines)
	{
		for (int i = 0; i < line_seg.line_points_metric.size(); i++)
		{
			all_pts.push_back(line_seg.line_points_metric[i]);
		}
	}
	//if (merge_mode == 0 && all_pts.size() < 10) return -1;
	for (int j = 0; j < all_pts.size(); j++)
	{
		all_mean += all_pts[j];
	}
	all_mean /= all_pts.size();//计算所有点的均值

	for (int j = 0; j < all_pts.size(); j++)
	{
		vector_temp = all_pts[j] - all_mean;//计算所有点相对于均值的向量
		all_cov += vector_temp * vector_temp.transpose();//对输入的线段进行预处理,去除空线段
	}
	all_cov /= all_pts.size();

	double eigen_value[3];
	Eigen::Vector3d direction;
	JacobiSVD<Eigen::MatrixXd> svd(all_cov, ComputeThinU | ComputeThinV);
	Matrix3d V = svd.matrixV(), U = svd.matrixU();
	Matrix3d S = U.inverse() * all_cov * V.transpose().inverse();
	eigen_value[0] = sqrt(S(0, 0));
	eigen_value[1] = sqrt(S(1, 1));
	eigen_value[2] = sqrt(S(2, 2));
	direction = U.block(0, 0, 3, 1);
	double yaw = m2att(Rwv).z() * 180 / M_PI+90;//获取yaw角度
	double direction_angle = atan2(direction(1), direction(0));// 通过特征值分解获取主方向
	if (fabs(yaw - direction_angle * 180 / M_PI) < 90) {}
	else
	{
		direction_angle += M_PI;
	}
	Matrix3d direction_rotation;
	direction_rotation << cos(direction_angle), sin(direction_angle), 0,
		-sin(direction_angle), cos(direction_angle), 0,
		0, 0, 1;

	//std::cerr << direction_rotation << std::endl;
	bool is_ref_set = false;
	Vector3d point_ref;
	for (auto& line_seg : lines)
	{
		for (int i = 0; i < line_seg.line_points_metric.size(); i++)
		{
			if (!is_ref_set)
			{
				point_ref = line_seg.line_points_metric[i];
				is_ref_set = true;
			}
			line_seg.line_points_metric[i] = direction_rotation * (line_seg.line_points_metric[i] - point_ref);//坐标变换,使得主方向与车辆航向角对齐
		}
	}

	if (isnan(point_ref.x()))
	{
		std::cerr << "NaN!!" << std::endl;
		return -1;
	}
	double min_x = 1e9;
	double max_x = -1e9;

    //计算所有点的x坐标的最大值和最小值
	for (auto& line_seg : lines)
	{
		for (int i = 0; i < line_seg.line_points_metric.size(); i++)
		{
			//std::cerr << line_seg.line_points_metric[i].x() << " ";
			if (line_seg.line_points_metric[i].x() < min_x) min_x = line_seg.line_points_metric[i].x();
			if (line_seg.line_points_metric[i].x() > max_x) max_x = line_seg.line_points_metric[i].x();
		}
	}
    //计算x坐标的估计值
	VectorXd x_est((int)ceil(max_x) - (int)floor(min_x) + 1);
	for (int i = 0; i < x_est.rows(); i++)
	{
		x_est(i) = min_x + i;
	}
	//x_est(0) = min_x;
	x_est(x_est.rows() - 1) = max_x;
	int nq = x_est.rows();//nq为x_est的行数
	//std::cerr << x_est << std::endl;

	VectorXd y_est = VectorXd::Zero(nq * 2);
	MatrixXd BtPB = MatrixXd::Zero(nq * 2, nq * 2);
	VectorXd BtPl = MatrixXd::Zero(nq * 2, 1);
	MatrixXd BtPB_obs;
	//开始迭代优化,在新坐标系下,对线段进行参数化拟合,优化过程中考虑点之间的距离和连续性。
	for (int i_iter = 0; i_iter < 5; i_iter++)
	{
		SparseMatrix<double> BtPBs(nq * 2, nq * 2);
		SparseMatrix<double> BtPls(nq * 2, 1);
		BtPB = MatrixXd::Zero(nq * 2, nq * 2);//BtPB是一个矩阵,每个元素代表两个点之间的距禒
		BtPl = MatrixXd::Zero(nq * 2, 1);

		// Line segments (weighted)
		double x, y, z; double xn, yn, zn; double x0, y0, z0; double x1, y1, z1;
		for (int i = 0; i < lines.size(); i++)//对每个线段进行处理
		{
			// Point distance (y-axis direction)
			for (int j = 0; j < lines[i].line_points_metric.size(); j++)//对每个线段的每个点进行处理
			{
				x = lines[i].line_points_metric[j].x();
				y = lines[i].line_points_metric[j].y();
				z = lines[i].line_points_metric[j].z();
				int seg_index = -1;
				for (int m = 0; m < nq - 1; m++) if (x_est(m) < x + 1e-3 && x_est(m + 1) > x - 1e-3) seg_index = m;//找到点所在的线段,需要确保点的平滑性

				x0 = x_est(seg_index); y0 = y_est(seg_index); z0 = y_est(seg_index + nq);//x0,y0,z0为线段的起点
				x1 = x_est(seg_index + 1); y1 = y_est(seg_index + 1); z1 = y_est(seg_index + 1 + nq);//x1,y1,z1为线段的终点
				Eigen::VectorXd l = Eigen::VectorXd::Zero(2); Eigen::MatrixXd B = Eigen::MatrixXd::Zero(2, nq * 2);

				l(0) = y - (y0 + (y1 - y0) / (x1 - x0) * (x - x0));//l为点到线段在y轴上的投影距离
				l(1) = z - (z0 + (z1 - z0) / (x1 - x0) * (x - x0));//l为点到线段在z轴上的投影距离
				B(0, seg_index) = -(1 - (x - x0) / (x1 - x0)); B(0, seg_index + 1) = -(x - x0) / (x1 - x0);//用于存储关于线段的信息
				B(1, seg_index + nq) = -(1 - (x - x0) / (x1 - x0)); B(1, seg_index + 1 + nq) = -(x - x0) / (x1 - x0);

				Eigen::MatrixXd BB0(1,2),BB1(1,2);
				BB0 << -(1 - (x - x0) / (x1 - x0)), -(x - x0) / (x1 - x0);//存储了与B相同的数值
				BB1 << -(1 - (x - x0) / (x1 - x0)), -(x - x0) / (x1 - x0);

				double point_error = sqrt(lines[i].line_points_uncertainty[j](0, 0) + lines[i].line_points_uncertainty[j](1, 1)) / sqrt(2.0);//计算点的误差

				Eigen::MatrixXd P = Eigen::MatrixXd::Identity(2, 2) / pow(point_error, 2);
				//BtPB = BtPB + B.transpose() * P * B; BtPl = BtPl + B.transpose() * P * l;
				BtPB.block(seg_index, seg_index, 2, 2) += BB0.transpose()*BB0 / pow(point_error, 2);//计算BtPB,更新雅可比矩阵中的特定块
				BtPB.block(seg_index+nq, seg_index + nq, 2, 2) += BB1.transpose()*BB1 / pow(point_error, 2);
				BtPl.segment(seg_index, 2) += BB0.transpose() * l(0) / pow(point_error, 2);//更新梯度向量中的特定部分
				BtPl.segment(seg_index+nq, 2) += BB1.transpose() * l(1) / pow(point_error, 2);
			}
			BtPB_obs = BtPB;

		}

		//连续性约束
		for (int i = 0; i < nq - 2; i++)
		{
			Eigen::VectorXd l = Eigen::VectorXd::Zero(2); Eigen::MatrixXd B = Eigen::MatrixXd::Zero(2, nq * 2);
			l(0, 0) = (y_est(i + 1) - y_est(i)) - (y_est(i + 2) - y_est(i + 1));//l为点到线段在y轴上的投影距离
			l(1, 0) = (y_est(nq + i + 1) - y_est(nq + i)) - (y_est(nq + i + 2) - y_est(nq + i + 1));//l为点到线段在z轴上的投影距离

			B(0, i) = -1; B(0, i + 1) = 2; B(0, i + 2) = -1;
			B(1, nq + i) = -1; B(1, nq + i + 1) = 2; B(1, nq + i + 2) = -1;

			Eigen::MatrixXd BB0 = Eigen::MatrixXd(1, 3); BB0 << -1, 2, -1;//存储了与B相同的数值
			Eigen::MatrixXd BB1 = Eigen::MatrixXd(1, 3); BB1 << -1, 2, -1;

			Eigen::MatrixXd P;
			//Eigen::MatrixXd P = Eigen::MatrixXd::Identity(2, 2)/0.04;
			if (road_class == PatchType::SOLID)
				P = Eigen::MatrixXd::Identity(2, 2) / 0.04;
			else if (road_class == PatchType::STOP)
				P = Eigen::MatrixXd::Identity(2, 2) / (0.01 * 0.01);
			//BtPB = BtPB + B.transpose() * P * B; BtPl = BtPl + B.transpose() * P * l;

			BtPB.block(i, i, 3, 3) += BB0.transpose() * P(0,0) * BB0;//计算BtPB,更新雅可比矩阵中的特定块
			BtPB.block(nq+i, nq+i, 3, 3) += BB1.transpose() *P(1,1) * BB1;
			BtPl.segment(i, 3) += BB0.transpose() * P(0, 0) * l(0);
			BtPl.segment(nq + i, 3) += BB1.transpose() * P(1, 1) * l(1);
		}

		BtPBs = BtPB.sparseView();//将BtPB转换为稀疏矩阵
		BtPls = BtPl.sparseView();
		SimplicialLDLT< SparseMatrix<double>>solver;
		solver.compute(BtPBs);
		Eigen::VectorXd dy = solver.solve(BtPls);//求解线性方程组
		
		y_est = y_est - dy;//更新y_est
	}

	SparseMatrix<double> BtPBs_obs = (BtPB_obs + MatrixXd::Identity(nq * 2, nq * 2) * 0.00001).sparseView();//将BtPB_obs转换为稀疏矩阵
	SimplicialLDLT< SparseMatrix<double>>solver;
	solver.compute(BtPBs_obs);//求解线性方程组
	SparseMatrix<double> I(BtPB_obs.rows(), BtPB_obs.rows());
	I.setIdentity();

	MatrixXd Cov = solver.solve(I).toDense();

	line_est = make_shared<RoadInstancePatch>();
	line_est->road_class = lines_in[0]->road_class;

	int i_start = 0;
	int i_end = nq - 1;
	for (int i = i_start; i <= i_end; i++)
	{
		if (i >= i_start + (i_end-i_start)/2 && Cov(i, i) > 0.35) break;
		line_est->line_points_metric.push_back(direction_rotation.transpose() * Vector3d(x_est(i), y_est(i), y_est(i + nq)) + point_ref);//将点放入line_est中
		line_est->line_points_uncertainty.push_back((Eigen::Matrix3d() << Cov(i, i), 0, 0, 0, Cov(i, i), 0, 0, 0, 1).finished());
	}
	if (line_est->line_points_metric.size() <= 3)
		return -1;
	for (int iii = 0; iii < line_est->line_points_metric.size(); iii++)
	{
		if (isnan(line_est->line_points_metric[iii].x()))
			return -1;
	}

	line_est->valid_add_to_map = true;
	line_est->line_valid = true;
	line_est->merged = true;
	line_est->mean_metric.setZero();
	vector_temp.setZero();

	for (int j = 0; j < line_est->line_points_metric.size(); j++)
	{
		line_est->mean_metric += line_est->line_points_metric[j];
	}
	line_est->mean_metric /= line_est->line_points_metric.size();

	return 0;

}

4. RoadLib之roadlib_map.cpp

这个这个项目当中最核心的算法,实现了对道路实例补丁的管理,包括添加新帧数据、清理、存储和加载补丁数据、构建KD树以进行快速邻近搜索、补丁匹配、合并补丁以及进行地理配准等功能。以下是对代码核心内容的概括:

…详情请参照古月居

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

敢敢のwings

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值