LIO-SAM 源码阅读--mapOptmization.cpp(4)

参考:

LIO-SAM源码阅读与分析(5)--mapOptmization.cpp - 知乎

LIO-SAM源码解析(四):MapOptimization - 知乎

https://blog.csdn.net/qq_32761549/category_10320716.html?spm=1001.2014.3001.5482

LIO-SAM框架:点云匹配前戏之初值计算及局部地图构建_月照银海似蛟龙的博客-CSDN博客_transformtobemapped

LIO-SAM框架:点云配准之角点面点的残差及梯度构建_月照银海似蛟龙的博客-CSDN博客_点云梯度

lio-sam框架:点云匹配之手写高斯牛顿下降优化求状态量更新_月照银海似蛟龙的博客-CSDN博客

lio-sam框架:回环检测及位姿计算_月照银海似蛟龙的博客-CSDN博客

lio-sam框架:后端里程计、回环、gps融合_月照银海似蛟龙的博客-CSDN博客

#include "utility.h"
#include "lio_sam/cloud_info.h"
#include "lio_sam/save_map.h"   // ROS 服务?

#include <gtsam/geometry/Rot3.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/navigation/GPSFactor.h>
#include <gtsam/navigation/ImuFactor.h>
#include <gtsam/navigation/CombinedImuFactor.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/Marginals.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/inference/Symbol.h>

#include <gtsam/nonlinear/ISAM2.h>

using namespace gtsam;

using symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
using symbol_shorthand::V; // Vel   (xdot,ydot,zdot)
using symbol_shorthand::B; // Bias  (ax,ay,az,gx,gy,gz)
using symbol_shorthand::G; // GPS pose

/*
    * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp)
    */
struct PointXYZIRPYT
{
    PCL_ADD_POINT4D
    PCL_ADD_INTENSITY;                  // preferred way of adding a XYZ+padding
    float roll;
    float pitch;
    float yaw;
    double time;
    EIGEN_MAKE_ALIGNED_OPERATOR_NEW   // make sure our new allocators are aligned
} EIGEN_ALIGN16;                    // enforce SSE padding for correct memory alignment

POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRPYT,
                                   (float, x, x) (float, y, y)
                                   (float, z, z) (float, intensity, intensity)
                                   (float, roll, roll) (float, pitch, pitch) (float, yaw, yaw)
                                   (double, time, time))

typedef PointXYZIRPYT  PointTypePose;


class mapOptimization : public ParamServer
{

public:

    // gtsam
    NonlinearFactorGraph gtSAMgraph;
    Values initialEstimate;
    Values optimizedEstimate;
    ISAM2 *isam;
    Values isamCurrentEstimate;
    Eigen::MatrixXd poseCovariance; // 位姿协方差矩阵

    ros::Publisher pubLaserCloudSurround;
    ros::Publisher pubLaserOdometryGlobal;
    ros::Publisher pubLaserOdometryIncremental;
    ros::Publisher pubKeyPoses;
    ros::Publisher pubPath;

    ros::Publisher pubHistoryKeyFrames;
    ros::Publisher pubIcpKeyFrames;
    ros::Publisher pubRecentKeyFrames;
    ros::Publisher pubRecentKeyFrame;
    ros::Publisher pubCloudRegisteredRaw;
    ros::Publisher pubLoopConstraintEdge;  // 可视化回环检测边?

    ros::Publisher pubSLAMInfo;

    ros::Subscriber subCloud;
    ros::Subscriber subGPS;
    ros::Subscriber subLoop;  // 这个程序没用到,没有外界回环检测的器件

    ros::ServiceServer srvSaveMap;   // 保存地图的服务

    std::deque<nav_msgs::Odometry> gpsQueue;
    lio_sam::cloud_info cloudInfo;

    // 历史所有关键帧的角点集合(降采样)
    vector<pcl::PointCloud<PointType>::Ptr> cornerCloudKeyFrames;
    // 历史所有关键帧的平面点集合(降采样)
    vector<pcl::PointCloud<PointType>::Ptr> surfCloudKeyFrames;
    
    /*3D  和 6D?*/
    pcl::PointCloud<PointType>::Ptr cloudKeyPoses3D;  // 历史关键帧(位置)存的xyz 关键帧的位置
    pcl::PointCloud<PointTypePose>::Ptr cloudKeyPoses6D; // 历史关键帧位姿 xyz RPY
    pcl::PointCloud<PointType>::Ptr copy_cloudKeyPoses3D;
    pcl::PointCloud<PointTypePose>::Ptr copy_cloudKeyPoses6D;

    pcl::PointCloud<PointType>::Ptr laserCloudCornerLast; // corner feature set from odoOptimization 角点集合
    pcl::PointCloud<PointType>::Ptr laserCloudSurfLast; // surf feature set from odoOptimization 平面点集合
    pcl::PointCloud<PointType>::Ptr laserCloudCornerLastDS; // downsampled corner feature set from odoOptimization 角点降采样
    pcl::PointCloud<PointType>::Ptr laserCloudSurfLastDS; // downsampled surf feature set from odoOptimization 平面点降采样

    // 当前帧与局部map匹配上了的角点、平面点,加入同一集合;后面是对应点的参数
    pcl::PointCloud<PointType>::Ptr laserCloudOri;
    pcl::PointCloud<PointType>::Ptr coeffSel;


    std::vector<PointType> laserCloudOriCornerVec; // corner point holder for parallel computation
    std::vector<PointType> coeffSelCornerVec;
    std::vector<bool> laserCloudOriCornerFlag;

    std::vector<PointType> laserCloudOriSurfVec; // surf point holder for parallel computation
    std::vector<PointType> coeffSelSurfVec;
    std::vector<bool> laserCloudOriSurfFlag;
    
    // 保存 地图 容器,first角点,second 平面点
    map<int, pair<pcl::PointCloud<PointType>, pcl::PointCloud<PointType>>> laserCloudMapContainer;
    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMap; // 局部map的角点集合
    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMap;   // 局部map的平面点集合

    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMapDS;  // 局部map的角点集合,降采样
    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMapDS;    // 局部map的平面点集合,降采样

    // 局部关键帧构建的map点云,对应kdtree,用于scan-to-map找相邻点
    pcl::KdTreeFLANN<PointType>::Ptr kdtreeCornerFromMap;  
    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurfFromMap;

    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurroundingKeyPoses;
    pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses;

    // 降采样
    pcl::VoxelGrid<PointType> downSizeFilterCorner;
    pcl::VoxelGrid<PointType> downSizeFilterSurf; 
    pcl::VoxelGrid<PointType> downSizeFilterICP;   // 这个是干嘛的?
    pcl::VoxelGrid<PointType> downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization
    
    // laserCloudInfoHandler回调函数中记录 当前信息的时间戳
    ros::Time timeLaserInfoStamp;  // stamp
    double timeLaserInfoCur;    // stamp.toSec()

    // 当前帧的位姿:六个值,按顺序分别存放 欧拉角RPY,位置xyz
    float transformTobeMapped[6];

    std::mutex mtx;
    std::mutex mtxLoopInfo;

    bool isDegenerate = false;
    cv::Mat matP;

    int laserCloudCornerFromMapDSNum = 0;  // 局部map角点数量
    int laserCloudSurfFromMapDSNum = 0;    // 局部map平面点数量

    int laserCloudCornerLastDSNum = 0;     // 当前激光帧角点数量
    int laserCloudSurfLastDSNum = 0;       // 当前激光帧面点数量

    bool aLoopIsClosed = false;
    map<int, int> loopIndexContainer; // from new to old
    vector<pair<int, int>> loopIndexQueue;
    vector<gtsam::Pose3> loopPoseQueue;
    vector<gtsam::noiseModel::Diagonal::shared_ptr> loopNoiseQueue;
    deque<std_msgs::Float64MultiArray> loopInfoVec;

    nav_msgs::Path globalPath;

    Eigen::Affine3f transPointAssociateToMap;
    Eigen::Affine3f incrementalOdometryAffineFront;
    Eigen::Affine3f incrementalOdometryAffineBack;


    mapOptimization()
    {
        // iSAM2 参数
        ISAM2Params parameters;
        parameters.relinearizeThreshold = 0.1;
        parameters.relinearizeSkip = 1;
        isam = new ISAM2(parameters);
        
        // 发布历史关键帧里程计
        pubKeyPoses                 = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/trajectory", 1);
        // 发布局部关键帧map的特征点云
        pubLaserCloudSurround       = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_global", 1);
        // 发布激光里程计,rviz中表现为坐标轴
        pubLaserOdometryGlobal      = nh.advertise<nav_msgs::Odometry> ("lio_sam/mapping/odometry", 1);
        // 发布激光里程计,它与上面的激光里程计基本一样,只是roll、pitch用imu数据加权平均了一下,z做了限制
        pubLaserOdometryIncremental = nh.advertise<nav_msgs::Odometry> ("lio_sam/mapping/odometry_incremental", 1);
        // 发布激光里程计路径,rviz中表现为载体的运行轨迹
        pubPath                     = nh.advertise<nav_msgs::Path>("lio_sam/mapping/path", 1);

        // 订阅当前激光帧点云信息,来自featureExtraction
        subCloud = nh.subscribe<lio_sam::cloud_info>("lio_sam/feature/cloud_info", 1, &mapOptimization::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());
        // 订阅GPS里程计
        subGPS   = nh.subscribe<nav_msgs::Odometry> (gpsTopic, 200, &mapOptimization::gpsHandler, this, ros::TransportHints().tcpNoDelay());
        // 订阅来自外部闭环检测程序提供的闭环数据,本程序没有提供,这里实际没用上
        subLoop  = nh.subscribe<std_msgs::Float64MultiArray>("lio_loop/loop_closure_detection", 1, &mapOptimization::loopInfoHandler, this, ros::TransportHints().tcpNoDelay());
        
        // 发布地图保存服务
        srvSaveMap  = nh.advertiseService("lio_sam/save_map", &mapOptimization::saveMapService, this);

        // 发布闭环匹配关键帧局部map
        pubHistoryKeyFrames   = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/icp_loop_closure_history_cloud", 1);
        // 发布当前关键帧经过闭环优化后的位姿变换之后的特征点云
        pubIcpKeyFrames       = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/icp_loop_closure_corrected_cloud", 1);
        // 发布闭环边,rviz中表现为闭环帧之间的连线
        pubLoopConstraintEdge = nh.advertise<visualization_msgs::MarkerArray>("/lio_sam/mapping/loop_closure_constraints", 1);

        // 发布局部map的降采样平面点集合
        pubRecentKeyFrames    = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_local", 1);
        // 发布历史帧(累加的)的角点、平面点降采样集合
        pubRecentKeyFrame     = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered", 1);
        // 发布当前帧原始点云配准之后的点云
        pubCloudRegisteredRaw = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered_raw", 1);

        pubSLAMInfo           = nh.advertise<lio_sam::cloud_info>("lio_sam/mapping/slam_info", 1);

        downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize);
        downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
        downSizeFilterICP.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
        downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity, surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization

        allocateMemory();
    }

    void allocateMemory()
    {
        cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
        cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());
        copy_cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
        copy_cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());

        kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
        kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());

        laserCloudCornerLast.reset(new pcl::PointCloud<PointType>()); // corner feature set from odoOptimization
        laserCloudSurfLast.reset(new pcl::PointCloud<PointType>()); // surf feature set from odoOptimization
        laserCloudCornerLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled corner featuer set from odoOptimization
        laserCloudSurfLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled surf featuer set from odoOptimization

        laserCloudOri.reset(new pcl::PointCloud<PointType>());
        coeffSel.reset(new pcl::PointCloud<PointType>());

        laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN);
        coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN);
        laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN);
        laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN);
        coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN);
        laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN);

        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false); //初始值 false
        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);

        laserCloudCornerFromMap.reset(new pcl::PointCloud<PointType>());
        laserCloudSurfFromMap.reset(new pcl::PointCloud<PointType>());
        laserCloudCornerFromMapDS.reset(new pcl::PointCloud<PointType>());
        laserCloudSurfFromMapDS.reset(new pcl::PointCloud<PointType>());

        kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN<PointType>());
        kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN<PointType>());

        for (int i = 0; i < 6; ++i){
            transformTobeMapped[i] = 0;
        }

        matP = cv::Mat(6, 6, CV_32F, cv::Scalar::all(0));
    }

    /**
     * 订阅当前激光帧点云信息,来自featureExtraction
     */ 
    void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
    {
        // extract time stamp
        // 时间戳
        timeLaserInfoStamp = msgIn->header.stamp;
        timeLaserInfoCur = msgIn->header.stamp.toSec();

        // extract info and feature cloud
        cloudInfo = *msgIn;
        pcl::fromROSMsg(msgIn->cloud_corner,  *laserCloudCornerLast);
        pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast);

        std::lock_guard<std::mutex> lock(mtx);

        static double timeLastProcessing = -1;
        if (timeLaserInfoCur - timeLastProcessing >= mappingProcessInterval)  // mappingProcessInterval 0.15
        {
            timeLastProcessing = timeLaserInfoCur;

            
            // 1. 当前帧位姿初始化
            updateInitialGuess();
            // 2. 提取局部角点、平面点云集合,加入局部map
            extractSurroundingKeyFrames();
            // 3. 当前激光帧角点、平面点集合降采样
            downsampleCurrentScan();
            // 4. scan-to-map优化当前帧位姿
            /**
             * 上面1,2两步: 激光雷达当前帧有了初值的估计,那么则可以利用初值投到地图坐标系下,并构建了局部地图
             * 有了上面两步,就可以构建角点和面点的 残差、梯度方向
             *            【其中残差就是点到直线/面 的距离,方向就是距离缩小的方向】
             */
            scan2MapOptimization();
            // 5. 设置当前帧为关键帧并执行因子图优化
            saveKeyFramesAndFactor();
            // 6. 更新因子图中所有变量节点的位姿,也就是所有历史关键帧的位姿,更新里程计轨迹
            correctPoses();
            // 7. 发布激光里程计
            publishOdometry();
            // 8. 发布里程计、点云、轨迹
            publishFrames();
        }
    }

    void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg)
    {
        gpsQueue.push_back(*gpsMsg);
    }

    /**
     * 转到地图坐标系
     */ 
    void pointAssociateToMap(PointType const * const pi, PointType * const po)
    {
        // 变换矩阵是在 updatePointAssociateToMap() 函数中得到的
        po->x = transPointAssociateToMap(0,0) * pi->x + transPointAssociateToMap(0,1) * pi->y + transPointAssociateToMap(0,2) * pi->z + transPointAssociateToMap(0,3);
        po->y = transPointAssociateToMap(1,0) * pi->x + transPointAssociateToMap(1,1) * pi->y + transPointAssociateToMap(1,2) * pi->z + transPointAssociateToMap(1,3);
        po->z = transPointAssociateToMap(2,0) * pi->x + transPointAssociateToMap(2,1) * pi->y + transPointAssociateToMap(2,2) * pi->z + transPointAssociateToMap(2,3);
        po->intensity = pi->intensity;
    }

    pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn, PointTypePose* transformIn)
    {
        pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());

        int cloudSize = cloudIn->size();
        // cloudOut 大小与 cloudIn相同
        cloudOut->resize(cloudSize);
        // 变换矩阵
        Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z, transformIn->roll, transformIn->pitch, transformIn->yaw);
        
        #pragma omp parallel for num_threads(numberOfCores)
        for (int i = 0; i < cloudSize; ++i)
        {
            const auto &pointFrom = cloudIn->points[i];
            cloudOut->points[i].x = transCur(0,0) * pointFrom.x + transCur(0,1) * pointFrom.y + transCur(0,2) * pointFrom.z + transCur(0,3);
            cloudOut->points[i].y = transCur(1,0) * pointFrom.x + transCur(1,1) * pointFrom.y + transCur(1,2) * pointFrom.z + transCur(1,3);
            cloudOut->points[i].z = transCur(2,0) * pointFrom.x + transCur(2,1) * pointFrom.y + transCur(2,2) * pointFrom.z + transCur(2,3);
            cloudOut->points[i].intensity = pointFrom.intensity;
        }
        return cloudOut;
    }

    gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint)
    {
        return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),
                                  gtsam::Point3(double(thisPoint.x),    double(thisPoint.y),     double(thisPoint.z)));
    }

    gtsam::Pose3 trans2gtsamPose(float transformIn[])
    {
        return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]), 
                                  gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));
    }

    Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint)
    { 
        return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch, thisPoint.yaw);
    }

    /**
     * Eigen::Affine3f 	getTransformation (1,2,3,4,5,6)
     * 根据欧拉角平移矩阵生成变换矩阵
     * 参数顺序是  float x; float y; float z; float roll; float pitch; float yaw;
     */ 
    Eigen::Affine3f trans2Affine3f(float transformIn[])
    {
        return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1], transformIn[2]);
    }

    PointTypePose trans2PointTypePose(float transformIn[])
    {
        PointTypePose thisPose6D;
        thisPose6D.x = transformIn[3];
        thisPose6D.y = transformIn[4];
        thisPose6D.z = transformIn[5];
        thisPose6D.roll  = transformIn[0];
        thisPose6D.pitch = transformIn[1];
        thisPose6D.yaw   = transformIn[2];
        return thisPose6D;
    }

    













    bool saveMapService(lio_sam::save_mapRequest& req, lio_sam::save_mapResponse& res)
    {
      string saveMapDirectory;

      cout << "****************************************************" << endl;
      cout << "Saving map to pcd files ..." << endl;
      if(req.destination.empty()) saveMapDirectory = std::getenv("HOME") + savePCDDirectory;
      else saveMapDirectory = std::getenv("HOME") + req.destination;
      cout << "Save destination: " << saveMapDirectory << endl;
      // create directory and remove old files;
      int unused = system((std::string("exec rm -r ") + saveMapDirectory).c_str());
      unused = system((std::string("mkdir -p ") + saveMapDirectory).c_str());
      // save key frame transformations
      pcl::io::savePCDFileBinary(saveMapDirectory + "/trajectory.pcd", *cloudKeyPoses3D);
      pcl::io::savePCDFileBinary(saveMapDirectory + "/transformations.pcd", *cloudKeyPoses6D);
      // extract global point cloud map
      pcl::PointCloud<PointType>::Ptr globalCornerCloud(new pcl::PointCloud<PointType>());
      pcl::PointCloud<PointType>::Ptr globalCornerCloudDS(new pcl::PointCloud<PointType>());
      pcl::PointCloud<PointType>::Ptr globalSurfCloud(new pcl::PointCloud<PointType>());
      pcl::PointCloud<PointType>::Ptr globalSurfCloudDS(new pcl::PointCloud<PointType>());
      pcl::PointCloud<PointType>::Ptr globalMapCloud(new pcl::PointCloud<PointType>());
      for (int i = 0; i < (int)cloudKeyPoses3D->size(); i++) {
          *globalCornerCloud += *transformPointCloud(cornerCloudKeyFrames[i],  &cloudKeyPoses6D->points[i]);
          *globalSurfCloud   += *transformPointCloud(surfCloudKeyFrames[i],    &cloudKeyPoses6D->points[i]);
          cout << "\r" << std::flush << "Processing feature cloud " << i << " of " << cloudKeyPoses6D->size() << " ...";
      }

      if(req.resolution != 0)
      {
        cout << "\n\nSave resolution: " << req.resolution << endl;

        // down-sample and save corner cloud
        downSizeFilterCorner.setInputCloud(globalCornerCloud);
        downSizeFilterCorner.setLeafSize(req.resolution, req.resolution, req.resolution);
        downSizeFilterCorner.filter(*globalCornerCloudDS);
        pcl::io::savePCDFileBinary(saveMapDirectory + "/CornerMap.pcd", *globalCornerCloudDS);
        // down-sample and save surf cloud
        downSizeFilterSurf.setInputCloud(globalSurfCloud);
        downSizeFilterSurf.setLeafSize(req.resolution, req.resolution, req.resolution);
        downSizeFilterSurf.filter(*globalSurfCloudDS);
        pcl::io::savePCDFileBinary(saveMapDirectory + "/SurfMap.pcd", *globalSurfCloudDS);
      }
      else
      {
        // save corner cloud
        pcl::io::savePCDFileBinary(saveMapDirectory + "/CornerMap.pcd", *globalCornerCloud);
        // save surf cloud
        pcl::io::savePCDFileBinary(saveMapDirectory + "/SurfMap.pcd", *globalSurfCloud);
      }

      // save global point cloud map
      *globalMapCloud += *globalCornerCloud;
      *globalMapCloud += *globalSurfCloud;

      int ret = pcl::io::savePCDFileBinary(saveMapDirectory + "/GlobalMap.pcd", *globalMapCloud);
      res.success = ret == 0;

      downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize);
      downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);

      cout << "****************************************************" << endl;
      cout << "Saving map to pcd files completed\n" << endl;

      return true;
    }
    /**
     * 可视化全局地图的 线程
     */ 
    void visualizeGlobalMapThread()
    {
        ros::Rate rate(0.2);
        while (ros::ok()){
            rate.sleep();
            publishGlobalMap();
        }

        if (savePCD == false)
            return;

        lio_sam::save_mapRequest  req;
        lio_sam::save_mapResponse res;

        if(!saveMapService(req, res)){
            cout << "Fail to save map" << endl;
        }
    }

    void publishGlobalMap()
    {
        if (pubLaserCloudSurround.getNumSubscribers() == 0)
            return;

        if (cloudKeyPoses3D->points.empty() == true)
            return;

        pcl::KdTreeFLANN<PointType>::Ptr kdtreeGlobalMap(new pcl::KdTreeFLANN<PointType>());;
        pcl::PointCloud<PointType>::Ptr globalMapKeyPoses(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalMapKeyPosesDS(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalMapKeyFrames(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr globalMapKeyFramesDS(new pcl::PointCloud<PointType>());

        // kd-tree to find near key frames to visualize
        std::vector<int> pointSearchIndGlobalMap;
        std::vector<float> pointSearchSqDisGlobalMap;
        // search near key frames to visualize
        mtx.lock();
        kdtreeGlobalMap->setInputCloud(cloudKeyPoses3D);
        kdtreeGlobalMap->radiusSearch(cloudKeyPoses3D->back(), globalMapVisualizationSearchRadius, pointSearchIndGlobalMap, pointSearchSqDisGlobalMap, 0);
        mtx.unlock();

        for (int i = 0; i < (int)pointSearchIndGlobalMap.size(); ++i)
            globalMapKeyPoses->push_back(cloudKeyPoses3D->points[pointSearchIndGlobalMap[i]]);
        // downsample near selected key frames
        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyPoses; // for global map visualization
        downSizeFilterGlobalMapKeyPoses.setLeafSize(globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity); // for global map visualization
        downSizeFilterGlobalMapKeyPoses.setInputCloud(globalMapKeyPoses);
        downSizeFilterGlobalMapKeyPoses.filter(*globalMapKeyPosesDS);
        for(auto& pt : globalMapKeyPosesDS->points)
        {
            kdtreeGlobalMap->nearestKSearch(pt, 1, pointSearchIndGlobalMap, pointSearchSqDisGlobalMap);
            pt.intensity = cloudKeyPoses3D->points[pointSearchIndGlobalMap[0]].intensity;
        }

        // extract visualized and downsampled key frames
        for (int i = 0; i < (int)globalMapKeyPosesDS->size(); ++i){
            if (pointDistance(globalMapKeyPosesDS->points[i], cloudKeyPoses3D->back()) > globalMapVisualizationSearchRadius)
                continue;
            int thisKeyInd = (int)globalMapKeyPosesDS->points[i].intensity;
            *globalMapKeyFrames += *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],  &cloudKeyPoses6D->points[thisKeyInd]);
            *globalMapKeyFrames += *transformPointCloud(surfCloudKeyFrames[thisKeyInd],    &cloudKeyPoses6D->points[thisKeyInd]);
        }
        // downsample visualized points
        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyFrames; // for global map visualization
        downSizeFilterGlobalMapKeyFrames.setLeafSize(globalMapVisualizationLeafSize, globalMapVisualizationLeafSize, globalMapVisualizationLeafSize); // for global map visualization
        downSizeFilterGlobalMapKeyFrames.setInputCloud(globalMapKeyFrames);
        downSizeFilterGlobalMapKeyFrames.filter(*globalMapKeyFramesDS);
        publishCloud(pubLaserCloudSurround, globalMapKeyFramesDS, timeLaserInfoStamp, odometryFrame);
    }











/**
 * 在点云匹配之后,可以来看回环检测部分的代码了
 * 这部分的代码入口在 main函数中,单独开了一个回环检测的线程
 */ 
    void loopClosureThread()
    {
        // 如果不需要进行回环检测,那么就退出这个线程, 需不需要可以在配置文件中设置
        if (loopClosureEnableFlag == false)
            return;
        // 设置回环检测的频率 loopClosureFrequency默认为 1hz ,没有必要太频繁
        ros::Rate rate(loopClosureFrequency);
        /**
         * 设置完频率后,进行一个while的 死循环。
         * 执行完一次就必须sleep一段时间,否则该线程的cpu占用会非常高
         * 通过performLoopClosure(), visualizeLoopClosure() 执行回环检测
        */
        while (ros::ok())
        {
            rate.sleep();
            performLoopClosure();
            visualizeLoopClosure();
        }
    }
    /**
     * 订阅来自外部的 回环,这里用不到
     */ 
    void loopInfoHandler(const std_msgs::Float64MultiArray::ConstPtr& loopMsg)
    {
        std::lock_guard<std::mutex> lock(mtxLoopInfo);
        if (loopMsg->data.size() != 2)
            return;

        loopInfoVec.push_back(*loopMsg);

        while (loopInfoVec.size() > 5)
            loopInfoVec.pop_front();
    }
    /**
     * 回环检测线程
     */ 
    void performLoopClosure()
    {
        // 如果没有关键帧,就没法进行回环检测了 就直接退出
        if (cloudKeyPoses3D->points.empty() == true)
            return;

        mtx.lock();
        // 把存储关键帧位姿 的 点云copy出来,避免线程冲突 
        // cloudKeyPoses3D就是关键帧的位置 cloudKeyPoses6D就是关键帧的位姿
        *copy_cloudKeyPoses3D = *cloudKeyPoses3D;
        *copy_cloudKeyPoses6D = *cloudKeyPoses6D;
        mtx.unlock();

        // find keys
        int loopKeyCur;
        int loopKeyPre;
        // 首先看一下外部通知的回环信息 (外部检测回环 好像没有)
        if (detectLoopClosureExternal(&loopKeyCur, &loopKeyPre) == false)
            // 然后根据里程计的距离来检测回环, 如果还没有则直接返回
            if (detectLoopClosureDistance(&loopKeyCur, &loopKeyPre) == false)
                return;

        // 如果检测回环存在!!!
        // 那么则可以进行下面内容,就是计算检测出这两帧的位姿变换
        // extract cloud
        pcl::PointCloud<PointType>::Ptr cureKeyframeCloud(new pcl::PointCloud<PointType>());  // 声明当前关键帧的点云
        pcl::PointCloud<PointType>::Ptr prevKeyframeCloud(new pcl::PointCloud<PointType>());  // 声明历史回环帧周围的点云(局部地图)
        {
            loopFindNearKeyframes(cureKeyframeCloud, loopKeyCur, 0);
            // 回环帧把自己周围一些点云取出来,也就是构成一个帧局部地图的一个匹配问题
            loopFindNearKeyframes(prevKeyframeCloud, loopKeyPre, historyKeyframeSearchNum);
            // 点云数目太少,返回
            if (cureKeyframeCloud->size() < 300 || prevKeyframeCloud->size() < 1000)
                return;
            // 把局部地图发布出来供rviz可视化使用
            if (pubHistoryKeyFrames.getNumSubscribers() != 0)
                publishCloud(pubHistoryKeyFrames, prevKeyframeCloud, timeLaserInfoStamp, odometryFrame);
        }

        // 现在有了当前 关键帧投到地图坐标系下的点云 和 历史回环帧投到地图坐标系下的局部地图,那么接下来就可以进行两者的icp位姿变换求解
        // ** 关键帧点云 和 历史回环帧点云 进行 ICP 配准 **
        // ICP Settings
        static pcl::IterativeClosestPoint<PointType, PointType> icp;
        // 使用简单的icp来进行帧到局部地图的配准
        icp.setMaxCorrespondenceDistance(historyKeyframeSearchRadius*2);   // 设置最大相关距离 
        icp.setMaximumIterations(100);   // 最大优化次数
        icp.setTransformationEpsilon(1e-6);   // 单次变换范围
        icp.setEuclideanFitnessEpsilon(1e-6);  // 残差设置
        icp.setRANSACIterations(0);

        // Align clouds
        // 设置两个点云
        icp.setInputSource(cureKeyframeCloud);  // 源点云
        icp.setInputTarget(prevKeyframeCloud);  // 目标点云

        // 执行配准
        pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());
        icp.align(*unused_result);
        // 检测icp是否收敛 且 得分是否满足要求
        if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore)  // 0.3
            return;

        // publish corrected cloud
        // 把修正后的当前点云发布供可视化使用
        if (pubIcpKeyFrames.getNumSubscribers() != 0)
        {
            pcl::PointCloud<PointType>::Ptr closed_cloud(new pcl::PointCloud<PointType>());
            pcl::transformPointCloud(*cureKeyframeCloud, *closed_cloud, icp.getFinalTransformation());
            publishCloud(pubIcpKeyFrames, closed_cloud, timeLaserInfoStamp, odometryFrame);
        }

        // Get pose transformation
        float x, y, z, roll, pitch, yaw;
        Eigen::Affine3f correctionLidarFrame;
        // 获得两个点云的变换矩阵结果
        correctionLidarFrame = icp.getFinalTransformation();    // 两个点云之间的变换矩阵
        // transform from world origin to wrong pose
        Eigen::Affine3f tWrong = pclPointToAffine3f(copy_cloudKeyPoses6D->points[loopKeyCur]);   // 取出当前帧的位姿
        // transform from world origin to corrected pose
        Eigen::Affine3f tCorrect = correctionLidarFrame * tWrong;// 将icp结果补偿过去,就是当前帧的更为准确的位姿结果pre-multiplying -> successive rotation about a fixed frame
        // 将当前帧补偿后的位姿 转换成 平移和旋转
        pcl::getTranslationAndEulerAngles (tCorrect, x, y, z, roll, pitch, yaw);
        
        // 将当前帧补偿后的位姿 转换成 gtsam的形式, From 和 To相当于帧间约束的因子,To是历史回环帧的位姿
        gtsam::Pose3 poseFrom = Pose3(Rot3::RzRyRx(roll, pitch, yaw), Point3(x, y, z));
        gtsam::Pose3 poseTo = pclPointTogtsamPose3(copy_cloudKeyPoses6D->points[loopKeyPre]);

        // 使用icp的得分作为他们的约束噪声项
        gtsam::Vector Vector6(6);
        float noiseScore = icp.getFitnessScore();
        Vector6 << noiseScore, noiseScore, noiseScore, noiseScore, noiseScore, noiseScore;
        noiseModel::Diagonal::shared_ptr constraintNoise = noiseModel::Diagonal::Variances(Vector6);

        // Add pose constraint
        // 将两帧索引,两帧相对位姿和噪声作为回环约束 送入对列
        mtx.lock();
        loopIndexQueue.push_back(make_pair(loopKeyCur, loopKeyPre));
        loopPoseQueue.push_back(poseFrom.between(poseTo));
        loopNoiseQueue.push_back(constraintNoise);
        mtx.unlock();

        // add loop constriant
        // 保存已经存在的约束对
        loopIndexContainer[loopKeyCur] = loopKeyPre;
    }
    /**
     * 根据里程计的距离来检测回环
     */ 
    bool detectLoopClosureDistance(int *latestID, int *closestID)
    {
        // 检测最新帧是否和其它帧形成回环
        // 取出最新帧的索引
        int loopKeyCur = copy_cloudKeyPoses3D->size() - 1;
        int loopKeyPre = -1;

        // check loop constraint added before
        // 检查一下较晚帧是否和别的形成了回环,如果有就算了
        // 因为当前帧刚刚出现,不会和其它帧形成回环,所以基本不会触发
        auto it = loopIndexContainer.find(loopKeyCur);
        if (it != loopIndexContainer.end())
            return false;

        // find the closest history key frame
        std::vector<int> pointSearchIndLoop;
        std::vector<float> pointSearchSqDisLoop;
        // 把只包含关键帧位移信息的点云填充kdtree
        kdtreeHistoryKeyPoses->setInputCloud(copy_cloudKeyPoses3D);
        // 根据最后一个关键帧的平移信息,寻找离他一定距离内的其它关键帧
        kdtreeHistoryKeyPoses->radiusSearch(copy_cloudKeyPoses3D->back(), historyKeyframeSearchRadius, 
                                            pointSearchIndLoop, pointSearchSqDisLoop, 0);
        // historyKeyframeSearchRadius 搜索范围10米
        // 遍历找到的候选关键帧
        for (int i = 0; i < (int)pointSearchIndLoop.size(); ++i)
        {
            int id = pointSearchIndLoop[i];
            // 历史帧,必须比当前帧间隔30s以上 必须满足时间上超过一定阈值,才认为是一个有效的回环
            if (abs(copy_cloudKeyPoses6D->points[id].time - timeLaserInfoCur) > historyKeyframeSearchTimeDiff)  // historyKeyframeSearchTimeDiff = 30
            {
                loopKeyPre = id;
                break;
            }
            // 如果时间上满足要求就找到了历史回环帧
            // 那么赋值id 并且 break
        }
        // 如果没有找到回环或者回环找到自己身上去了,就认为是本次回环寻找失败
        if (loopKeyPre == -1 || loopKeyCur == loopKeyPre)
            return false;

        // 至此则找到了当前关键帧 和 历史回环帧 
        // 赋值当前帧 和 历史回环帧的 id
        *latestID = loopKeyCur;  // 当前帧
        *closestID = loopKeyPre;  // 回环帧

        return true;
    }

    bool detectLoopClosureExternal(int *latestID, int *closestID)
    {
        // this function is not used yet, please ignore it
        int loopKeyCur = -1;
        int loopKeyPre = -1;

        std::lock_guard<std::mutex> lock(mtxLoopInfo);
        if (loopInfoVec.empty())
            return false;

        double loopTimeCur = loopInfoVec.front().data[0];
        double loopTimePre = loopInfoVec.front().data[1];
        loopInfoVec.pop_front();

        if (abs(loopTimeCur - loopTimePre) < historyKeyframeSearchTimeDiff)
            return false;

        int cloudSize = copy_cloudKeyPoses6D->size();
        if (cloudSize < 2)
            return false;

        // latest key
        loopKeyCur = cloudSize - 1;
        for (int i = cloudSize - 1; i >= 0; --i)
        {
            if (copy_cloudKeyPoses6D->points[i].time >= loopTimeCur)
                loopKeyCur = round(copy_cloudKeyPoses6D->points[i].intensity);
            else
                break;
        }

        // previous key
        loopKeyPre = 0;
        for (int i = 0; i < cloudSize; ++i)
        {
            if (copy_cloudKeyPoses6D->points[i].time <= loopTimePre)
                loopKeyPre = round(copy_cloudKeyPoses6D->points[i].intensity);
            else
                break;
        }

        if (loopKeyCur == loopKeyPre)
            return false;

        auto it = loopIndexContainer.find(loopKeyCur);
        if (it != loopIndexContainer.end())
            return false;

        *latestID = loopKeyCur;
        *closestID = loopKeyPre;

        return true;
    }

    void loopFindNearKeyframes(pcl::PointCloud<PointType>::Ptr& nearKeyframes, const int& key, const int& searchNum)
    {
        // extract near keyframes
        nearKeyframes->clear();
        int cloudSize = copy_cloudKeyPoses6D->size();
        // searchNum 是搜索范围 ,遍历帧的范围 [-searchNum, +searchNum]
        for (int i = -searchNum; i <= searchNum; ++i)
        {   
            // 找到这个 idx
            int keyNear = key + i;
            // 如果超出范围了就算了
            if (keyNear < 0 || keyNear >= cloudSize )
                continue;
            // 把对应 角点和面点 的点云转到世界坐标系下去
            *nearKeyframes += *transformPointCloud(cornerCloudKeyFrames[keyNear], &copy_cloudKeyPoses6D->points[keyNear]);
            *nearKeyframes += *transformPointCloud(surfCloudKeyFrames[keyNear],   &copy_cloudKeyPoses6D->points[keyNear]);
        }
        // 没有有效点云
        if (nearKeyframes->empty())
            return;
        // 点云下采样
        // downsample near keyframes
        pcl::PointCloud<PointType>::Ptr cloud_temp(new pcl::PointCloud<PointType>());
        downSizeFilterICP.setInputCloud(nearKeyframes);
        downSizeFilterICP.filter(*cloud_temp);
        *nearKeyframes = *cloud_temp;
    }

    void visualizeLoopClosure()
    {
        if (loopIndexContainer.empty())
            return;
        
        visualization_msgs::MarkerArray markerArray;
        // loop nodes
        visualization_msgs::Marker markerNode;
        markerNode.header.frame_id = odometryFrame;
        markerNode.header.stamp = timeLaserInfoStamp;
        markerNode.action = visualization_msgs::Marker::ADD;
        markerNode.type = visualization_msgs::Marker::SPHERE_LIST;
        markerNode.ns = "loop_nodes";
        markerNode.id = 0;
        markerNode.pose.orientation.w = 1;
        markerNode.scale.x = 0.3; markerNode.scale.y = 0.3; markerNode.scale.z = 0.3; 
        markerNode.color.r = 0; markerNode.color.g = 0.8; markerNode.color.b = 1;
        markerNode.color.a = 1;
        // loop edges
        visualization_msgs::Marker markerEdge;
        markerEdge.header.frame_id = odometryFrame;
        markerEdge.header.stamp = timeLaserInfoStamp;
        markerEdge.action = visualization_msgs::Marker::ADD;
        markerEdge.type = visualization_msgs::Marker::LINE_LIST;
        markerEdge.ns = "loop_edges";
        markerEdge.id = 1;
        markerEdge.pose.orientation.w = 1;
        markerEdge.scale.x = 0.1;
        markerEdge.color.r = 0.9; markerEdge.color.g = 0.9; markerEdge.color.b = 0;
        markerEdge.color.a = 1;

        for (auto it = loopIndexContainer.begin(); it != loopIndexContainer.end(); ++it)
        {
            int key_cur = it->first;
            int key_pre = it->second;
            geometry_msgs::Point p;
            p.x = copy_cloudKeyPoses6D->points[key_cur].x;
            p.y = copy_cloudKeyPoses6D->points[key_cur].y;
            p.z = copy_cloudKeyPoses6D->points[key_cur].z;
            markerNode.points.push_back(p);
            markerEdge.points.push_back(p);
            p.x = copy_cloudKeyPoses6D->points[key_pre].x;
            p.y = copy_cloudKeyPoses6D->points[key_pre].y;
            p.z = copy_cloudKeyPoses6D->points[key_pre].z;
            markerNode.points.push_back(p);
            markerEdge.points.push_back(p);
        }

        markerArray.markers.push_back(markerNode);
        markerArray.markers.push_back(markerEdge);
        pubLoopConstraintEdge.publish(markerArray);
    }







    

    /**
     * 1、如果是第一帧,用原始imu数据的RPY初始化当前帧位姿(旋转部分)
     * 2、后续帧,用imu里程计计算两帧之间的增量位姿变换,作用于前一帧的激光位姿,得到当前帧激光位姿
     */ 
    void updateInitialGuess()
    {
        // save current transformation before any processing
        // 前一帧的位姿,注:这里指lidar的位姿,后面都简写成位姿
        incrementalOdometryAffineFront = trans2Affine3f(transformTobeMapped);

        // 前一帧的初始化姿态角(来自原始imu数据),用于估计第一帧的位姿(旋转部分)
        static Eigen::Affine3f lastImuTransformation;
        // initialization
        // 如果关键帧集合为空,继续进行初始化
        if (cloudKeyPoses3D->points.empty())
        {
            // 当前帧位姿的旋转部分,用激光帧信息中的RPY(来自imu原始数据)初始化
            transformTobeMapped[0] = cloudInfo.imuRollInit;
            transformTobeMapped[1] = cloudInfo.imuPitchInit;
            transformTobeMapped[2] = cloudInfo.imuYawInit;

            if (!useImuHeadingInitialization)
                transformTobeMapped[2] = 0;

            // 保存旋转变换
            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
            return;
        }

        // use imu pre-integration estimation for pose guess
        // 用 当前帧 和 前一帧 对应的imu里程计 计算 相对位姿变换,
        // 再用 前一帧的位姿与相对变换,计算当前帧的位姿,存 transformTobeMapped
        static bool lastImuPreTransAvailable = false;
        static Eigen::Affine3f lastImuPreTransformation;
        if (cloudInfo.odomAvailable == true)
        {
            // 在imageProjection.cpp中的 函数odomDeskewInfo() 里面定义的 initialGuessX等
            Eigen::Affine3f transBack = pcl::getTransformation(cloudInfo.initialGuessX,    cloudInfo.initialGuessY,     cloudInfo.initialGuessZ, 
                                                               cloudInfo.initialGuessRoll, cloudInfo.initialGuessPitch, cloudInfo.initialGuessYaw);
            if (lastImuPreTransAvailable == false)
            {
                // // 赋值给前一帧
                lastImuPreTransformation = transBack;
                lastImuPreTransAvailable = true;
            } else {
                // 当前帧相对于前一帧的位姿变换,imu里程计计算得到
                Eigen::Affine3f transIncre = lastImuPreTransformation.inverse() * transBack;
                // 前一帧的位姿
                Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
                // 当前帧的位姿  最终的变换 利用该变换给transformTobeMapped赋值
                Eigen::Affine3f transFinal = transTobe * transIncre;
                // 根据仿射矩阵transFinal  计算  x,y,z,roll,pitch,yaw [存在精度缺失问题]
                pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], 
                                                              transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
                // 赋值给前一帧
                lastImuPreTransformation = transBack;

                // 保存旋转变换
                lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
                return;
            }
        }

        // use imu incremental estimation for pose guess (only rotation)
        // 只在第一帧调用(注意上面的return),用imu数据初始化当前帧位姿,仅初始化旋转部分
        if (cloudInfo.imuAvailable == true)
        {
            Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit);
            // 当前帧相对于前一帧的位姿变换
            Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;

            Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
            // 当前帧的位姿  最终的变换
            Eigen::Affine3f transFinal = transTobe * transIncre;
            // 根据仿射矩阵transFinal  计算  x,y,z,roll,pitch,yaw [存在精度缺失问题]
            pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], 
                                                          transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
            // 保存旋转变换
            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
            return;
        }
    }

    // 这个函数没用到
    void extractForLoopClosure()
    {
        pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());
        int numPoses = cloudKeyPoses3D->size();
        for (int i = numPoses-1; i >= 0; --i)
        {
            if ((int)cloudToExtract->size() <= surroundingKeyframeSize)
                cloudToExtract->push_back(cloudKeyPoses3D->points[i]);
            else
                break;
        }

        extractCloud(cloudToExtract);
    }

    //提取周围的关键帧
    void extractNearby()
    {
        pcl::PointCloud<PointType>::Ptr surroundingKeyPoses(new pcl::PointCloud<PointType>()); //保存附近关键帧
        pcl::PointCloud<PointType>::Ptr surroundingKeyPosesDS(new pcl::PointCloud<PointType>()); // 保存关键帧下采样之后的点 以及时间上相近的关键帧
        // 临时变量 KD 树搜索的时候使用
        std::vector<int> pointSearchInd;
        std::vector<float> pointSearchSqDis;

        /**
         *  extract all the nearby key poses and downsample them
         */
        // kdtree的输入,全局关键帧位姿集合(历史所有关键帧集合)
        kdtreeSurroundingKeyPoses->setInputCloud(cloudKeyPoses3D); // create kd-tree
        // 对最近的一帧关键帧,在半径区域内搜索[radiusSearch]空间区域上相邻的关键帧集合
        // surroundingKeyframeSearchRadius搜索半径是 50米
        kdtreeSurroundingKeyPoses->radiusSearch(cloudKeyPoses3D->back(), (double)surroundingKeyframeSearchRadius, 
                                                                                 pointSearchInd, pointSearchSqDis);
        // 将半径搜索查询的结果保存到 surroundingKeyPoses()  空间上近的关键帧
        for (int i = 0; i < (int)pointSearchInd.size(); ++i)
        {
            int id = pointSearchInd[i];
            surroundingKeyPoses->push_back(cloudKeyPoses3D->points[id]);
        }
        // 对查询结果进行 下采样处理【目的:避免关键帧过多】
        downSizeFilterSurroundingKeyPoses.setInputCloud(surroundingKeyPoses);
        downSizeFilterSurroundingKeyPoses.filter(*surroundingKeyPosesDS);

        /**
         * 这一段 for 循环的含义:确认每个下采样后的点的索引
         */ 
        for(auto& pt : surroundingKeyPosesDS->points)
        {
            kdtreeSurroundingKeyPoses->nearestKSearch(pt, 1, pointSearchInd, pointSearchSqDis);
            pt.intensity = cloudKeyPoses3D->points[pointSearchInd[0]].intensity;
        }

        // also extract some latest key frames in case the robot rotates in one position
        // 刚刚是提取了一些空间上比较近的关键帧,然后再提取一些时间上比较近的关键帧
        int numPoses = cloudKeyPoses3D->size();
        for (int i = numPoses-1; i >= 0; --i)
        {
            // 最近十秒的关键帧也保存下来
            if (timeLaserInfoCur - cloudKeyPoses6D->points[i].time < 10.0)
                surroundingKeyPosesDS->push_back(cloudKeyPoses3D->points[i]);
            else
                break;
        }

        //根据筛选出来的关键帧进行局部地图构建
        extractCloud(surroundingKeyPosesDS);
    }
    // 根据筛选出来的关键帧进行局部地图构建, 作为scan-to-map匹配的局部点云地图
    void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract)
    {
        // fuse the map
        laserCloudCornerFromMap->clear();  // 角点 局部地图
        laserCloudSurfFromMap->clear();    // 平面点 局部地图
        // 遍历关键帧
        for (int i = 0; i < (int)cloudToExtract->size(); ++i)
        {     
            // pointDistance(a,b)计算两点距离,校验一下关键帧距离不能太远, 不能超过搜索半径 50
            if (pointDistance(cloudToExtract->points[i], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius)
                continue;

            // 取出提出来的关键帧的索引
            int thisKeyInd = (int)cloudToExtract->points[i].intensity;
            // 如果这个关键帧对应的点云信息已经存储在一个地图容器里 直接从容器中取出来加到局部地图中
            if (laserCloudMapContainer.find(thisKeyInd) != laserCloudMapContainer.end()) 
            {
                // transformed cloud available
                // 直接容器中取出,添加到局部地图
                *laserCloudCornerFromMap += laserCloudMapContainer[thisKeyInd].first;
                *laserCloudSurfFromMap   += laserCloudMapContainer[thisKeyInd].second;
            } else {
                // transformed cloud not available
                // 如果这个点云没有实现存储,那就通过该帧对应的位姿,把该帧点云从当前帧的位姿转到世界坐标系下
                pcl::PointCloud<PointType> laserCloudCornerTemp = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],  
                                                                                       &cloudKeyPoses6D->points[thisKeyInd]);
                pcl::PointCloud<PointType> laserCloudSurfTemp = *transformPointCloud(surfCloudKeyFrames[thisKeyInd], 
                                                                                       &cloudKeyPoses6D->points[thisKeyInd]);
                // 添加到局部点云图
                *laserCloudCornerFromMap += laserCloudCornerTemp;
                *laserCloudSurfFromMap   += laserCloudSurfTemp;
                // 保存到容器中,第一个是角点,第二个是平面点
                laserCloudMapContainer[thisKeyInd] = make_pair(laserCloudCornerTemp, laserCloudSurfTemp);
            }
            
        }
        /**
         * 将提取的关键帧的点云转到世界坐标系下后,避免点云过度密集,因此对面点和角点的局部地图做一个采样的过程
         */
        // Downsample the surrounding corner key frames (or map)
        downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);
        downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);
        laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();
        // Downsample the surrounding surf key frames (or map)
        downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);
        downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);
        laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();

        // clear map cache if too large
        // 如果这个局部地图容器过大,就clear一下,避免占用内存过大
        if (laserCloudMapContainer.size() > 1000)
            laserCloudMapContainer.clear();
    }
    /**
     * 提取周围关键帧 extractNearby() , 并根据筛选出来的关键帧进行局部地图构建 extractCloud(surroundingKeyPosesDS)
     */ 
    void extractSurroundingKeyFrames()
    {
        if (cloudKeyPoses3D->points.empty() == true)
            return; 
        
        // if (loopClosureEnableFlag == true)
        // {
        //     extractForLoopClosure();    
        // } else {
        //     extractNearby();
        // }
        // 提取关键帧
        extractNearby();
    }

    void downsampleCurrentScan()
    {
        // 对 laserCloudInfoHandler 回调函数接收的 平面点 和 角点进行降采样处理
        // Downsample cloud from current scan
        laserCloudCornerLastDS->clear();
        downSizeFilterCorner.setInputCloud(laserCloudCornerLast);
        downSizeFilterCorner.filter(*laserCloudCornerLastDS);
        laserCloudCornerLastDSNum = laserCloudCornerLastDS->size();

        laserCloudSurfLastDS->clear();
        downSizeFilterSurf.setInputCloud(laserCloudSurfLast);
        downSizeFilterSurf.filter(*laserCloudSurfLastDS);
        laserCloudSurfLastDSNum = laserCloudSurfLastDS->size();
    }

    void updatePointAssociateToMap()
    {
        // 当前帧的仿射变换矩阵,这个矩阵可用来 将点投影到 地图【pointAssociateToMap()】
        transPointAssociateToMap = trans2Affine3f(transformTobeMapped);
    }

    void cornerOptimization()
    {
        // 将当前帧的先验位姿(初值估计那来的) 将欧拉角转成eigen的形式
        updatePointAssociateToMap();

        #pragma omp parallel for num_threads(numberOfCores)
        // 遍历当前帧的角点
        // downsampleCurrentScan() 中下采样之后的角点 点数量,保存到laserCloudCornerLastDSNum
        for (int i = 0; i < laserCloudCornerLastDSNum; i++)
        {
            PointType pointOri, pointSel, coeff;

            // kd树近邻搜索的时候使用的
            std::vector<int> pointSearchInd;  //在points[] 中的下标索引
            std::vector<float> pointSearchSqDis; // 距离

            // 临时变量,保存当前点
            pointOri = laserCloudCornerLastDS->points[i];
            // 将该点从当前帧通过初始的位姿变换到地图坐标系下去
            pointAssociateToMap(&pointOri, &pointSel);
            // 在角点地图里面寻找距离当前点比较近的5个点
            kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);

            cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));  // 协方差矩阵
            cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));  // 协方差矩阵的特征值
            cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));  // 协方差矩阵的特征向量

            // 计算找到的点中距离当前点最远的点,如果距离太大那说明这个约束不可信,就跳过  
            // 合适的距离小于 1 米   
            if (pointSearchSqDis[4] < 1.0) {

                // 计算找到的5个点的均值
                float cx = 0, cy = 0, cz = 0;
                for (int j = 0; j < 5; j++) {
                    cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x;
                    cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y;
                    cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z;
                }
                cx /= 5; cy /= 5;  cz /= 5;

                // 计算协方差矩阵
                //     [xx   xy   xz]
                //P =  [yx   yy   yz]
                //     [zx   zy   zz]
                //  协方差值:
                // xx = 1/n * 【(x1-x平均数)*(x1-x平均数) + (x2-x平均数)*(x2-x平均数) + ... + (xn-x平均数)*(xn-x平均数)】
                // xy = 1/n * 【(x1-x平均数)*(y1-y平均数) + (x2-x平均数)*(y2-y平均数) + ... + (xn-x平均数)*(yn-y平均数)】
                // 其他的以此类推
                // 其中 xy = yx, xz = zx, yz = zy

                // 最终得到
                //     [a11   a12   a13]
                //A1 = [a12   a22   a23]
                //     [a13   a23   a33]
                float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;
                for (int j = 0; j < 5; j++) {
                    float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx;
                    float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy;
                    float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz;

                    a11 += ax * ax; a12 += ax * ay; a13 += ax * az;
                    a22 += ay * ay; a23 += ay * az;
                    a33 += az * az;
                }
                a11 /= 5; a12 /= 5; a13 /= 5; a22 /= 5; a23 /= 5; a33 /= 5;

                matA1.at<float>(0, 0) = a11; matA1.at<float>(0, 1) = a12; matA1.at<float>(0, 2) = a13;
                matA1.at<float>(1, 0) = a12; matA1.at<float>(1, 1) = a22; matA1.at<float>(1, 2) = a23;
                matA1.at<float>(2, 0) = a13; matA1.at<float>(2, 1) = a23; matA1.at<float>(2, 2) = a33;
            //
                //
                //  cv::eigen(InputArray src, OutputArray  eigenvalues,  OutputArray eigenvectors)
                //  得到 特征值eigenvalues【排列从大到小】,特征向量 eigenvectors 
                //
                // 特征值分解 为 证明这5个点是一条直线,
                cv::eigen(matA1, matD1, matV1);

                // 要求 最大 特征值 大于3倍的次大特征值 --> 线特征
                if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1)) {
                    
                    
                    //
                    // 点到直线距离:
                    // 点O(x0, y0, z0) , 
                    // 两点确定一条直线:A(x1, y1, z1) B(x2, y2, z2) 
                    //

                    // 当前点转换到地图坐标系下的点
                    float x0 = pointSel.x;
                    float y0 = pointSel.y;
                    float z0 = pointSel.z;

                    // 进行了一个直线的构建, 通过点的均值往两边拓展(加 减 特征向量对应的值)
                    // **最大特征值 对应的 特征向量 对应的就是直线的方向向量**
                    float x1 = cx + 0.1 * matV1.at<float>(0, 0);
                    float y1 = cy + 0.1 * matV1.at<float>(0, 1);
                    float z1 = cz + 0.1 * matV1.at<float>(0, 2);

                    float x2 = cx - 0.1 * matV1.at<float>(0, 0);
                    float y2 = cy - 0.1 * matV1.at<float>(0, 1);
                    float z2 = cz - 0.1 * matV1.at<float>(0, 2);

                    //
                    //现在有了 一个点,和构建的两个点,
                    //下面要求整个点到构建的两个点形成直线的距离和方向,即残差与残差方向
                    //
                    float a012 = sqrt(((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) * ((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) 
                                    + ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) * ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) 
                                    + ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)) * ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)));
                    // 这个就是AB的模长
                    float l12 = sqrt((x1 - x2)*(x1 - x2) + (y1 - y2)*(y1 - y2) + (z1 - z2)*(z1 - z2));

                    float la = ((y1 - y2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) 
                              + (z1 - z2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))) / a012 / l12;

                    float lb = -((x1 - x2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) 
                               - (z1 - z2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;

                    float lc = -((x1 - x2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) 
                               + (y1 - y2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;

                    // 求距离 也就是残差,根据三角形面积
                    float ld2 = a012 / l12;
                    // 一个简单的核函数 , 残差越大 权重 越 低
                    float s = 1 - 0.9 * fabs(ld2);

                    // 残差 x y z 代表方向 intensity 为大小
                    coeff.x = s * la;
                    coeff.y = s * lb;
                    coeff.z = s * lc;
                    coeff.intensity = s * ld2;
                    // 如果残差da于10cm,就认为是一个有效的约束
                    if (s > 0.1) {
                        laserCloudOriCornerVec[i] = pointOri;
                        coeffSelCornerVec[i] = coeff;
                        laserCloudOriCornerFlag[i] = true;
                    }
                }
            }
        }
    }
    // 平面点优化
    void surfOptimization()
    {
        // 同理角点优化
        // 将当前帧的先验位姿(初值估计那来的) 将欧拉角转成eigen的形式 
        updatePointAssociateToMap();

        #pragma omp parallel for num_threads(numberOfCores)
        // 遍历当前帧每个面点
        for (int i = 0; i < laserCloudSurfLastDSNum; i++)
        {
            PointType pointOri, pointSel, coeff;
            std::vector<int> pointSearchInd;
            std::vector<float> pointSearchSqDis;

            // 取出 该点
            pointOri = laserCloudSurfLastDS->points[i];
            // 将该点从当前帧通过初始的位姿变换到地图坐标系下去
            pointAssociateToMap(&pointOri, &pointSel); 
            // 在平面点地图里面寻找距离当前点比较近的5个点
            kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);

            // 下面不是通过特征值分解 来求特征向量的 通过超定方程来求解
            // 求解 Ax = b 中的 x?,下面的三个变量分别对应方程中的 A  b x 
            Eigen::Matrix<float, 5, 3> matA0;
            Eigen::Matrix<float, 5, 1> matB0;
            Eigen::Vector3f matX0;
            
            //平面方程 Ax + By + Cz + 1 = 0
            matA0.setZero();
            matB0.fill(-1);
            matX0.setZero();
            // 最大距离不能超过1m
            if (pointSearchSqDis[4] < 1.0) {
                // 填充 matA0 矩阵
                for (int j = 0; j < 5; j++) {
                    matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x;
                    matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y;
                    matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z;
                }
                //
                // QR 分解
                // 求解 Ax = b 这个超定方程
                // 
                matX0 = matA0.colPivHouseholderQr().solve(matB0);
                
                // 求出来x的就是这个平面的法向量
                float pa = matX0(0, 0);
                float pb = matX0(1, 0);
                float pc = matX0(2, 0);
                float pd = 1;

                // 单位化 ---> 单位法向量
                float ps = sqrt(pa * pa + pb * pb + pc * pc);
                pa /= ps; pb /= ps; pc /= ps; pd /= ps;


                bool planeValid = true;
                // 将每个点代入平面方程,计算点到平面的距离,如果距离大于0.2m,认为这个平面曲率偏大,就是无效的平面
                for (int j = 0; j < 5; j++) {
                    if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x +
                             pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y +
                             pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2) {
                        planeValid = false;
                        break;
                    }
                }
                // 如果 平面是有效的, 
                if (planeValid) {
                    // 计算残差,点到平面的距离 --> pd2
                    float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;
                    // 残差到权重的换算 
                    // 分母部分 点离激光越远 则 分母越大,那么权重就越大,
                    float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointOri.x * pointOri.x
                            + pointOri.y * pointOri.y + pointOri.z * pointOri.z));
                    // 残差 x y z 代表梯度方向 intensity 为大小
                    coeff.x = s * pa;
                    coeff.y = s * pb;
                    coeff.z = s * pc;
                    coeff.intensity = s * pd2;

                    // 如果权重大于阈值,就认为是一个有效的约束
                    if (s > 0.1) {
                        laserCloudOriSurfVec[i] = pointOri;
                        coeffSelSurfVec[i] = coeff;
                        laserCloudOriSurfFlag[i] = true;
                    }
                }
            }
        }
    }

    void combineOptimizationCoeffs()
    {
        // combine corner coeffs
        // 遍历 角点, 只有标记为true的时候才是有效约束
        for (int i = 0; i < laserCloudCornerLastDSNum; ++i){
            // laserCloudOriCornerFlag 在 函数cornerOptimization()中赋值true
            if (laserCloudOriCornerFlag[i] == true){

                // 点和残差及梯度加入到 laserCloudOri 和coeffSel 中
                laserCloudOri->push_back(laserCloudOriCornerVec[i]);  // 点
                coeffSel->push_back(coeffSelCornerVec[i]);   // 残差及梯度
            }
        }
        // combine surf coeffs   同理处理面点,只有标记为true的时候才是有效约束
        for (int i = 0; i < laserCloudSurfLastDSNum; ++i){
            //  laserCloudOriSurfFlag 在 函数surfOptimization()中赋值true
            if (laserCloudOriSurfFlag[i] == true){

                // 点和残差及梯度加入到 laserCloudOri 和coeffSel 中
                laserCloudOri->push_back(laserCloudOriSurfVec[i]);
                coeffSel->push_back(coeffSelSurfVec[i]);
            }
        }
        // reset flag for next iteration
        // 
        // 标志位清零
        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
    }

    // 当数据准备好之后,即可开始下面的优化
    bool LMOptimization(int iterCount)
    {
        // This optimization is from the original loam_velodyne by Ji Zhang, need to cope with coordinate transformation
        // 相机到雷达坐标系的变换         雷达到相机坐标系的变换
        // lidar <- camera      ---     camera <- lidar
        // x = z                ---     x = y
        // y = x                ---     y = z
        // z = y                ---     z = x
        // roll = yaw           ---     roll = pitch
        // pitch = roll         ---     pitch = yaw
        // yaw = pitch          ---     yaw = roll

        // lidar -> camera  0 x, 1 y, 2 z 正好对应上面的变换
        float srx = sin(transformTobeMapped[1]); // y
        float crx = cos(transformTobeMapped[1]); // y
        float sry = sin(transformTobeMapped[2]); // z
        float cry = cos(transformTobeMapped[2]); // z
        float srz = sin(transformTobeMapped[0]); // x
        float crz = cos(transformTobeMapped[0]); // x
        
        // 检测有多少个约束,如果约束小于50,则直接return false
        int laserCloudSelNum =  ->size();
        if (laserCloudSelNum < 50) {
            return false;
        }

        //
        // 非线性最小二乘求解
        // 高斯牛顿法的原理
        //      
        cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));  // 雅克比矩阵J
        cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0)); // 雅克比矩阵转置 J^T
        cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));               // 矩阵 J^TJ
        cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));
        cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));               // -JTe
        cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));
        // 临时变量
        PointType pointOri, coeff;
        // 迭代每一个约束点进行优化求解
        for (int i = 0; i < laserCloudSelNum; i++) {
            //  先转到相机坐标系

            // lidar -> camera  首先将当前 点 转到相机系 lidar -> camera
            pointOri.x = laserCloudOri->points[i].y;
            pointOri.y = laserCloudOri->points[i].z;
            pointOri.z = laserCloudOri->points[i].x;
            // lidar -> camera  将当前点到线(面)的 单位向量 转到相机系
            coeff.x = coeffSel->points[i].y;
            coeff.y = coeffSel->points[i].z;
            coeff.z = coeffSel->points[i].x;
            coeff.intensity = coeffSel->points[i].intensity;

            // in camera
            // 旋转矩阵 R  网上有公式,有矩阵形式,可查找参考
            //
            //  X0 = R * Xi + T
            //  R = R_yaw * R_pitch * R_roll
            //    = R_ry * R_rx * R_rz
            //    = |cry  0 sry|   |1  0   0  |   |crz -srz 0|
            //      |0    1  0 | * |0 crx -srx| * |srz crz  0|
            //      |-sry 0 cry|   |0 srx  crx|   | 0   0   1|
            // 
            //    = |crycrz+srxsrysrz   srxsrycrz-crysrz   crxsry|
            //      |    crxsrz             crxcrz           -srx|
            //      |srxcrysrz-srycrz   srysrz+srxcrycrz   crxcry|   
            //
            //  error = (R * point + T) * coeff
            //  求导:
            //** 距离对旋转矩阵R 的求导
            //                |crxsrysrz   crxsrycrz   -srxsry|
            //   derror/drx = |-srxsrz     -srxcrz       -crx |
            //                |crxcrysrz   crxcrycrz   -srxcry|
            // 
            //
            //                |srxcrysrz-srycrz   srxcrycrz+srysrz   crxcry|
            //   derror/dry = |      0                0                0   |
            //                |-srxsrysrz-crycrz  crysrz-srxsrycrz  -crxsry|
            //
            // 
            //                |srxsrycrz-crysrz  -srxsrysrz-crycrz  0|
            //   derror/drz = |      crxcrz           -crxsrz       0|
            //                |srxcrycrz+srysrz   srycrz-srxcrysrz  0|
            //   最后再乘以 coeff
            //   
            //** 距离对平移的偏导【雅克比矩阵】
            //   derror/dtx = coeff.x
            //   derror/dty = coeff.y
            //   derror/dtz = coeff.z
            //
            //
            //
            float arx = (crx*sry*srz*pointOri.x + crx*crz*sry*pointOri.y - srx*sry*pointOri.z) * coeff.x
                      + (-srx*srz*pointOri.x - crz*srx*pointOri.y - crx*pointOri.z) * coeff.y
                      + (crx*cry*srz*pointOri.x + crx*cry*crz*pointOri.y - cry*srx*pointOri.z) * coeff.z;

            float ary = ((cry*srx*srz - crz*sry)*pointOri.x 
                      + (sry*srz + cry*crz*srx)*pointOri.y + crx*cry*pointOri.z) * coeff.x
                      + ((-cry*crz - srx*sry*srz)*pointOri.x 
                      + (cry*srz - crz*srx*sry)*pointOri.y - crx*sry*pointOri.z) * coeff.z;

            float arz = ((crz*srx*sry - cry*srz)*pointOri.x + (-cry*crz-srx*sry*srz)*pointOri.y)*coeff.x
                      + (crx*crz*pointOri.x - crx*srz*pointOri.y) * coeff.y
                      + ((sry*srz + cry*crz*srx)*pointOri.x + (crz*sry-cry*srx*srz)*pointOri.y)*coeff.z;
            
            // camera -> lidar
            // 这里就是把camera转到lidar了, 赋值 雅克比矩阵
            matA.at<float>(i, 0) = arz;
            matA.at<float>(i, 1) = arx;
            matA.at<float>(i, 2) = ary;
            // 雅可比矩阵中距离对平移的偏导
            matA.at<float>(i, 3) = coeff.z;
            matA.at<float>(i, 4) = coeff.x;
            matA.at<float>(i, 5) = coeff.y;
            matB.at<float>(i, 0) = -coeff.intensity;
        }
        
        // 下面 构造 JTJ 以及 -JTe 矩阵
        cv::transpose(matA, matAt);
        matAtA = matAt * matA;
        matAtB = matAt * matB;
        // 利用CV的方法求解增量  JTJX=-JTe
        // 利用高斯牛顿法进行求解,
        // 高斯牛顿法的原型是J^(T)*J * delta(x) = -J*f(x)
        // J是雅克比矩阵,这里是A,f(x)是优化目标,这里是-B(符号在给B赋值时候就放进去了)
        // 通过QR分解的方式,求解matAtA*matX=matAtB,得到解matX
        cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);

        // 检测一下 是否有退化得情况
        // 首次迭代,检查近似Hessian矩阵(J^T·J)是否退化,或者称为奇异,行列式值=0 todo
        if (iterCount == 0) {
            
            
            cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));  // 特征值
            cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));  // 特征向量
            cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));  // 特征向量 copy
            // 对JTJ[matAtA] 进行特征值分解
            cv::eigen(matAtA, matE, matV);
            matV.copyTo(matV2);

            isDegenerate = false;
            float eignThre[6] = {100, 100, 100, 100, 100, 100};
            //
            // 特征值从小到大遍历,如果小于阈值就认为退化
            // 对应的特征向量全部置0
            // 退化标志位 为 true
            for (int i = 5; i >= 0; i--) {
                if (matE.at<float>(0, i) < eignThre[i]) {
                    for (int j = 0; j < 6; j++) {
                        matV2.at<float>(i, j) = 0;
                    }
                    isDegenerate = true;
                } else {
                    break;
                }
            }
            matP = matV.inv() * matV2;
        }

        // 如果发生退化,就对增量进行修改,退化方向不更新
        if (isDegenerate)
        {
            cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));
            matX.copyTo(matX2);
            matX = matP * matX2;
        }
        // 增量更新
        transformTobeMapped[0] += matX.at<float>(0, 0);
        transformTobeMapped[1] += matX.at<float>(1, 0);
        transformTobeMapped[2] += matX.at<float>(2, 0);
        transformTobeMapped[3] += matX.at<float>(3, 0);
        transformTobeMapped[4] += matX.at<float>(4, 0);
        transformTobeMapped[5] += matX.at<float>(5, 0);

        // 计算更新的旋转和平移大小
        float deltaR = sqrt(
                            pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) +
                            pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) +
                            pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));
        float deltaT = sqrt(
                            pow(matX.at<float>(3, 0) * 100, 2) +
                            pow(matX.at<float>(4, 0) * 100, 2) +
                            pow(matX.at<float>(5, 0) * 100, 2));
        
        // 如果更新的旋转和平移过小,那么认为达到最优解
        if (deltaR < 0.05 && deltaT < 0.05) {
            return true; // converged
        }
        // 否则继续优化
        return false; // keep optimizing
    }
    /**
     * 得到一个旋转和平移,使得当前帧最好的贴合到局部地图中去 (ICP 的思想,ICP就是得到旋转矩阵和平移,来进行配准)
     * 1、先构建了角点和面点的残差及梯度方向 cornerOptimization();surfOptimization();
     * 2、通过combineOptimizationCoeffs();函数将角点和面点的残差及梯度统一到一起
     * 
     */ 
    void scan2MapOptimization()
    {   
        // 如果没有关键帧,那也没办法做当前帧到局部地图的匹配,则直接return
        if (cloudKeyPoses3D->points.empty())
            return;
        // edgeFeatureMinValidNum = 10,  surfFeatureMinValidNum = 100
        // 判断当前帧的角点数和面点数是否足够, 默认 角点要求10 面点要求100
        if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum)
        {
            // if 里面内容是 配准过程
            
            // 分别把角点和面点 局部地图构建 kdtree
            kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS);  // extractCloud() 降采样之后的 特征点地图
            kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS);

            /**
             * 迭代求解,迭代30次
             * 里面是手写的优化器
             * lio-sam是继承了 loam 和 lego-loam 中的 高斯牛顿的 优化方法
             */ 
            for (int iterCount = 0; iterCount < 30; iterCount++)
            {
                laserCloudOri->clear();
                coeffSel->clear();

                // 角点优化
                cornerOptimization();
                // 平面点优化
                surfOptimization();
                // 通过下面这个函数将角点和面点的残差及梯度统一到一起
                // 保存到:laserCloudOri  和 coeffSel  
                combineOptimizationCoeffs();

                // 当数据准备好之后,即可开始下面的优化
                // 执行优化求解
                // 虽然函数名写的是LM优化算法,但是函数内部是高斯牛顿下降优化算法
                //
                if (LMOptimization(iterCount) == true)
                    break;              
            }
            
            // 求解出来优化结果后 把结果和imu进行一次加权融合
            transformUpdate();
        } 
        else { //角点或者面点的数量不够,则终端提示信息
            ROS_WARN("Not enough features! Only %d edge and %d planar features available.", laserCloudCornerLastDSNum, laserCloudSurfLastDSNum);
        }
    }
    // 使用了9轴imu的orientation与做transformTobeMapped插值, 并且roll和pitch收到常量阈值约束(权重)
    void transformUpdate()
    {
        // 判断 可以获取九轴imu的世界坐标系下的姿态
        if (cloudInfo.imuAvailable == true)
        {
            // 角度没有很大
            if (std::abs(cloudInfo.imuPitchInit) < 1.4)
            {
                //imu数据的权重约束
                double imuWeight = imuRPYWeight;
                tf::Quaternion imuQuaternion;
                tf::Quaternion transformQuaternion;
                double rollMid, pitchMid, yawMid;

                //lidar 匹配获得的roll角转成四元数
                //imu 获得的roll角转成四元数
                //使用四元的球面插值
                //插值的结果作为roll的最终结果
                // slerp roll
                transformQuaternion.setRPY(transformTobeMapped[0], 0, 0);
                imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
                transformTobeMapped[0] = rollMid;

                // slerp pitch  同理pitch 角度
                transformQuaternion.setRPY(0, transformTobeMapped[1], 0);
                imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
                transformTobeMapped[1] = pitchMid;
            }
        }
        // 对roll pitch 和z进行一些约束,主要针对室内2d场景下,已知2d先验可以加上这些约束
        transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance);
        transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance);
        transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance);
        // 最终的结果也可以转成eigen的结构, 当前帧位姿
        incrementalOdometryAffineBack = trans2Affine3f(transformTobeMapped);
    }

    float constraintTransformation(float value, float limit)
    {
        if (value < -limit)
            value = -limit;
        if (value > limit)
            value = limit;

        return value;
    }

    /**
     * 保存关键帧
     * 计算当前帧与前一帧位姿变换,如果变化太小,不设为关键帧,反之设为关键帧
     * 通过旋转和平移增量,判断是否是关键帧, 如果不是关键帧则不往因子图里加factor
    */
    bool saveFrame()
    {   
        // 如果没有关键帧,就直接认为是关键帧
        if (cloudKeyPoses3D->points.empty())
            return true;

        // if (sensor == SensorType::LIVOX)
        // {
        //     if (timeLaserInfoCur - cloudKeyPoses6D->back().time > 1.0)
        //         return true;
        // }

        // 前一帧位姿(上一个关键帧位姿)
        Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back());
        // 当前帧位姿
        Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], 
                                                            transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
        // 位姿变换增量  两个位姿之间的 delta pose
        Eigen::Affine3f transBetween = transStart.inverse() * transFinal;
        float x, y, z, roll, pitch, yaw;
        // 通过变换矩阵的到 欧拉角(旋转)和位置(平移)
        pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);
        // 旋转和平移量都较小,当前帧不设为关键帧
        if (abs(roll)  < surroundingkeyframeAddingAngleThreshold &&
                        abs(pitch) < surroundingkeyframeAddingAngleThreshold && 
                        abs(yaw)   < surroundingkeyframeAddingAngleThreshold &&
                        sqrt(x*x + y*y + z*z) < surroundingkeyframeAddingDistThreshold)
            return false;

        return true;
    }

    /**
     * 添加激光雷达帧间里程计因子
     */ 
    void addOdomFactor()
    {
        // 如果是第一帧 关键帧
        if (cloudKeyPoses3D->points.empty())
        {
            // 置信度就设置差一点,尤其是不可观的平移和yaw角
            noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-2, 1e-2, M_PI*M_PI, 1e8, 1e8, 1e8).finished()); // rad*rad, meter*meter
            // 增加先验约束 , 对第 0 个节点增加约束 
            gtSAMgraph.add(PriorFactor<Pose3>(0, trans2gtsamPose(transformTobeMapped), priorNoise));
            // 加入节点信息 初始值
            initialEstimate.insert(0, trans2gtsamPose(transformTobeMapped));
        }
        // 如果不是第一帧,就增加帧间约束
        else{
            // 这时帧间约束置信度就设置高一些
            noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
            // 上一关键帧 位姿 转成 gtsam的 格式
            gtsam::Pose3 poseFrom = pclPointTogtsamPose3(cloudKeyPoses6D->points.back());
            // 当前关键帧 位姿 转成 gtsam的 格式
            gtsam::Pose3 poseTo   = trans2gtsamPose(transformTobeMapped);
            // 这是 一个 帧间 约束 ,分别 输入两个 节点 的 id,帧间约束大小 以及 置信度
            gtSAMgraph.add(BetweenFactor<Pose3>(cloudKeyPoses3D->size()-1, cloudKeyPoses3D->size(), poseFrom.between(poseTo), odometryNoise));
            // 加入节点信息 先验位姿
            initialEstimate.insert(cloudKeyPoses3D->size(), poseTo);
        }
    }
/// 我们用不到
    void addGPSFactor()
    {
        // 如果没有gps信息就算了  
        if (gpsQueue.empty())
            return;

        // wait for system initialized and settles down
        // 如果有gps但是没有关键帧信息也算了  因为gps 是给关键帧提供约束的
        if (cloudKeyPoses3D->points.empty())
            return;
        else
        {
            // 第一个关键帧和最后一个关键帧相差很近,也就算了,要么刚起步,要么会触发回环
            if (pointDistance(cloudKeyPoses3D->front(), cloudKeyPoses3D->back()) < 5.0)
                return;
        }

        // pose covariance small, no need to correct
        // gtsam 反馈的 当前 x、y 的置信度,如果置信度比较高 也不需要 gps来进行 优化
        if (poseCovariance(3,3) < poseCovThreshold && poseCovariance(4,4) < poseCovThreshold)
            return;

        // last gps position
        static PointType lastGPSPoint;

        while (!gpsQueue.empty())
        {
            // 把距离当前帧比较早的帧都抛弃
            if (gpsQueue.front().header.stamp.toSec() < timeLaserInfoCur - 0.2)
            {
                // message too old
                gpsQueue.pop_front();
            }
            else if (gpsQueue.front().header.stamp.toSec() > timeLaserInfoCur + 0.2)
            {
                // message too new
                break;
            }
            else
            {
                nav_msgs::Odometry thisGPS = gpsQueue.front();
                gpsQueue.pop_front();

                // GPS too noisy, skip
                float noise_x = thisGPS.pose.covariance[0];
                float noise_y = thisGPS.pose.covariance[7];
                float noise_z = thisGPS.pose.covariance[14];
                if (noise_x > gpsCovThreshold || noise_y > gpsCovThreshold)
                    continue;

                float gps_x = thisGPS.pose.pose.position.x;
                float gps_y = thisGPS.pose.pose.position.y;
                float gps_z = thisGPS.pose.pose.position.z;
                if (!useGpsElevation)
                {
                    gps_z = transformTobeMapped[5];
                    noise_z = 0.01;
                }

                // GPS not properly initialized (0,0,0)
                if (abs(gps_x) < 1e-6 && abs(gps_y) < 1e-6)
                    continue;

                // Add GPS every a few meters
                PointType curGPSPoint;
                curGPSPoint.x = gps_x;
                curGPSPoint.y = gps_y;
                curGPSPoint.z = gps_z;
                if (pointDistance(curGPSPoint, lastGPSPoint) < 5.0)
                    continue;
                else
                    lastGPSPoint = curGPSPoint;

                gtsam::Vector Vector3(3);
                Vector3 << max(noise_x, 1.0f), max(noise_y, 1.0f), max(noise_z, 1.0f);
                noiseModel::Diagonal::shared_ptr gps_noise = noiseModel::Diagonal::Variances(Vector3);
                gtsam::GPSFactor gps_factor(cloudKeyPoses3D->size(), gtsam::Point3(gps_x, gps_y, gps_z), gps_noise);
                gtSAMgraph.add(gps_factor);

                aLoopIsClosed = true;
                break;
            }
        }
    }

    void addLoopFactor()
    {
        // 有一个专门的回环检测线程会检测回环,检测到就会给这个队列塞入回环约束
        if (loopIndexQueue.empty())
            return;
        // 把队列里面所有的回环约束添加进行
        for (int i = 0; i < (int)loopIndexQueue.size(); ++i)
        {   
            // 当前帧 回环帧 索引
            int indexFrom = loopIndexQueue[i].first; 
            int indexTo = loopIndexQueue[i].second;
            // 帧间约束
            gtsam::Pose3 poseBetween = loopPoseQueue[i];
            // 回环的置信度就是icp的得分
            gtsam::noiseModel::Diagonal::shared_ptr noiseBetween = loopNoiseQueue[i];
            // 加入约束
            gtSAMgraph.add(BetweenFactor<Pose3>(indexFrom, indexTo, poseBetween, noiseBetween));
        }
        // 清空回环相关队列
        loopIndexQueue.clear();
        loopPoseQueue.clear();
        loopNoiseQueue.clear();
        // 标志位 至 true
        aLoopIsClosed = true;
    }
/**
 * 如何将三者(雷达里程计、回环检测、gps)进行融合,来实现全局位姿优化的。
 */ 
    void saveKeyFramesAndFactor()
    {
        if (saveFrame() == false)
            return;
    // 如果是关键帧的话就给isam增加因子

        // odom factor
        addOdomFactor();

        // gps factor
        addGPSFactor();

        // loop factor
        addLoopFactor();

        // cout << "****************************************************" << endl;
        // gtSAMgraph.print("GTSAM Graph:\n");

        // update iSAM  
        // 所有因子加完了,就调用isam 接口,更新图模型
        isam->update(gtSAMgraph, initialEstimate);
        isam->update();

        // 如果加入了gps约束或者回环约束,isam需要进行更多次的优化
        if (aLoopIsClosed == true)
        {
            isam->update();
            isam->update();
            isam->update();
            isam->update();
            isam->update();
        }
        // 将约束和节点信息清空,他们已经被加入到isam中去了,因此这里清空不会影响整个优化
        gtSAMgraph.resize(0);
        initialEstimate.clear();

        //save key poses
        PointType thisPose3D;
        PointTypePose thisPose6D;
        Pose3 latestEstimate;

        // 通过接口获得所以变量的状态
        isamCurrentEstimate = isam->calculateEstimate();
        // 取出优化后的最新关键帧位姿
        latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size()-1);
        // cout << "****************************************************" << endl;
        // isamCurrentEstimate.print("Current estimate: ");

        // 平移信息取出来保存进 clouKeyPoses 3D 这个结构中,其中索引作为 intensity
        thisPose3D.x = latestEstimate.translation().x();
        thisPose3D.y = latestEstimate.translation().y();
        thisPose3D.z = latestEstimate.translation().z();
        thisPose3D.intensity = cloudKeyPoses3D->size(); // this can be used as index
        cloudKeyPoses3D->push_back(thisPose3D);

        // 6D姿态同样保留下来
        thisPose6D.x = thisPose3D.x;
        thisPose6D.y = thisPose3D.y;
        thisPose6D.z = thisPose3D.z;
        thisPose6D.intensity = thisPose3D.intensity ; // this can be used as index
        thisPose6D.roll  = latestEstimate.rotation().roll();
        thisPose6D.pitch = latestEstimate.rotation().pitch();
        thisPose6D.yaw   = latestEstimate.rotation().yaw();
        thisPose6D.time = timeLaserInfoCur;
        cloudKeyPoses6D->push_back(thisPose6D);

        // cout << "****************************************************" << endl;
        // cout << "Pose covariance:" << endl;
        // cout << isam->marginalCovariance(isamCurrentEstimate.size()-1) << endl << endl;

        // 保存当前位姿的置信度 用于是否使用gps的判断
        poseCovariance = isam->marginalCovariance(isamCurrentEstimate.size()-1);

        // save updated transform
        // 将优化后的位姿更新到transformTobeMapped数组中,作为当前最佳估计值
        transformTobeMapped[0] = latestEstimate.rotation().roll();
        transformTobeMapped[1] = latestEstimate.rotation().pitch();
        transformTobeMapped[2] = latestEstimate.rotation().yaw();
        transformTobeMapped[3] = latestEstimate.translation().x();
        transformTobeMapped[4] = latestEstimate.translation().y();
        transformTobeMapped[5] = latestEstimate.translation().z();

        // save all the received edge and surf points
        pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());
        pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());
        // 当前帧的点云的角点和面点 分别拷贝一下
        pcl::copyPointCloud(*laserCloudCornerLastDS,  *thisCornerKeyFrame);
        pcl::copyPointCloud(*laserCloudSurfLastDS,    *thisSurfKeyFrame);

        // save key frame cloud
        // 关键帧的点云保存下来
        cornerCloudKeyFrames.push_back(thisCornerKeyFrame);
        surfCloudKeyFrames.push_back(thisSurfKeyFrame);

        // save path for visualization
        // 根据当前最新位姿更新rviz可视化
        updatePath(thisPose6D);
    }

    /**
     * 调整全局轨迹
     */ 
    void correctPoses()
    {
        if (cloudKeyPoses3D->points.empty())
            return;
        // 只有回环以及gps信息这些会触发全局调整信息才会触发
        if (aLoopIsClosed == true)
        {
            // clear map cache
            // 存放关键帧的位姿和点云
            laserCloudMapContainer.clear();
            // clear path  清空path
            globalPath.poses.clear();
            // update key poses  更新所有的位姿
            int numPoses = isamCurrentEstimate.size();
            for (int i = 0; i < numPoses; ++i)
            {
                // 3D
                cloudKeyPoses3D->points[i].x = isamCurrentEstimate.at<Pose3>(i).translation().x();
                cloudKeyPoses3D->points[i].y = isamCurrentEstimate.at<Pose3>(i).translation().y();
                cloudKeyPoses3D->points[i].z = isamCurrentEstimate.at<Pose3>(i).translation().z();
                // 6D 
                cloudKeyPoses6D->points[i].x = cloudKeyPoses3D->points[i].x;
                cloudKeyPoses6D->points[i].y = cloudKeyPoses3D->points[i].y;
                cloudKeyPoses6D->points[i].z = cloudKeyPoses3D->points[i].z;
                cloudKeyPoses6D->points[i].roll  = isamCurrentEstimate.at<Pose3>(i).rotation().roll();
                cloudKeyPoses6D->points[i].pitch = isamCurrentEstimate.at<Pose3>(i).rotation().pitch();
                cloudKeyPoses6D->points[i].yaw   = isamCurrentEstimate.at<Pose3>(i).rotation().yaw();

                updatePath(cloudKeyPoses6D->points[i]);
            }

            aLoopIsClosed = false;
        }
    }

    void updatePath(const PointTypePose& pose_in)
    {
        geometry_msgs::PoseStamped pose_stamped;
        pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time);
        pose_stamped.header.frame_id = odometryFrame;
        pose_stamped.pose.position.x = pose_in.x;
        pose_stamped.pose.position.y = pose_in.y;
        pose_stamped.pose.position.z = pose_in.z;
        tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);
        pose_stamped.pose.orientation.x = q.x();
        pose_stamped.pose.orientation.y = q.y();
        pose_stamped.pose.orientation.z = q.z();
        pose_stamped.pose.orientation.w = q.w();

        globalPath.poses.push_back(pose_stamped);
    }

    void publishOdometry()
    {
        // Publish odometry for ROS (global)
        nav_msgs::Odometry laserOdometryROS;
        laserOdometryROS.header.stamp = timeLaserInfoStamp;
        laserOdometryROS.header.frame_id = odometryFrame;
        laserOdometryROS.child_frame_id = "odom_mapping";
        laserOdometryROS.pose.pose.position.x = transformTobeMapped[3];
        laserOdometryROS.pose.pose.position.y = transformTobeMapped[4];
        laserOdometryROS.pose.pose.position.z = transformTobeMapped[5];
        laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
        pubLaserOdometryGlobal.publish(laserOdometryROS);
        
        // Publish TF
        static tf::TransformBroadcaster br;
        tf::Transform t_odom_to_lidar = tf::Transform(tf::createQuaternionFromRPY(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]),
                                                      tf::Vector3(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5]));
        tf::StampedTransform trans_odom_to_lidar = tf::StampedTransform(t_odom_to_lidar, timeLaserInfoStamp, odometryFrame, "lidar_link");
        br.sendTransform(trans_odom_to_lidar);

        // Publish odometry for ROS (incremental)
        static bool lastIncreOdomPubFlag = false;
        static nav_msgs::Odometry laserOdomIncremental; // incremental odometry msg
        static Eigen::Affine3f increOdomAffine; // incremental odometry in affine
        if (lastIncreOdomPubFlag == false)
        {
            lastIncreOdomPubFlag = true;
            laserOdomIncremental = laserOdometryROS;
            increOdomAffine = trans2Affine3f(transformTobeMapped);
        } else {
            Eigen::Affine3f affineIncre = incrementalOdometryAffineFront.inverse() * incrementalOdometryAffineBack;
            increOdomAffine = increOdomAffine * affineIncre;
            float x, y, z, roll, pitch, yaw;
            pcl::getTranslationAndEulerAngles (increOdomAffine, x, y, z, roll, pitch, yaw);
            if (cloudInfo.imuAvailable == true)
            {
                if (std::abs(cloudInfo.imuPitchInit) < 1.4)
                {
                    double imuWeight = 0.1;
                    tf::Quaternion imuQuaternion;
                    tf::Quaternion transformQuaternion;
                    double rollMid, pitchMid, yawMid;

                    // slerp roll
                    transformQuaternion.setRPY(roll, 0, 0);
                    imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
                    tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
                    roll = rollMid;

                    // slerp pitch
                    transformQuaternion.setRPY(0, pitch, 0);
                    imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
                    tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
                    pitch = pitchMid;
                }
            }
            laserOdomIncremental.header.stamp = timeLaserInfoStamp;
            laserOdomIncremental.header.frame_id = odometryFrame;
            laserOdomIncremental.child_frame_id = "odom_mapping";
            laserOdomIncremental.pose.pose.position.x = x;
            laserOdomIncremental.pose.pose.position.y = y;
            laserOdomIncremental.pose.pose.position.z = z;
            laserOdomIncremental.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw);
            if (isDegenerate)
                laserOdomIncremental.pose.covariance[0] = 1;
            else
                laserOdomIncremental.pose.covariance[0] = 0;
        }
        pubLaserOdometryIncremental.publish(laserOdomIncremental);
    }

    void publishFrames()
    {
        if (cloudKeyPoses3D->points.empty())
            return;
        // publish key poses
        publishCloud(pubKeyPoses, cloudKeyPoses3D, timeLaserInfoStamp, odometryFrame);
        // Publish surrounding key frames
        publishCloud(pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, odometryFrame);
        // publish registered key frame
        if (pubRecentKeyFrame.getNumSubscribers() != 0)
        {
            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
            *cloudOut += *transformPointCloud(laserCloudCornerLastDS,  &thisPose6D);
            *cloudOut += *transformPointCloud(laserCloudSurfLastDS,    &thisPose6D);
            publishCloud(pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, odometryFrame);
        }
        // publish registered high-res raw cloud
        if (pubCloudRegisteredRaw.getNumSubscribers() != 0)
        {
            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
            pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut);
            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
            *cloudOut = *transformPointCloud(cloudOut,  &thisPose6D);
            publishCloud(pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, odometryFrame);
        }
        // publish path
        if (pubPath.getNumSubscribers() != 0)
        {
            globalPath.header.stamp = timeLaserInfoStamp;
            globalPath.header.frame_id = odometryFrame;
            pubPath.publish(globalPath);
        }
        // publish SLAM infomation for 3rd-party usage
        static int lastSLAMInfoPubSize = -1;
        if (pubSLAMInfo.getNumSubscribers() != 0)
        {
            if (lastSLAMInfoPubSize != cloudKeyPoses6D->size())
            {
                lio_sam::cloud_info slamInfo;
                slamInfo.header.stamp = timeLaserInfoStamp;
                pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
                *cloudOut += *laserCloudCornerLastDS;
                *cloudOut += *laserCloudSurfLastDS;
                slamInfo.key_frame_cloud = publishCloud(ros::Publisher(), cloudOut, timeLaserInfoStamp, lidarFrame);
                slamInfo.key_frame_poses = publishCloud(ros::Publisher(), cloudKeyPoses6D, timeLaserInfoStamp, odometryFrame);
                pcl::PointCloud<PointType>::Ptr localMapOut(new pcl::PointCloud<PointType>());
                *localMapOut += *laserCloudCornerFromMapDS;
                *localMapOut += *laserCloudSurfFromMapDS;
                slamInfo.key_frame_map = publishCloud(ros::Publisher(), localMapOut, timeLaserInfoStamp, odometryFrame);
                pubSLAMInfo.publish(slamInfo);
                lastSLAMInfoPubSize = cloudKeyPoses6D->size();
            }
        }
    }
};


int main(int argc, char** argv)
{
    ros::init(argc, argv, "lio_sam");
    
    // 类对象 MO
    mapOptimization MO;

    ROS_INFO("\033[1;32m----> Map Optimization Started.\033[0m");
    
    /**
     * loopClosureThread()线程有两个主要函数,执行回环检测以及可视化回环检测的线(RVIZ展示)
     * performLoopClosure();
     * visualizeLoopClosure();
     */ 
    std::thread loopthread(&mapOptimization::loopClosureThread, &MO);
    /**
     * visualizeGlobalMapThread() 线程主要两部分: 发布全局地图以及保存点云PCD文件
     * publishGlobalMap();
     */ 
    std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO);

    ros::spin();

    loopthread.join();
    visualizeMapThread.join();

    return 0;
}

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值