ros重置后地址_从零开始丨ROS平台下运行实时ORB教程及Demo

本文涉及很多代码及文字,排版、文字错误请见谅。

阅读时间预计30分钟

本文涉及图像、数据均由INDEMIND双目视觉惯性模组采集

一:环境介绍

系统:Ubuntu 16.04 ROS

ORB依赖库:Pangolin、OpenCV、Eigen3、DBoW2、g2o,ros

二:下载SDK及ORB-SLAM源码

下载地址:

  • SDK:http://indemind.cn/sdk.html

  • ORB-SLAM:https://github.com/raulmur/ORB_SLAM2

三:修改源码

1、下载好SDK之后,进入SDK-Linux/demo_ros/src目录。将下载好的放在该目录下,并对其改名,改为 ORB_SLAM

70c686b1c2a27e88eea27587497f58c7.png

2. 进入ORB_SLAM2/Examples/Stereo目录下,修改stereo_euroc.cc文件。

1a393dcd3c97cda0a051704b0e141c47.png

1)将其橙色区域改成

280dc7afd11a5e37c660f40243e9a55c.png

代码如下:

#include#include#include#include#include#include#include#include "ros/ros.h"#include #include #include "std_msgs/String.h"#include "FileYaml.h"#include #include #include #include using namespace std;ros::Subscriber image_l_sub;ros::Subscriber image_r_sub; int image_l_count = 0;queue<:mat> que_image_l;queue<:mat> que_image_r;queue<long double> que_stamp; std::mutex limage_mutex;std::mutex rimage_mutex;std::condition_variable left_data_cond;std::condition_variable right_data_cond;//    std::lock_guard<:mutex>lock(rimage_mutex);//    std::thread show_thread{show_image}; //visualization thread//    show_thread.detach(); cv::Mat imLeft;cv::Mat imRight;ros::Time ros_stamp;long double stamp;

修改后如下:

bbea61b76a046778cdcb891c7d586ac2.png

2)在代码,long double stamp;后添加时间转换函数

long double time_tranform(int64_t time){   //取后13位int b = time/1e13;int64_t temp = b * 1e13;int64_t c = time - temp;   //小数点后9位long double d = c / 1e9;return d;}

修改后如下:

0475bacad14adbf555d0a83d4e204810.png

3)删除LoadImages函数

45ad77f5b84a78eaec73ee9e65a58a5c.png

4)在time_tranform和main函数之间添加左图回调函数imagelCallback,添加右图回调函数imagerCallback,以及一些变量的定义

eb1201f8fa80a8aec94fa7e0d38a2b38.png

代码如下:

//image_l回调函数void imagelCallback(const sensor_msgs::ImageConstPtr&msg){   cv_bridge::CvImagePtr cv_ptr;   cv_ptr = cv_bridge::toCvCopy(msg, "mono8");   cv_ptr->image.copyTo(imLeft);   image_l_count = cv_ptr->header.seq;   ros_stamp = cv_ptr->header.stamp;std::lock_guard<:mutex> lock_l(limage_mutex);   stamp = time_tranform(ros_stamp.toNSec());//cout<   que_image_l.push(imLeft);   que_stamp.push(stamp);}//image_r回调函数void imagerCallback(const sensor_msgs::ImageConstPtr&msg){   cv_bridge::CvImagePtr cv_ptr;   cv_ptr = cv_bridge::toCvCopy(msg, "mono8");   cv_ptr->image.copyTo(imRight);std::lock_guard<:mutex> lock_r(rimage_mutex);   que_image_r.push(imRight);}Camera_Other_Parameter vecCamerasParam;cv::Mat M1l,M2l,M1r,M2r;cv::Mat data_left;cv::Mat data_right;long double frame_time;cv::Mat imLeftRect, imRightRect;

5)在cv::Mat imLeftRect, imRightRect;之后,添加show_ORB函数,用于获取图像数据和时间,以及启动ORB的功能

cf29eba4c0ea9b7b3487ebd6334b628c.png

代码如下:

void show_ORB(){//-----------------ORB_SLAM2 Init---------------------------------------------const string param1_ORBvoc = "Vocabulary/ORBvoc.txt";    const string param3_ORByaml = "Examples/Stereo/EuRoC.yaml";            ORB_SLAM2::System SLAM(param1_ORBvoc,param3_ORByaml,ORB_SLAM2::System::STEREO,true);std::this_thread::sleep_for(std::chrono::milliseconds(500));while(true)    {std::this_thread::sleep_for(std::chrono::milliseconds(30));std::unique_lock<:mutex> lock_l(limage_mutex);        data_left = que_image_l.front();  frame_time = que_stamp.front();  que_image_l.pop();  que_stamp.pop();  lock_l.unlock();std::unique_lock<:mutex> lock_r(rimage_mutex);        data_right = que_image_r.front();  que_image_r.pop();  lock_r.unlock();//   cout.precision(13);//  cout<    cv::resize(data_left,data_left,cv::Size(vecCamerasParam.cols,vecCamerasParam.rows));  cv::resize(data_right,data_right,cv::Size(vecCamerasParam.cols,vecCamerasParam.rows));  cv::remap(data_left,imLeftRect,M1l,M2l,cv::INTER_LINEAR);        cv::remap(data_right,imRightRect,M1r,M2r,cv::INTER_LINEAR);#ifdef COMPILEDWITHC11std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();#elsestd::chrono::monotonic_clock::time_point t1 = std::chrono::monotonic_clock::now();#endif  SLAM.TrackStereo(imLeftRect,imRightRect,frame_time);#ifdef COMPILEDWITHC11std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();#elsestd::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();#endifdouble ttrack= std::chrono::duration_cast<:chrono::duration> >(t2 - t1).count();  // Wait to load the next framedouble T = 0;            T = que_stamp.front() - frame_time;if(ttrack < T)            usleep((T-ttrack)*1e6);  /*    cv::imshow("left",data_left);  cv::imshow("right",data_right);  cv::waitKey(1);*/    }// Stop all threads    SLAM.Shutdown();// Save camera trajectory    SLAM.SaveTrajectoryTUM("CameraTrajectory.txt");

修改后如下:

d431d7d6ed6f9d8718ceae200c530abc.png

6)将main函数内的内容全部删除,在main函数内添加ros初始化,读取配置文件,去畸变,获取参数,校正,ORB_SLAM的启动

db88f9af51f17dd0d66bdd6d6a164f8f.png

代码如下:

 ros::init(argc,argv,"ORB_SLAM2");    ros::NodeHandle n;    image_l_sub = n.subscribe("/module/image_left",100,imagelCallback);    image_r_sub = n.subscribe("/module/image_right", 100,imagerCallback);        const char *param2_SDKyaml =  "/home/indemind/u/SDK-Linux-ros/lib/1604/headset.yaml";    readConfig(param2_SDKyaml,vecCamerasParam);//-----------------fisheye rectify---------------------------------------------    cv::Mat Q;    if(vecCamerasParam.K_l.empty() || vecCamerasParam.K_r.empty() || vecCamerasParam.P_l.empty() || vecCamerasParam.P_r.empty() || vecCamerasParam.R_l.empty() ||      vecCamerasParam.R_r.empty() || vecCamerasParam.D_l.empty() || vecCamerasParam.D_r.empty() || vecCamerasParam.rows==0 || vecCamerasParam.cols==0)    {cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl;return -1;    }        cv::fisheye::initUndistortRectifyMap(vecCamerasParam.K_l,vecCamerasParam.D_l,vecCamerasParam.R_l,vecCamerasParam.P_l.rowRange(0,3).colRange(0,3),           cv::Size(vecCamerasParam.cols,vecCamerasParam.rows),CV_32FC1,M1l,M2l);    cv::fisheye::initUndistortRectifyMap(vecCamerasParam.K_r,vecCamerasParam.D_r,vecCamerasParam.R_r,vecCamerasParam.P_r.rowRange(0,3).colRange(0,3),           cv::Size(vecCamerasParam.cols,vecCamerasParam.rows),CV_32FC1,M1r,M2r);cout << "the P_l of initUndistortRectifyMap after" << endl;for(int i = 0;i < 3;++i)for(int j = 0;j < 3;++j)      {double *ptr = vecCamerasParam.P_l.ptr(i,j);cout << *ptr<      }cout << "the P_r of initUndistortRectifyMap after" << endl;for(int i = 0;i < 3;++i)for(int j = 0;j < 3;++j)      {double *ptr = vecCamerasParam.P_r.ptr(i,j);cout << *ptr<      }    cv::stereoRectify(vecCamerasParam.K_l,vecCamerasParam.D_l,vecCamerasParam.K_r,vecCamerasParam.D_r,cv::Size(vecCamerasParam.cols,vecCamerasParam.rows),          vecCamerasParam.R,vecCamerasParam.t,vecCamerasParam.R_l,vecCamerasParam.R_r,vecCamerasParam.P_l,vecCamerasParam.P_r,Q,cv::CALIB_ZERO_DISPARITY,0);  //-----------------fisheye end---------------------------------------------std::thread show_thread{show_ORB}; //visualization thread            show_thread.detach();    ros::spin();    return 0;

修改后如下:

163e40c77642a6143e34fc833c8262dc.png

7)将LoadImages函数删除掉

17194be9975a0e29be5d97dc16cfc244.png

文件修改完成

3. 进入SDK-Linux/demo_ros/src/ORB_SLAM2/include目录下,新建FileYaml.h文件,将下列代码填入文件

#ifndef _FILEYAML_H#define _FILEYAML_H#include #include using namespace std;#include #include #include #include#include #include #include #include #include using namespace cv;using namespace Eigen;using namespace Sophus;using cv::FileStorage;using cv::FileNode;struct Camera{    Eigen::Matrix4d T_SC;    Eigen::Vector2d imageDimension;    Eigen::VectorXd distortionCoefficients;    Eigen::Vector2d focalLength;    Eigen::Vector2d principalPoint;std::string distortionType;    Camera() :            T_SC(Eigen::Matrix4d::Identity()),            imageDimension(Eigen::Vector2d(1280, 800)),            distortionCoefficients(Eigen::Vector2d(0, 0)),            focalLength(Eigen::Vector2d(700, 700)),            principalPoint(Eigen::Vector2d(640, 400)),            distortionType("equidistant")    {    }void write(FileStorage& fs) const //Write serialization for this class{        fs << "{:";        fs << "T_SC";        fs << "[:";for (int i = 0; i < 4; i++)        {for (int j = 0; j < 4; j++)            {                fs << T_SC(i, j);            }        }        fs << "]";        fs << "image_dimension";        fs << "[:";        fs << imageDimension(0) << imageDimension(1);        fs << "]";        fs << "distortion_coefficients";        fs << "[:";for (int i = 0; i < distortionCoefficients.rows(); i++)        {            fs << distortionCoefficients(i);        }        fs << "]";        fs << "distortion_type" << distortionType;        fs << "focal_length";        fs << "[:";        fs << focalLength(0) << focalLength(1);        fs << "]";        fs << "principal_point";        fs << "[:";        fs << principalPoint(0) << principalPoint(1);        fs << "]";        fs << "}";    }void read(const FileNode& node)  //Read serialization for this class{        cv::FileNode T_SC_node = node["T_SC"];        cv::FileNode imageDimensionNode = node["image_dimension"];        cv::FileNode distortionCoefficientNode = node["distortion_coefficients"];        cv::FileNode focalLengthNode = node["focal_length"];        cv::FileNode principalPointNode = node["principal_point"];// extrinsics        T_SC << T_SC_node[0], T_SC_node[1], T_SC_node[2], T_SC_node[3], T_SC_node[4], T_SC_node[5], T_SC_node[6], T_SC_node[7], T_SC_node[8], T_SC_node[9], T_SC_node[10], T_SC_node[11], T_SC_node[12], T_SC_node[13], T_SC_node[14], T_SC_node[15];        imageDimension << imageDimensionNode[0], imageDimensionNode[1];        distortionCoefficients.resize(distortionCoefficientNode.size());for (size_t i = 0; i            distortionCoefficients[i] = distortionCoefficientNode[i];        }        focalLength << focalLengthNode[0], focalLengthNode[1];        principalPoint << principalPointNode[0], principalPointNode[1];        distortionType = (std::string)(node["distortion_type"]);    }};struct Camera_Other_Parameter{    Mat K_l;    Mat K_r;    Mat D_l;    Mat D_r;    Mat R_l;    Mat R_r;    Mat P_l;    Mat P_r;    Mat R;    Mat t;int cols;    //图像宽int rows;    //图像高};void readConfig(const char *yamlPath,Camera_Other_Parameter &vecCamerasOtherParam);#endif

注:该文件为读取文件的头文件

4. 进入SDK-Linux/demo_ros/src/ORB_SLAM2/src目录下,新建FileYaml.cc文件,将下列代码填入文件:

#include "FileYaml.h"static void write(FileStorage& fs, const std::string&, const Camera& x){     x.write(fs);}static void read(const FileNode& node, Camera& x, const Camera& default_value = Camera()){if (node.empty())        x = default_value;else        x.read(node);}void readConfig(const char *yamlPath,Camera_Other_Parameter &vecCamerasOtherParam){std::vector vecCameras;    cv::FileStorage fin(yamlPath, cv::FileStorage::READ);if(!fin.isOpened())    {cerr << endl << "Failed to load readConfig yamlPath " << endl;return ;    }          cv::FileNode cameras_node = fin["cameras"];/*        cv::FileNode Rl_node = fin["Rl"];    cv::FileNode Rr_node = fin["Rr"];    cv::FileNode Pl_node = fin["Pl"];    cv::FileNode Pr_node = fin["Pr"];    cv::FileNode Kl_node = fin["Kl"];    cv::FileNode Kr_node = fin["Kr"];    cv::FileNode Dl_node = fin["Dl"];    cv::FileNode Dr_node = fin["Dr"];*/        fin["Rl"] >> vecCamerasOtherParam.R_l;    fin["Rr"] >> vecCamerasOtherParam.R_r;    fin["Pl"] >> vecCamerasOtherParam.P_l;    fin["Pr"] >> vecCamerasOtherParam.P_r;    fin["Kl"] >> vecCamerasOtherParam.K_l;    fin["Kr"] >> vecCamerasOtherParam.K_r;    fin["Dl"] >> vecCamerasOtherParam.D_l;    fin["Dr"] >> vecCamerasOtherParam.D_r;/*    vecCamerasOtherParam.R_l = Rl_node.mat();    vecCamerasOtherParam.R_r = Rr_node.mat();    vecCamerasOtherParam.P_l = Pl_node.mat();    vecCamerasOtherParam.P_r = Pr_node.mat();    vecCamerasOtherParam.K_l = Kl_node.mat();    vecCamerasOtherParam.K_r = Kr_node.mat();    vecCamerasOtherParam.D_l = Dl_node.mat();    vecCamerasOtherParam.D_r = Dr_node.mat();*/for (cv::FileNodeIterator it = cameras_node.begin(); it != cameras_node.end(); it++)    {        Camera camera;        (*it) >> camera;        vecCameras.push_back(camera);    }//obtain col & row    vecCamerasOtherParam.cols = vecCameras[0].imageDimension(0);    vecCamerasOtherParam.rows = vecCameras[0].imageDimension(1);//obtain R & t    Eigen::Matrix4d T_SC_left;    Eigen::Matrix4d T_SC_right;    T_SC_left  = vecCameras[0].T_SC;    T_SC_right = vecCameras[1].T_SC;SE3 T_SC_l(T_SC_left.topLeftCorner(3,3),T_SC_left.topRightCorner(3,1));SE3 T_SC_r(T_SC_right.topLeftCorner(3,3),T_SC_right.topRightCorner(3,1));    SE3 Tcl_cr = T_SC_l.inverse()*T_SC_r;    SE3 Tcr_cl = T_SC_r.inverse()*T_SC_l;    Matrix3d R = Tcr_cl.rotation_matrix();    Vector3d t = Tcr_cl.translation();    //Eigen tranfer to arraydouble * R_ptr= new double[R.size()];double *t_ptr = new double[t.size()];    Map(R_ptr, R.rows(), R.cols()) = R;    Map(t_ptr, t.rows(), t.cols()) = t;cout<double R_matrix[3][3];for(int i = 0;i < 3;i++)for(int j = 0;j<3;j++)      {//transpose    R_matrix[j][i] = R_ptr[i+j*3];cout<      }       cout<double t_matrix[3];for(int i = 0;i < 3;i++)    {  t_matrix[i] = t_ptr[i];cout<    }    vecCamerasOtherParam.R = cv::Mat(3,3,CV_64FC1,R_matrix);    vecCamerasOtherParam.t = cv::Mat(3,1,CV_64FC1,t_matrix);}

注:该文件为读取文件的源文件

5. 修改CMakeLists.txt

进入SDK-Linux/demo_ros/src目录下,修改CMakeLists.txt文件

将ORB中的CMakeLists.txt添加到SDK的CMakeLists.txt下,修改如下

1)

在add_compile_options(-std=c++11)后面添加

IF(NOT CMAKE_BUILD_TYPE)SET(CMAKE_BUILD_TYPE Release)ENDIF()MESSAGE("Build type: " ${CMAKE_BUILD_TYPE})set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}  -Wall  -O3 -march=native ")set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall   -O3 -march=native")# Check C++11 or C++0x supportinclude(CheckCXXCompilerFlag)CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11)CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)if(COMPILER_SUPPORTS_CXX11)set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")   add_definitions(-DCOMPILEDWITHC11)   message(STATUS "Using flag -std=c++11.")elseif(COMPILER_SUPPORTS_CXX0X)set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")   add_definitions(-DCOMPILEDWITHC0X)   message(STATUS "Using flag -std=c++0x.")else()   message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.")endif()LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules)find_package(OpenCV 3.0 QUIET)if(NOT OpenCV_FOUND)   find_package(OpenCV 2.4.3 QUIET)if(NOT OpenCV_FOUND)      message(FATAL_ERROR "OpenCV > 2.4.3 not found.")   endif()endif()#find_package(Eigen3 3.1.0 REQUIRED)find_package(Pangolin REQUIRED)

修改后如下:

734161c2c9da348bf8beb4cee7c8069b.png

2)

d20f12c224f51d1eb3d21c406cf0499d.png

在橙色区域后面添加

#Eigeninclude_directories("/usr/include/eigen3")#Sophusfind_package(Sophus REQUIRED)include_directories( ${Sophus_INCLUDE_DIRS})include_directories(${PROJECT_SOURCE_DIR}${PROJECT_SOURCE_DIR}/ORB_SLAM/include${PROJECT_SOURCE_DIR}/ORB_SLAM#${EIGEN3_INCLUDE_DIR}${Pangolin_INCLUDE_DIRS})set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/lib)

3)

6e8b039125e5844045a4de278e25b8d1.png

对橙色区域进行修改

add_library(${PROJECT_NAME} SHAREDORB_SLAM/src/FileYaml.ccORB_SLAM/src/System.ccORB_SLAM/src/Tracking.ccORB_SLAM/src/LocalMapping.ccORB_SLAM/src/LoopClosing.ccORB_SLAM/src/ORBextractor.ccORB_SLAM/src/ORBmatcher.ccORB_SLAM/src/FrameDrawer.ccORB_SLAM/src/Converter.ccORB_SLAM/src/MapPoint.ccORB_SLAM/src/KeyFrame.ccORB_SLAM/src/Map.ccORB_SLAM/src/MapDrawer.ccORB_SLAM/src/Optimizer.ccORB_SLAM/src/PnPsolver.ccORB_SLAM/src/Frame.ccORB_SLAM/src/KeyFrameDatabase.ccORB_SLAM/src/Sim3Solver.ccORB_SLAM/src/Initializer.ccORB_SLAM/src/Viewer.cc

4)

3e37fb249f1077ba896f21196a600c0e.png

将其修改为

target_link_libraries(${PROJECT_NAME}${OpenCV_LIBS}#${EIGEN3_LIBS}${Pangolin_LIBRARIES}${Sophus_LIBRARIES}${PROJECT_SOURCE_DIR}/ORB_SLAM/Thirdparty/DBoW2/lib/libDBoW2.so${PROJECT_SOURCE_DIR}/ORB_SLAM/Thirdparty/g2o/lib/libg2o.so)

5)

075a6948449fbbc806b5f9601ef1a792.png

在其橙色区域添加

set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/ORB_SLAM/Examples/RGB-D)add_executable(rgbd_tumORB_SLAM/Examples/RGB-D/rgbd_tum.cc)target_link_libraries(rgbd_tum ${PROJECT_NAME}${Sophus_LIBRARIES})set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/ORB_SLAM/Examples/Stereo)add_executable(stereo_kittiORB_SLAM/Examples/Stereo/stereo_kitti.cc)target_link_libraries(stereo_kitti ${PROJECT_NAME}${Sophus_LIBRARIES})add_executable(stereo_eurocORB_SLAM/Examples/Stereo/stereo_euroc.cc)add_dependencies(stereo_euroc ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})target_link_libraries(stereo_euroc ${PROJECT_NAME}${Sophus_LIBRARIES}${catkin_LIBRARIES})add_executable(module_driver src/camera_driver.cpp)add_dependencies(module_driver ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})target_link_libraries(module_driver${PROJECT_SOURCE_DIR}/../../lib/1604/libindem.so${catkin_LIBRARIES}${PROJECT_SOURCE_DIR}/../../lib/1604/libboost_filesystem.so.1.58.0${PROJECT_SOURCE_DIR}/../../lib/1604/libboost_system.so.1.58.0${PROJECT_SOURCE_DIR}/../../lib/1604/libg3logger.so.1.3.0-0${PROJECT_SOURCE_DIR}/../../lib/1604/libnanomsg.so.5${PROJECT_SOURCE_DIR}/../../lib/1604/libopencv_core.so.3.4${PROJECT_SOURCE_DIR}/../../lib/1604/libopencv_imgproc.so.3.4${PROJECT_SOURCE_DIR}/../../lib/1604/libopencv_videoio.so.3.4  pthread  stdc++fs  dl)set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/ORB_SLAM/Examples/Monocular)add_executable(mono_tumORB_SLAM/Examples/Monocular/mono_tum.cc)target_link_libraries(mono_tum${Sophus_LIBRARIES}${PROJECT_NAME})add_executable(mono_kittiORB_SLAM/Examples/Monocular/mono_kitti.cc)target_link_libraries(mono_kitti${Sophus_LIBRARIES}${PROJECT_NAME})add_executable(mono_eurocORB_SLAM/Examples/Monocular/mono_euroc.cc)target_link_libraries(mono_euroc${Sophus_LIBRARIES}${PROJECT_NAME})

CMakeLists.txt修改完毕

6. 编译

1)安装SDK,教程请查看:开讲啦丨双目惯性模组运行ORB-SLAM算法示例

进入SDK-Linux/demo_ros目录下,执行catkin_make命令,结果如下

cc3657f44dd7fce7696918c2014950cf.png

7. 执行

1)新打开一个shell,执行roscore命令,如下图

a3777baf6ef39684bca3fb539ae5d04c.png

2- 进入SDK-Linux/lib/1604目录下,执行

sudo -s,sudo chmod 777 ./run.sh

之后执行

./run.sh

如下图

b2fbff731803ddb7ecaff6e5cd9e25c6.png

3)进入SDK-Linux/demo_ros/src/ORB_SLAM目录下,执行./Examples/Stereo/stereo_euroc命令

10d35035edb8c43e1d08c04f83b63c64.png

注意:此时,将获得去畸变之后的参数,把下列参数写入到SDK-Linux/demo_ros/src/ORB_SLAM/src/Tracking.cc文件中,如下图

float fx = 597.935;float fy = 597.935;float cx = 476.987;float cy = 442.158;

b53c0365c545f0a8dcf7de7420e58335.png

最后,再次编译,并执行,即可得到实时ORB

教程至此结束,Demo如下:

9d1864596c62dfc3925ab61318d6af2c.png

88a63e5ef7d094f9afad8cd25abf97d0.gif学 生 福 利 集88a63e5ef7d094f9afad8cd25abf97d0.gif

45138a390b278941f5639a2e0ec9e526.png高校大使推荐同学购买可获得奖励金,购买同学福利多多,点击图片了解详情

945f7d95a1aeaa022947ad6f8ce4e78e.png提交相关信息,即可获得100元院校优惠券,点击图片了解详情

af8490a415a8d05f93c07c666cd4e83f.png提交相关信息,即可享受15天免费试用,试用期购买享大额优惠,点击图片了解详情

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值