和大神们学习
添加链接描述IMU数据处理
VINS-Fusion代码按执行顺序阅读(一)
VINS-Fusion代码按执行顺序阅读(二)
VINS-Fusion代码按执行顺序阅读(三)
VINS-Fusion代码按执行顺序阅读(四)
使用自己的INDEMIND相机来运行ORBSLAM2单目,双目和深度模式(小觅相机和realsense通用)
INDEMIND 双目相机使用教程
INDEMIND官网SDK
VINS-FUSION 源码 双目 单线程 按执行顺序阅读
VINS-FUSION代码超详细注释(VIO部分)/VIO入门(1)
VINS-FUSION代码超详细注释(VIO部分)/VIO入门(2)
VINS-FUSION代码超详细注释(VIO部分)/VIO入门(3)
VINS-FUSION代码超详细注释(VIO部分)/VIO入门(4)
【泡泡读者来稿】VINS 论文推导及代码解析(一)
【泡泡读者来稿】VINS 论文推导及代码解析(二)
【泡泡读者来稿】VINS 论文推导及代码解析(三)
【泡泡读者来稿】VINS 论文推导及代码解析(四)
VINS代码阅读
vslam的前端在光流法,以至于此算法的天花板颇低。
有两个先天弱点:1,特征点无描述,导致无法用某一帧定位在全图的位置 2,用LK光流跟踪,转弯废
1.程序入口rosNodeTest.cpp
定义了 估计器、 缓存器 、 获取传感器数据的函数 和 一个主函数
初始化数据队列
queue<sensor_msgs::ImuConstPtr> imu_buf;
queue<sensor_msgs::PointCloudConstPtr> feature_buf;
queue<sensor_msgs::ImageConstPtr> img0_buf;
queue<sensor_msgs::ImageConstPtr> img1_buf;
//获得左图图像(message)
void img0_callback(const sensor_msgs::ImageConstPtr &img_msg)
//获得右图图像(message)
void img1_callback(const sensor_msgs::ImageConstPtr &img_msg)
//将msg转为cv::Mat
cv::Mat getImageFromMsg(const sensor_msgs::ImageConstPtr &img_msg)
//获得imu数据,并将imu数据存放到estimator中
void imu_callback(const sensor_msgs::ImuConstPtr &imu_msg)
//若是双目选取时间戳相同的图像放入estimator中,若是单目直接放入estimator
void sync_process()
//接收/feature_tracker/feature节点发送的cur帧的信息,并放在feature_buf中
void feature_callback(const sensor_msgs::PointCloudConstPtr &feature_msg)
// 是否重启estimator,并重新设置参数
void restart_callback(const std_msgs::BoolConstPtr &restart_msg)
//确定是否使用IMU
void imu_switch_callback(const std_msgs::BoolConstPtr &switch_msg)
//确实使用单目还是双目
void cam_switch_callback(const std_msgs::BoolConstPtr &switch_msg)
1.1img0_callback
img0_buf.push(img_msg);
将左图图像数据放入img0_buf
1.2img1_callback
img1_buf.push(img_msg);
将左图图像数据放入img1_buf
1.3cv::Mat getImageFromMsg(const sensor_msgs::ImageConstPtr &img_msg)
ptr = cv_bridge::toCvCopy(img, sensor_msgs::image_encodings::MONO8); 进行图像格式转换
1.4void imu_callback(const sensor_msgs::ImuConstPtr &imu_msg)
double t = imu_msg->header.stamp.toSec();
double dx = imu_msg->linear_acceleration.x;
double dy = imu_msg->linear_acceleration.y;
double dz = imu_msg->linear_acceleration.z;
double rx = imu_msg->angular_velocity.x;
double ry = imu_msg->angular_velocity.y;
double rz = imu_msg->angular_velocity.z;
Vector3d acc(dx, dy, dz);
Vector3d gyr(rx, ry, rz);
estimator.inputIMU(t, acc, gyr);
return;
void Estimator::inputIMU(double t, const Vector3d &linearAcceleration, const Vector3d &angularVelocity))
中accBuf.push(make_pair(t, linearAcceleration));
gyrBuf.push(make_pair(t, angularVelocity));
插入到accBuf与gyrBuf
1.5sync_process
首先判断是否是单目双目
如果是双目,需要检测同步问题。对双目的时间进行判断,时间间隔小于0.003s的话则使用getImageFromMsg
将其输入到image0和image1变量之中。之后estimator.inputImage
。
如果是弹幕,则直接estimator.inputImage
.主函数运行到sync_process
时转到estimator.inputImage
1.6feature_callback
map<int, vector<pair<int, Eigen::Matrix<double, 7, 1>>>> featureFrame;
for (unsigned int i = 0; i < feature_msg->points.size(); i++)
{
int feature_id = feature_msg->channels[0].values[i];
int camera_id = feature_msg->channels[1].values[i];
double x = feature_msg->points[i].x;
double y = feature_msg->points[i].y;
double z = feature_msg->points[i].z;
double p_u = feature_msg->channels[2].values[i];
double p_v = feature_msg->channels[3].values[i];
double velocity_x = feature_msg->channels[4].values[i];
double velocity_y = feature_msg->channels[5].values[i];
if(feature_msg->channels.size() > 5)
{
double gx = feature_msg->channels[6].values[i];
double gy = feature_msg->channels[7].values[i];
double gz = feature_msg->channels[8].values[i];
pts_gt[feature_id] = Eigen::Vector3d(gx, gy, gz);
//printf("receive pts gt %d %f %f %f\n", feature_id, gx, gy, gz);
}
ROS_ASSERT(z == 1);
Eigen::Matrix<double, 7, 1> xyz_uv_velocity;
xyz_uv_velocity << x, y, z, p_u, p_v, velocity_x, velocity_y;
featureFrame[feature_id].emplace_back(camera_id, xyz_uv_velocity);
}
double t = feature_msg->header.stamp.toSec();
estimator.inputFeature(t, featureFrame);
return;
接收节点的消息,填充featureFrame
,并把featureFrame
通过inputFeature
输入到estimator
,且填充了featureBuf
2.主函数
1.初始话ROS节点,句柄,日志信息
ros::init(argc, argv, "vins_estimator");
ros::NodeHandle n("~");
ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info);
2.读取参数,设置参数
readParameters(config_file);
estimator.setParameter();
3.发布用于RVIZ显示的Topic,本模块具体发布的内容详见输入输出
registerPub(n);
这个函数定义在utility/visualization.cpp里面:void registerPub(ros::NodeHandle &n)。
4.订阅ROS信息
ros::Subscriber sub_imu = n.subscribe(IMU_TOPIC, 2000, imu_callback, ros::TransportHints().tcpNoDelay());
ros::Subscriber sub_feature = n.subscribe("/feature_tracker/feature", 2000, feature_callback);
ros::Subscriber sub_img0 = n.subscribe(IMAGE0_TOPIC, 100, img0_callback);
ros::Subscriber sub_img1 = n.subscribe(IMAGE1_TOPIC, 100, img1_callback);
ros::Subscriber sub_restart = n.subscribe("/vins_restart", 100, restart_callback);
ros::Subscriber sub_imu_switch = n.subscribe("/vins_imu_switch", 100, imu_switch_callback);
ros::Subscriber sub_cam_switch = n.subscribe("/vins_cam_switch", 100, cam_switch_callback);
3.其他
void
CataCamera::readParameters(const std::vector<double>& parameterVec)
{
if ((int)parameterVec.size() != parameterCount())
{
return;
}
Parameters params = getParameters();
params.xi() = parameterVec.at(0);
params.k1() = parameterVec.at(1);
params.k2() = parameterVec.at(2);
params.p1() = parameterVec.at(3);
params.p2() = parameterVec.at(4);
params.gamma1() = parameterVec.at(5);
params.gamma2() = parameterVec.at(6);
params.u0() = parameterVec.at(7);
params.v0() = parameterVec.at(8);
setParameters(params);
}
void Estimator::setParameter()
{
mProcess.lock();
for (int i = 0; i < NUM_OF_CAM; i++)
{
tic[i] = TIC[i];
ric[i] = RIC[i];
cout << " exitrinsic cam " << i << endl << ric[i] << endl << tic[i].transpose() << endl;
}
f_manager.setRic(ric);
ProjectionTwoFrameOneCamFactor::sqrt_info = FOCAL_LENGTH / 1.5 * Matrix2d::Identity();
ProjectionTwoFrameTwoCamFactor::sqrt_info = FOCAL_LENGTH / 1.5 * Matrix2d::Identity();
ProjectionOneFrameTwoCamFactor::sqrt_info = FOCAL_LENGTH / 1.5 * Matrix2d::Identity();
td = TD;
g = G;
cout << "set g " << g.transpose() << endl;
featureTracker.readIntrinsicParameter(CAM_NAMES);
std::cout << "MULTIPLE_THREAD is " << MULTIPLE_THREAD << '\n';
if (MULTIPLE_THREAD && !initThreadFlag)
{
initThreadFlag = true;
processThread = std::thread(&Estimator::processMeasurements, this);
}
mProcess.unlock();
}