主要进行了slam系统创建、imu、相机数据获取、时间戳对齐、图像去畸变、直方图均衡化等
其中imu数据主要使用sensor_msgs::ImuConstPtr类型,图像数据主要从sensor_msgs::ImuConstPtr转为cv::Mat。
class ImuGrabber
{
public:
ImuGrabber(){};
void GrabImu(const sensor_msgs::ImuConstPtr &imu_msg);
//存储imumsg的队列
queue<sensor_msgs::ImuConstPtr> imuBuf;
//异步互斥锁
std::mutex mBufMutex;
};
class ImageGrabber
{
public:
//orb系统,imu抓取类,是否去畸变参数,是否直方图均衡参数
ImageGrabber(ORB_SLAM3::System* pSLAM, ImuGrabber *pImuGb, const bool bRect, const bool bClahe): mpSLAM(pSLAM), mpImuGb(pImuGb), do_rectify(bRect), mbClahe(bClahe){}
void GrabImageLeft(const sensor_msgs::ImageConstPtr& msg);
void GrabImageRight(const sensor_msgs::ImageConstPtr& msg);
cv::Mat GetImage(const sensor_msgs::ImageConstPtr &img_msg);
//和imu同步函数
void SyncWithImu();
queue<sensor_msgs::ImageConstPtr> imgLeftBuf, imgRightBuf;
std::mutex mBufMutexLeft,mBufMutexRight;
ORB_SLAM3::System* mpSLAM;
ImuGrabber *mpImuGb;
const bool do_rectify;
cv::Mat M1l,M2l,M1r,M2r;
//直方图均衡
const bool mbClahe;
cv::Ptr<cv::CLAHE> mClahe = cv::createCLAHE(3.0, cv::Size(8, 8));
};
int main(int argc, char **argv)
{
ros::init(argc, argv, "Stereo_Inertial");
ros::NodeHandle n("~");
//ros自带的设置调试显示等级,分为DEBUG INFO WARN ERROR FATAL五个等级
ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Info);
bool bEqual = false;//是否需要图像矫正的标志
//检查参数个数
if(argc < 4 || argc > 5)
{
cerr << endl << "Usage: rosrun ORB_SLAM3 Stereo_Inertial path_to_vocabulary path_to_settings do_rectify [do_equalize]" << endl;
ros::shutdown();
return 1;
}
std::string sbRect(argv[3]);//是否需要图像矫正
if(argc==5)
{
std::string sbEqual(argv[4]);//是否需要直方图均衡
if(sbEqual == "true")
bEqual = true;
}
// 创建SLAM系统。它初始化所有系统线程,并准备好处理帧。
ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO,true);
ImuGrabber imugb;
ImageGrabber igb(&SLAM,&imugb,sbRect == "true",bEqual);
//如果需要进行双目图像矫正,则从配置文件读取相关的参数
if(igb.do_rectify)
{
// Load settings related to stereo calibration
cv::FileStorage fsSettings(argv[2], cv::FileStorage::READ);//只读
//cv::FileStorage能够很好的读取矩阵
if(!fsSettings.isOpened())
{
cerr << "ERROR: Wrong path to settings" << endl;
return -1;
}
cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r;
fsSettings["LEFT.K"] >> K_l;//相机内参
fsSettings["RIGHT.K"] >> K_r;
fsSettings["LEFT.P"] >> P_l;//矫正后新的投影矩阵(期望将相机矫正到这个投影矩阵)
fsSettings["RIGHT.P"] >> P_r;
fsSettings["LEFT.R"] >> R_l;//立体矫正的旋转矩阵,
fsSettings["RIGHT.R"] >> R_r;//将双目图像变换到共平面且水平平行
fsSettings["LEFT.D"] >> D_l;//相机畸变参数
fsSettings["RIGHT.D"] >> D_r;
int rows_l = fsSettings["LEFT.height"];
int cols_l = fsSettings["LEFT.width"];
int rows_r = fsSettings["RIGHT.height"];
int cols_r = fsSettings["RIGHT.width"];
if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty() ||
rows_l==0 || rows_r==0 || cols_l==0 || cols_r==0)
{
cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl;
return -1;
}
//输入图像参数,计算无畸变和修正转换映射,initUndistortRectifyMap为opencv自带的畸变修复函数
cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,igb.M1l,igb.M2l);
cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,igb.M1r,igb.M2r);
}
// Maximum delay, 5 seconds
//订阅左右目图像和IMU数据
ros::Subscriber sub_imu = n.subscribe("/imu", 1000, &ImuGrabber::GrabImu, &imugb);
ros::Subscriber sub_img_left = n.subscribe("/camera/left/image_raw", 100, &ImageGrabber::GrabImageLeft,&igb);
ros::Subscriber sub_img_right = n.subscribe("/camera/right/image_raw", 100, &ImageGrabber::GrabImageRight,&igb);
//开启一个线程同步图像和IMU数据
std::thread sync_thread(&ImageGrabber::SyncWithImu,&igb);
ros::spin();
return 0;
}
//类内回调,存储一张图片地址
void ImageGrabber::GrabImageLeft(const sensor_msgs::ImageConstPtr &img_msg)
{
mBufMutexLeft.lock();
if (!imgLeftBuf.empty())
imgLeftBuf.pop();
imgLeftBuf.push(img_msg);
mBufMutexLeft.unlock();
}
void ImageGrabber::GrabImageRight(const sensor_msgs::ImageConstPtr &img_msg)
{
mBufMutexRight.lock();
if (!imgRightBuf.empty())
imgRightBuf.pop();
imgRightBuf.push(img_msg);
mBufMutexRight.unlock();
}
//imu抓取
void ImuGrabber::GrabImu(const sensor_msgs::ImuConstPtr &imu_msg)
{
mBufMutex.lock();
imuBuf.push(imu_msg);
mBufMutex.unlock();
return;
}
cv::Mat ImageGrabber::GetImage(const sensor_msgs::ImageConstPtr &img_msg)
{
// Copy the ros image message to cv::Mat.
cv_bridge::CvImageConstPtr cv_ptr;
try
{
cv_ptr = cv_bridge::toCvShare(img_msg, sensor_msgs::image_encodings::MONO8);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
}
//image.type()为0时,元数据类型为8位Uint,对应关系为
//#define CV_8U 0
//#define CV_8S 1
//#define CV_16U 2
//#define CV_16S 3
//#define CV_32S 4
//#define CV_32F 5
//#define CV_64F 6
//#define CV_USRTYPE1 7
if(cv_ptr->image.type()==0)
{
return cv_ptr->image.clone();
}
else
{
std::cout << "Error type" << std::endl;
return cv_ptr->image.clone();
}
}
initUndistortRectifyMap函数
函数说明:
这个函数使用于计算无畸变和修正转换关系。
函数原型:
C++:
void initUndistortRectifyMap( InputArray cameraMatrix, InputArray distCoeffs,
InputArray R, InputArray newCameraMatrix, Size size, int m1type, OutputArray map1, OutputArray map2 );
python:
map1, map2=cv.initUndistortRectifyMap(cameraMatrix, distCoeffs, R, newCameraMatrix, size, m1type[, map1[, map2]])
参数说明:
cameraMatrix——输入的摄像头内参数矩阵(3X3矩阵)
distCoeffs——输入的摄像头畸变系数矩阵(5X1矩阵)
R——输入的第一和第二摄像头坐标系之间的旋转矩阵
newCameraMatrix——输入的校正后的3X3摄像机矩阵
size——摄像头采集的无失真图像尺寸
m1type——map1的数据类型,可以是CV_32FC1或CV_16SC2
map1——输出的X坐标重映射参数
map2——输出的Y坐标重映射参数
remap()函数
函数说明:一幅图像中某位置的像素放置到另一个图片指定位置。
函数原型:
C++:
void remap(InputArray src, OutputArray dst, InputArray map1, InputArray map2,
int interpolation, intborderMode = BORDER_CONSTANT,
const Scalar& borderValue = Scalar())
python:
dst = cv.remap(src, map1, map2, intermap2polation, dst=None, borderMode=None, borderValue=None)
cv.remap(src, map1, map2, interpolation[, dst[, borderMode[, borderValue]]])
参数说明:
src——输入图像,即原图像,需要单通道8位或者浮点类型的图像
dst(c++)——输出图像,即目标图像,需和原图形一样的尺寸和类型
map1——它有两种可能表示的对象:(1)表示点(x,y)的第一个映射;(2)表示CV_16SC2,CV_32FC1等
map2——有两种可能表示的对象:(1)若map1表示点(x,y)时,这个参数不代表任何值;(2)表示 CV_16UC1,CV_32FC1类型的Y值
intermap2polation——插值方式,有四中插值方式:
(1)INTER_NEAREST——最近邻插值
(2)INTER_LINEAR——双线性插值(默认)
(3)INTER_CUBIC——双三样条插值(默认)
(4)INTER_LANCZOS4——lanczos插值(默认)
intborderMode——边界模式,默认BORDER_CONSTANT
borderValue——边界颜色,默认Scalar()黑色
IMU同步和跟踪track入口
void ImageGrabber::SyncWithImu()
{
const double maxTimeDiff = 0.01;//同步的左右目图像的最大时间差
while(1)
{
cv::Mat imLeft, imRight;
double tImLeft = 0, tImRight = 0;
//当左右目图像和IMU数据都非空
if (!imgLeftBuf.empty()&&!imgRightBuf.empty()&&!mpImuGb->imuBuf.empty())
{
tImLeft = imgLeftBuf.front()->header.stamp.toSec();//左目时间戳
tImRight = imgRightBuf.front()->header.stamp.toSec();//右目时间戳
this->mBufMutexRight.lock();
//如果左目图像比右目图像新0.01秒以上,则认为不同步,如果imgRightBuf中不只一帧图像,则
//丢弃最早的那帧,直到满足时间差条件或者imgRightBuf中只剩一帧
//(感觉两个图像队列中一直都是只有一帧图像,因为获取图像的回调函数中就只存了最新的那帧)
while((tImLeft-tImRight)>maxTimeDiff && imgRightBuf.size()>1)
{
imgRightBuf.pop();
tImRight = imgRightBuf.front()->header.stamp.toSec();
}
this->mBufMutexRight.unlock();
this->mBufMutexLeft.lock();
while((tImRight-tImLeft)>maxTimeDiff && imgLeftBuf.size()>1)
{
imgLeftBuf.pop();
tImLeft = imgLeftBuf.front()->header.stamp.toSec();
}
this->mBufMutexLeft.unlock();
//此时两个图像队列中肯定各自只有一帧,如果时间差太大,直接continue
if((tImLeft-tImRight)>maxTimeDiff || (tImRight-tImLeft)>maxTimeDiff)
{
continue;
}
//如果左目图像时间比最新的IMU数据大,说明IMU数据还没准备好,需要等一下IMU数据
if(tImLeft>mpImuGb->imuBuf.back()->header.stamp.toSec())
continue;
this->mBufMutexLeft.lock();
imLeft = GetImage(imgLeftBuf.front());//获取图像,ROS格式转成opencv格式
imgLeftBuf.pop();
this->mBufMutexLeft.unlock();
this->mBufMutexRight.lock();
imRight = GetImage(imgRightBuf.front());
imgRightBuf.pop();
this->mBufMutexRight.unlock();
vector<ORB_SLAM3::IMU::Point> vImuMeas;
mpImuGb->mBufMutex.lock();
if(!mpImuGb->imuBuf.empty())
{
// Load imu measurements from buffer
vImuMeas.clear();
//获取当前左目图像时间戳之前的imu数据
while(!mpImuGb->imuBuf.empty() && mpImuGb->imuBuf.front()->header.stamp.toSec()<=tImLeft)
{
double t = mpImuGb->imuBuf.front()->header.stamp.toSec();
cv::Point3f acc(mpImuGb->imuBuf.front()->linear_acceleration.x, mpImuGb->imuBuf.front()->linear_acceleration.y, mpImuGb->imuBuf.front()->linear_acceleration.z);
cv::Point3f gyr(mpImuGb->imuBuf.front()->angular_velocity.x, mpImuGb->imuBuf.front()->angular_velocity.y, mpImuGb->imuBuf.front()->angular_velocity.z);
vImuMeas.push_back(ORB_SLAM3::IMU::Point(acc,gyr,t));
mpImuGb->imuBuf.pop();
}
}
mpImuGb->mBufMutex.unlock();
//图像直方图均衡,mbClache在ImageGrabber构造函数中设置
if(mbClahe)
{
mClahe->apply(imLeft,imLeft);
mClahe->apply(imRight,imRight);
}
//将前面得到的映射作用与图像,完成矫正。详见博客 https://blog.csdn.net/sss_369/article/details/52983123
if(do_rectify)
{
cv::remap(imLeft,imLeft,M1l,M2l,cv::INTER_LINEAR);
cv::remap(imRight,imRight,M1r,M2r,cv::INTER_LINEAR);
}
//开始跟踪
mpSLAM->TrackStereo(imLeft,imRight,tImLeft,vImuMeas);
std::chrono::milliseconds tSleep(1);
std::this_thread::sleep_for(tSleep);
}
}
}