《视觉SLAM十四讲》slambook/ch7/gpose_estimation_3d3d更改成只用Eigen不用非线性优化

例子运行方法:
siat@hzt:~/Documents/slambook/ch7$ ./build/pose_estimation_3d3d 1.png 2.png 1_depth.png 2_depth.png

因我具体的问题是3点到3点的ICP匹配求相机姿态,所以SVD就够了,不用非线性优化(因里面有g2o库还需要再下载一个,挺大的吧,各种不想用),自己删除了此文件中非线性相关代码,并成功编译使用SVD(前提是装OPENCV,Eigen 头文件库)。新建gpose_estimation_3_3d3d.cpp内容如下:
#include
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <Eigen/Core>
#include <Eigen/Geometry>
#include <Eigen/SVD>
#include <g2o/core/base_vertex.h>
#include <g2o/core/base_unary_edge.h>
#include <g2o/core/block_solver.h>
#include <g2o/core/optimization_algorithm_gauss_newton.h>
#include <g2o/solvers/eigen/linear_solver_eigen.h>
#include <g2o/types/sba/types_six_dof_expmap.h>
#include

using namespace std;
using namespace cv;

void pose_estimation_3d3d (
const vector& pts1,
const vector& pts2,
Mat& R, Mat& t
);

void bundleAdjustment(
const vector& points_3d,
const vector& points_2d,
Mat& R, Mat& t
);

// g2o edge
class EdgeProjectXYZRGBDPoseOnly : public g2o::BaseUnaryEdge<3, Eigen::Vector3d, g2o::VertexSE3Expmap>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW;
EdgeProjectXYZRGBDPoseOnly( const Eigen::Vector3d& point ) : _point(point) {}

virtual void computeError()
{
    const g2o::VertexSE3Expmap* pose = static_cast<const g2o::VertexSE3Expmap*> ( _vertices[0] );
    // measurement is p, point is p'
    _error = _measurement - pose->estimate().map( _point );
}

virtual void linearizeOplus()
{
    g2o::VertexSE3Expmap* pose = static_cast<g2o::VertexSE3Expmap *>(_vertices[0]);
    g2o::SE3Quat T(pose->estimate());
    Eigen::Vector3d xyz_trans = T.map(_point);
    double x = xyz_trans[0];
    double y = xyz_trans[1];
    double z = xyz_trans[2];

    _jacobianOplusXi(0,0) = 0;
    _jacobianOplusXi(0,1) = -z;
    _jacobianOplusXi(0,2) = y;
    _jacobianOplusXi(0,3) = -1;
    _jacobianOplusXi(0,4) = 0;
    _jacobianOplusXi(0,5) = 0;

    _jacobianOplusXi(1,0) = z;
    _jacobianOplusXi(1,1) = 0;
    _jacobianOplusXi(1,2) = -x;
    _jacobianOplusXi(1,3) = 0;
    _jacobianOplusXi(1,4) = -1;
    _jacobianOplusXi(1,5) = 0;

    _jacobianOplusXi(2,0) = -y;
    _jacobianOplusXi(2,1) = x;
    _jacobianOplusXi(2,2) = 0;
    _jacobianOplusXi(2,3) = 0;
    _jacobianOplusXi(2,4) = 0;
    _jacobianOplusXi(2,5) = -1;
}

bool read ( istream& in ) {}
bool write ( ostream& out ) const {}

protected:
Eigen::Vector3d _point;
};

int main ( int argc, char** argv )
{

vector<Point3f> pts1, pts2;

float x1=43,y1=7,z1=404;
float x2=-38,y2=6,z2=384;
float x3=-52,y3=2,z3=382;
float xo1=78,yo1=6,zo1=416;
float xo2=6,yo2=7,zo2=372;
float xo3=-7,yo3=2,zo3=367;
pts1.push_back ( Point3f (x1, y1, z1));
pts2.push_back ( Point3f (xo1, yo1,zo1));
pts1.push_back ( Point3f (x2, y2, z2));
pts2.push_back ( Point3f (xo2, yo2,zo2));
pts1.push_back ( Point3f (x3, y3, z3));
pts2.push_back ( Point3f (xo3, yo3,zo3));
cout<<"3d-3d pairs: "<<pts1.size() <<endl;
Mat R, t;
pose_estimation_3d3d ( pts1, pts2, R, t );
cout<<"ICP via SVD results: "<<endl;
cout<<"R = "<<R<<endl;
cout<<"t = "<<t<<endl;
cout<<"R_inv = "<<R.t() <<endl;
cout<<"t_inv = "<<-R.t() *t<<endl;

cout<<"calling bundle adjustment"<<endl;

bundleAdjustment( pts1, pts2, R, t );

// verify p1 = R*p2 + t
for ( int i=0; i<3; i++ )
{
    cout<<"p1 = "<<pts1[i]<<endl;
    cout<<"p2 = "<<pts2[i]<<endl;
    cout<<"(R*p2+t) = "<<
        R * (Mat_<double>(3,1)<<pts2[i].x, pts2[i].y, pts2[i].z) + t
        <<endl;
    cout<<endl;
}

}

void pose_estimation_3d3d (
const vector& pts1,
const vector& pts2,
Mat& R, Mat& t
)
{
Point3f p1, p2; // center of mass
int N = pts1.size();
for ( int i=0; i<N; i++ )
{
p1 += pts1[i];
p2 += pts2[i];
}
p1 = Point3f( Vec3f(p1) / N);
p2 = Point3f( Vec3f(p2) / N);
vector q1 ( N ), q2 ( N ); // remove the center
for ( int i=0; i<N; i++ )
{
q1[i] = pts1[i] - p1;
q2[i] = pts2[i] - p2;
}

// compute q1*q2^T
Eigen::Matrix3d W = Eigen::Matrix3d::Zero();
for ( int i=0; i<N; i++ )
{
    W += Eigen::Vector3d ( q1[i].x, q1[i].y, q1[i].z ) * Eigen::Vector3d ( q2[i].x, q2[i].y, q2[i].z ).transpose();
}
cout<<"W="<<W<<endl;

// SVD on W
Eigen::JacobiSVD<Eigen::Matrix3d> svd ( W, Eigen::ComputeFullU|Eigen::ComputeFullV );
Eigen::Matrix3d U = svd.matrixU();
Eigen::Matrix3d V = svd.matrixV();

if (U.determinant() * V.determinant() < 0)
{
    for (int x = 0; x < 3; ++x)
    {
        U(x, 2) *= -1;
    }
}

cout<<"U="<<U<<endl;
cout<<"V="<<V<<endl;

Eigen::Matrix3d R_ = U* ( V.transpose() );
Eigen::Vector3d t_ = Eigen::Vector3d ( p1.x, p1.y, p1.z ) - R_ * Eigen::Vector3d ( p2.x, p2.y, p2.z );

// convert to cv::Mat
R = ( Mat_<double> ( 3,3 ) <<
      R_ ( 0,0 ), R_ ( 0,1 ), R_ ( 0,2 ),
      R_ ( 1,0 ), R_ ( 1,1 ), R_ ( 1,2 ),
      R_ ( 2,0 ), R_ ( 2,1 ), R_ ( 2,2 )
    );
t = ( Mat_<double> ( 3,1 ) << t_ ( 0,0 ), t_ ( 1,0 ), t_ ( 2,0 ) );

}

void bundleAdjustment (
const vector< Point3f >& pts1,
const vector< Point3f >& pts2,
Mat& R, Mat& t )
{
// 初始化g2o
typedef g2o::BlockSolver< g2o::BlockSolverTraits<6,3> > Block; // pose维度为 6, landmark 维度为 3
Block::LinearSolverType* linearSolver = new g2o::LinearSolverEigenBlock::PoseMatrixType(); // 线性方程求解器
Block* solver_ptr = new Block( linearSolver ); // 矩阵块求解器
g2o::OptimizationAlgorithmGaussNewton* solver = new g2o::OptimizationAlgorithmGaussNewton( solver_ptr );
g2o::SparseOptimizer optimizer;
optimizer.setAlgorithm( solver );

// vertex
g2o::VertexSE3Expmap* pose = new g2o::VertexSE3Expmap(); // camera pose
pose->setId(0);
pose->setEstimate( g2o::SE3Quat(
    Eigen::Matrix3d::Identity(),
    Eigen::Vector3d( 0,0,0 )
) );
optimizer.addVertex( pose );

// edges
int index = 1;
vector<EdgeProjectXYZRGBDPoseOnly*> edges;
for ( size_t i=0; i<pts1.size(); i++ )
{
    EdgeProjectXYZRGBDPoseOnly* edge = new EdgeProjectXYZRGBDPoseOnly(
        Eigen::Vector3d(pts2[i].x, pts2[i].y, pts2[i].z) );
    edge->setId( index );
    edge->setVertex( 0, dynamic_cast<g2o::VertexSE3Expmap*> (pose) );
    edge->setMeasurement( Eigen::Vector3d(
        pts1[i].x, pts1[i].y, pts1[i].z) );
    edge->setInformation( Eigen::Matrix3d::Identity()*1e4 );
    optimizer.addEdge(edge);
    index++;
    edges.push_back(edge);
}

chrono::steady_clock::time_point t1 = chrono::steady_clock::now();
optimizer.setVerbose( true );
optimizer.initializeOptimization();
optimizer.optimize(10);
chrono::steady_clock::time_point t2 = chrono::steady_clock::now();
chrono::duration<double> time_used = chrono::duration_cast<chrono::duration<double>>(t2-t1);
cout<<"optimization costs time: "<<time_used.count()<<" seconds."<<endl;

cout<<endl<<"after optimization:"<<endl;
cout<<"T="<<endl<<Eigen::Isometry3d( pose->estimate() ).matrix()<<endl;

}
然后在同一级的CMakeLists.txt文件中(末尾)加入一段编译信息:
add_executable( pose_estimation_3_3d3d pose_estimation_3_3d3d.cpp )
target_link_libraries( pose_estimation_3_3d3d
${OpenCV_LIBS}
g2o_core g2o_stuff g2o_types_sba g2o_csparse_extension
C S P A R S E L I B R A R Y ) 然 后 进 入 b u i l d 目 录 开 始 编 译 : s i a t @ h z t :   / D o c u m e n t s / s l a m b o o k / c h 7 {CSPARSE_LIBRARY} ) 然后进入build目录开始编译: siat@hzt:~/Documents/slambook/ch7 CSPARSELIBRARY)buildsiat@hzt: /Documents/slambook/ch7 cd build
siat@hzt:~/Documents/slambook/ch7/build$ cmake …(注意…前有一个空格)
siat@hzt:~/Documents/slambook/ch7/build$ cmake…
过几分钟编译完生产可执行文件pose_estimation_3_3d3d,
在这里插入图片描述就可以运行了:
siat@hzt:~/Documents/slambook/ch7/build$ ./pose_estimation_3_3d3d
在这里插入图片描述在这里插入图片描述在这里插入图片描述

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
视觉SLAM十四》的第七章主要介绍了ORB特征的手写实现。ORB特征是一种基于FAST角点检测和BRIEF描述子的特征提取方法,它在计算效率和鲁棒性上表现出色,被广泛应用于视觉SLAM中。 第七章还介绍了ORB特征的主要步骤,包括角点检测、特征描述子计算和特征匹配。在角点检测中,通过FAST算法检测图像中的角点位置。然后,利用BRIEF描述子计算对应角点位置的特征描述子。最后,通过特征匹配算法将当前帧的ORB特征与地图中的ORB特征进行匹配,从而实现相机的位姿估计和地图构建。 除了手写ORB特征的实现,第七章还介绍了ORB-SLAM系统的整体框架和关键技术。该系统结合了特征点法和直接法,实现了在无GPS和IMU信息的情况下进行实时的视觉SLAM。通过利用ORB特征进行初始化、追踪和建图,ORB-SLAM系统在室内和室外环境下都取得了良好的效果。 总而言之,视觉SLAM的第七章《视觉SLAM十四》介绍了手写ORB特征的实现方法,并介绍了ORB-SLAM系统的整体框架和关键技术。这些内容对于理解和应用视觉SLAM具有重要意义。<span class="em">1</span><span class="em">2</span><span class="em">3</span> #### 引用[.reference_title] - *1* *3* [视觉SLAM十四——ch7](https://blog.csdn.net/weixin_58021155/article/details/123496372)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v93^chatsearchT3_1"}}] [.reference_item style="max-width: 50%"] - *2* [《视觉slam十四》学习笔记——ch7实践部分 比较opencv库下的ORB特征的提取和手写ORB的区别](https://blog.csdn.net/weixin_70026476/article/details/127415318)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v93^chatsearchT3_1"}}] [.reference_item style="max-width: 50%"] [ .reference_list ]

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值