F-LOAM中的点线,点面残差手动推导
F-LOAM中使用ceres进行优化,残差通过evaluate函数手动推导。现学习一下:
点线残差:
代码:
bool EdgeAnalyticCostFunction::Evaluate(double const *const *parameters, double *residuals, double **jacobians) const
{
//求解残差
Eigen::Map<const Eigen::Quaterniond> q_last_curr(parameters[0]);
Eigen::Map<const Eigen::Vector3d> t_last_curr(parameters[0] + 4);
Eigen::Vector3d lp;
lp = q_last_curr * curr_point + t_last_curr;
Eigen::Vector3d nu = (lp - last_point_a).cross(lp - last_point_b);
Eigen::Vector3d de = last_point_a - last_point_b;
double de_norm = de.norm();
residuals[0] = nu.norm()/de_norm;
//求解雅各比矩阵
if(jacobians != NULL)
{
if(jacobians[0] != NULL)
{
Eigen::Matrix3d skew_lp = skew(lp);
Eigen::Matrix<double, 3, 6> dp_by_se3;
dp_by_se3.block<3,3>(0,0) = -skew_lp;
(dp_by_se3.block<3,3>(0, 3)).setIdentity();//SE3的左扰动模型,I不应该在前面三行么?
Eigen::Map<Eigen::Matrix<double, 1, 7, Eigen::RowMajor> > J_se3(jacobians[0]);//雅各比矩阵
J_se3.setZero();
Eigen::Matrix3d skew_de = skew(de);
J_se3.block<1,6>(0,0) = - nu.transpose() / nu.norm() * skew_de * dp_by_se3/de_norm;
}
}
return true;
}
雅各比推导:
残差计算:
d ϵ = ∣ ∣ ( x w − a ) × ( x w − b ) ∣ ∣ ∣ ∣ a − b ∣ ∣ = ∣ ∣ n ⃗ ∣ ∣ ∣ ∣ a − b ∣ ∣ d_{\epsilon}=\frac{||(x^w-a) \times (x^w-b)||}{||a-b||} = \frac{||\vec{n}||}{||a-b||} dϵ=∣∣a−b∣∣∣∣(xw−a)×(xw−b)∣∣=∣∣a−b∣∣∣∣n∣∣
x w = R ⋅ x l + t = T x l x^w=R\cdot x^l +t=Tx^l xw=R⋅xl+t=Txl
R R R 为四元数存储:
∂ x w ξ = [ I − x w ∧ ] ( n o t e 1 ) \frac{\partial x^w}{\xi}=\begin{bmatrix}I& -{x^w}^{\wedge} \end{bmatrix} (note1) ξ∂xw=[I−xw∧](note1)
雅各比计算:
∂ d ∂ ϵ ξ = ∂ d ϵ ∂ n ⃗ ⋅ ∂ n ⃗ ∂ x w ⋅ ∂ x w ∂ ξ \frac{\partial d_{\partial\epsilon}}{\xi}=\frac{\partial d_{\epsilon}}{\partial {\vec{n}}} \cdot \frac{\partial\vec{n}}{\partial x^w} \cdot \frac{\partial x^w}{\partial\xi} ξ∂d∂ϵ=∂n∂dϵ⋅∂xw∂n⋅∂ξ∂xw
其中:
∂ d ϵ ∂ n ⃗ = n ⃗ ∣ ∣ a − b ∣ ∣ ⋅ ∣ ∣ n ⃗ ∣ ∣ ( n o t e 2 ) \frac{\partial{d_{\epsilon}}}{\partial\vec{n}} = \frac{\vec{n}}{||a-b||\cdot||\vec{n}||} (note 2) ∂n∂dϵ=∣∣a−b∣∣⋅∣∣n∣∣n(note2)
∂ n ⃗ ∂ x w = − ( a − b ) ∧ ( n o t e 3 ) \frac{\partial \vec{n}}{\partial{x^w}} = -(a-b)^{\wedge} (note3) ∂xw∂n=−(a−b)∧(note3)
点面残差:
代码:
bool SurfNormAnalyticCostFunction::Evaluate(double const *const *parameters, double *residuals, double **jacobians) const
{
Eigen::Map<const Eigen::Quaterniond> q_w_curr(parameters[0]);
Eigen::Map<const Eigen::Vector3d> t_w_curr(parameters[0] + 4);
Eigen::Vector3d point_w = q_w_curr * curr_point + t_w_curr;
residuals[0] = plane_unit_norm.dot(point_w) + negative_OA_dot_norm;
if(jacobians != NULL)
{
if(jacobians[0] != NULL)
{
Eigen::Matrix3d skew_point_w = skew(point_w);
Eigen::Matrix<double, 3, 6> dp_by_se3;
dp_by_se3.block<3,3>(0,0) = -skew_point_w;
(dp_by_se3.block<3,3>(0, 3)).setIdentity();
Eigen::Map<Eigen::Matrix<double, 1, 7, Eigen::RowMajor> > J_se3(jacobians[0]);
J_se3.setZero();
J_se3.block<1,6>(0,0) = plane_unit_norm.transpose() * dp_by_se3;
}
}
return true;
}
雅各比推导:
平面方程为:
A X + B Y + C Z + 1 = 0 AX+BY+CZ+1=0 AX+BY+CZ+1=0
令: n ⃗ = { A , B , C } , n o r m = 1 / A 2 + B 2 + C 2 \vec{n}=\{A,B,C\}, norm=1/\sqrt{A^2+B^2+C^2} n={A,B,C},norm=1/A2+B2+C2
残差:
d s = n ⃗ ⋅ x w + n o r m d_s = \vec{n} \cdot x^w + norm ds=n⋅xw+norm
求导:
∂ d s ξ = ∂ d s ∂ x w ⋅ ∂ x w ∂ ξ = n ⃗ T ⋅ ∂ x w ∂ ξ \frac{\partial d_s}{\xi} = \frac{\partial d_s}{\partial x^w}\cdot \frac{\partial x^w}{\partial\xi} = \vec{n}^T \cdot \frac{\partial x^w}{\partial\xi} ξ∂ds=∂xw∂ds⋅∂ξ∂xw=nT⋅∂ξ∂xw
Note
先验知识:
n o t e 1 : note1: note1:
S E 3 的左扰动模型(视觉 S L A M 十四讲 4.3.5 ) SE3的左扰动模型(视觉SLAM十四讲4.3.5) SE3的左扰动模型(视觉SLAM十四讲4.3.5)
n o t e 2 : note2: note2:
∂ ∣ ∣ n ∣ ∣ ∂ n = n T ∣ ∣ n ∣ ∣ \frac {\partial{||n||}}{\partial n}=\frac{n^T}{||n||} ∂n∂∣∣n∣∣=∣∣n∣∣nT
e g : eg: eg:
n = [ a b c ] T n=\begin {bmatrix} a&b&c \end{bmatrix}^T n=[abc]T
∣ ∣ n ∣ ∣ = a 2 + b 2 + c 2 ||n||=\sqrt{a^2+b^2+c^2} ∣∣n∣∣=a2+b2+c2
∂ ∣ ∣ n ∣ ∣ ∂ n = [ ∂ ∣ ∣ n ∣ ∣ a ∂ ∣ ∣ n ∣ ∣ b ∂ ∣ ∣ n ∣ ∣ c ] = 1 ∣ ∣ n ∣ ∣ [ a b c ] = n T ∣ ∣ n ∣ ∣ \frac{\partial||n||}{\partial n} = \begin{bmatrix} \frac{\partial{||n||}}{a} & \frac{\partial{||n||}}{b} & \frac{\partial{||n||}}{c} \end{bmatrix} = \frac{1}{||n||} \begin{bmatrix} a&b&c \end{bmatrix} = \frac{n^T}{||n||} ∂n∂∣∣n∣∣=[a∂∣∣n∣∣b∂∣∣n∣∣c∂∣∣n∣∣]=∣∣n∣∣1[abc]=∣∣n∣∣nT
n o t e 3 : note3: note3:
∂ ( x − a ) × ( x − b ) ∂ x = − ( a − b ) ∧ \frac{\partial (x-a)\times (x-b)}{\partial x} = -(a-b)^{\wedge} ∂x∂(x−a)×(x−b)=−(a−b)∧
( x − a ) × ( x − b ) = [ ( x 2 − a 2 ) ( x 3 − b 3 ) − ( x 3 − a 3 ) ( x 2 − b 2 ) ( x 3 − a 3 ) ( x 1 − b 1 ) − ( x 1 − a 1 ) ( x 3 − b 3 ) ( x 1 − a 1 ) ( x 2 − b 2 ) − ( x 1 − b 1 ) ( x 2 − a 2 ) ] (x-a)\times(x-b)=\begin{bmatrix} (x_2-a_2)(x_3-b_3)-(x_3-a_3)(x_2-b_2)\\ (x_3-a_3)(x_1-b_1)-(x_1-a_1)(x_3-b_3) \\ (x_1-a_1)(x_2-b_2)-(x_1-b_1)(x_2-a_2)\end{bmatrix} (x−a)×(x−b)= (x2−a2)(x3−b3)−(x3−a3)(x2−b2)(x3−a3)(x1−b1)−(x1−a1)(x3−b3)(x1−a1)(x2−b2)−(x1−b1)(x2−a2)
∂ ( x − a ) × ( x − b ) ∂ x = [ ∂ ( x − a ) × ( x − b ) ∂ x 1 ∂ ( x − a ) × ( x − b ) ∂ x 2 ∂ ( x − a ) × ( x − b ) ∂ x 3 ] = [ 0 a 3 − b 3 − a 2 + b 2 − a 3 + b 3 0 a 1 − b 1 a 2 − b 2 − a 1 + b 1 ∗ 0 ] = − ( a − b ) ∧ \frac{\partial(x-a)\times(x-b)}{\partial x}=\begin{bmatrix}\frac{\partial (x-a)\times(x-b)}{\partial x_1} &\frac{\partial (x-a)\times(x-b)}{\partial x_2}&\frac{\partial (x-a)\times(x-b)}{\partial x_3}\end{bmatrix}=\begin{bmatrix} 0 & a_3-b_3 &-a_2+b_2 \\ -a_3+b_3 &0 & a_1-b_1 \\ a_2-b_2 &-a_1+b_1 *& 0\end{bmatrix}=-(a-b)^{\wedge} ∂x∂(x−a)×(x−b)=[∂x1∂(x−a)×(x−b)∂x2∂(x−a)×(x−b)∂x3∂(x−a)×(x−b)]= 0−a3+b3a2−b2a3−b30−a1+b1∗−a2+b2a1−b10 =−(a−b)∧