python 手动标定lidar到lidar

import argparse
import json
import os
import math

import numpy as np
import yaml
from transforms3d.quaternions import quat2mat
from transforms3d.affines import compose
from tqdm import tqdm
###############################
def read_pcd(pcd_path):
    lines = []
    num_points = None

    with open(pcd_path, 'r') as f:
        for line in f:
            lines.append(line.strip())
            if line.startswith('POINTS'):
                num_points = int(line.split()[-1])

    assert num_points is not None
    # print('Read {} points from {}'.format(num_points, pcd_path))

    points = []
    for line in lines[-num_points:]:
        x, y, z, i = list(map(float, line.split()))
        points.append((np.array([x, y, z, 1.0]), i))

    return points

###############################
HEADER = '''\
# .PCD v0.7 - Point Cloud Data file format
VERSION 0.7
FIELDS x y z intensity
SIZE 4 4 4 4
TYPE F F F F
COUNT 1 1 1 1
WIDTH {}
HEIGHT 1
VIEWPOINT 0 0 0 1 0 0 0
POINTS {}
DATA ascii
'''

def write_pcd(points, intensities, pcd_path):
    n = len(intensities)
    lines = []
    for i in range(n):
        x, y, z = points[i]
        lines.append('{:.6f} {:.6f} {:.6f} {}'.format(
            x, y, z, intensities[i]))
    with open(pcd_path, 'w') as f:
        f.write(HEADER.format(n, n))
        f.write('\n'.join(lines))

###############################
def load_yaml_file(file_path):
    with open(file_path, 'r') as f:
        ret = yaml.load(f.read(), Loader=yaml.FullLoader)
    return ret

def load_yaml_rt(yaml_path):
    tf_params = load_yaml_file(yaml_path)
    r = tf_params['transform']['rotation']
    rotation = quat2mat([r['w'], r['x'], r['y'], r['z']])
    t = tf_params['transform']['translation']
    translation = [t['x'], t['y'], t['z']]
    return compose(translation, rotation, [1.0, 1.0, 1.0])

def eural_qua(x_t, y_t, z_t, r, p, y):
    sinp = math.sin(math.radians(p/2))
    siny = math.sin(math.radians(y/2))
    sinr = math.sin(math.radians(r/2))

    cosp = math.cos(math.radians(p/2))
    cosy = math.cos(math.radians(y/2))
    cosr = math.cos(math.radians(r/2))

    w = cosr*cosp*cosy + sinr*sinp*siny
    x = sinr*cosp*cosy - cosr*sinp*siny
    y = cosr*sinp*cosy + sinr*cosp*siny
    z = cosr*cosp*siny - sinr*sinp*cosy

    ##显示手标结果
    print("Translation (x, y, z): ", x_t, y_t, z_t)
    print("Quaternion (w, x , y, z): ", w, x, y, z)
    translation = [x_t, y_t, z_t]
    rotation = quat2mat([w, x, y, z])
    return(compose(translation, rotation, [1.0, 1.0, 1.0]))


###############################
def main():
    slave_dir = "R40"
    master_dir = 'L40'
    extrinsics_path = 'byd_pandar40R_pandar40L_extrinsics.yaml'
    result_path = 'result/check.pcd'

    pcd_slave_path_list = [os.path.join(slave_dir ,i) for i in os.listdir(slave_dir) if i.endswith(".pcd")]
    pcd_master_path_list = [os.path.join(master_dir ,i) for i in os.listdir(master_dir) if i.endswith(".pcd")]
    
    
    ######manual_calibration#######
    x_t = 0.103329
    y_t = -2.57224
    z_t = 0.018346
    #源坐标系到目标坐标系旋转顺序为X,Y,Z,左手系  
    #这个初始欧拉角,可以在 https://www.andre-gaschler.com/rotationconverter/
    #这个网站求得, 记得把 Euler angles (degrees), 调整为 Z Y X
    x_eural = -0.4297041
    y_eural = 0.45
    z_eural = -0.5205246
    slave_to_mater_transform = eural_qua(x_t, y_t, z_t, x_eural, y_eural, z_eural)

    #最后得到的translation和四元数放到yaml里面再验证一遍
    #slave_to_mater_transform = load_yaml_rt(extrinsics_path)
    all_points = []
    all_intensities = []
    
    # slave to master 
    for n, pcd in enumerate(pcd_slave_path_list):
        points = read_pcd(pcd)
        # 4 * n
        np_points = np.stack([p for (p, _) in points]).transpose()

        # tansform slave to master
        np_points = slave_to_mater_transform.dot(np_points)
	
	# 3 * n
        all_points.append(np_points[:-1, :])
        all_intensities += [0 for (_, it) in points]

    # add master
    for n, pcd in enumerate(pcd_master_path_list):
        points = read_pcd(pcd)
        # 4 * n
        np_points = np.stack([p for (p, _) in points]).transpose()
	
	# 3 * n
        all_points.append(np_points[:-1, :])
        all_intensities += [1 for (_, it) in points]

    ########### 
    all_points = np.concatenate(all_points, axis=-1)



    write_pcd(all_points.astype(np.float32).transpose().tolist(),
                    all_intensities,
                    result_path) 


if __name__ == "__main__":
    main()


byd_pandar40R_pandar40L_extrinsics.yaml

header:
  stamp:
    secs: 1422601952
    nsecs: 288805456
  seq: 0
  frame_id: velodyne32
transform: 
  rotation:
    w: 0.9999750088664674
    x: -0.0037319614245119323
    y: 0.003943945933493147
    z: -0.004527625968218702
  translation:
    x: 0.103329
    y: -2.57224
    z: 0.018346
child_frame_id: pandar40R


#Translation (x, y, z):  0.103329 -2.57224 0.018346
#Quaternion (w, x , y, z):  0.9999750088664674 -0.0037319614245119323 0.003943945933493147 -0.004527625968218702

check.pcd
在这里插入图片描述

IMU和LiDAR是两种不同的传感器技术,通过标定可以增加它们之间的配准准确性,提高它们在定位、导航和感知方面的性能。 IMU(惯性测量单元)是基于陀螺仪和加速度计等组件构成的传感器系统,可以测量和记录物体的加速度、角速度和姿态等状态信息。LiDAR(激光雷达)则是通过发送激光束来测量物体距离和位置的传感器。 IMU和LiDAR之间的标定过程主要包括两个方面:外部标定和内部标定。 外部标定主要是通过手动或自动方法,将IMU和LiDAR物理安装在同一个坐标系中,使它们的位置、姿态和对齐得到准确的估计。常见的方式是使用高精度的测量工具(如测距仪)来测量它们之间的距离和角度,进而计算出其准确的相对位置关系。 内部标定主要是通过采集IMU和LiDAR输出的数据,使用数学方法来估计它们之间的转换矩阵,通过这个矩阵可以将两个传感器的数据在同一个坐标系下进行融合和配准。内部标定的过程通常需要在不同的位置和姿态下进行采集和计算,以获得更准确和鲁棒的标定结果。 IMU与LiDAR标定可以提高它们在多传感器融合中的配准精度,从而提高定位、导航和感知等应用中的性能。通过标定,我们可以更准确地得到物体的位置、运动状态和环境信息,为机器人、自动驾驶车辆等智能设备的应用提供更可靠的数据支持。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值