多传感器融合系列之感知后融合实战

概要:基于卡尔曼滤波,实现LiDAR和Camera的后融合感知算法。输入数据为LiDAR检测结果和Camera检测结果, LiDAR和Camera的检测算法借鉴了Apollo 6.0,即LiDAR检测算法为PointPillars,Camera检测算法为YOLO。

fusion_node.cpp
 读取检测结果到frame中,然后基于卡尔曼滤波执行融合操作,最后获取融合结果。

int main(int argc, char** argv)
{
    // argv[1] config, argv[2] location, argv[3] camera, argv[4] lidar, argv[5] ground truth
    ros::init(argc, argv, "fusion_node");
    if (argc != 5)
    {
        std::cout << "Need 4 args!\n";
        return 0;
    }

    std::vector<std::tuple<double, proto_input::FileTag, std::string> >&& data_frame_seq = proto_input::mergeInputSequence({
        proto_input::buildSensorInputSequence(argv[2], "Location", proto_input::FileTag::LOCATION),
        proto_input::buildSensorInputSequence(argv[3], "Lidar", proto_input::FileTag::CAMERA),
        proto_input::buildSensorInputSequence(argv[4], "Lidar", proto_input::FileTag::LIDAR),
    });

    ros::NodeHandle nh;
    ros::Publisher lidar_pub = nh.advertise<sensor_msgs::PointCloud>("lidar_det", 50);
    ros::Publisher res_pub = nh.advertise<sensor_msgs::PointCloud>("fusion_result", 50);
    ros::Rate r(2);

    ezcfg::Interpreter itp;
    kit::perception::fusion::FusionWrapper fusion{ argv[1] };
    for (auto  it = data_frame_seq.begin(); it != data_frame_seq.end();)
    {
        itp.loadFile(std::get<2>(*it));
        proto_input::Location pose;
        itp.lex.matchID("header");
        itp.parse(pose.header);
        itp.lex.matchID("pose");
        itp.parse(pose.pose);
        ++it;

        std::shared_ptr<kit::perception::fusion::Frame> frame = std::make_shared<kit::perception::fusion::Frame>();
        //get camera detection results
        if (std::get<1>(*it) == proto_input::FileTag::CAMERA)
        {
            proto_input::CameraObject raw_co;
            kit::perception::fusion::CameraObjectListPtr co_list = std::make_shared<kit::perception::fusion::CameraObjectList>();
            itp.loadFile(std::get<2>(*it));
            while (itp.lex.getToken() == ezcfg::Token::ID && itp.lex.getTokenText() == "perception_obstacle")
            {
                itp.lex.next();
                itp.parse(raw_co);
                co_list->objs.push_back(makeCameraObjectPtr(raw_co, std::get<0>(*it)));
            }
            co_list->time_ns = std::get<0>(*it);
            frame->camera_objs = std::move(co_list);
            ++it;
        }
        //get lidar detection results
        if (std::get<1>(*it) == proto_input::FileTag::LIDAR)
        {
            proto_input::LidarObject raw_lo;
            kit::perception::fusion::LiDARObjectListPtr lo_list = std::make_shared<kit::perception::fusion::LiDARObjectList>();
            itp.loadFile(std::get<2>(*it));
            while (itp.lex.getToken() == ezcfg::Token::ID && itp.lex.getTokenText() == "perception_obstacle")
            {
                itp.lex.next();
                itp.parse(raw_lo);
                raw_lo.anchor_point.x = raw_lo.anchor_point.x - pose.pose.position.x;
                raw_lo.anchor_point.y = raw_lo.anchor_point.y - pose.pose.position.y;
                raw_lo.anchor_point.z = raw_lo.anchor_point.z - pose.pose.position.z;
                lo_list->objs.push_back(makeLiDARObjectPtr(raw_lo, std::get<0>(*it)));
            }
            lidar_pub.publish(transToPointCloud(*lo_list));
            lo_list->time_ns = std::get<0>(*it);
            frame->lidar_objs = std::move(lo_list);
            ++it;
        }

        // fusion start
        fusion.Update(frame);
        kit::perception::fusion::FusionObjectListPtr fusion_res = std::make_shared<kit::perception::fusion::FusionObjectList>(kit::perception::fusion::FusionObjectList());
        fusion.GetFusionResult(fusion_res);
        
        // result output
        std::cout << "********************************************************************\n";
        for (size_t i = 0; i < fusion_res->objs.size(); i++)
            std::cout << fusion_res->objs[i]->ToString() << std::endl;
        res_pub.publish(transToPointCloud(*fusion_res));
        std::cout << "********************************************************************\n";
        r.sleep();
    }

    ros::shutdown();
    return 0;
}

Fusion::Update

bool FusionWrapper::Update(const FramePtr &frame) {

    fusion::FusionObjectListPtr global_obj_list =
        std::make_shared<fusion::FusionObjectList>(fusion::FusionObjectList());

    // fuse lidar measurement
    if (frame->lidar_objs->objs.size() > 0) {
        std::cout << "updating with lidar objects..." << std::endl;
        tracker_->GetGlobalObjects(global_obj_list);
        if (!predictor_->Predict(global_obj_list, frame->lidar_objs->time_ns)) {
            std::cout << "predict error for lidar measurement." << std::endl;
            return false; 
        }
        // map of lidar obj idx and global obj idx
        std::map<size_t, int> local_global_map;
        if (!matcher_->Match(frame->lidar_objs, global_obj_list, local_global_map)) {
            std::cout << "match failed for lidar and global." << std::endl;
            return false;
        }
        // update global object with lidar measurements
        tracker_->Update(frame->lidar_objs, local_global_map);
    }

    // fuse camera measurement
    if (frame->camera_objs->objs.size() > 0) {
        std::cout << "updating with camera objects..." << std::endl;
        tracker_->GetGlobalObjects(global_obj_list);
        if (!predictor_->Predict(global_obj_list, frame->camera_objs->time_ns)) {
            std::cout << "predict error for camera measurement." << std::endl;
            return false; 
        }
        // map of camera obj idx and global obj idx
        std::map<size_t, int> local_global_map;
        if (!matcher_->Match(frame->camera_objs, global_obj_list, local_global_map)) {
            std::cout << "match failed for camera and global." << std::endl;
            return false;
        }
        tracker_->Update(frame->camera_objs, local_global_map);
    }

    return true;
}

Tracker::Update

bool Tracker::Update(const LiDARObjectListPtr &lidar_obj_list,
        const std::map<size_t, int>& map) {
    for (auto m : map) {
        if (m.second >= 0) {
            Measurement lidar_mea;
            const auto &lidar_obj = lidar_obj_list->objs[m.first];
            const auto &global_obj = global_obj_list_->objs[m.second];
            lidar_mea.sensor_type = SensorType::LIDAR;
            Eigen::VectorXd meas(5);
            meas << lidar_obj->x,
                	lidar_obj->y,
                	lidar_obj->velo_x,
                	lidar_obj->velo_y,
                	lidar_obj->label;
            lidar_mea.time_ns = lidar_obj->time_ns;
            lidar_mea.meas = meas;
            State state;
            meas << global_obj->x,
                global_obj->y,
                global_obj->velo_x,
                global_obj->velo_y,
                global_obj->label;
            state.time_ns = global_obj->time_ns;
            state.meas = meas;
            // update fusion_obj with lidar_measurement using KalmanFilter
            motion_filters_[m.second]->UpdateMotion(state, lidar_mea);
            global_obj->x = motion_filters_[m.second]->GetState()[0];
            global_obj->y = motion_filters_[m.second]->GetState()[1];
            global_obj->z = lidar_obj->z;
            global_obj->velo_x = motion_filters_[m.second]->GetState()[2];
            global_obj->velo_y = motion_filters_[m.second]->GetState()[3];
            global_obj->label = motion_filters_[m.second]->GetState()[4];
            global_obj->length = lidar_obj->length;
            global_obj->width = lidar_obj->width;
            global_obj->height = lidar_obj->height;
            int dura = global_obj_list_->objs[m.second]->life_duration;
            global_obj_list_->objs[m.second]->life_duration =  (dura < startup_duration_ ? dura+1 : startup_duration_);
            std::cout << "updating exist global object: " << global_obj_list_->objs[m.second]->ToString() << std::endl;
        } else {
            // create a new global object with lidar object
            FusionObjectPtr new_obj =
                std::make_shared<FusionObject>(FusionObject());
            const auto &lidar_obj = lidar_obj_list->objs[m.first];
            new_obj->time_ns = lidar_obj->time_ns;
            // TODO: we should maintain a global unique id for new object
            new_obj->id = lidar_obj->id;
            new_obj->x = lidar_obj->x;
            new_obj->y = lidar_obj->y; new_obj->z = lidar_obj->z;
            new_obj->length = lidar_obj->length;
            new_obj->width = lidar_obj->width;
            new_obj->height = lidar_obj->height;
            new_obj->velo_x = lidar_obj->velo_x;
            new_obj->velo_y = lidar_obj->velo_y;
            new_obj->velo_z = lidar_obj->velo_z;
            new_obj->label = lidar_obj->label;
            new_obj->confidence = lidar_obj->confidence;
            new_obj->life_duration = startup_duration_;
            global_obj_list_->objs.push_back(new_obj);
            std::shared_ptr<FusionEKF> motion_filter =
                std::make_shared<FusionEKF>(FusionEKF());
            motion_filters_.push_back(motion_filter);
            std::cout << "create new global object: " << new_obj->ToString() << std::endl;
        }
        global_obj_list_->time_ns = lidar_obj_list->time_ns;
    }

    return true;
}

更新状态量UpdateMotion
 根据卡尔曼滤波原理,在先验计算过程中(Prediction), k − 1 k-1 k1步的后验状态量通过状态方程计算出 k k k步的先验状态量和方差,状态转移F_矩阵考虑了x, y方向的位移和速度。系统噪声Q_考虑了时间间隔,间隔越大,方差越大。

​ 在后验计算过程中(Update),同时考虑k步的观测量和k步的先验状态量与方差,更新得到k步的后验状态量和方差。

void FusionEKF::UpdateMotion(const State &state, const Measurement &measurement) {
    /*****************************************************************************
     * Initialization 
     ****************************************************************************/
    motion_filter_.x_ = VectorXd(5);
    motion_filter_.P_ = P_;

    if (measurement.sensor_type == SensorType::RADAR) {
        // Initialize state.
        motion_filter_.x_ << state.meas[0], state.meas[1], state.meas[2],
        					 state.meas[3], 0;  // x, y, vx, vy, class
        motion_filter_.F_ << F_motion_;

    } else if (measurement.sensor_type == SensorType::LIDAR) {
        // Initialize state.
        motion_filter_.x_ << state.meas[0], state.meas[1], state.meas[2],  		
        					 state.meas[3], state.meas[4];  // x, y, vx, vy, class
        motion_filter_.F_ = F_motion_;
    }

    /*****************************************************************************
     *  Prediction
     ****************************************************************************/

    /**
     * Update the state transition matrix F according to the new elapsed time.
     * Update the process noise covariance matrix.
     */
    float dt = measurement.time_ns - state.time_ns;  //  in seconds
    float dt_2 = dt * dt;
    float dt_3 = dt_2 * dt;
    float dt_4 = dt_3 * dt;

    // Modify the F matrix so that the time is integrated
    motion_filter.F_ = MatrixXd(5, 5);
    motion_filter.F_ << 1, 0, dt, 0, 0,
    					0, 1, 0, dt, 0,
                        0, 0, 1, 0,  0,
    					0, 0, 0, 1,  0,
    					0, 0, 0, 0,  1;
    
    // set the process covariance matrix Q
    motion_filter_.Q_ = MatrixXd(5, 5);
    motion_filter_.Q_ << 
        dt*noise_ax, 0, 		  0, 		   0, 			 0, 
    	0,           dt*noise_ay, 0, 		   0, 			 0, 
    	0, 		  	 0,           dt*noise_ax, 0, 			 0, 
    	0, 		  	 0, 		   0, 		   dt*noise_ay,  0, 
    	0, 		  	 0, 		   0, 		   0, 			 dt*noise_attr_;
    
    motion_filter_.Predict();

    /*****************************************************************************
     *  Update
     ****************************************************************************/

    // TODO:
    /**
     * Use the sensor type to perform the update step.
     * Update the state and covariance matrices.
     */
     if (measurement.sensor_type == SensorType::RADAR)
     {
        // Radar updates
        H_radar_jacobian_ = CalculateJacobian(motion_filter_.x_);
        motion_filter_.H_ = H_radar_jacobian_;
        motion_filter_.R_ = R_radar_;
        motion_filter_.UpdateEKF(measurement.meas);
     }
     else
     {
        // Laser updates
        motion_filter_.H = H_laser_;
        motion_filter_.R_ = R_laser_;
        motion_filter_.Update(measurement.meas);
     }

}

Predict与Update

void KalmanFilter::Predict() {
    // predict the state
    x_ = F_ * x_;
    MatrixXd Ft = F_.transpose();
    P_ = F_ * P_ * Ft + Q_;
}

void KalmanFilter::Update(const VectorXd &z) {
     
    // update the state by using Kalman Filter equations
    VectorXd z_pred = H_ * x_;

    VectorXd y = z - z_pred;
    MatrixXd Ht = H_.transpose();
    MatrixXd PHt = P_ * Ht;
    MatrixXd S = H_ * PHt + R_;
    MatrixXd Si = S.inverse();
    MatrixXd K = PHt * Si;

    // new estimate
    x_ = x_ + (K * y);
    long x_size = x_.size();
    MatrixXd I = MatrixXd::Identity(x_size, x_size);
    P_ = (I - K * H_) * P_;
}

实现效果

多传感器融合实现效果

  • 0
    点赞
  • 11
    收藏
    觉得还不错? 一键收藏
  • 2
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值