长江 5 号双目,ros测试小记

长江 5 号双目,ros测试小记

左相机、双目视差、点云小效果
请添加图片描述
相关软件可以从网盘下载 百度网盘链接:https://pan.baidu.com/s/1UbtPUQCIhYQR4vzhNuXWUQ
提取码:zkhy
https://github.com/SmarterEye/SmarterEye-SDK
开源的 提供全部源代码 客户那边可以在所有平台上编译运行

编译问题
1最新版编译不过,qt版本问题吧。
使用老的版本。SmarterEye-SDK-0.5.1

在这里插入图片描述
2qtcreator版本 cmake配置

SmarterEye-SDK-0.5.1/cmake/find_qt.cmake

if (WIN32)
    # set stuff for windows
#    SET(Qt5_PATH /path/to/your/qt/lib)
else()
    # set stuff for other systems
    #SET(Qt5_PATH /opt/Qt5.12.2/5.12.2/gcc_64)
    SET(Qt5_PATH /opt/Qt5.14.0/5.14.0/gcc_64)
endif()

源码相关修改
去掉opencv窗口
修改坐标系名称
修改时间戳

//
// Created by xtp on 2020/3/9.
//

#include "stereo_publisher.h"

#include "framemonitor.h"
//#include "smarter_eye_sdk/stereocamera.h"
//#include "smarter_eye_sdk/frameid.h"
//#include "smarter_eye_sdk/calibrationparams.h"
//#include "smarter_eye_sdk/rotationmatrix.h"

#include "stereocamera.h"
#include "frameid.h"
#include "calibrationparams.h"
#include "rotationmatrix.h"

#include "cv_bridge/cv_bridge.h"
#include "std_msgs/String.h"
#include "sensor_msgs/Imu.h"

#include <cmath>

static const std::string kLeftGrayTopic("/zkhy_stereo/left/gray");
static const std::string kLeftColorTopic("/zkhy_stereo/left/color");

static const std::string kRigthGrayTopic("/zkhy_stereo/right/gray");
static const std::string kRigthColorTopic("/zkhy_stereo/right/color");

static const std::string kDisparityTopic("/zkhy_stereo/disparity");

static const std::string kImuTopic("/zkhy_stereo/imu");
static const std::string kPointCloudTopic("/zkhy_stereo/points");

static const std::string kCameraParamsService("/zkhy_stereo/get_camera_params");
static const std::string kRotationMatrixService("/zkhy_stereo/get_rotation_matrix");
static const std::string kFrameRateService("/zkhy_stereo/set_frame_rate");

StereoPublisher::StereoPublisher()
    : it_(node_handler_),
      stereo_camera_(StereoCamera::connect("192.168.1.251")),
      frame_monitor_(new FrameMonitor)
{
    bool isConnected = false;
	// frame rate
    float rate = 0.0;
    while (!isConnected) {
        printf("connecting...\n");
        isConnected = stereo_camera_->isConnected();
        std::this_thread::sleep_for(std::chrono::seconds(1));
    }
    if(stereo_camera_->getFrameRate(rate))
    {
        std::cout <<"frame rate : "<<rate<<std::endl;
    }
    else
    {
        std::cout << "get frame rate failed" <<std::endl;
    }
    left_gray_pub_ = it_.advertise(kLeftGrayTopic, 1);
    left_color_pub_ = it_.advertise(kLeftColorTopic, 1);

    right_gray_pub_ = it_.advertise(kRigthGrayTopic, 1);
    right_color_pub_ = it_.advertise(kRigthColorTopic, 1);

    disparity_pub_ = it_.advertise(kDisparityTopic, 1);

    imu_pub_ = node_handler_.advertise<sensor_msgs::Imu>(kImuTopic, 100);
    points_pub_ = node_handler_.advertise<sensor_msgs::PointCloud2>(kPointCloudTopic, 1);

    camera_params_server_ = node_handler_.advertiseService(kCameraParamsService, &StereoPublisher::onCameraParamsRequest, this);
    rotation_matrix_server_ = node_handler_.advertiseService(kRotationMatrixService, &StereoPublisher::onRotationMatrixRequest, this);
    frame_rate_server_ = node_handler_.advertiseService(kFrameRateService, &StereoPublisher::onFrameRateSetRequest, this);

    stereo_camera_->requestFrame(frame_monitor_.get(), FrameId::CalibLeftCamera | FrameId::CalibRightCamera | FrameId::Disparity);
    stereo_camera_->enableMotionData(true);

    StereoCalibrationParameters params {};
    stereo_camera_->requestStereoCameraParameters(params);

    frame_monitor_->setStereoCalibParams(params);
    frame_monitor_->setFrameCallback(std::bind(&StereoPublisher::publishFrameCallback, this,
            std::placeholders::_1, std::placeholders::_2, std::placeholders::_3));
    frame_monitor_->setMotionDataCallback(std::bind(&StereoPublisher::publishImuCallback, this, std::placeholders::_1));
    frame_monitor_->setPointCloudCallback(std::bind(&StereoPublisher::publishPointsCallback, this, std::placeholders::_1));
}

StereoPublisher::~StereoPublisher()
{
    stereo_camera_->disconnectFromServer();
}

bool StereoPublisher::onCameraParamsRequest(zkhy_stereo_d::CameraParams::Request &req,
                                            zkhy_stereo_d::CameraParams::Response &resp)
{
    StereoCalibrationParameters params {};
    bool ok = stereo_camera_->requestStereoCameraParameters(params);
    if (ok) {
        resp.focus = params.focus;
        resp.cx = params.cx;
        resp.cy = params.cy;
        resp.RRoll = params.RRoll;
        resp.RPitch = params.RPitch;
        resp.RYaw = params.RYaw;
        resp.Tx = params.Tx;
        resp.Ty = params.Ty;
        resp.Tz = params.Tz;
    } else {
        std::cout << "Stereo camera params request failed from ADAS.";
    }

    return ok;
}

bool StereoPublisher::onRotationMatrixRequest(zkhy_stereo_d::RotationMatrix::Request &req,
                                              zkhy_stereo_d::RotationMatrix::Response &resp)
{
    RotationMatrix matrix {};
    bool ok = stereo_camera_->requestRotationMatrix(matrix);
    if (ok) {
        for (size_t i = 0; i < resp.real3DToImage.size(); i++) {
            resp.real3DToImage[i] = matrix.real3DToImage[i];
        }
        for (size_t j = 0; j < resp.imageToReal3D.size(); j++) {
            resp.imageToReal3D[j] = matrix.imageToReal3D[j];
        }
    } else {
        std::cout << "Rotation matrix request failed from ADAS.";
    }

    return ok;
}

bool StereoPublisher::onFrameRateSetRequest(zkhy_stereo_d::FrameRate::Request &req,zkhy_stereo_d::FrameRate::Response &resp)
{
    float rate = req.rate;
    std::cout << " request rate :"<<rate<<std::endl;
    bool ok = stereo_camera_->setFrameRate(rate);
    if(ok)
    {
        std::cout << " set frame rate done"<<std::endl;
    }
    else
    {
        std::cout << " set frame rate failed"<<std::endl;
    }
    return ok;
}

void StereoPublisher::publishFrameCallback(int frameId, int64_t timestamp, const cv::Mat &img_mat)
{
    bool is_color = (img_mat.type() == CV_8UC3);
    std::string encoding = is_color ? "rgb8" : "mono8";
    sensor_msgs::ImagePtr img_msg;

    if (frameId == FrameId::CalibLeftCamera || frameId == FrameId::CalibRightCamera) {
        img_msg = cv_bridge::CvImage(std_msgs::Header(), encoding, img_mat).toImageMsg();
        //img_msg->header.stamp = ros::Time(timestamp / 1000.0);
        img_msg->header.stamp = ros::Time::now();
        if (frameId == FrameId::CalibLeftCamera) {
            if (is_color) {
                img_msg->header.frame_id = "zkhy_left_color_frame";
                left_color_pub_.publish(img_msg);
            } else {
                img_msg->header.frame_id = "zkhy_left_gray_frame";
                left_gray_pub_.publish(img_msg);
            }
        }

        if (frameId == FrameId::CalibRightCamera) {
            if (is_color) {
                img_msg->header.frame_id = "zkhy_right_color_frame";
                right_color_pub_.publish(img_msg);
            } else {
                img_msg->header.frame_id = "zkhy_right_gray_frame";
                right_gray_pub_.publish(img_msg);
            }
        }
    }

    if (frameId == FrameId::Disparity) {
        img_msg = cv_bridge::CvImage(std_msgs::Header(), "mono8", img_mat).toImageMsg();
        img_msg->header.frame_id = "zkhy_disparity_frame";
        disparity_pub_.publish(img_msg);
    }
}

void StereoPublisher::publishImuCallback(const MotionData *motionData)
{
    sensor_msgs::Imu imu_msg;
    //imu_msg.header.stamp = ros::Time(motionData->timestamp / 1000.0);
    imu_msg.header.stamp = ros::Time::now();
    imu_msg.header.frame_id = "zkhy_imu_frame";

    imu_msg.linear_acceleration.x = motionData->accelX;
    imu_msg.linear_acceleration.y = motionData->accelY;
    imu_msg.linear_acceleration.z = motionData->accelZ;

    imu_msg.angular_velocity.x = motionData->gyroX * M_PI / 180;
    imu_msg.angular_velocity.y = motionData->gyroY * M_PI / 180;
    imu_msg.angular_velocity.z = motionData->gyroZ * M_PI / 180;

    imu_pub_.publish(imu_msg);
}

void StereoPublisher::publishPointsCallback(sensor_msgs::PointCloud2::Ptr cloud)
{
    cloud->header.frame_id = "stereo_left";
    points_pub_.publish(cloud);
}


int main(int argc, char* argv[])
{
    ros::init(argc, argv, "stereo_publisher_d");

    StereoPublisher stereo_publisher;

    ros::spin();

    return 0;
}
#include "pointcloudgenerator.h"

#include <iostream>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/point_cloud2_iterator.h>

//#include "smarter_eye_sdk/satpext.h"
//#include "smarter_eye_sdk/disparityconvertor.h"
#include "satpext.h"
#include "disparityconvertor.h"
PointCloudGenerator::PointCloudGenerator()
    : width_(0),
      height_(0),
      bit_num_(0),
      running_(false),
      frame_ready_(false),
      disparity_raw_data_(nullptr)
{

}

PointCloudGenerator::~PointCloudGenerator()
{
    stop();
}

void PointCloudGenerator::init(int width, int height,  int dataSize, int bitNum)
{
    width_ = width;
    height_ = height;
    bit_num_ = bitNum;

    disparity_raw_data_.reset((RawImageFrame*)new char[sizeof(RawImageFrame) + dataSize]);

    start();
}

void PointCloudGenerator::push(const RawImageFrame *rawFrame)
{
    if (!prepared()) return;

    if (!running_ || frame_ready_) return;

    {
        std::lock_guard<std::mutex> lock(mutex_);
        if (frame_ready_) return;

        memcpy(disparity_raw_data_.get(), rawFrame, sizeof(RawImageFrame) + rawFrame->dataSize);
        frame_ready_ = true;
    }

    condition_.notify_one();
}

void PointCloudGenerator::start()
{
    if (!prepared()) return;

    if (running_) return;

    {
        std::lock_guard<std::mutex> _(mutex_);
        if (running_) return;
        running_ = true;
    }
    thread_ = std::thread(&PointCloudGenerator::run, this);
}

void PointCloudGenerator::stop()
{
    if (!running_) return;

    {
        std::lock_guard<std::mutex> _(mutex_);
        if (!running_) return;
        running_ = false;
        frame_ready_ = true;
    }
    condition_.notify_one();
    if (thread_.joinable()) {
        thread_.join();
    }
}

void PointCloudGenerator::run()
{
    while (running_) {
        {
            std::unique_lock<std::mutex> lock(mutex_);
            condition_.wait(lock, [this]{ return frame_ready_; });
            if (!running_) break;
        }

        sensor_msgs::PointCloud2::Ptr pc(new sensor_msgs::PointCloud2);
        //pc->header.stamp = ros::Time(disparity_raw_data_->time / 1000.0);
        pc->header.stamp = ros::Time::now();
        pc->width = width_;
        pc->height = height_;

        sensor_msgs::PointCloud2Modifier modifier(*pc);
        modifier.setPointCloud2Fields(3,
                                      "x", 1, sensor_msgs::PointField::FLOAT32,
                                      "y", 1, sensor_msgs::PointField::FLOAT32,
                                      "z", 1, sensor_msgs::PointField::FLOAT32);

        sensor_msgs::PointCloud2Iterator<float> iter_x(*pc, "x");
        sensor_msgs::PointCloud2Iterator<float> iter_y(*pc, "y");
        sensor_msgs::PointCloud2Iterator<float> iter_z(*pc, "z");

        for (int posY = 0; posY < height_; posY++) {
            for (int posX = 0; posX < width_; posX++) {
                float x, y, z;
                DisparityConvertor::getPointXYZDistance(disparity_raw_data_->image, width_, height_, bit_num_,
                                                        calib_params_.Tx, calib_params_.focus, calib_params_.cx, calib_params_.cy,
                                                        posX, posY, x, y, z);

                if ((fabs(x) > 200000.0f)||(fabs(y) > 200000.0f)||(fabs(z) > 200000.0f)) {
                    x = 0.0f;
                    y = 0.0f;
                    z = 0.0f;
                }

                *iter_x = x / 1000.f;
                *iter_y = y / 1000.f;
                *iter_z = z / 1000.f;

                ++iter_x; ++iter_y; ++iter_z;
            }
        }

        publish_callback_(pc);

        {
            std::lock_guard<std::mutex> _(mutex_);
            frame_ready_ = false;
        }
    }
}

有关cameraInfo 官方ros包中没有

使用提供的service转换一下试试

#! /usr/bin/env python
# -*-coding:utf-8 -*-
import rospy
from zkhy_stereo_d.srv import CameraParams,FrameRate,RotationMatrix
#zkhy_stereo_d/CameraParams


#static const std::string kCameraParamsService("/zkhy_stereo/get_camera_params");
'''
---
float64 focus
float64 cx
float64 cy
float64 RRoll
float64 RPitch
float64 RYaw
float64 Tx
float64 Ty
float64 Tz
'''
#static const std::string kRotationMatrixService("/zkhy_stereo/get_rotation_matrix");
'''
---
float32[12] real3DToImage
float32[9] imageToReal3D
'''
#static const std::string kFrameRateService("/zkhy_stereo/set_frame_rate");
'''
float64 rate
---
'''

from sensor_msgs.msg import CameraInfo
from sensor_msgs.msg import Image
class ZKHYCamInfoNode():


    def __init__(self):

        rospy.init_node("node_zzztestsrv")
        rospy.on_shutdown(self.shutdown)
        self.rate_hz = rospy.get_param("~rate", 10)
        self.rate = rospy.Rate(self.rate_hz)
        #
        self._srv_camera_params = rospy.get_param("~get_camera_params", "/zkhy_stereo/get_camera_params")
        self._srv_rotation_matrix = rospy.get_param("~get_rotation_matrix", "/zkhy_stereo/get_rotation_matrix")
        self.threshold_s = rospy.get_param("~threshold", 2)
        self._frame = rospy.get_param("~frame_id", "stereo_left")
        self._height = rospy.get_param("~img_height", 720)
        self._width = rospy.get_param("~img_width", 1280)

        self._f = rospy.get_param("~f", 924)

        self._fx = self._f
        self._fy = self._f
        self._cx = self._width/2.0
        self._cy = self._height/2.0
        self._p  = [self._fx, 0.0, self._cx, 0.0, 0.0, self._fx, self._cy, 0.0, 0.0, 0.0, 1.0, 0.0]
        self._pub_info = rospy.Publisher('~camera_info', CameraInfo, queue_size=5)

    def refresh(self):
        try:
            client1 = rospy.ServiceProxy(self._srv_camera_params,CameraParams)

            client3 = rospy.ServiceProxy(self._srv_rotation_matrix,RotationMatrix)

            response = client1.call()
            print("/zkhy_stereo/get_camera_params callok")
            print(response)
            self._f=response.focus
            self._fx = self._f
            self._fy = self._f
            self._cx=response.cx
            self._cy=response.cy



            response = client3.call()
            print("/zkhy_stereo/get_rotation_matrix callok")
            print(response)
            self._p=response.real3DToImage
            #print(response.real3DToImage)
        except:
            pass
            print "call Service err "


    def pubcaminfomsg(self):
        """:obj:`sensor_msgs.CamerInfo` : Returns ROS CamerInfo msg 

        
        msg_header = Header()
        msg_header.frame_id = self._frame

        msg_roi = RegionOfInterest()
        msg_roi.x_offset = 0
        msg_roi.y_offset = 0
        msg_roi.height = 0
        msg_roi.width = 0
        msg_roi.do_rectify = 0
        """
        msg = CameraInfo()
        #msg.header = msg_header
        msg.header.frame_id = self._frame
        msg.height = self._height
        msg.width = self._width

        # The distortion model used. Supported models are listed in
        # sensor_msgs/distortion_models.h. For most cameras, "plumb_bob" - a
        # simple model of radial and tangential distortion - is sufficient.
        msg.distortion_model = 'plumb_bob'
        # The distortion parameters, size depending on the distortion model.
        # For "plumb_bob", the 5 parameters are: (k1, k2, t1, t2, k3).
        msg.D = [0.0, 0.0, 0.0, 0.0, 0.0]
        # Intrinsic camera matrix for the raw (distorted) images.
        #     [fx  0 cx]
        # K = [ 0 fy cy]
        #     [ 0  0  1]
        # Projects 3D points in the camera coordinate frame to 2D pixel
        # coordinates using the focal lengths (fx, fy) and principal point
        # (cx, cy).
        msg.K = [self._fx, 0.0, self._cx, 0.0, self._fy, self._cy, 0.0, 0.0, 1.0]

        # Rectification matrix (stereo cameras only)
        # A rotation matrix aligning the camera coordinate system to the ideal
        # stereo image plane so that epipolar lines in both stereo images are
        # parallel.
        msg.R = [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]

        # Projection/camera matrix
        #     [fx'  0  cx' Tx]
        # P = [ 0  fy' cy' Ty]
        #     [ 0   0   1   0]
        # By convention, this matrix specifies the intrinsic (camera) matrix
        #  of the processed (rectified) image. That is, the left 3x3 portion
        #  is the normal camera intrinsic matrix for the rectified image.
        # It projects 3D points in the camera coordinate frame to 2D pixel
        #  coordinates using the focal lengths (fx', fy') and principal point
        #  (cx', cy') - these may differ from the values in K.
        # For monocular cameras, Tx = Ty = 0. Normally, monocular cameras will
        #  also have R = the identity and P[1:3,1:3] = K.
        # For a stereo pair, the fourth column [Tx Ty 0]' is related to the
        #  position of the optical center of the second camera in the first
        #  camera's frame. We assume Tz = 0 so both cameras are in the same
        #  stereo image plane. The first camera always has Tx = Ty = 0. For
        #  the right (second) camera of a horizontal stereo pair, Ty = 0 and
        #  Tx = -fx' * B, where B is the baseline between the cameras.
        # Given a 3D point [X Y Z]', the projection (x, y) of the point onto
        #  the rectified image is given by:
        #  [u v w]' = P * [X Y Z 1]'
        #         x = u / w
        #         y = v / w
        #  This holds for both images of a stereo pair.

        msg.P = self._p
        #[self._fx, 0.0, self._cx, 0.0, 0.0, self._fx, self._cy, 0.0, 0.0, 0.0, 1.0, 0.0]
        msg.binning_x = 0
        msg.binning_y = 0
        #msg.roi = msg_roi
        self._pub_info.publish(msg)
        #return msg


    def shutdown(self):
        pass
    def run(self):
        c=0
        while not rospy.is_shutdown():
                self.rate.sleep()

                if c%50==0:

                    self.refresh()
          
                        
                print c

                if rospy.is_shutdown():
                    break
                self.pubcaminfomsg()

                c=c+1

if __name__=="__main__":
    '''
    # 初始化ros节点
    rospy.init_node("node_zzztestsrv")
    # 创建客户端对象


    client1 = rospy.ServiceProxy("/zkhy_stereo/get_camera_params",CameraParams)
    client2 = rospy.ServiceProxy("/zkhy_stereo/set_frame_rate",FrameRate)
    client3 = rospy.ServiceProxy("/zkhy_stereo/get_rotation_matrix",RotationMatrix)

    response = client1.call()
    print("/zkhy_stereo/get_camera_params callok")
    print(response)
    cx=response.cx
    cy=response.cy
    response = client2.call(10.0,)
    print("/zkhy_stereo/set_frame_rate callok")
    print(response)


    response = client3.call()
    print("/zkhy_stereo/get_rotation_matrix callok")
    print(response)

    #print(response.real3DToImage)

    '''
    try:

        m=ZKHYCamInfoNode()
        m.run()

    except rospy.ROSInterruptException:
        pass
    #'''
    

请添加图片描述
请添加图片描述标定板配置之后的数据
请添加图片描述请添加图片描述

/zkhy_stereo/get_camera_params callok
focus: 924.560392664
cx: 652.410320649
cy: 383.193440768
RRoll: 0.0
RPitch: 0.0
RYaw: 0.0
Tx: 120.910602422
Ty: 0.0
Tz: 0.0
/zkhy_stereo/set_frame_rate callok

/zkhy_stereo/get_rotation_matrix callok
real3DToImage: [952.0765991210938, 5.438984394073486, 611.094482421875, 46.22802734375, 16.343040466308594, 927.7232055664062, 374.91796875, 0.0, 
0.0435481071472168, 0.008342000655829906, 0.999016523361206, 0.0]

imageToReal3D: [0.00108056899625808, -3.9293402664952737e-07, -0.6608323454856873, 4.758225063961063e-13, 0.0010815574787557125, -0.40589451789855957, -4.710305802291259e-05, -9.014106581162196e-06, 1.0331801176071167]

输出camerainfo

rostopic echo /node_zzztestsrv/camera_info
header: 
  seq: 1
  stamp: 
    secs: 0
    nsecs:         0
  frame_id: "stereo_left"
height: 720
width: 1280
distortion_model: "plumb_bob"
D: [0.0, 0.0, 0.0, 0.0, 0.0]
K: [924.5603926636185, 0.0, 652.4103206490715, 0.0, 924.5603926636185, 383.1934407680866, 0.0, 0.0, 1.0]
R: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]
P: [952.0765991210938, 5.438984394073486, 611.094482421875, 46.22802734375, 16.343040466308594, 927.7232055664062, 374.91796875, 0.0, 0.0435481071472168, 0.008342000655829906, 0.999016523361206, 0.0]
binning_x: 0
binning_y: 0
roi: 
  x_offset: 0
  y_offset: 0
  height: 0
  width: 0
  do_rectify: False
---
配置相关坐标系外参,效果概览
近处的障碍点云没有?

在这里插入图片描述

如有错误请指正。

  • 7
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
ROS(机器人操作系统)是一个开源的机器人软件平台,它提供了各种工具和库用于开发机器人的各个方面,包括感知、定位、导航、控制等等。 在ROS中进行双目相机的校准,主要是为了获得两个相机之间的准确的几何关系,以便进行立体视觉或深度感知任务。校准的目标是确定两个相机的相对位置和姿态,以及相机的内部参数(如焦距、畸变等)。 在ROS中,使用的主要工具是camera_calibration工具包,在进行双目校准前,需要先进行单目相机的校准。这可以通过采集一系列图片并使用该工具包中的rosrun camera_calibration cameracalibrator.py命令来完成。这个过程将根据所采集的图像自动计算相机的内部参数。 当单目校准完成后,可以开始进行双目相机的校准。这可以通过采集一组双目图像并使用camera_calibration工具包中的rosrun camera_calibration stereo_calibrator.py命令来完成。根据采集的图像,该工具会自动计算相机之间的旋转矩阵和平移向量。 完成双目校准后,可以使用ROS中的stereo_image_proc工具包进行双目图像处理。该工具包可以将双目图像转换为深度图像、点云等,并提供了一些其他的立体视觉相关功能。 总之,ROS提供了一组强大的工具和库来完成双目相机的校准和应用。通过合理使用这些工具,可以获得准确的双目相机几何关系,从而在机器人的感知、导航等任务中提供更精确的信息。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值