Kinect for Windows V2和V1对比开发___骨骼数据获取并用OpenCV2.4.10显示

19 篇文章 106 订阅 ¥39.90 ¥99.00
本文介绍了如何在Kinect V1和V2中打开和更新骨骼帧,以及使用OpenCV绘制骨骼数据。通过对比V1的NuiSkeletonGetNextFrame方法和V2的BodyFrameSource接口,展示了V2如何利用Direct2D或OpenCV进行骨骼绘制。此外,还提供了V2与VS2012及OpenCV结合的代码示例,演示了如何获取和处理骨骼数据并实时显示。
摘要由CSDN通过智能技术生成

~~有兴趣的小伙伴,加kinect算法交流群:462964980。


1,       打开骨骼帧的方式

对于V1,

  1. 方法NuiSkeletonTrackingEnable实现  
  2.             m_hNextSkeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL );  
  3.             hr =m_PNuiSensor->NuiSkeletonTrackingEnable(  
  4.                        m_hNextSkeletonEvent,  
  5.             NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE//|  
  6.                        );  
  7.             if( FAILED( hr ) )  
  8.                   {  
  9.                        cout<<"Couldnot open skeleton stream video"<<endl;  
  10.                        return hr;  
  11.                   }  


对于V2

  1. // Initialize the Kinect andget coordinate mapper and the body reader  
  2.         IBodyFrameSource* pBodyFrameSource = NULL;  
  3.    
  4.         hr = m_pKinectSensor->Open();  
  5.    
  6.         if (SUCCEEDED(hr))  
  7.         {  
  8.             hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper);  
  9.         }  
  10. 方法get_CoordinateMapper得到坐标映射  
  11.         if (SUCCEEDED(hr))  
  12.         {  
  13.             hr =m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource);  
  14.         }  
  15. 方法get_BodyFrameSource得到骨骼帧源  
  16.         if (SUCCEEDED(hr))  
  17.         {  
  18.             hr =pBodyFrameSource->OpenReader(&m_pBodyFrameReader);  
  19.         }  
  20. 方法get_BodyFrameSource打开骨骼帧读取器  
  21.         SafeRelease(pBodyFrameSource);  
  22.     }  


2,更新骨骼帧方式

对于V1,方法NuiSkeletonGetNextFrame实现

  1. NUI_SKELETON_FRAMESkeletonFrame;//骨骼帧的定义   
  2.             bool bFoundSkeleton = false;   
  3.    
  4.             if(SUCCEEDED(NuiSkeletonGetNextFrame( 0, &SkeletonFrame )) )//Get the next frameof skeleton data.直接从kinect中提取骨骼帧  


对于V2,

  1.     if (!m_pBodyFrameReader)  
  2.     {  
  3.         return;  
  4.     }  
  5.   
  6. <pre name="code" class="cpp">    //更新骨骼帧  
    HRESULT hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);
 
  1. //更新骨骼数据  
  2. hResult = pBodyFrame->GetAndRefreshBodyData( BODY_COUNT, pBody );  

3,画骨架方式:

对于V1,主要用opencv辅助来画,用到cvLine方法

例如左上肢的实现为:

   

  1. //左上肢    
  2.     if((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 ||pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0) &&    
  3.        (pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x!=0 ||pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y!=0))    
  4.         cvLine(SkeletonImage, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER],pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);    
  5.     if((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x!=0 ||pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y!=0) &&    
  6.         (pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x!=0|| pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y!=0))    
  7.         cvLine(SkeletonImage,pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT],pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);    
  8.     if((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x!=0 ||pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y!=0) &&    
  9.        (pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x!=0 ||pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y!=0))    
  10.         cvLine(SkeletonImage,pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT],pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);    
  11.     if((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x!=0 ||pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y!=0) &&    
  12.        (pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x!=0 ||pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y!=0))    
  13.         cvLine(SkeletonImage,pointSet[NUI_SKELETON_POSITION_WRIST_LEFT],pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);    


对于V2,主要借助Direct2D微软的图形图像API,具体详细可以查阅资料。。当然也可以转换为用opencv来画。下面用OpenCV2.4.10中drawing functions 里边的line()函数:

  1. line(SkeletonImage,pointSet[joint0], pointSet[joint1],  color, 2);  


4,V2+VS2012+OpenCV代码


  1. #include <Windows.h>  
  2. #include <Kinect.h>  
  3. #include <opencv2/opencv.hpp>  
  4.   
  5. using namespace std;  
  6. using namespace cv;  
  7.   
  8. //释放接口需要自己定义  
  9. template<class Interface>  
  10. inline void SafeRelease( Interface *& pInterfaceToRelease )  
  11. {  
  12.     if( pInterfaceToRelease != NULL ){  
  13.         pInterfaceToRelease->Release();  
  14.         pInterfaceToRelease = NULL;  
  15.     }  
  16. }  
  17.   
  18.   
  19. void DrawBone( Mat& SkeletonImage,  CvPoint pointSet[], const Joint* pJoints, int whichone, JointType joint0, JointType joint1);  
  20.   
  21. void drawSkeleton( Mat& SkeletonImage,  CvPoint pointSet[],const Joint* pJoints, int whichone);  
  22.   
  23.   
  24. int main( int argc, char **argv[] )  
  25. {  
  26.     //OpenCV中开启CPU的硬件指令优化功能函数  
  27.     setUseOptimized( true );  
  28.   
  29.     // Sensor  
  30.     IKinectSensor* pSensor;  
  31.     HRESULT hResult = S_OK;  
  32.     hResult = GetDefaultKinectSensor( &pSensor );  
  33.     if( FAILED( hResult ) ){  
  34.         std::cerr << "Error : GetDefaultKinectSensor" << std::endl;  
  35.         return -1;  
  36.     }  
  37.   
  38.     hResult = pSensor->Open( );  
  39.     if( FAILED( hResult ) ){  
  40.         std::cerr << "Error : IKinectSensor::Open()" << std::endl;  
  41.         return -1;  
  42.     }  
  43.   
  44.   
  45.   
  46.      //Source  
  47.     IColorFrameSource* pColorSource;  
  48.     hResult = pSensor->get_ColorFrameSource( &pColorSource );  
  49.     if( FAILED( hResult ) ){  
  50.         std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;  
  51.         return -1;  
  52.     }  
  53.   
  54.     IBodyFrameSource* pBodySource;  
  55.     hResult = pSensor->get_BodyFrameSource( &pBodySource );  
  56.     if( FAILED( hResult ) ){  
  57.         std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl;  
  58.         return -1;  
  59.     }  
  60.   
  61.     // Reader  
  62.     IColorFrameReader* pColorReader;  
  63.     hResult = pColorSource->OpenReader( &pColorReader );  
  64.     if( FAILED( hResult ) ){  
  65.         std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;  
  66.         return -1;  
  67.     }  
  68.   
  69.     IBodyFrameReader* pBodyReader;  
  70.     hResult = pBodySource->OpenReader( &pBodyReader );  
  71.     if( FAILED( hResult ) ){  
  72.         std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl;  
  73.         return -1;  
  74.     }  
  75.   
  76.     // Description  
  77.     IFrameDescription* pDescription;  
  78.     hResult = pColorSource->get_FrameDescription( &pDescription );  
  79.     if( FAILED( hResult ) ){  
  80.         std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;  
  81.         return -1;  
  82.     }  
  83.   
  84.     int width = 0;  
  85.     int height = 0;  
  86.     pDescription->get_Width( &width ); // 1920  
  87.     pDescription->get_Height( &height ); // 1080  
  88.     unsigned int bufferSize = width * height * 4 * sizeof( unsigned char );  
  89.   
  90.     cv::Mat bufferMat( height, width, CV_8UC4 );  
  91.     cv::Mat bodyMat( height / 2, width / 2, CV_8UC4 );  
  92.     cv::namedWindow( "Body" );  
  93.   
  94.     // Color Table  
  95.     cv::Vec3b color[BODY_COUNT];  
  96.     color[0] = cv::Vec3b( 255,   0,   0 );  
  97.     color[1] = cv::Vec3b(   0, 255,   0 );  
  98.     color[2] = cv::Vec3b(   0,   0, 255 );  
  99.     color[3] = cv::Vec3b( 255, 255,   0 );  
  100.     color[4] = cv::Vec3b( 255,   0, 255 );  
  101.     color[5] = cv::Vec3b(   0, 255, 255 );  
  102.   
  103.   
  104.     // Coordinate Mapper  
  105.     ICoordinateMapper* pCoordinateMapper;  
  106.     hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper );  
  107.     if( FAILED( hResult ) ){  
  108.         std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;  
  109.         return -1;  
  110.     }  
  111.   
  112.   
  113. while(1){  
  114.             // Frame  
  115.         IColorFrame* pColorFrame = nullptr;  
  116.         hResult = pColorReader->AcquireLatestFrame( &pColorFrame );  
  117.         if( SUCCEEDED( hResult ) ){  
  118.             hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );  
  119.             if( SUCCEEDED( hResult ) ){  
  120.                 cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 );  
  121.             }  
  122.   
  123.         }  
  124.         //更新骨骼帧  
  125.         IBodyFrame* pBodyFrame = nullptr;  
  126.         hResult = pBodyReader->AcquireLatestFrame( &pBodyFrame );  
  127.         if( SUCCEEDED( hResult ) ){  
  128.             IBody* pBody[BODY_COUNT] = { 0 };  
  129.             //更新骨骼数据  
  130.             hResult = pBodyFrame->GetAndRefreshBodyData( BODY_COUNT, pBody );  
  131.             if( SUCCEEDED( hResult ) ){  
  132.                 forint count = 0; count < BODY_COUNT; count++ ){  
  133.                     BOOLEAN bTracked = false;  
  134.                     hResult = pBody[count]->get_IsTracked( &bTracked );  
  135.                     if( SUCCEEDED( hResult ) && bTracked ){  
  136.                         Joint joint[JointType::JointType_Count];  
  137.                         hResult = pBody[ count ]->GetJoints( JointType::JointType_Count, joint );  
  138.                         if( SUCCEEDED( hResult ) ){  
  139.                             // Left Hand State  
  140.                             HandState leftHandState = HandState::HandState_Unknown;  
  141.                             hResult = pBody[count]->get_HandLeftState( &leftHandState );  
  142.                             if( SUCCEEDED( hResult ) ){  
  143.                                 ColorSpacePoint colorSpacePoint = { 0 };  
  144.                                 hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandLeft].Position, &colorSpacePoint );  
  145.                                 if( SUCCEEDED( hResult ) ){  
  146.                                     int x = static_cast<int>( colorSpacePoint.X );  
  147.                                     int y = static_cast<int>( colorSpacePoint.Y );  
  148.                                     if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){  
  149.                                         if( leftHandState == HandState::HandState_Open ){  
  150.                                             cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA );  
  151.                                         }  
  152.                                         else if( leftHandState == HandState::HandState_Closed ){  
  153.                                             cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA );  
  154.                                         }  
  155.                                         else if( leftHandState == HandState::HandState_Lasso ){  
  156.                                             cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA );  
  157.                                         }  
  158.                                     }  
  159.                                 }  
  160.                             }  
  161.   
  162.                             // Right Hand State  
  163.                             HandState rightHandState = HandState::HandState_Unknown;  
  164.                             hResult = pBody[count]->get_HandRightState( &rightHandState );  
  165.                             if( SUCCEEDED( hResult ) ){  
  166.                                 ColorSpacePoint colorSpacePoint = { 0 };  
  167.                                 hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandRight].Position, &colorSpacePoint );  
  168.                                 if( SUCCEEDED( hResult ) ){  
  169.                                     int x = static_cast<int>( colorSpacePoint.X );  
  170.                                     int y = static_cast<int>( colorSpacePoint.Y );  
  171.                                     if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){  
  172.                                         if( rightHandState == HandState::HandState_Open ){  
  173.                                             cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA );  
  174.                                         }  
  175.                                         else if( rightHandState == HandState::HandState_Closed ){  
  176.                                             cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA );  
  177.                                         }  
  178.                                         else if( rightHandState == HandState::HandState_Lasso ){  
  179.                                             cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA );  
  180.                                         }  
  181.                                     }  
  182.                                 }  
  183.                             }  
  184.                                 CvPoint skeletonPoint[BODY_COUNT][JointType_Count]={cvPoint(0,0)};  
  185.                             // Joint  
  186.                             forint type = 0; type < JointType::JointType_Count; type++ ){  
  187.                                 ColorSpacePoint colorSpacePoint = { 0 };  
  188.                                 pCoordinateMapper->MapCameraPointToColorSpace( joint[type].Position, &colorSpacePoint );  
  189.                                 int x = static_cast<int>( colorSpacePoint.X );  
  190.                                 int y = static_cast<int>( colorSpacePoint.Y );  
  191.                                 skeletonPoint[count][type].x = x;    
  192.                                 skeletonPoint[count][type].y = y;    
  193.                                 if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){  
  194.                                     cv::circle( bufferMat, cv::Point( x, y ), 5, static_cast< cv::Scalar >( color[count] ), -1, CV_AA );  
  195.                                 }  
  196.                             }  
  197.                             //画骨骼  
  198.                             drawSkeleton( bufferMat, skeletonPoint[count], joint,count);  
  199.                         }  
  200.                       
  201.                          Lean  
  202.                         //PointF amount;  
  203.                         //hResult = pBody[count]->get_Lean( &amount );  
  204.                         //if( SUCCEEDED( hResult ) ){  
  205.                         //  std::cout << "amount : " << amount.X << ", " << amount.Y << std::endl;  
  206.                         //}  
  207.                     }  
  208.                 }  
  209.                 cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 );  
  210.             }  
  211.             forint count = 0; count < BODY_COUNT; count++ ){  
  212.                 SafeRelease( pBody[count] );  
  213.             }  
  214.         }  
  215.         SafeRelease( pColorFrame );  
  216.         SafeRelease( pBodyFrame );  
  217.           
  218.         waitKey(1);  
  219.         cv::imshow( "Body", bodyMat );  
  220.   
  221.     }  
  222.   
  223.   
  224.     SafeRelease( pColorSource );  
  225.     SafeRelease( pColorReader );  
  226.     SafeRelease( pDescription );  
  227.     SafeRelease(pBodySource);  
  228.     // done with body frame reader  
  229.     SafeRelease(pBodyReader);  
  230.   
  231.     SafeRelease( pDescription );  
  232.         // done with coordinate mapper  
  233.     SafeRelease(pCoordinateMapper);  
  234.   
  235.     if( pSensor ){  
  236.         pSensor->Close();  
  237.     }  
  238.     SafeRelease( pSensor );  
  239.   
  240.         return 0;  
  241. }  
  242.   
  243.   
  244. void DrawBone( Mat& SkeletonImage,  CvPoint pointSet[], const Joint* pJoints, int whichone, JointType joint0, JointType joint1)  
  245. {  
  246.     TrackingState joint0State = pJoints[joint0].TrackingState;  
  247.     TrackingState joint1State = pJoints[joint1].TrackingState;  
  248.   
  249.     // If we can't find either of these joints, exit  
  250.     if ((joint0State == TrackingState_NotTracked) || (joint1State == TrackingState_NotTracked))  
  251.     {  
  252.         return;  
  253.     }  
  254.   
  255.     // Don't draw if both points are inferred  
  256.     if ((joint0State == TrackingState_Inferred) && (joint1State == TrackingState_Inferred))  
  257.     {  
  258.         return;  
  259.     }  
  260.   
  261.   
  262.         CvScalar color;     
  263.     switch(whichone) //跟踪不同的人显示不同的颜色     
  264.     {     
  265.     case 0:     
  266.         color = cvScalar(255);     
  267.         break;     
  268.     case 1:     
  269.         color = cvScalar(0,255);     
  270.         break;     
  271.     case 2:     
  272.         color = cvScalar(0, 0, 255);     
  273.         break;     
  274.     case 3:     
  275.         color = cvScalar(255, 255, 0);     
  276.         break;     
  277.     case 4:     
  278.         color = cvScalar(255, 0, 255);     
  279.         break;     
  280.     case 5:     
  281.         color = cvScalar(0, 255, 255);     
  282.         break;     
  283.     }    
  284.   
  285.   
  286.     // We assume all drawn bones are inferred unless BOTH joints are tracked  
  287.     if ((joint0State == TrackingState_Tracked) && (joint1State == TrackingState_Tracked))  
  288.     {  
  289.         line(SkeletonImage,pointSet[joint0], pointSet[joint1],  color, 2);  
  290.     }  
  291.     else  
  292.     {  
  293.         line(SkeletonImage,pointSet[joint0], pointSet[joint1],  color, 2);  
  294.     }  
  295. }  
  296.   
  297. void drawSkeleton( Mat& SkeletonImage,  CvPoint pointSet[],const Joint* pJoints, int whichone)     
  298. {     
  299.    
  300.       // Draw the bones  
  301.   
  302.     // Torso  
  303.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_Head, JointType_Neck);  
  304.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_Neck, JointType_SpineShoulder);  
  305.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineShoulder, JointType_SpineMid);  
  306.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineMid, JointType_SpineBase);  
  307.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineShoulder, JointType_ShoulderRight);  
  308.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineShoulder, JointType_ShoulderLeft);  
  309.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineBase, JointType_HipRight);  
  310.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_SpineBase, JointType_HipLeft);  
  311.       
  312.     // Right Arm      
  313.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ShoulderRight, JointType_ElbowRight);  
  314.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ElbowRight, JointType_WristRight);  
  315.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristRight, JointType_HandRight);  
  316.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HandRight, JointType_HandTipRight);  
  317.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristRight, JointType_ThumbRight);  
  318.   
  319.     // Left Arm  
  320.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ShoulderLeft, JointType_ElbowLeft);  
  321.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_ElbowLeft, JointType_WristLeft);  
  322.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristLeft, JointType_HandLeft);  
  323.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HandLeft, JointType_HandTipLeft);  
  324.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_WristLeft, JointType_ThumbLeft);  
  325.   
  326.     // Right Leg  
  327.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HipRight, JointType_KneeRight);  
  328.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_KneeRight, JointType_AnkleRight);  
  329.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_AnkleRight, JointType_FootRight);  
  330.   
  331.     // Left Leg  
  332.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_HipLeft, JointType_KneeLeft);  
  333.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_KneeLeft, JointType_AnkleLeft);  
  334.     DrawBone(SkeletonImage, pointSet, pJoints, whichone, JointType_AnkleLeft, JointType_FootLeft);  
  335. }    

代码注解:主要定义一个DrawBone()函数来画骨架,而骨骼点的画出用的OpenCV的circle()函数实现。



SDK自带的三种手势识别


欢迎拍砖指正!

  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值