录制视频(帧数版本) IOS

//
#import <UIKit/UIKit.h>

#import <AVFoundation/AVFoundation.h>
#undef PRODUCER_HAS_VIDEO_CAPTURE
#define PRODUCER_HAS_VIDEO_CAPTURE (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000 && TARGET_OS_EMBEDDED)
@protocol CameraHelpDelegate
-(void) getSampleBufferImage:(UIImage *) v_image ;
@end

@interface CameraHelp : NSObject
#if PRODUCER_HAS_VIDEO_CAPTURE
<AVCaptureVideoDataOutput SampleBufferDelegate>
#endif
{
@private
      int mWidth;
      int mHeight;
      int mFps;
      BOOL mFrontCamera;
      BOOL mFirstFrame;
      BOOL mStarted;
      UIView* mPreview;
      id<CameraHelpDelegate> outDelegate;
#if PRODUCER_HAS_VIDEO_CAPTURE
      AVCaptureSession* mCaptureSession;
      AVCaptureDevice *mCaptureDevice;
#endif
}
//单例模式
+ (CameraHelp*)shareCameraHelp;
+ (void)closeCamera;
//设置前置摄像头
- (BOOL)setFrontCamera;
//设置后置摄像头
- (BOOL)setBackCamera;
//开始前设置捕获参数
- (void)prepareVideoCapture:(int) width andHeight: (int)height andFps: (int) fps andFrontCamera:(BOOL) bfront andPreview:(UIView*) view;
//开始捕获
- (void)startVideoCapture;
//停止捕获
- (void)stopVideoCapture;
//设置要显示到得View
- (void)setPreview: (UIView*)preview;
//设置数据输出
- (void)setVideoDataOutputBuffer :(id<CameraHelpDelegate>)delegate;
@end
-------------------------------------------------------------------------
//
//  CameraHelp.m
// 
//
//  Created by zcx. on 11-6-28.
//  Copyright 2011  . All rights reserved.
//

#import "CameraHelp.h"
//
//      Private
//
@interface CameraHelp (Private)

#if PRODUCER_HAS_VIDEO_CAPTURE
+(AVCaptureDevice *)cameraAtPosition:(AVCaptureDevicePosition)position;
- (void)startPreview;
- (void)stopPreview;
#endif

@end

@implementation CameraHelp (Private)

#if PRODUCER_HAS_VIDEO_CAPTURE
+ (AVCaptureDevice *)cameraAtPosition:(AVCaptureDevicePosition)position{
      NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
      for (AVCaptureDevice *device in cameras){
              if (device.position == position){
                      return device;
              }
      }
      return [AVCaptureDevice defaultDeviceWithMediaTy pe:AVMediaTypeVideo];
}

- (void)startPreview{
      if(mCaptureSession && mPreview && mStarted){
            AVCaptureVideoPreviewLay er* previewLayer = [AVCaptureVideoPreviewLay er layerWithSession: mCaptureSession];
            previewLayer.frame = mPreview.bounds;
            previewLayer.videoGravity = AVLayerVideoGravityResiz eAspectFill;
//            if(previewLayer.orientationSupported){
//                  previewLayer.orientation = mOrientation;
//            }
            [mPreview.layer addSublayer: previewLayer];
           
            if(![mCaptureSession isRunning]){
                  [mCaptureSession startRunning];
            }
      }
}

- (void)stopPreview{
      if(mCaptureSession){           
            if([mCaptureSession isRunning]){
                  [mCaptureSession stopRunning];
                 
                  // remove all sublayers
                  if(mPreview){
                        for(CALayer *ly in mPreview.layer.sublayers){
                              if([ly isKindOfClass: [AVCaptureVideoPreviewLay er class]])
                              {
                                    [ly removeFromSuperlayer];
                                    break;
                              }
                        }
                  }
            }
      }
}
#endif
@end

@implementation CameraHelp
static CameraHelp* g_camera = 0;
- (id)init
{
      if(g_camera)
            return g_camera;
      else
      {
            if(self = [super init])
            {
                      self->mWidth = 30;
                  self->mHeight = 30;

                  self->mFps = 60;
                  self->mFrontCamera = NO;
                  self->mStarted = NO;
                  g_camera = self;
                  outDelegate = nil;
            }
            return g_camera;
      }
}
-(void)dealloc
{
#if PRODUCER_HAS_VIDEO_CAPTURE
      [mCaptureSession release];
      [mCaptureDevice release];
      [mPreview release];
#endif
      [super dealloc];
}
+ (CameraHelp*)shareCameraHelp
{
      if(!g_camera)
            g_camera = [[CameraHelp alloc] init];
      return g_camera;
}
+ (void)closeCamera
{
      if(g_camera)
      {
            [g_camera dealloc];
            g_camera = nil;
      }
}
- (void)prepareVideoCapture:(int) width andHeight: (int)height andFps: (int) fps andFrontCamera:(BOOL) bfront andPreview:(UIView*) view
{
      self->mWidth = width;
      self->mHeight = height;
      self->mFps = fps;
      self->mFrontCamera = bfront;
      if(view)
            self->mPreview = [view retain];
#if PRODUCER_HAS_VIDEO_CAPTURE     
      if([mCaptureSession isRunning])
      {
            [self stopVideoCapture];
            [self startVideoCapture];
      }
#endif
}
- (void)startVideoCapture
{
#if PRODUCER_HAS_VIDEO_CAPTURE     
      //防锁
      [[UIApplication sharedApplication] setIdleTimerDisabled:YES];
      //打开摄像设备,并开始捕抓图像
      //[labelState setText:@"Starting Video stream"];
      if(mCaptureDevice || mCaptureSession)
      {
            NSLog(@"Already capturing");
            return;
      }
     
      if((mCaptureDevice = [CameraHelp cameraAtPosition:mFrontCamera? AVCaptureDevicePositionF ront:AVCaptureDevicePositionB ack]) == nil)
      {
            NSLog(@"Failed to get valide capture device");
            return;
      }
     
      NSError *error = nil;
      AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:mCaptureDevice error:&error];
      if (!videoInput)
      {
            NSLog(@"Failed to get video input");
            mCaptureDevice = nil;
              return;
      }
     
      mCaptureSession = [[AVCaptureSession alloc] init];

      mCaptureSession.sessionPreset = AVCaptureSessionPreset64 0x480;

      [mCaptureSession addInput:videoInput];
     
      // Currently, the only supported key is kCVPixelBufferPixelForma tTypeKey. Recommended pixel format choices are
      // kCVPixelFormatType_420YpCbCr8BiPlanarVideoR ange or kCVPixelFormatType_32BGRA.
      // On iPhone 3G, the recommended pixel format choices are kCVPixelFormatType_422YpCbCr8 or kCVPixelFormatType_32BGRA.
      //
      AVCaptureVideoDataOutput *avCaptureVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
     
      NSString* key = (NSString*)kCVPixelBufferPixelForma tTypeKey;
      NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
      NSDictionary* settings = [NSDictionary dictionaryWithObject:value forKey:key];
//      NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys:
//                                            //[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoR ange], //kCVPixelBufferPixelForma tTypeKey,
//                                            [NSNumber numberWithInt: mWidth], (id)kCVPixelBufferWidthKey,
//                                                          [NSNumber numberWithInt: mHeight], (id)kCVPixelBufferHeightKey,
//                                            nil];

      avCaptureVideoDataOutput .videoSettings = settings;
      //[settings release];
//      avCaptureVideoDataOutput .minFrameDuration = CMTimeMake(1, 1.0f/30);
      avCaptureVideoDataOutput .alwaysDiscardsLateVideoF rames = YES;
     
     
     
      dispatch_queue_t queue = dispatch_queue_create("com.gh.cecall", NULL);
      [avCaptureVideoDataOutput setSampleBufferDelegate:self queue:queue];
      [mCaptureSession addOutput:avCaptureVideoDataOutput ];
      [settings release];
      [avCaptureVideoDataOutput release];
      dispatch_release(queue);
      mFirstFrame = YES;
      mStarted = YES;
     
      //start preview
      [self startPreview];
     
#endif
}
- (void)stopVideoCapture
{
#if PRODUCER_HAS_VIDEO_CAPTURE     
      if(mCaptureSession){
            [mCaptureSession stopRunning];
            [mCaptureSession release], mCaptureSession = nil;
            NSLog(@"Video capture stopped");
      }
      [mCaptureDevice release], mCaptureDevice = nil;
     
      if(mPreview){
            for (UIView *view in mPreview.subviews) {
                  [view removeFromSuperview];
            }
      }
#endif
}



- (BOOL)setFrontCamera
{
      if(mFrontCamera)
            return YES;
      [self stopVideoCapture];
      mFrontCamera = YES;
      [self startVideoCapture];
      return YES;
}

- (BOOL)setBackCamera{
      if(!mFrontCamera)
            return YES;
      [self stopVideoCapture];
      mFrontCamera = NO;
      [self startVideoCapture];
      return YES;
}

- (void) setPreview: (UIView*)preview{
#if PRODUCER_HAS_VIDEO_CAPTURE     
      if(preview == nil){
            // stop preview
            [self stopPreview];
            // remove layers
            if(mPreview){
                  for(CALayer *ly in mPreview.layer.sublayers){
                        if([ly isKindOfClass: [AVCaptureVideoPreviewLay er class]]){
                              [ly removeFromSuperlayer];
                              break;
                        }
                  }
                  [mPreview release], mPreview = nil;
            }
      }
      else {
            //start preview
              if (mPreview) {
                      [mPreview release];
                      mPreview = nil;
              }
            if((mPreview = [preview retain])){
                  [self startPreview];
            }
      }
     
#endif
}
- (void)setVideoDataOutputBuffer :(id<CameraHelpDelegate>)delegate
{
      outDelegate = delegate;
}
#pragma mark -
#pragma mark AVCaptureVideoDataOutput SampleBufferDelegate
#if PRODUCER_HAS_VIDEO_CAPTURE     
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
 
      UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
      if (outDelegate) {
            [outDelegate getSampleBufferImage:image];
      }
#if 0
      NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
     
      //捕捉数据输出要怎么处理虽你便
      CVImageBufferRef imageBuffer = CMSampleBufferGetImageBu ffer(sampleBuffer);
     
      CVPixelBufferLockBaseAdd ress(imageBuffer,0);
      if(CVPixelBufferLockBaseAdd ress(imageBuffer, 0) == kCVReturnSuccess)
      {
//            void *bufferPtr = CVPixelBufferGetBaseAddr ess(imageBuffer);
              UInt8 *bufferPtr = (UInt8 *)CVPixelBufferGetBaseAddr essOfPlane(imageBuffer,0);
              size_t buffeSize = CVPixelBufferGetDataSize (imageBuffer);
            NSLog(@"%ld",buffeSize);
            if(self->mFirstFrame)
            {
                  //第一次数据要求:宽高,类型
                      size_t bytesPerRow = CVPixelBufferGetBytesPer Row(imageBuffer);
                  size_t width = CVPixelBufferGetWidth(imageBuffer);
                  size_t height = CVPixelBufferGetHeight(imageBuffer);
                  NSNumber *numberRow = [NSNumber numberWithInteger:bytesPerRow];
                      NSNumber *numberWidth = [NSNumber numberWithInteger:width];
                      NSNumber *numberHeight = [NSNumber numberWithInteger:height];
                     
                      NSArray *array = [NSArray arrayWithObjects:numberRow,numberWidth,numberHeight, nil];
                     
                      if (outDelegate) {
                              [outDelegate getVideoSizeInfo:array];
                      }
                  int pixelFormat = CVPixelBufferGetPixelFor matType(imageBuffer);
                  switch (pixelFormat) {
                        case kCVPixelFormatType_420YpCbCr8BiPlanarVideoR ange:
                              //engine->srcFormat = VideoFormat_NV12;//PIX_FMT_NV12;
                              NSLog(@"Capture pixel format=NV12");
                              break;
                        case kCVPixelFormatType_422YpCbCr8:
                              //engine->srcFormat = VideoFormat_UYVY;//PIX_FMT_UYVY422;
                              NSLog(@"Capture pixel format=UYUY422");
                              break;
                        default:
                              //engine->srcFormat = VideoFormat_BGR32;//PIX_FMT_RGB32;
                              NSLog(@"Capture pixel format=RGB32");
                  }
                  mFirstFrame = NO;
            }
            //send data
            //engine->SendVideoFrame((unsigned char*)bufferPtr,buffeSize);
            if(outDelegate){
                    [outDelegate videoDataOutputBuffer:(char*)bufferPtr dataSize:buffeSize];
              }
           
              CVPixelBufferUnlockBaseA ddress(imageBuffer, 0);
      }
      [pool release];
#endif
}

// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
      // Get a CMSampleBuffer's Core Video image buffer for the media data
      CVImageBufferRef imageBuffer = CMSampleBufferGetImageBu ffer(sampleBuffer);
      // Lock the base address of the pixel buffer
      CVPixelBufferLockBaseAdd ress(imageBuffer, 0);
     
      // Get the number of bytes per row for the pixel buffer
      void *baseAddress = CVPixelBufferGetBaseAddr ess(imageBuffer);
     
      // Get the number of bytes per row for the pixel buffer
      size_t bytesPerRow = CVPixelBufferGetBytesPer Row(imageBuffer);
      // Get the pixel buffer width and height
      size_t width = CVPixelBufferGetWidth(imageBuffer);
      size_t height = CVPixelBufferGetHeight(imageBuffer);
     
      // Create a device-dependent RGB color space
      CGColorSpaceRef colorSpace = CGColorSpaceCreateDevice RGB();
     
      // Create a bitmap graphics context with the sample buffer data
      CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                                          bytesPerRow, colorSpace, kCGBitmapByteOrder32Litt le | kCGImageAlphaPremultipli edFirst);
      // Create a Quartz image from the pixel data in the bitmap graphics context
      CGImageRef quartzImage = CGBitmapContextCreateIma ge(context);
      // Unlock the pixel buffer
      CVPixelBufferUnlockBaseA ddress(imageBuffer,0);
     
      // Free up the context and color space
      CGContextRelease(context);
      CGColorSpaceRelease(colorSpace);
     
      // Create an image object from the Quartz image
      UIImage *image = [UIImage imageWithCGImage:quartzImage];
     
      // Release the Quartz image
      CGImageRelease(quartzImage);
     
      return (image);
}

#endif
@end
-----------------------------------将图片保存为视频-------------------------------
- (void) saveVideo {
      NSString *strSpeed = nil;
      NSString *strAgle = nil;
      if (m_saveMutableDict) {
            strSpeed = [m_saveMutableDict objectForKey:SWING_SPEED];
            strAgle = [m_saveMutableDict objectForKey:SWING_ANGLE];
      }
     
      //定义视频的大小
      CGSize size ;
#if isPad
      size = CGSizeMake(480,640); // 960*640
#else
      size = CGSizeMake(480,640);
#endif
     
      NSError *error = nil;

      NSString *filePath = [[Utilities getSanBoxPath] stringByAppendingPathCom ponent:[NSString stringWithFormat:@"%@.mov",self.m_strUUID]];
     
      unlink([filePath UTF8String]);
     
      //—-initialize compression engine
      AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:filePath]
                                                                                                                    fileType:AVFileTypeQuickTimeMovie
                                                                                                                          error:&error];
      NSParameterAssert(videoWriter);
      if(error)
              NSLog(@"error = %@", [error localizedDescription]);
     
      NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAnd Keys:AVVideoCodecH264, AVVideoCodecKey,
                                                                    [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                                                    [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
      AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMedi aType:AVMediaTypeVideo outputSettings:videoSettings];
     
      NSDictionary *sourcePixelBufferAttribu tesDictionary = [NSDictionary dictionaryWithObjectsAnd Keys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelForma tTypeKey, nil];
     
      AVAssetWriterInputPixelB ufferAdaptor *adaptor = [AVAssetWriterInputPixelB ufferAdaptor
                                                                                                        assetWriterInputPixelBuf ferAdaptorWithAssetWrite rInput:writerInput sourcePixelBufferAttribu tes:sourcePixelBufferAttribu tesDictionary];
      NSParameterAssert(writerInput);
      NSParameterAssert([videoWriter canAddInput:writerInput]);
     
      if ([videoWriter canAddInput:writerInput])
              NSLog(@"  ");
      else
              NSLog(@"  ");
     
      [videoWriter addInput:writerInput];
      [videoWriter startWriting];
      [videoWriter startSessionAtSourceTime :kCMTimeZero];
     
      //合成多张图片为一个视频文件
      dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
      int __block frame = 0;
     
      [writerInput requestMediaDataWhenRead yOnQueue:dispatchQueue usingBlock:^{
              while ([writerInput isReadyForMoreMediaData])
                  {
                      if(++frame >= [m_mutableArrayDatas count])
                        {
                              [writerInput markAsFinished];
                              [videoWriter finishWriting];
                              [videoWriter release];
                              dispatch_release(dispatchQueue);
                              [NSThread detachNewThreadSelector:@selector(saveOneImageAndPlist) toTarget:self withObject:nil];
                              break;
                        }
                      CVPixelBufferRef buffer = NULL;
                 
                      int idx = frame;
                      UIImage *imageOld = [m_mutableArrayDatas objectAtIndex:idx];
                  // 给外部传递百分比
                  if (m_delegate && [m_delegate respondsToSelector:@selector(saveVideoWithProgress:)]) {
                        [m_delegate saveVideoWithProgress:(1.0f*frame/[m_mutableArrayDatas count])];
                  }
                  // 图片 cpmvert buffer
                      buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[imageOld CGImage] size:size andSpeed:strSpeed andAngle:strAgle];
                      if (buffer)
                        {
                        //                              RECORD_VIDEO_FPS
                              if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, m_floatFPS)]) {
                                      dispatch_release(dispatchQueue);
                                      [self restoreDefault];
                                      // 出错的情况吓会执行这些。
                              // 此处应该恢复刚进来的状况
                              NSLog(@"视频录制出错了");
                              }else
                                      CFRelease(buffer);
                        }
                  }
      }];
}


- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size andSpeed:(NSString *)v_speed andAngle:(NSString*)v_angle
{
      //Impact Speed : = %f , Club Angle
      NSDictionary *options = [NSDictionary dictionaryWithObjectsAnd Keys:
                                                        [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCom patibilityKey,
                                                        [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapCo ntextCompatibilityKey, nil];
      CVPixelBufferRef pxbuffer = NULL;
      CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
     
      NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
     
      CVPixelBufferLockBaseAdd ress(pxbuffer, 0);
      void *pxdata = CVPixelBufferGetBaseAddr ess(pxbuffer);
      NSParameterAssert(pxdata != NULL);
     
      CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDevice RGB();
      CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipli edFirst);
      NSParameterAssert(context);
      CGContextSaveGState(context);
     
      // 旋转
      CGContextRotateCTM(context, -M_PI_2);
      CGContextTranslateCTM(context, -size.height, 0);
      CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),CGImageGetHeight(image)), image);
      CGContextRestoreGState(context);
      // 添加logo
      UIImage *imageLogo = [UIImage imageNamed:@"Watermark.png"];
      CGRect rectLogo ;
      //  1280 720
#if isPad
      rectLogo = CGRectMake(size.width-imageLogo.size.width-20.0f, size.height-imageLogo.size.height-170.0f, imageLogo.size.width, imageLogo.size.height);
#else
      rectLogo = CGRectMake(size.width-imageLogo.size.width-50.0f, size.height-imageLogo.size.height-25.0f, imageLogo.size.width, imageLogo.size.height);
#endif
      CGContextDrawImage(context, rectLogo, imageLogo.CGImage);
      // 球杆挥动的时候才显示数据
      if (m_saveMutableDict) {             
#if isPad     
            MyDrawText(context , CGPointMake(20.0f, size.height-imageLogo.size.height-150.0f),v_speed);
            MyDrawText(context , CGPointMake(20.0f, size.height-imageLogo.size.height-180.0f),v_angle);
#else
            MyDrawText(context , CGPointMake(70.0f, size.height-30.0f),v_speed);
            MyDrawText(context , CGPointMake(70.0f, size.height-53.0f),v_angle);
#endif   
      }
      CGColorSpaceRelease(rgbColorSpace);
      CGContextRelease(context);
     
      CVPixelBufferUnlockBaseA ddress(pxbuffer, 0);
     
      return pxbuffer;
}

void MyDrawText (CGContextRef myContext, CGPoint point, NSString *v_strContext) {
#if isPad
      CGContextSelectFont (myContext,
                                                "Impact",
                                                20.0f,
                                                kCGEncodingMacRoman);
#else
      CGContextSelectFont (myContext,
                                                "Impact",
                                                20.0f,
                                                kCGEncodingMacRoman);
#endif
      //      CGContextTranslateCTM(myContext, 0, 768);
      //      CGContextScaleCTM(myContext, 1, -1);
      CGContextSetCharacterSpa cing (myContext, 1);
      CGContextSetTextDrawingM ode (myContext, kCGTextFillStroke);
      CGContextSetLineWidth(myContext, 1.0f);
      CGContextSetFillColorWit hColor(myContext, [UIColor colorWithRed:251.0f/255.0f green:237.0f/255.0f blue:75.0f/255.0f alpha:1.0f].CGColor);
      CGContextSetStrokeColorW ithColor(myContext, [UIColor blackColor].CGColor) ;
      CGContextShowTextAtPoint (myContext, point.x, point.y, v_strContext.UTF8String, strlen(v_strContext.UTF8String)); // 10
      //      [v_strContext drawAtPoint:CGPointMake(100  , 100) withFont:[UIFont fontWithName:@"Helvetica" size:20]];
}
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值