AVCapture实现视频采集

我这里采集的视频转为了yuv格式,可以根据自己需要自行选择

#import <UIKit/UIKit.h>
FILE *pFile;
uint8_t* baseAddress;
size_t width;
size_t height;
@interface ViewController : UIViewController


@end

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <stdio.h>
#import <stdlib.h>
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, strong) AVCaptureSession *session;
@property(nonatomic, strong) AVCaptureVideoPreviewLayer *preViewLayer;
@property(nonatomic, assign) int producerFps;


@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
    [self setupCaptureSession];
}
- (void)setupCaptureSession
{
    NSError *error = nil;
    self.session = [[AVCaptureSession alloc] init];

    self.session.sessionPreset = AVCaptureSessionPresetMedium;
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (error) {
        NSLog(@"error input:%@", error.description);
    }
    if ([_session canAddInput:input]) {
        [self.session addInput:input];
    }
    
    AVCaptureVideoDataOutput *outPut = [[AVCaptureVideoDataOutput alloc] init];
    dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
    [outPut setSampleBufferDelegate:self queue:queue];
    outPut.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(NSString *)kCVPixelBufferPixelFormatTypeKey];
//    NSLog(@"%@", outPut.videoSettings);
    [self.session startRunning];
    if ([_session canAddOutput:outPut]) {
        [self.session addOutput:outPut];
    }
    
    [self setSession:self.session];
    _preViewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
    _preViewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    _preViewLayer.frame = self.view.layer.bounds;
    [self.view.layer addSublayer:_preViewLayer];
    self.producerFps = 25;
}

//函数回调,做自己的操作,我这里转为了yuv格式
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    const int kFlags = 0;
    CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
    NSLog(@"wwwwwwwwwwwwww%@", videoFrame);
    
    
    if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
        return;
    }
    const int kYPlaneIndex = 0;
    const int kUVPlaneIndex = 1;
    
    baseAddress =
    (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
    size_t yPlaneBytesPerRow =
    CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
    size_t yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
    size_t uvPlaneBytesPerRow =
    CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
    size_t uvPlaneHeight =
    CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
    size_t frameSize =
    yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
    NSLog(@"%zu",frameSize);
    //获取每帧图片的高宽
    width = CVPixelBufferGetWidth(videoFrame);
    height = CVPixelBufferGetHeight(videoFrame);
    //时间戳
    NSDate *dateNow = [NSDate date];
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    [formatter setDateFormat:@"YYYY-MM-dd HH:mm:ss"];
    NSString *nowtime = [formatter stringFromDate:dateNow];
    NSLog(@"%@", nowtime);
    NSLog(@"%zu   %zu", width, height);
    
    CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
    
//    NSArray *path = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
//    NSString *Pathes = path.lastObject;
//    NSString *filePath = [Pathes stringByAppendingPathComponent:@"testvideo.yuv1"];
//    const char *str = [filePath UTF8String];
//    pFile = fopen(str, "w");
//    fwrite(baseAddress, strlen((char*)baseAddress),1 , pFile);
//    fflush(pFile);
    
}

//yuv420sp转为yuv420p
void YUV420SPToYUV420SP(uint8_t *yuv420sp, uint8_t *yuv420p, size_t widthp, size_t heightp)
{
    yuv420sp = baseAddress;
    widthp = width;
    heightp = height;
    if (yuv420sp == NULL ||yuv420p == NULL)
        return;
    size_t framesize = widthp*heightp;
    int i = 0, j = 0;
    //copy y
    for (i = 0; i < framesize; i++)
    {
        *(yuv420p + i) = *(yuv420sp + i);
    }
    i = 0;
    //copy u
    for (j = 0; j < framesize/2; j+=2)
    {
        *(yuv420p + (i + framesize*5/4)) = *(yuv420sp + (j+framesize));
        i++;
    }
    i = 0;
    //copy v
    for(j = 1; j < framesize/2;j+=2)
    {
        *(yuv420p + (i+framesize)) = *(yuv420sp + (j+framesize));
        i++;
    }
    NSArray *path = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *Pathes = path.lastObject;
    NSString *filePath = [Pathes stringByAppendingPathComponent:@"testvideo1.yuv1"];
    const char *str = [filePath UTF8String];
    pFile = fopen(str, "w");
    fwrite(yuv420p, strlen((char*)yuv420p),1 , pFile);
    fflush(pFile);
}
- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}
@end

参考网址

http://blog.csdn.net/u010180166/article/details/8923134

http://www.cnblogs.com/mfryf/archive/2012/03/10/2389121.html

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值