iOS 获取摄像头视频

#import "ViewController.h"

#import <AVFoundation/AVFoundation.h>

#import <CoreGraphics/CoreGraphics.h>

#import <CoreVideo/CoreVideo.h>

#import <CoreMedia/CoreMedia.h>

@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>

@property (nonatomic, strong)AVCaptureSession *session;

@property (nonatomic, strong)AVCaptureVideoDataOutput *videoOutput;

@property (nonatomic, strong)AVCaptureConnection *videoConnection;

@property (nonatomic, retain) AVCaptureSession *captureSession;

@property (nonatomic, retain) UIImageView *imageView;

@property (nonatomic, retain) CALayer *customLayer;

@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;

@end

@implementation ViewController


#pragma mark -

#pragma mark init

- (instancetype)init

{

    self = [super init];

    if (self) {

        self.imageView = nil;

        self.prevLayer = nil;

        self.customLayer = nil;

    }

    return self;

}


- (void)viewDidLoad {

    [super viewDidLoad];

    [self initCapture];

}


- (void)initCapture {

    //配置采集输入源(摄像头)

    AVCaptureDevice*videoDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

    videoDevice=[self getCameraDeviceWithPosition:AVCaptureDevicePositionFront];

    //用设备初始化一个采集的输入对象

    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice  error:nil];

    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]init];

    captureOutput.alwaysDiscardsLateVideoFrames = YES;

//    captureOutput.minFrameDuration = CMTimeMake(1, 10);

    //配置采集输出,即我们取得视频图像的接口

    dispatch_queue_t queue;

    queue = dispatch_queue_create("cameraQueue", NULL);

    [captureOutput setSampleBufferDelegate:self queue:queue];

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;

    //配置输出视频图像格式

    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];

    [captureOutput setVideoSettings:videoSettings];

    _captureSession = [[AVCaptureSession alloc] init];

    [_captureSession addInput:captureInput];

    [_captureSession addOutput:captureOutput];

    [_captureSession startRunning];

    //保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)

    [captureOutput connectionWithMediaType:AVMediaTypeVideo];

    _customLayer = [CALayer layer];

    _customLayer.frame = self.view.bounds;

    _customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);

    _customLayer.contentsGravity = kCAGravityResizeAspectFill;

    [self.view.layer addSublayer:_customLayer];

    _imageView = [[UIImageView alloc] init];

    _imageView.frame = CGRectMake(0, 0, 100, 100);

    [self.view addSubview:_imageView];

//    _prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: _captureSession];

//    _prevLayer.frame = CGRectMake(100, 0, 100, 100);

//    _prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

//    [self.view.layer addSublayer: self.prevLayer];

}


/**

 *  取得指定位置的摄像头

 *

 *  @param position 摄像头位置

 *

 *  @return 摄像头设备

 */

-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{

    NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];

    for (AVCaptureDevice *camera in cameras) {

        if ([camera position]==position) {

            return camera;

        }

    }

    return nil;

}


#pragma mark -

#pragma mark AVCaptureSession delegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断

    if (connection == _videoConnection) {  // Video

        /*

         // 取得当前视频尺寸信息

         CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

         int width = CVPixelBufferGetWidth(pixelBuffer);

         int height = CVPixelBufferGetHeight(pixelBuffer);

         NSLog(@"video width: %d  height: %d", width, height);

         */

        NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264");

    }

//    if (connection == _audioConnection) {  // Audio

//       NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC");

//    }

//    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    CVPixelBufferLockBaseAddress(imageBuffer,0);

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);

    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

    size_t width = CVPixelBufferGetWidth(imageBuffer);

    size_t height = CVPixelBufferGetHeight(imageBuffer);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

    CGImageRef newImage = CGBitmapContextCreateImage(newContext);

    CGContextRelease(newContext);

    CGColorSpaceRelease(colorSpace);

    [_customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (__bridge id) newImage waitUntilDone:YES];

    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];

    CGImageRelease(newImage);

    [_imageView performSelectorOnMainThread:@selector(setImage:)  withObject:image waitUntilDone:YES];

    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

//    [pool drain];

}


#pragma mark -

#pragma mark Memory management

//- (void)viewDidUnload {

//    

//    _imageView = nil;

//    

//    _customLayer = nil;

//    

//    _prevLayer = nil;

//    

//}


- (void)didReceiveMemoryWarning {

    [super didReceiveMemoryWarning];

    // Dispose of any resources that can be recreated.

}



@end



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值