iOS直接获取摄像头数据

需要添加的 Framework :CoreMedia, CoreVideo QuartzCore, AVFoundation
MyAVController.h:

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>

@interface MyAVController : UIViewController <
AVCaptureVideoDataOutputSampleBufferDelegate> {
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
}

@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;

@end

MyAVController.m:

#import "MyAVController.h"

@implementation MyAVController

@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;

#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}

- (void)viewDidLoad {
[self initCapture];
}

- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 10);

dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber
numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary
dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
self.customLayer = [CALayer layer];
self.customLayer.frame = self.view.bounds;
self.customLayer.transform = CATransform3DRotate(
CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
self.imageView = [[UIImageView alloc] init];
self.imageView.frame = CGRectMake(0, 0, 100, 100);
[self.view addSubview:self.imageView];
self.prevLayer = [AVCaptureVideoPreviewLayer
layerWithSession: self.captureSession];
self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.prevLayer];
}

#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{

NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress,
width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);

CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);

[self.customLayer performSelectorOnMainThread:@selector(setContents:)
withObject: (id) newImage waitUntilDone:YES];

UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
orientation:UIImageOrientationRight];

CGImageRelease(newImage);

[self.imageView performSelectorOnMainThread:@selector(setImage:)
withObject:image waitUntilDone:YES];

CVPixelBufferUnlockBaseAddress(imageBuffer,0);

[pool drain];
}

#pragma mark -
#pragma mark Memory management

- (void)viewDidUnload {
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
}

- (void)dealloc {
[self.captureSession release];
[super dealloc];
}

@end
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值