目的:打开、关闭前置摄像头,绘制图像,并获取摄像头的二进制数据。
需要的库
AVFoundation.framework 、CoreVideo.framework 、CoreMedia.framework 、QuartzCore.framework
该摄像头捕抓必须编译真机的版本,模拟器下编译不了。
函数说明
- ( void )createControl
{
// UI界面控件的创建
}
- (AVCaptureDevice *)getFrontCamera;
获取前置摄像头设备
- (void)startVideoCapture;
打开摄像头并开始捕捉图像
其中代码:
AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: self -> avCaptureSession ];
previewLayer. frame = localView . bounds ;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self->localView.layer addSublayer: previewLayer];
为把图片画到UIView里面
- ( void )stopVideoCapture:( id )arg;
关闭摄像头,停止捕抓图像
其中代码:
for ( UIView * view in self -> localView . subviews ) {
[ view removeFromSuperview ];
}
为移除摄像头图像的View
详情见代码,代码拷过去可以直接使用 Over!!!!
代码:
头文件:
//
// AVCallController.h
// Pxlinstall
//
// Created by Lin Charlie C. on 11-3-24.
// Copyright 2011 xxxx. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface AVCallController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate>
{
//UI
UILabel * labelState ;
UIButton * btnStartVideo ;
UIView * localView ;
AVCaptureSession* avCaptureSession ;
AVCaptureDevice * avCaptureDevice ;
BOOL firstFrame ; // 是否为第一帧
int producerFps ;
}
@property ( nonatomic , retain ) AVCaptureSession *avCaptureSession;
@property ( nonatomic , retain ) UILabel *labelState;
- ( void )createControl;
- (AVCaptureDevice *)getFrontCamera;
- ( void )startVideoCapture;
- ( void )stopVideoCapture:( id )arg;
@end
/
/
/
实现文件:
//
// AVCallController.m
// Pxlinstall
//
// Created by Lin Charlie C. on 11-3-24.
// Copyright 2011 高鸿移通. All rights reserved.
//
#import "AVCallController.h"
@implementation AVCallController
@synthesize avCaptureSession;
@synthesize labelState;
// The designated initializer. Override if you create the controller programmatically and want to perform customization that is not appropriate for viewDidLoad.
/*
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil {
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization.
}
return self;
}
*/
-( id )init
{
if ( self = [ super init ])
{
firstFrame = YES ;
producerFps = 50 ;
}
return self ;
}
// Implement loadView to create a view hierarchy programmatically, without using a nib.
- ( void )loadView {
[ super loadView ];
[ self createControl ];
}
/*
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
}
*/
/*
// Override to allow orientations other than the default portrait orientation.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations.
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
*/
- ( void )didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[ super didReceiveMemoryWarning ];
// Release any cached data, images, etc. that aren't in use.
}
- ( void )viewDidUnload {
[ super viewDidUnload ];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- ( void )dealloc {
[ super dealloc ];
}
#pragma mark -
#pragma mark createControl
- ( void )createControl
{
//UI展示
self . view . backgroundColor = [ UIColor grayColor ];
labelState = [[ UILabel alloc ] initWithFrame : CGRectMake ( 10 , 20 , 220 , 30 )];
labelState . backgroundColor = [ UIColor clearColor ];
[ self . view addSubview : labelState ];
[ labelState release ];
btnStartVideo = [[ UIButton alloc ] initWithFrame : CGRectMake ( 20 , 350 , 80 , 50 )];
[ btnStartVideo setTitle : @"Star" forState : UIControlStateNormal ];
[ btnStartVideo setBackgroundImage :[ UIImage imageNamed : @"Images/button.png" ] forState : UIControlStateNormal ];
[ btnStartVideo addTarget : self action : @selector ( startVideoCapture ) forControlEvents : UIControlEventTouchUpInside ];
[ self . view addSubview : btnStartVideo ];
[ btnStartVideo release ];
UIButton * stop = [[ UIButton alloc ] initWithFrame : CGRectMake ( 120 , 350 , 80 , 50 )];
[stop setTitle : @"Stop" forState : UIControlStateNormal ];
[stop setBackgroundImage :[ UIImage imageNamed : @"Images/button.png" ] forState : UIControlStateNormal ];
[stop addTarget : self action : @selector ( stopVideoCapture :) forControlEvents : UIControlEventTouchUpInside ];
[ self . view addSubview :stop];
[stop release ];
localView = [[ UIView alloc ] initWithFrame : CGRectMake ( 40 , 50 , 200 , 300 )];
[ self . view addSubview : localView ];
[ localView release ];
}
#pragma mark -
#pragma mark VideoCapture
- (AVCaptureDevice *)getFrontCamera
{
//获取前置摄像头设备
NSArray *cameras = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo ];
for (AVCaptureDevice *device in cameras)
{
if (device. position == AVCaptureDevicePositionFront)
return device;
}
return [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo ];
}
- ( void )startVideoCapture
{
//打开摄像设备,并开始捕抓图像
[ labelState setText : @"Starting Video stream" ];
if ( self -> avCaptureDevice || self -> avCaptureSession )
{
[ labelState setText : @"Already capturing" ];
return ;
}
if (( self -> avCaptureDevice = [ self getFrontCamera ]) == nil )
{
[ labelState setText : @"Failed to get valide capture device" ];
return ;
}
NSError *error = nil ;
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice: self -> avCaptureDevice error:&error];
if (!videoInput)
{
[ labelState setText : @"Failed to get video input" ];
self -> avCaptureDevice = nil ;
return ;
}
self -> avCaptureSession = [[AVCaptureSession alloc ] init ];
self -> avCaptureSession .sessionPreset = AVCaptureSessionPresetLow;
[ self -> avCaptureSession addInput:videoInput];
// Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Recommended pixel format choices are
// kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange or kCVPixelFormatType_32BGRA.
// On iPhone 3G, the recommended pixel format choices are kCVPixelFormatType_422YpCbCr8 or kCVPixelFormatType_32BGRA.
//
AVCaptureVideoDataOutput *avCaptureVideoDataOutput = [[AVCaptureVideoDataOutput alloc ] init ];
NSDictionary *settings = [[ NSDictionary alloc ] initWithObjectsAndKeys :
//[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
[ NSNumber numberWithInt : 240 ], ( id ) kCVPixelBufferWidthKey ,
[ NSNumber numberWithInt : 320 ], ( id ) kCVPixelBufferHeightKey ,
nil ];
avCaptureVideoDataOutput.videoSettings = settings;
[settings release ];
avCaptureVideoDataOutput.minFrameDuration = CMTimeMake ( 1 , self -> producerFps );
/*We create a serial queue to handle the processing of our frames*/
dispatch_queue_t queue = dispatch_queue_create ( "org.doubango.idoubs" , NULL );
[avCaptureVideoDataOutput setSampleBufferDelegate: self queue:queue];
[ self -> avCaptureSession addOutput:avCaptureVideoDataOutput];
[avCaptureVideoDataOutput release ];
dispatch_release (queue);
AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: self -> avCaptureSession ];
previewLayer. frame = localView . bounds ;
previewLayer. videoGravity = AVLayerVideoGravityResizeAspectFill ;
[ self -> localView . layer addSublayer : previewLayer];
self -> firstFrame = YES ;
[ self -> avCaptureSession startRunning];
[ labelState setText : @"Video capture started" ];
}
- ( void )stopVideoCapture:( id )arg
{
//停止摄像头捕抓
if ( self -> avCaptureSession ){
[ self -> avCaptureSession stopRunning];
self -> avCaptureSession = nil ;
[ labelState setText : @"Video capture stopped" ];
}
self -> avCaptureDevice = nil ;
//移除localView里面的内容
for ( UIView * view in self -> localView . subviews ) {
[ view removeFromSuperview ];
}
}
#pragma mark -
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- ( void )captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
//捕捉数据输出 要怎么处理虽你便
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the buffer*/
if ( CVPixelBufferLockBaseAddress (pixelBuffer, 0 ) == kCVReturnSuccess )
{
UInt8 *bufferPtr = ( UInt8 *) CVPixelBufferGetBaseAddress (pixelBuffer);
size_t buffeSize = CVPixelBufferGetDataSize (pixelBuffer);
if ( self -> firstFrame )
{
if ( 1 )
{
//第一次数据要求:宽高,类型
int width = CVPixelBufferGetWidth (pixelBuffer);
int height = CVPixelBufferGetHeight (pixelBuffer);
int pixelFormat = CVPixelBufferGetPixelFormatType (pixelBuffer);
switch (pixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange :
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_nv12; // iPhone 3GS or 4
NSLog ( @"Capture pixel format=NV12" );
break ;
case kCVPixelFormatType_422YpCbCr8 :
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_uyvy422; // iPhone 3
NSLog ( @"Capture pixel format=UYUY422" );
break ;
default :
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_rgb32;
NSLog ( @"Capture pixel format=RGB32" );
break ;
}
self -> firstFrame = NO ;
}
}
/*We unlock the buffer*/
CVPixelBufferUnlockBaseAddress (pixelBuffer, 0 );
}
/*We create an autorelease pool because as we are not in the main_queue our code is
not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/
// NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
//
// CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// /*Lock the image buffer*/
// CVPixelBufferLockBaseAddress(imageBuffer,0);
// /*Get information about the image*/
// uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
// size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// size_t width = CVPixelBufferGetWidth(imageBuffer);
// size_t height = CVPixelBufferGetHeight(imageBuffer);
//
// /*Create a CGImageRef from the CVImageBufferRef*/
// CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// CGImageRef newImage = CGBitmapContextCreateImage(newContext);
//
// /*We release some components*/
// CGContextRelease(newContext);
// CGColorSpaceRelease(colorSpace);
//
// /*We display the result on the custom layer. All the display stuff must be done in the main thread because
// UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)
// we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/
// [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
//
// /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
// Same thing as for the CALayer we are not in the main thread so ...*/
// UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
//
// /*We relase the CGImageRef*/
// CGImageRelease(newImage);
//
// [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
//
// /*We unlock the image buffer*/
// CVPixelBufferUnlockBaseAddress(imageBuffer,0);
//
// [pool drain];
}
需要的库
AVFoundation.framework 、CoreVideo.framework 、CoreMedia.framework 、QuartzCore.framework
该摄像头捕抓必须编译真机的版本,模拟器下编译不了。
函数说明
- ( void )createControl
{
// UI界面控件的创建
}
- (AVCaptureDevice *)getFrontCamera;
获取前置摄像头设备
- (void)startVideoCapture;
打开摄像头并开始捕捉图像
其中代码:
AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: self -> avCaptureSession ];
previewLayer. frame = localView . bounds ;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self->localView.layer addSublayer: previewLayer];
为把图片画到UIView里面
- ( void )stopVideoCapture:( id )arg;
关闭摄像头,停止捕抓图像
其中代码:
for ( UIView * view in self -> localView . subviews ) {
[ view removeFromSuperview ];
}
为移除摄像头图像的View
详情见代码,代码拷过去可以直接使用 Over!!!!
代码:
头文件:
//
// AVCallController.h
// Pxlinstall
//
// Created by Lin Charlie C. on 11-3-24.
// Copyright 2011 xxxx. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface AVCallController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate>
{
//UI
UILabel * labelState ;
UIButton * btnStartVideo ;
UIView * localView ;
AVCaptureSession* avCaptureSession ;
AVCaptureDevice * avCaptureDevice ;
BOOL firstFrame ; // 是否为第一帧
int producerFps ;
}
@property ( nonatomic , retain ) AVCaptureSession *avCaptureSession;
@property ( nonatomic , retain ) UILabel *labelState;
- ( void )createControl;
- (AVCaptureDevice *)getFrontCamera;
- ( void )startVideoCapture;
- ( void )stopVideoCapture:( id )arg;
@end
/
/
/
实现文件:
//
// AVCallController.m
// Pxlinstall
//
// Created by Lin Charlie C. on 11-3-24.
// Copyright 2011 高鸿移通. All rights reserved.
//
#import "AVCallController.h"
@implementation AVCallController
@synthesize avCaptureSession;
@synthesize labelState;
// The designated initializer. Override if you create the controller programmatically and want to perform customization that is not appropriate for viewDidLoad.
/*
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil {
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization.
}
return self;
}
*/
-( id )init
{
if ( self = [ super init ])
{
firstFrame = YES ;
producerFps = 50 ;
}
return self ;
}
// Implement loadView to create a view hierarchy programmatically, without using a nib.
- ( void )loadView {
[ super loadView ];
[ self createControl ];
}
/*
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
}
*/
/*
// Override to allow orientations other than the default portrait orientation.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations.
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
*/
- ( void )didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[ super didReceiveMemoryWarning ];
// Release any cached data, images, etc. that aren't in use.
}
- ( void )viewDidUnload {
[ super viewDidUnload ];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- ( void )dealloc {
[ super dealloc ];
}
#pragma mark -
#pragma mark createControl
- ( void )createControl
{
//UI展示
self . view . backgroundColor = [ UIColor grayColor ];
labelState = [[ UILabel alloc ] initWithFrame : CGRectMake ( 10 , 20 , 220 , 30 )];
labelState . backgroundColor = [ UIColor clearColor ];
[ self . view addSubview : labelState ];
[ labelState release ];
btnStartVideo = [[ UIButton alloc ] initWithFrame : CGRectMake ( 20 , 350 , 80 , 50 )];
[ btnStartVideo setTitle : @"Star" forState : UIControlStateNormal ];
[ btnStartVideo setBackgroundImage :[ UIImage imageNamed : @"Images/button.png" ] forState : UIControlStateNormal ];
[ btnStartVideo addTarget : self action : @selector ( startVideoCapture ) forControlEvents : UIControlEventTouchUpInside ];
[ self . view addSubview : btnStartVideo ];
[ btnStartVideo release ];
UIButton * stop = [[ UIButton alloc ] initWithFrame : CGRectMake ( 120 , 350 , 80 , 50 )];
[stop setTitle : @"Stop" forState : UIControlStateNormal ];
[stop setBackgroundImage :[ UIImage imageNamed : @"Images/button.png" ] forState : UIControlStateNormal ];
[stop addTarget : self action : @selector ( stopVideoCapture :) forControlEvents : UIControlEventTouchUpInside ];
[ self . view addSubview :stop];
[stop release ];
localView = [[ UIView alloc ] initWithFrame : CGRectMake ( 40 , 50 , 200 , 300 )];
[ self . view addSubview : localView ];
[ localView release ];
}
#pragma mark -
#pragma mark VideoCapture
- (AVCaptureDevice *)getFrontCamera
{
//获取前置摄像头设备
NSArray *cameras = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo ];
for (AVCaptureDevice *device in cameras)
{
if (device. position == AVCaptureDevicePositionFront)
return device;
}
return [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo ];
}
- ( void )startVideoCapture
{
//打开摄像设备,并开始捕抓图像
[ labelState setText : @"Starting Video stream" ];
if ( self -> avCaptureDevice || self -> avCaptureSession )
{
[ labelState setText : @"Already capturing" ];
return ;
}
if (( self -> avCaptureDevice = [ self getFrontCamera ]) == nil )
{
[ labelState setText : @"Failed to get valide capture device" ];
return ;
}
NSError *error = nil ;
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice: self -> avCaptureDevice error:&error];
if (!videoInput)
{
[ labelState setText : @"Failed to get video input" ];
self -> avCaptureDevice = nil ;
return ;
}
self -> avCaptureSession = [[AVCaptureSession alloc ] init ];
self -> avCaptureSession .sessionPreset = AVCaptureSessionPresetLow;
[ self -> avCaptureSession addInput:videoInput];
// Currently, the only supported key is kCVPixelBufferPixelFormatTypeKey. Recommended pixel format choices are
// kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange or kCVPixelFormatType_32BGRA.
// On iPhone 3G, the recommended pixel format choices are kCVPixelFormatType_422YpCbCr8 or kCVPixelFormatType_32BGRA.
//
AVCaptureVideoDataOutput *avCaptureVideoDataOutput = [[AVCaptureVideoDataOutput alloc ] init ];
NSDictionary *settings = [[ NSDictionary alloc ] initWithObjectsAndKeys :
//[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
[ NSNumber numberWithInt : 240 ], ( id ) kCVPixelBufferWidthKey ,
[ NSNumber numberWithInt : 320 ], ( id ) kCVPixelBufferHeightKey ,
nil ];
avCaptureVideoDataOutput.videoSettings = settings;
[settings release ];
avCaptureVideoDataOutput.minFrameDuration = CMTimeMake ( 1 , self -> producerFps );
/*We create a serial queue to handle the processing of our frames*/
dispatch_queue_t queue = dispatch_queue_create ( "org.doubango.idoubs" , NULL );
[avCaptureVideoDataOutput setSampleBufferDelegate: self queue:queue];
[ self -> avCaptureSession addOutput:avCaptureVideoDataOutput];
[avCaptureVideoDataOutput release ];
dispatch_release (queue);
AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: self -> avCaptureSession ];
previewLayer. frame = localView . bounds ;
previewLayer. videoGravity = AVLayerVideoGravityResizeAspectFill ;
[ self -> localView . layer addSublayer : previewLayer];
self -> firstFrame = YES ;
[ self -> avCaptureSession startRunning];
[ labelState setText : @"Video capture started" ];
}
- ( void )stopVideoCapture:( id )arg
{
//停止摄像头捕抓
if ( self -> avCaptureSession ){
[ self -> avCaptureSession stopRunning];
self -> avCaptureSession = nil ;
[ labelState setText : @"Video capture stopped" ];
}
self -> avCaptureDevice = nil ;
//移除localView里面的内容
for ( UIView * view in self -> localView . subviews ) {
[ view removeFromSuperview ];
}
}
#pragma mark -
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- ( void )captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
//捕捉数据输出 要怎么处理虽你便
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the buffer*/
if ( CVPixelBufferLockBaseAddress (pixelBuffer, 0 ) == kCVReturnSuccess )
{
UInt8 *bufferPtr = ( UInt8 *) CVPixelBufferGetBaseAddress (pixelBuffer);
size_t buffeSize = CVPixelBufferGetDataSize (pixelBuffer);
if ( self -> firstFrame )
{
if ( 1 )
{
//第一次数据要求:宽高,类型
int width = CVPixelBufferGetWidth (pixelBuffer);
int height = CVPixelBufferGetHeight (pixelBuffer);
int pixelFormat = CVPixelBufferGetPixelFormatType (pixelBuffer);
switch (pixelFormat) {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange :
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_nv12; // iPhone 3GS or 4
NSLog ( @"Capture pixel format=NV12" );
break ;
case kCVPixelFormatType_422YpCbCr8 :
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_uyvy422; // iPhone 3
NSLog ( @"Capture pixel format=UYUY422" );
break ;
default :
//TMEDIA_PRODUCER(producer)->video.chroma = tmedia_rgb32;
NSLog ( @"Capture pixel format=RGB32" );
break ;
}
self -> firstFrame = NO ;
}
}
/*We unlock the buffer*/
CVPixelBufferUnlockBaseAddress (pixelBuffer, 0 );
}
/*We create an autorelease pool because as we are not in the main_queue our code is
not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/
// NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
//
// CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// /*Lock the image buffer*/
// CVPixelBufferLockBaseAddress(imageBuffer,0);
// /*Get information about the image*/
// uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
// size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// size_t width = CVPixelBufferGetWidth(imageBuffer);
// size_t height = CVPixelBufferGetHeight(imageBuffer);
//
// /*Create a CGImageRef from the CVImageBufferRef*/
// CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// CGImageRef newImage = CGBitmapContextCreateImage(newContext);
//
// /*We release some components*/
// CGContextRelease(newContext);
// CGColorSpaceRelease(colorSpace);
//
// /*We display the result on the custom layer. All the display stuff must be done in the main thread because
// UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)
// we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/
// [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
//
// /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
// Same thing as for the CALayer we are not in the main thread so ...*/
// UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
//
// /*We relase the CGImageRef*/
// CGImageRelease(newImage);
//
// [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
//
// /*We unlock the image buffer*/
// CVPixelBufferUnlockBaseAddress(imageBuffer,0);
//
// [pool drain];
}
@end
原文链接:http://www.cocoachina.com/bbs/read.php?tid=51754&page=1
http://www.cocoachina.com/bbs/read.php?tid=66400