//
// Camera.h
// VideoPlayback
//
// Created by Mac on 14-10-31.
// Copyright (c) 2014年 fly. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
@interface Camera : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureDevice *avCaptureDevice;
AVCaptureSession* _session;
}
- (void)setupCaptureSession;
-(void)stopcamera;
@end
//
// Camera.m
// VideoPlayback
//
// Created by Mac on 14-10-31.
// Copyright (c) 2014年 fly. All rights reserved.
//
#import "Camera.h"
@implementation Camera
- (AVCaptureDevice *)getFrontCamera
{
//获取前置摄像头设备
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in cameras)
{
if (device.position == AVCaptureDevicePositionBack)
return device;
}
return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
// Create and configure a capture session and start it running
- (void)setupCaptureSession
{
//-- Create CVOpenGLESTextureCacheRef for optimal CVImageBufferRef to GLES texture conversion.
/*#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, glContext, NULL, &_videoTextureCache);
#else
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)glContext, NULL, &_videoTextureCache);
#endif
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
return;
}*/
//-- Setup Capture Session.
_session = [[AVCaptureSession alloc] init];
[_session beginConfiguration];
//-- Set preset session size.
[_session setSessionPreset:AVCaptureSessionPreset640x480];
//-- Creata a video device and input from that Device. Add the input to the capture session.
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if(videoDevice == nil)
assert(0);
//-- Add the device to the session.
NSError *error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if(error)
assert(0);
[_session addInput:input];
//-- Create the output for the capture session.
AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES]; // Probably want to set this to NO when recording
//-- Set to YUV420.
[dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // Necessary for manual preview
// Set dispatch to be on the main thread so OpenGL can do things with the data
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:dataOutput];
[_session commitConfiguration];
[_session startRunning];
}
#pragma mark -
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVReturn err;
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
NSLog(@"captureOutput");
}
-(void)stopcamera
{
[_session stopRunning];
}
@end