引用的文件:
#import <AVFoundation.h>
1.首先进行设备权限的判断和申请
第一次使用需要进行设备的申请。iOS10之后在plist文件中也需要设置,否则可能出现调用崩溃的bug。
- 麦克风权限:Privacy - Microphone Usage Description
- 相机权限: Privacy - Camera Usage Description
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
switch (status) {
case AVAuthorizationStatusNotDetermined:{
//申请授权
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
}];
}
break;
case AVAuthorizationStatusAuthorized:{
//拥有权限
}
break;
//拒绝授权或无法访问设备,提示用户在设置中进行修改
case AVAuthorizationStatusDenied:
case AVAuthorizationStatusRestricted:
break;
default:
break;
}
2.设置设备并启动
- (void)startVideoDevice{
NSError *error = nil;
// 获取设备
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// 初始化输入对象
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error) {
NSLog(@"Error getting video input device: %@", error.description);
}
if ([_session canAddInput:videoInput]) {
[_session addInput:videoInput]; // 添加到Session
}
// 设置线程和代理
_videoQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[_videoOutput setSampleBufferDelegate:self queue:_videoQueue];
// 配置输出视频图像格式
NSDictionary *captureSettings = @{(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
_videoOutput.videoSettings = captureSettings;
_videoOutput.alwaysDiscardsLateVideoFrames = YES;
if ([_session canAddOutput:_videoOutput]) {
[_session addOutput:_videoOutput]; // 添加到Session
}
_videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
}
//这里只列举视频部分的代码了,音频部分是完全一样的,只是关键字变为audio即可。
3.获取音视频数据(在代理中)
<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if ([_videoOutput isEqual:captureOutput]) {
//视频数据,用于编码保存或发送
}else if([_audioOutput isEqual:captureOutput]){
//音频数据,用于编码保存或发送
}
}
4.关于切换摄像头
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for ( AVCaptureDevice *device in devices )
if ( device.position == position )
return device;
return nil;
}
- (void)swapFrontAndBackCameras {
NSArray *inputs = self.session.inputs;
for ( AVCaptureDeviceInput *input in inputs ) {
AVCaptureDevice *device = input.device;
if ( [device hasMediaType:AVMediaTypeVideo] ) {
AVCaptureDevicePosition position = device.position;
AVCaptureDevice *newCamera = nil;
AVCaptureDeviceInput *newInput = nil;
if (position == AVCaptureDevicePositionFront)
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
else
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
[self.session beginConfiguration];
[self.session removeInput:input];
[self.session addInput:newInput];
[self.session commitConfiguration];
break;
}
}
}
5.全局属性(为了方便懒癌患者)
@property (nonatomic,strong)AVCaptureSession *session;
@property (nonatomic,strong)dispatch_queue_t videoQueue;
@property (nonatomic,strong)dispatch_queue_t audioQueue;
@property (nonatomic,strong)AVCaptureVideoDataOutput * videoOutput;
@property (nonatomic,strong)AVCaptureAudioDataOutput * audioOutput;
@property (nonatomic,strong)AVCaptureConnection * videoConnection;
@property (nonatomic,strong)AVCaptureConnection * audioConnection;