//=============================
//@property(nonatomic, readonly) BOOL hasFlash;//是否有闪光灯
//@property(nonatomic) AVCaptureTorchMode torchMode;//手电筒
//@property(nonatomic, readonly, getter=isRecordingPaused) BOOL recordingPaused NS_AVAILABLE(10_7, NA);//是否处于暂停
//- (void)pauseRecording NS_AVAILABLE(10_7, NA);//暂停录制
//
//- (void)resumeRecording NS_AVAILABLE(10_7, NA);//继续录制
//
//@property(nonatomic, readonly) CMTime recordedDuration;//当前录制的时长
//
//@property(nonatomic, readonly) int64_t recordedFileSize;//当前录制文件的大小
//@property(nonatomic) CMTime maxRecordedDuration;//停止时最大录制时长
//
//@property(nonatomic) int64_t maxRecordedFileSize;//停止时最大录制文件的大小
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController () <AVCaptureFileOutputRecordingDelegate,AVCaptureVideoDataOutputSampleBufferDelegate>
//输入 采集画面和声音信息//设备输入数据管理对象,管理输入数据
@property (nonatomic,strong) AVCaptureDeviceInput *inputPitcure;
@property (nonatomic,strong) AVCaptureDeviceInput *inputAudio;
//输出 把需要的帧转换成视频格式的文件//设备输出数据管理对象,管理输出数据,通常使用它的子类:
@property (nonatomic,strong) AVCaptureMovieFileOutput *output;//视屏文件输出
@property(nonatomic,strong)AVCaptureVideoDataOutput *videoOutput;//视屏数据
//会话 关联输入设备和输出设备和 layer
@property (nonatomic,strong) AVCaptureSession *session;
//特殊的layer用来展示输入设备采集到画面
@property (nonatomic,strong) AVCaptureVideoPreviewLayer *layer;
@property(nonatomic,strong)UILabel *lbl;//显示当前的录制时间
@property(nonatomic,strong)dispatch_queue_t captureQueue;
@property(nonatomic,strong)AVCaptureConnection *videoConnection;
@property(nonatomic,strong)AVAssetWriter *writer;//媒体写入对象
@property(nonatomic,strong)AVAssetWriterInput *videoInput;//视频写入
@end
@implementation ViewController
-(void)flash{
[self flashButtonClick:nil];
}
//闪光灯的设置非常简单,只需要修改device的flashMode属性即可,这里需要注意的是,修改device时候需要先锁住,修改完成后再解锁,否则会崩溃,设置闪光灯的时候也需要做安全判断,验证设备是否支持闪光灯,有些iOS设备是没有闪光灯的,如果不做判断还是会crash掉
- (void)flashButtonClick:(UIBarButtonItem *)sender {
NSLog(@"flashButtonClick");
//mediatype类型: AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed
//获取所有支持摄像头的设备
AVCaptureDevice *device = [AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];
//修改前必须先锁定
[device lockForConfiguration:nil];
//必须判定是否有闪光灯,否则如果没有闪光灯会崩溃
if ([devicehasFlash]) {
device.torchMode =AVCaptureTorchModeOn;//开启手电筒
// device.flashMode = AVCaptureFlashModeOn;//开启闪光灯模式
// if (device.flashMode == AVCaptureFlashModeOff) {
// device.flashMode = AVCaptureFlashModeOn;
//
[sender setTitle:@"flashOn"];
// } else if (device.flashMode == AVCaptureFlashModeOn) {
// device.flashMode = AVCaptureFlashModeAuto;
[sender setTitle:@"flashAuto"];
// } else if (device.flashMode == AVCaptureFlashModeAuto) {
// device.flashMode = AVCaptureFlashModeOff;
[sender setTitle:@"flashOff"];
// }
} else {
NSLog(@"设备不支持闪光灯");
}
[device unlockForConfiguration];
}
//录制的队列
- (dispatch_queue_t)captureQueue {
if (_captureQueue ==nil) {
_captureQueue =dispatch_queue_create("cn.qiuyouqun.im.wclrecordengine.capture",DISPATCH_QUEUE_SERIAL);
}
return_captureQueue;
}
//视频输出
- (AVCaptureVideoDataOutput *)videoOutput {
if (_videoOutput ==nil) {
_videoOutput = [[AVCaptureVideoDataOutputalloc] init];
[_videoOutputsetSampleBufferDelegate:selfqueue:self.captureQueue];
NSDictionary* setcapSettings = [NSDictionarydictionaryWithObjectsAndKeys:
[NSNumbernumberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],kCVPixelBufferPixelFormatTypeKey,
nil];
_videoOutput.videoSettings = setcapSettings;
}
return_videoOutput;
}
//初始化视频输入
- (void)initVideoInputHeight:(NSInteger)cy width:(NSInteger)cx {
//录制视频的一些配置,分辨率,编码方式等等
NSDictionary* settings = [NSDictionarydictionaryWithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodecKey,
[NSNumbernumberWithInteger: cx], AVVideoWidthKey,
[NSNumbernumberWithInteger: cy], AVVideoHeightKey,
nil];
//初始化视频写入类
_videoInput = [AVAssetWriterInputassetWriterInputWithMediaType:AVMediaTypeVideooutputSettings:settings];
//表明输入是否应该调整其处理为实时数据源的数据
_videoInput.expectsMediaDataInRealTime =YES;
//将视频输入源加入
[_writeraddInput:_videoInput];
}
//初始化方法
- (instancetype)initPath:(NSString*)path Height:(NSInteger)cy width:(NSInteger)cx channels:(int)ch samples:(Float64) rate {
self = [superinit];
if (self) {
NSString *path = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES)lastObject]stringByAppendingPathComponent:@"hellllll.m4v"];
//先把路径下的文件给删除掉,保证录制的文件是最新的
[[NSFileManagerdefaultManager] removeItemAtPath:patherror:nil];
NSURL* url = [NSURLfileURLWithPath:path];
//初始化写入媒体类型为MP4类型
_writer = [AVAssetWriterassetWriterWithURL:url fileType:AVFileTypeMPEG4error:nil];
//使其更适合在网络上播放
_writer.shouldOptimizeForNetworkUse =YES;
//初始化视频输出
[selfinitVideoInputHeight:200width:200];
//确保采集到rate和ch
if (rate !=0 && ch != 0) {
//初始化音频输出
// [self initAudioInputChannels:ch samples:rate];
}
}
returnself;
}
- (void)viewDidLoad {
[superviewDidLoad];
UIButton *captureBtn=[[UIButtonalloc]initWithFrame:CGRectMake(30,300, 60,30)];
captureBtn.backgroundColor=[UIColorblueColor];
captureBtn.titleLabel.textColor=[UIColorgreenColor];
[captureBtn setTitle:@"闪光灯"forState:UIControlStateNormal];
[captureBtn addTarget:self action:@selector(flash)forControlEvents:UIControlEventTouchDown];
[self.viewaddSubview:captureBtn];
UILabel *lbl=[[UILabelalloc]initWithFrame:CGRectMake(30,350, 60, 30)];
self.lbl=lbl;
lbl.backgroundColor=[UIColorblueColor];
[self.viewaddSubview:lbl];
[selfsetsession];
}
//AVCaptureSession是AVFoundation捕捉类的中心枢纽,我们先从这个类入手,在视频捕获时,客户端可以实例化AVCaptureSession并添加适当的AVCaptureInputs、AVCaptureDeviceInput和输出,比如AVCaptureMovieFileOutput。通过[AVCaptureSession startRunning]开始数据流从输入到输出,和[AVCaptureSession stopRunning]停止输出输入的流动。客户端可以通过设置sessionPreset属性定制录制质量水平或输出的比特率。
-(void)setsession{
//视频录制请参考二维码扫描
// 0 --3----8-- 10 点击了开始录制按钮 结束按钮
//角色: 1.输入设备画面输入 摄像头 输入麦克风 2.输出文件的输出保存为mp4 ->录制 3.会话 (session 关联以上设备) 4.展示当前摄像头捕捉到画面
//创建所有设备(摄像头)摄像头 两个 前置后置
AVCaptureDevice *devicePitcure = [AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *deviceAudio = [AVCaptureDevicedefaultDeviceWithMediaType:AVMediaTypeAudio];
//设备输入数据管理对象,管理输入数据AVCaptureDeviceInput
//指定摄像头作为输入设备 还可以用麦克风作为输入设备
self.inputPitcure = [AVCaptureDeviceInputdeviceInputWithDevice:devicePitcureerror:nil];
self.inputAudio = [AVCaptureDeviceInputdeviceInputWithDevice:deviceAudioerror:nil];
// AVCaptureMovieFileOutput是AVCaptureFileOutput的子类,AVCaptureOutput的子类有:AVCaptureFileOutput;AVCaptureAudioDataOutput,AVCaptureVideoDataOutput,AVCaptureStillImageOutput(ios10.0之后用AVCapturePhotoOutput代替)
//*设备输出数据管理对象,管理输出数据,通常使用它的子类:AVCaptureAudioDataOutput//输出音频管理对象,输出数据为NSData
// AVCaptureStillImageDataOutput//输出图片管理对象,输出数据为NSData
//AVCaptureVideoDataOutput//输出视频管理对象,输出数据为NSData
/* 输出文件管理对象,输出数据以文件形式输出 */
//AVCaptureFileOutput
// {//子类
// AVCaptureAudioFileOutput //输出是音频文件
//AVCaptureMovieFileOutput //输出是视频文件
//}
//添加防抖动功能
self.videoConnection = [self.videoOutputconnectionWithMediaType:AVMediaTypeVideo];
if ([self.videoConnectionisVideoStabilizationSupported]) {
self.videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;//防抖模式
}
//设置视频录制的方向
self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
// self.output = [[AVCaptureMovieFileOutput alloc]init];输出 把需要的帧转换成视频格式的文件
self.videoOutput=[selfvideoOutput];
self.session = [[AVCaptureSessionalloc]init];//会话,用来关联输入输出
//关联layer和session
self.layer = [[AVCaptureVideoPreviewLayeralloc]initWithSession:self.session];//相机拍摄预览图层,是CALayer的子类,实时查看拍照或录像效果。
self.layer.videoGravity =AVLayerVideoGravityResize;
self.layer.frame =self.view.bounds;
//看到 layer再说
[self.view.layerinsertSublayer:self.layeratIndex:0];//把涂层加到第一层级
//关联输入
if ([self.sessioncanAddInput:self.inputPitcure]) {
[self.sessionaddInput:self.inputPitcure];
}
if ([self.sessioncanAddInput:self.inputAudio]) {
[self.sessionaddInput:self.inputAudio];
}
// 关联输出
if ([self.sessioncanAddOutput:self.videoOutput]) {
[self.sessionaddOutput:self.videoOutput];
}
//查看录制结果(真机)
//设置属性
//会话创建之后 关联 开启session
[self.sessionstartRunning];
}
- (IBAction)startRecord:(UIButton *)sender {
//avcaptureoutput 为AVCaptureFileOutput时的操作
// NSLog(@"startRecord");
// if ([self.output isRecording]) { //正在录制第二次点击停止
// [self.output stopRecording];
// }else{
// //开始录制(采集需要帧)
// //参数1 路径 参数2代理
// NSString *path = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)lastObject]stringByAppendingPathComponent:@"hellllll.m4v"];
// //开始录像,存到指定的URL
// [self.output startRecordingToOutputFileURL:[NSURL fileURLWithPath:path] recordingDelegate:self];
// self.lbl.text = [NSString stringWithFormat:@"%lld", self.output.recordedDuration.value];
// }
//=================================
//avcaptureoutput 为AVCapturevideoDataOutput时的操作
//通过session的开始和关闭来控制录入视频的开始和关闭
if([self.sessionisRunning]){
[self.sessionstopRunning];
}else{
[self.sessionstartRunning];
}
}
//写入数据, //avcaptureoutput为AVCapturevideoDataOutput时的操作(只要会话开启,就开始频繁调用该方法,源源不断的捕获视频数据,在里面可以操作存储捕获的视频数据到本地或数据库)
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
NSLog(@"写入数据001");
}
@end