设置获取device
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *camera in cameras) {
if ([camera position] == position) {
return camera;
}
}
return nil;
}
一、录制视频 audio
//设置AVFoundation
- (void)setAVFoundationMovie {
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.session = [[AVCaptureSession alloc]init];
NSError *error = nil;
self.input = [[AVCaptureDeviceInput alloc]initWithDevice:self.device error:&error];
if (self.input && [self.session canAddInput:self.input]) {
[self.session addInput:self.input];
}
self.outputMovie = [[AVCaptureMovieFileOutput alloc]init];
if ([self.session canAddOutput:self.outputMovie]) {
AVCaptureConnection *connection = [self.outputMovie connectionWithMediaType:AVMediaTypeAudio];
if ([connection isVideoStabilizationSupported]) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
[self.session addOutput:self.outputMovie];
}
self.layerShow = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.layerShow.frame = self.view.bounds;
self.layerShow.frame = CGRectMake(200, 0, 500, 1000);
self.layerShow.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.layerShow.backgroundColor = [UIColor orangeColor].CGColor;
[self.view.layer addSublayer:self.layerShow];
NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"myMovie.mov"];
NSLog(@"save path is :%@",outputFielPath);
NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
NSLog(@"fileUrl:%@",fileUrl);
[self.outputMovie startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
}
- (void)startRecordingToOutputFileURL:(NSURL*)outputFileURL recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate {
NSLog(@"CCC---:startRecordingToOutputFileURL");
}
二、直播视频 -video
//直播
- (void)setAVFoundationVideo {
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.session = [[AVCaptureSession alloc]init];
NSError *error = nil;
self.input = [[AVCaptureDeviceInput alloc]initWithDevice:self.device error:&error];
if (self.input && [self.session canAddInput:self.input]) {
[self.session addInput:self.input];
}
self.outputVideo = [[AVCaptureVideoDataOutput alloc]init];
if ([self.session canAddOutput:self.outputVideo]) {
AVCaptureConnection *connection = [self.outputVideo connectionWithMediaType:AVMediaTypeAudio];
if ([connection isVideoStabilizationSupported]) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
[self.session addOutput:self.outputVideo];
}
self.layerShow = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.layerShow.frame = self.view.bounds;
self.layerShow.frame = CGRectMake(200, 0, 500, 1000);
self.layerShow.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.layerShow.backgroundColor = [UIColor orangeColor].CGColor;
[self.view.layer addSublayer:self.layerShow];
// [self.session startRunning];
self.outputVideo.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt: 320], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithInt: 240], (id)kCVPixelBufferHeightKey,
nil];
[self.outputVideo setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
NSLog(@"CCC---:didOutputSampleBuffer");
}