转自:http://flhs-wdw.blog.sohu.com/207300574.html
iphone提供了AVFoundation库来方便的操作多媒体设备,AVAssetWriter这个类可以方便的将图像和音频写成一个完整的视频文件。甚至将整个应用的操作录制下来,也不是什么困难的事情。
这里先说一下如何将录像的视频写到指定文件中去:
首先先准备好AVCaptureSession,当录制开始后,可以控制调用相关回调来取音视频的每一贞数据。
- NSError * error;
- session = [[AVCaptureSession alloc] init];
- [session beginConfiguration];
- [session setSessionPreset:AVCaptureSessionPreset640x480];
- [self initVideoAudioWriter];
- AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
- AVCaptureDevice * audioDevice1 = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
- AVCaptureDeviceInput *audioInput1 = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice1 error:&error];
- videoOutput = [[AVCaptureVideoDataOutput alloc] init];
- [videoOutput setAlwaysDiscardsLateVideoFrames:YES];
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- [videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
- audioOutput = [[AVCaptureAudioDataOutput alloc] init];
- numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- [audioOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
- [session addInput:videoInput];
- [session addInput:audioInput1];
- [session addOutput:videoOutput];
- [session addOutput:audioOutput];
- [session commitConfiguration];
- [session startRunning];
回调函数:
- -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
- //CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
- NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
- static int frame = 0;
- CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
- if( frame == 0 && videoWriter.status != AVAssetWriterStatusWriting )
- {
- [videoWriter startWriting];
- [videoWriter startSessionAtSourceTime:lastSampleTime];
- }
- if (captureOutput == videoOutput)
- {
- / if( videoWriter.status > AVAssetWriterStatusWriting )
- {
- NSLog(@"Warning: writer status is %d", videoWriter.status);
- if( videoWriter.status == AVAssetWriterStatusFailed )
- NSLog(@"Error: %@", videoWriter.error);
- return;
- }
- if ([videoWriterInput isReadyForMoreMediaData])
- if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
- NSLog(@"Unable to write to video input");
- else
- NSLog(@"already write vidio");
- }
- }
- else if (captureOutput == audioOutput)
- {
- if( videoWriter.status > AVAssetWriterStatusWriting )
- {
- NSLog(@"Warning: writer status is %d", videoWriter.status);
- if( videoWriter.status == AVAssetWriterStatusFailed )
- NSLog(@"Error: %@", videoWriter.error);
- return;
- }
- if ([audioWriterInput isReadyForMoreMediaData])
- if( ![audioWriterInput appendSampleBuffer:sampleBuffer] )
- NSLog(@"Unable to write to audio input");
- else
- NSLog(@"already write audio");
- }
- if (frame == FrameCount)
- {
- [self closeVideoWriter];
- }
- frame ++;
- [pool drain];
- }
剩下的工作就是初始化AVAssetWriter,包括音频与视频输入输出:
- -(void) initVideoAudioWriter
- {
- CGSize size = CGSizeMake(480, 320);
- NSString *betaCompressionDirectory = [NSHomeDirectory()stringByAppendingPathComponent:@"Documents/Movie.mp4"];
- NSError *error = nil;
- unlink([betaCompressionDirectory UTF8String]);
- //----initialize compression engine
- self.videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURLfileURLWithPath:betaCompressionDirectory]
- fileType:AVFileTypeQuickTimeMovie
- error:&error];
- NSParameterAssert(videoWriter);
- if(error)
- NSLog(@"error = %@", [error localizedDescription]);
- NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithDouble:128.0*1024.0],AVVideoAverageBitRateKey,
- nil ];
- NSDictionary *videoSettings = [NSDictionarydictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
- [NSNumber numberWithInt:size.width], AVVideoWidthKey,
- [NSNumber numberWithInt:size.height],AVVideoHeightKey,videoCompressionProps, AVVideoCompressionPropertiesKey, nil];
- self.videoWriterInput = [AVAssetWriterInputassetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
- NSParameterAssert(videoWriterInput);
- videoWriterInput.expectsMediaDataInRealTime = YES;
- NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionarydictionaryWithObjectsAndKeys:
- [NSNumbernumberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
- self.adaptor = [AVAssetWriterInputPixelBufferAdaptorassetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
- sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
- NSParameterAssert(videoWriterInput);
- NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
- if ([videoWriter canAddInput:videoWriterInput])
- NSLog(@"I can add this input");
- else
- NSLog(@"i can't add this input");
- // Add the audio input
- AudioChannelLayout acl;
- bzero( &acl, sizeof(acl));
- acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
- NSDictionary* audioOutputSettings = nil;
- // audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
- // [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
- // [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
- // [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
- // [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
- // [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
- // nil ];
- audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
- [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
- [ NSNumber numberWithInt:64000], AVEncoderBitRateKey,
- [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
- [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
- [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
- nil ];
- audioWriterInput = [[AVAssetWriterInput
- assetWriterInputWithMediaType: AVMediaTypeAudio
- outputSettings: audioOutputSettings ] retain];
- audioWriterInput.expectsMediaDataInRealTime = YES;
- // add input
- [videoWriter addInput:audioWriterInput];
- [videoWriter addInput:videoWriterInput];
- }
这里音频的参数一定要注意,如果添错了,可能会没声音,这个问题折磨了我不少时间,这样,当回调函数开始执行后,就会调用写视频和音频的函数,文件就被写到指定位置去了。如果想加些滤镜效果,只要有好的图像处理算法,针对图像每一帧进行处理就可以了