我这里采集的视频转为了yuv格式,可以根据自己需要自行选择
#import <UIKit/UIKit.h>
FILE *pFile;
uint8_t* baseAddress;
size_t width;
size_t height;
@interface ViewController : UIViewController
@end
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <stdio.h>
#import <stdlib.h>
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, strong) AVCaptureSession *session;
@property(nonatomic, strong) AVCaptureVideoPreviewLayer *preViewLayer;
@property(nonatomic, assign) int producerFps;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupCaptureSession];
}
- (void)setupCaptureSession
{
NSError *error = nil;
self.session = [[AVCaptureSession alloc] init];
self.session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (error) {
NSLog(@"error input:%@", error.description);
}
if ([_session canAddInput:input]) {
[self.session addInput:input];
}
AVCaptureVideoDataOutput *outPut = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[outPut setSampleBufferDelegate:self queue:queue];
outPut.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(NSString *)kCVPixelBufferPixelFormatTypeKey];
// NSLog(@"%@", outPut.videoSettings);
[self.session startRunning];
if ([_session canAddOutput:outPut]) {
[self.session addOutput:outPut];
}
[self setSession:self.session];
_preViewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preViewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_preViewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:_preViewLayer];
self.producerFps = 25;
}
//函数回调,做自己的操作,我这里转为了yuv格式
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
const int kFlags = 0;
CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
NSLog(@"wwwwwwwwwwwwww%@", videoFrame);
if (CVPixelBufferLockBaseAddress(videoFrame, kFlags) != kCVReturnSuccess) {
return;
}
const int kYPlaneIndex = 0;
const int kUVPlaneIndex = 1;
baseAddress =
(uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, kYPlaneIndex);
size_t yPlaneBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kYPlaneIndex);
size_t yPlaneHeight = CVPixelBufferGetHeightOfPlane(videoFrame, kYPlaneIndex);
size_t uvPlaneBytesPerRow =
CVPixelBufferGetBytesPerRowOfPlane(videoFrame, kUVPlaneIndex);
size_t uvPlaneHeight =
CVPixelBufferGetHeightOfPlane(videoFrame, kUVPlaneIndex);
size_t frameSize =
yPlaneBytesPerRow * yPlaneHeight + uvPlaneBytesPerRow * uvPlaneHeight;
NSLog(@"%zu",frameSize);
//获取每帧图片的高宽
width = CVPixelBufferGetWidth(videoFrame);
height = CVPixelBufferGetHeight(videoFrame);
//时间戳
NSDate *dateNow = [NSDate date];
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
[formatter setDateFormat:@"YYYY-MM-dd HH:mm:ss"];
NSString *nowtime = [formatter stringFromDate:dateNow];
NSLog(@"%@", nowtime);
NSLog(@"%zu %zu", width, height);
CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
// NSArray *path = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
// NSString *Pathes = path.lastObject;
// NSString *filePath = [Pathes stringByAppendingPathComponent:@"testvideo.yuv1"];
// const char *str = [filePath UTF8String];
// pFile = fopen(str, "w");
// fwrite(baseAddress, strlen((char*)baseAddress),1 , pFile);
// fflush(pFile);
}
//yuv420sp转为yuv420p
void YUV420SPToYUV420SP(uint8_t *yuv420sp, uint8_t *yuv420p, size_t widthp, size_t heightp)
{
yuv420sp = baseAddress;
widthp = width;
heightp = height;
if (yuv420sp == NULL ||yuv420p == NULL)
return;
size_t framesize = widthp*heightp;
int i = 0, j = 0;
//copy y
for (i = 0; i < framesize; i++)
{
*(yuv420p + i) = *(yuv420sp + i);
}
i = 0;
//copy u
for (j = 0; j < framesize/2; j+=2)
{
*(yuv420p + (i + framesize*5/4)) = *(yuv420sp + (j+framesize));
i++;
}
i = 0;
//copy v
for(j = 1; j < framesize/2;j+=2)
{
*(yuv420p + (i+framesize)) = *(yuv420sp + (j+framesize));
i++;
}
NSArray *path = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *Pathes = path.lastObject;
NSString *filePath = [Pathes stringByAppendingPathComponent:@"testvideo1.yuv1"];
const char *str = [filePath UTF8String];
pFile = fopen(str, "w");
fwrite(yuv420p, strlen((char*)yuv420p),1 , pFile);
fflush(pFile);
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
参考网址
http://blog.csdn.net/u010180166/article/details/8923134
http://www.cnblogs.com/mfryf/archive/2012/03/10/2389121.html