iOS音视频实时采集硬件编码(H264+AAC)
做 iOS开发已经好几年了,从来没写博客的习惯,今天就试试写点东西
最近在做音视频方面的东西,发现国内硬编的资料特别少,去网上搜全是软编,于是翻墙,发现外国人就是有分享精神,研究了一个星期终于整理出来一套 iOS 实时硬编码,大概功能实现:
- 使用 AVCaptureSession进行实时采集音视频
- 通过AVCaptureVideoDataOutputSampleBufferDelegate获取到音视频buffer数据
- 分别对音视频原始数据进行编码
- 传输
//
// ViewController.h
// H264AACEncode
//
// Created by ZhangWen on 15/10/14.
// Copyright © 2015年 Zhangwen. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import "AACEncoder.h"
#import "H264Encoder.h"
@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate,H264EncoderDelegate>
@end
//
// ViewController.m
// H264AACEncode
//
// Created by ZhangWen on 15/10/14.
// Copyright © 2015年 Zhangwen. All rights reserved.
//
#import "ViewController.h"
#define CAPTURE_FRAMES_PER_SECOND 20
#define SAMPLE_RATE 44100
#define VideoWidth 480
#define VideoHeight 640
@interface ViewController ()
{
UIButton *startBtn;
bool startCalled;
H264Encoder *h264Encoder;
AACEncoder *aacEncoder;
AVCaptureSession *captureSession;
dispatch_queue_t _audioQueue;
AVCaptureConnection* _audioConnection;
AVCaptureConnection* _videoConnection;
NSMutableData *_data;
NSString *h264File;
NSFileHandle *fileHandle;
}
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
startCalled = true;
_data = [[NSMutableData alloc] init];
captureSession = [[AVCaptureSession alloc] init];
[self initStartBtn];
}
#pragma mark
#pragma mark - 设置音频 capture
- (void) setupAudioCapture {
aacEncoder = [[AACEncoder alloc] init];
// create capture device with video input
/*
* Create audio connection
*/
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:&error];
if (error) {
NSLog(@"Error getting audio input device: %@", error.description);
}
if ([captureSession canAddInput:audioInput]) {
[captureSession addInput:audioInput];
}
_audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
AVCaptureAudioDataOutput* audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[audioOutput setSampleBufferDelegate:self queue:_audioQueue];
if ([captureSession canAddOutput:audioOutput]) {
[captureSession addOutput:audioOutput];
}
_audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio];
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for ( AVCaptureDevice *device in devices )
if ( device.position == position )
return device;
return nil;
}
#pragma mark
#pragma mark - 设置视频 capture
- (void) setupVideoCaprure
{
h264Encoder = [H264Encoder alloc];
[h264Encoder initWithConfiguration];
NSError *deviceError;
AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// cameraDevice = [self cameraWithPosition:AVCaptureDevicePositionBack];
// cameraDevice.position = AVCaptureDevicePositionBack;
AVCaptureDeviceInput *inputDevice = [AVCaptureDeviceInput deviceInputWithDevice:cameraDevice error:&deviceError];
// make output device
AVCaptureVideoDataOutput *outputDevice = [[AVCaptureVideoDataOutput alloc] init];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* val = [NSNumber
numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
NSDictionary* videoSettings =
[NSDictionary dictionaryWithObject:val forKey:key];
NSError *error;
[cameraDevice lockForConfiguration:&error];
if (error == nil) {
NSLog(@"cameraDevice.activeFormat.videoSupportedFrameRateRanges IS %@",[cameraDevice.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0]);
if (cameraDevice.activeFormat.videoSupportedFrameRateRanges){
[cameraDevice setActiveVideoMinFrameDuration:CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND)];
[cameraDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND)];
}
}else{
// handle error2
}
[cameraDevice unlockForConfiguration];
// Start the session running to start the flow of data
outputDevice.videoSettings = videoSettings;
[outputDevice setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
// initialize capture session
if ([captureSession canAddInput:inputDevice]) {
[captureSession addInput:inputDevice];
}
if ([captureSession canAddOutput:outputDevice]) {
[captureSession addOutput:outputDevice];
}
// begin configuration for the AVCaptureSession
[captureSession beginConfiguration];
// picture resolution
[captureSession setSessionPreset:[NSString stringWithString:AVCaptureSessionPreset640x480]];
_videoConnection = [outputDevice connectionWithMediaType:AVMediaTypeVideo];
//Set landscape (if required)
if ([_videoConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[_videoConnection setVideoOrientation:orientation];
}
// make preview layer and add so that camera's view is displayed on screen
NSFileManager *fileManager = [NSFileManager defaultManager];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
h264File = [documentsDirectory stringByAppendingPathComponent:@"test.h264"];
[fileManager removeItemAtPath:h264File error:nil];
[fileManager createFileAtPath:h264File contents:nil attributes:nil];
// Open the file using POSIX as this is anyway a test application
//fd = open([h264File UTF8String], O_RDWR);
fileHandle = [NSFileHandle fileHandleForWritingAtPath:h264File];
[h264Encoder initEncode:VideoWidth height:VideoHeight];
h264Encoder.delegate = self;
}
#pragma mark
#pragma mark - sampleBuffer 数据
-(void) captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
{
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
double dPTS = (double)(pts.value) / pts