ios视频和音频采集以及预览
本文将说明如何用ios做视频和音频的采集,以及预览,预览采用的是系统自带的AVCaptureVideoPreviewLayer和UIView,视频采集用AVCaptureSession,音频采集用AudioQueue,音频采集如果用AVCaptureSession设置参数比较麻烦。下面是具体代码
//
// Lvs_Ios_Device_Collection.h
// LvsIos
//
// Created by mx on 16/9/5.
// Copyright © 2016年 lvs.zwg All rights reserved.
//
#include <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import "Lvs_Info.h"
//音频采集用(AudioQueue)
//添加视频,音频数据输出的代理,固定代理名称及调用的函数
@interface Lvs_Ios_Device_Collection : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
//音频采集buff数量
#define kNumberBuffers 3
//音频采集结构体(AudioQueue)
typedef struct AQRecorderState {
AudioStreamBasicDescription mDataFormat; //format
AudioQueueRef mQueue; //audio queue
AudioQueueBufferRef mBuffers[kNumberBuffers]; //buff数据缓存
UInt32 bufferByteSize; //每个缓存的大小
Float64 audio_seconde_time; //音频采集缓存时常(这个缓存要足够长满足后面处理时间,否则会丢数据,暂定0.5秒)
Module_StreamInfo * ModuleStreamInfo_Out_Audio_data; //输出的数据信息结构体(用于获取数据的结构体)
} AQRecorderState;
//用于视频数据输出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
//init
- (int)device_init: (Module_Info **) ModuleInfo_Current andCurrentLength: (int *) CurrentLength;
//uinit
- (void)device_uinit: (Module_Info *) ModuleInfo_Current;
//write
- (int)device_write: (Module_Info *) ModuleInfo_Current andUpperLength: (int) UpperLength
andModuleInfo_Next_video: (Module_Info *) ModuleInfo_Next_video andNextLength_video: (int) NextLength_video
andModuleInfo_Next_audio: (Module_Info *) ModuleInfo_Next_audio andNextLength_audio: (int)NextLength_audio;
@end
//
// Lvs_Ios_Device_Collection.m
// LvsIos
//
// Created by mx on 16/9/5.
// Copyright © 2016年 lvs. All rights reserved.
//
#import "Lvs_Ios_Device_Collection.h"
@implementation Lvs_Ios_Device_Collection
int m_stream_num = 3; //这里只有前摄像头,后摄像头,麦克风三路(音频用AudioQueue,这里只是预留出来)
//video
std::map<int,AVCaptureSession *> m_list_capture_session; //session
AVCaptureConnection *videoCaptureConnection_back = nil; //视频后摄像头输出数据用的connection
AVCaptureConnection *videoCaptureConnection_front = nil; //视频前摄像头输出数据用的connection
AVCaptureConnection *audioCaptureConnection = nil; //音频输出数据用的connection
long long up_time_video_front = 0; //前面摄像头的上一帧时间戳
long long up_time_video_back = 0; //后面摄像头的上一帧时间戳
//audio
static AQRecorderState m_aqData = {0}; //音频采集结构体(AudioQueue)
long long up_time_audio = 0; //音频的上一帧时间戳
//用于获取数据的地址
Module_Info * m_device_module_info_collection; //0:后面摄像头 1:前面摄像头 2:麦克风
int m_device_module_length_collection;
//根据stream_id获取对应的session
-(AVCaptureSession *)get_list_session: (int)stream_id
{
return m_list_capture_session[stream_id];
}
-(AQRecorderState *)get_AQRecorderState
{
return &m_aqData;
}
//获取connection (type 0->video 1->audio) (backorfront 0->back 1->front)
-(AVCaptureConnection *)get_connection: (int)type andbackorfront: (int) backorfront
{
//video
if (type == 0)
{
if (backorfront == 0)
{
return videoCaptureConnection_back;
}
else if(backorfront == 1)
{
return videoCaptureConnection_front;
}
}
//audio
else if(type ==1)
{
return audioCaptureConnection;
}
return nil;
}
//用于视频数据输出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
//NSLog(@"delegate");
if (connection == videoCaptureConnection_back) //0
{
//NSLog(@"videoCaptureConnection_back");
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//LOCK
if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
{
//buf
UInt8 *bufferPtr_y = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
UInt8 *bufferPtr_uv = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
//bufsize
size_t buffSize = CVPixelBufferGetDataSize(imageBuffer); //这个值不准要自己算
//width
size_t width = CVPixelBufferGetWidth(imageBuffer);
//height
size_t height = CVPixelBufferGetHeight(imageBuffer);
//PlaneCount
size_t PlaneCount = CVPixelBufferGetPlaneCount(imageBuffer);
//NSLog(@"buffSize %d",buffSize);
//NSLog(@"width %d",width);
//NSLog(@"height %d",height);
//NSLog(@"PlaneCount %d",PlaneCount);
//获取1970到现在的时间毫秒
NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
if(theTime > up_time_video_back)
{
//拷贝数据
m_device_module_info_collection[0].ModuleStreamInfo_Out->ActualLen = width * height + width * height /2;
memcpy(m_device_module_info_collection[0].ModuleStreamInfo_Ou