ios视频和音频采集

本文介绍了在iOS平台上进行视频和音频采集以及预览的方法。使用AVCaptureSession进行视频采集,AVCaptureVideoPreviewLayer和UIView实现预览。音频采集部分提到使用AudioQueue,但指出使用AVCaptureSession配置可能较为复杂。提供了相关的QQ交流群和QQ号以便讨论。
摘要由CSDN通过智能技术生成

ios视频和音频采集以及预览

本文将说明如何用ios做视频和音频的采集,以及预览,预览采用的是系统自带的AVCaptureVideoPreviewLayer和UIView,视频采集用AVCaptureSession,音频采集用AudioQueue,音频采集如果用AVCaptureSession设置参数比较麻烦。下面是具体代码

//
//  Lvs_Ios_Device_Collection.h
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs.zwg All rights reserved.
//

#include <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import "Lvs_Info.h"

//音频采集用(AudioQueue)

//添加视频,音频数据输出的代理,固定代理名称及调用的函数
@interface Lvs_Ios_Device_Collection : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>


//音频采集buff数量
#define kNumberBuffers 3

//音频采集结构体(AudioQueue)
typedef struct AQRecorderState {
    AudioStreamBasicDescription  mDataFormat;                          //format
    AudioQueueRef                mQueue;                               //audio queue
    AudioQueueBufferRef          mBuffers[kNumberBuffers];             //buff数据缓存
    UInt32                       bufferByteSize;                       //每个缓存的大小
    Float64                      audio_seconde_time;                   //音频采集缓存时常(这个缓存要足够长满足后面处理时间,否则会丢数据,暂定0.5秒)
    Module_StreamInfo *          ModuleStreamInfo_Out_Audio_data;      //输出的数据信息结构体(用于获取数据的结构体)
} AQRecorderState;


//用于视频数据输出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;

//init
- (int)device_init: (Module_Info **) ModuleInfo_Current andCurrentLength: (int *) CurrentLength;
//uinit
- (void)device_uinit: (Module_Info *) ModuleInfo_Current;

//write
- (int)device_write: (Module_Info *) ModuleInfo_Current andUpperLength: (int) UpperLength
              andModuleInfo_Next_video: (Module_Info *) ModuleInfo_Next_video andNextLength_video: (int) NextLength_video
              andModuleInfo_Next_audio: (Module_Info *) ModuleInfo_Next_audio andNextLength_audio: (int)NextLength_audio;


@end


//
//  Lvs_Ios_Device_Collection.m
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs. All rights reserved.
//

#import "Lvs_Ios_Device_Collection.h"


@implementation Lvs_Ios_Device_Collection

int m_stream_num = 3;                                        //这里只有前摄像头,后摄像头,麦克风三路(音频用AudioQueue,这里只是预留出来)
//video
std::map<int,AVCaptureSession *> m_list_capture_session;     //session
AVCaptureConnection *videoCaptureConnection_back = nil;      //视频后摄像头输出数据用的connection
AVCaptureConnection *videoCaptureConnection_front = nil;     //视频前摄像头输出数据用的connection
AVCaptureConnection *audioCaptureConnection = nil;           //音频输出数据用的connection
long long up_time_video_front = 0;                           //前面摄像头的上一帧时间戳
long long up_time_video_back = 0;                            //后面摄像头的上一帧时间戳
//audio
static AQRecorderState m_aqData = {0};                       //音频采集结构体(AudioQueue)
long long up_time_audio = 0;                                 //音频的上一帧时间戳

//用于获取数据的地址
Module_Info * m_device_module_info_collection;               //0:后面摄像头 1:前面摄像头 2:麦克风
int m_device_module_length_collection;

//根据stream_id获取对应的session
-(AVCaptureSession *)get_list_session: (int)stream_id
{
    return m_list_capture_session[stream_id];
}

-(AQRecorderState *)get_AQRecorderState
{
    return &m_aqData;
}

//获取connection (type 0->video 1->audio) (backorfront 0->back 1->front)
-(AVCaptureConnection *)get_connection: (int)type andbackorfront: (int) backorfront
{
    //video
    if (type == 0)
    {
        if (backorfront == 0)
        {
            return videoCaptureConnection_back;
        }
        else if(backorfront == 1)
        {
            return videoCaptureConnection_front;
        }
    }
    //audio
    else if(type ==1)
    {
        return audioCaptureConnection;
    }
    return nil;
}

//用于视频数据输出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    //NSLog(@"delegate");
    if (connection == videoCaptureConnection_back) //0
    {
        //NSLog(@"videoCaptureConnection_back");
        
        CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        
        //LOCK
        if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
        {
            //buf
            UInt8 *bufferPtr_y = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            UInt8 *bufferPtr_uv = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
            //bufsize
            size_t buffSize = CVPixelBufferGetDataSize(imageBuffer);   //这个值不准要自己算
            //width
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            //height
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            //PlaneCount
            size_t PlaneCount = CVPixelBufferGetPlaneCount(imageBuffer);
            //NSLog(@"buffSize %d",buffSize);
            //NSLog(@"width %d",width);
            //NSLog(@"height %d",height);
            //NSLog(@"PlaneCount %d",PlaneCount);
            
            //获取1970到现在的时间毫秒
            NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
            long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
            
            if(theTime > up_time_video_back)
            {
                //拷贝数据
                m_device_module_info_collection[0].ModuleStreamInfo_Out->ActualLen = width * height + width * height /2;
                memcpy(m_device_module_info_collection[0].ModuleStreamInfo_Ou
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值