iOS RTMP 视频直播

这里简单说下 iOS 的摄像头采集。

首先初始化AVCaptureSession

// 初始化 AVCaptureSession
_session = [[AVCaptureSession alloc] init];

设置采集的 Video 和 Audio 格式,这两个是分开设置的,也就是说,你可以只采集视频。

/
 配置采集输入源(摄像头)


NSError*error=nil;


//
 获得一个采集设备,例如前置/后置摄像头


AVCaptureDevice*videoDevice=[AVCaptureDevice
 defaultDeviceWithMediaType:AVMediaTypeVideo];


//
 用设备初始化一个采集的输入对象


AVCaptureDeviceInput*videoInput=[AVCaptureDeviceInput
 deviceInputWithDevice:videoDevice
 error:&error];


if(error){


    NSLog(@"Error
 getting video input device: %@",error.description);


}


if([_session
 canAddInput:videoInput]){


    [_session
 addInput:videoInput];//
 添加到Session


}


 


//
 配置采集输出,即我们取得视频图像的接口


_videoQueue=dispatch_queue_create("Video
 Capture Queue",DISPATCH_QUEUE_SERIAL);


_videoOutput=[[AVCaptureVideoDataOutputalloc]
 init];


[_videoOutput
 setSampleBufferDelegate:self
 queue:_videoQueue];


//
 配置输出视频图像格式


NSDictionary*captureSettings=@{(NSString*)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};


_videoOutput.videoSettings=captureSettings;


_videoOutput.alwaysDiscardsLateVideoFrames=YES;


if([_session
 canAddOutput:_videoOutput]){


    [_session
 addOutput:_videoOutput];  //
 添加到Session

}

//
 保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)


_videoConnection=[_videoOutput
 connectionWithMediaType:AVMediaTypeVideo];

实现 AVCaptureOutputDelegate:

- (void) captureOutput:(AVCaptureOutput *)captureOutput 
 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
        fromConnection:(AVCaptureConnection *)connection
{
    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
    if (connection == _videoConnection) {  // Video
        /*
        // 取得当前视频尺寸信息
        CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        int width = CVPixelBufferGetWidth(pixelBuffer);
        int height = CVPixelBufferGetHeight(pixelBuffer);
        NSLog(@"video width: %d  height: %d", width, height);
        */
         NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264)");
    } else if (connection == _audioConnection) {  // Audio
        NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC)");
    }
}

配置完成,现在启动 Session:

	


//
 启动 Session


[_sessionstartRunning];

1.1 附加任务:将当前硬件采集视频图像显示到屏幕

很简单,发送端直接使用自家的AVCaptureVideoPreviewLayer显示

_previewLayer=[AVCaptureVideoPreviewLayer
 layerWithSession:_session];


_previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//
 设置预览时的视频缩放方式


[[_previewLayerconnection]
 setVideoOrientation:AVCaptureVideoOrientationPortrait];//
 设置视频的朝向

_previewLayer.frame=self.view.layer.bounds;


[self.view.layer
 addSublayer:_previewLayer];

 

然后将这个layer添加到界面中即可显示了。

具体实现代码:

#import "MyAVController.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>

@interface MyAVController()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
- (void)initCapture;

@end

#import "MyAVController.h"

@implementation MyAVController
{
    AVCaptureSession *_captureSession;
    UIImageView *_imageView;
    CALayer *_customLayer;
    AVCaptureVideoPreviewLayer *_prevLayer;
    AVCaptureConnection *_videoConnection;
    AVCaptureConnection *_audioConnection;
}


#pragma mark -
#pragma mark Initialization
- (id)init {
    self = [super init];
    if (self) {
        self.imageView = nil;
        self.prevLayer = nil;
        self.customLayer = nil;
    }
    return self;
}

- (void)viewDidLoad {
    [self initCapture];
}

- (void)initCapture {
    //配置采集输入源(摄像头)
    AVCaptureDevice*videoDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    //用设备初始化一个采集的输入对象
    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice  error:nil];
    AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]init];
    captureOutput.alwaysDiscardsLateVideoFrames = YES;
    //captureOutput.minFrameDuration = CMTimeMake(1, 10);
    
    //配置采集输出,即我们取得视频图像的接口
    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    
    [captureOutput setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    //配置输出视频图像格式
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary
                                   dictionaryWithObject:value forKey:key];
    [captureOutput setVideoSettings:videoSettings];
    self.captureSession = [[AVCaptureSession alloc] init];
    [self.captureSession addInput:captureInput];
    [self.captureSession addOutput:captureOutput];
    [self.captureSession startRunning];
    
    //保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)
    _videoConnection=[captureOutput
                      connectionWithMediaType:AVMediaTypeVideo];
    
    //view
    self.customLayer = [CALayer layer];
    self.customLayer.frame = self.view.bounds;
    self.customLayer.transform = CATransform3DRotate(
                                                     CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer];
    self.imageView = [[UIImageView alloc] init];
    self.imageView.frame = CGRectMake(0, 0, 100, 100);
    [self.view addSubview:self.imageView];
    self.prevLayer = [AVCaptureVideoPreviewLayer
                      layerWithSession: self.captureSession];
    self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
    self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [self.view.layer addSublayer: self.prevLayer];
}

#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    
    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
    if (connection == _videoConnection) {  // Video
        /*
         // 取得当前视频尺寸信息
         CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
         int width = CVPixelBufferGetWidth(pixelBuffer);
         int height = CVPixelBufferGetHeight(pixelBuffer);
         NSLog(@"video width: %d  height: %d", width, height);
         */
        NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264)");
    } else if (connection == _audioConnection) {  // Audio
        NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC)");
    }
    
    
    
    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
    // 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(baseAddress,
                                                    width, height, 8, bytesPerRow, colorSpace,
                                                    kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    
    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);
    
    [self.customLayer performSelectorOnMainThread:@selector(setContents:)
                                       withObject: (__bridge id) newImage waitUntilDone:YES];
    
    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
                                  orientation:UIImageOrientationRight];
    
    CGImageRelease(newImage);
    
    [self.imageView performSelectorOnMainThread:@selector(setImage:)
                                     withObject:image waitUntilDone:YES];
    
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    [pool drain];
}

#pragma mark -
#pragma mark Memory management

- (void)viewDidUnload {
    self.imageView = nil;
    self.customLayer = nil;
    self.prevLayer = nil;
}

- (void)dealloc {
    [self.captureSession release];
}

@end

 

转载于:https://my.oschina.net/u/1763048/blog/714490

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值