iOS8系统H264视频硬件解码Demo

H文件:HardwareDecompressedVideoFrame..h

//
//  HardwareDecompressVideo.h
//  H264HardwareCodec
//
//  Created by scott on 11/26/15.
//  Copyright © 2015 com.KirogiYi. All rights reserved.
//

#include "AVFrameData.h"
#include "YUVData.h"
#import <VideoToolbox/VideoToolbox.h>
#import <Foundation/Foundation.h>

typedef struct _NALUnit{
    unsigned int type;
    unsigned int size;
    unsigned char *data;
}NALUnit;

typedef enum{
    NALUTypeBPFrame = 0x01,
    NALUTypeIFrame = 0x05,
    NALUTypeSPS = 0x07,
    NALUTypePPS = 0x08
}NALUType;

@interface HardwareDecompressVideo : NSObject

- (id)init;
- (BOOL)takePicture:(NSString *)fileName;
- (CVPixelBufferRef)deCompressedCMSampleBufferWithData:(AVFrameData*)frameData andOffset:(int)offset;

@end


M文件:HardwareDecompressedVideoFrame.m:

//
//  HardwareDecompressedVideoFrame.m
//  H264HardwareCodec
//
//  Created by scott on 11/26/15.
//  Copyright © 2015 com.KirogiYi. All rights reserved.
//

#import <UIKit/UIKit.h>
#import "AVFrameData.h"
#import "HardwareDecompressVideo.h"
#include "AVDataBufferManager.h"

@implementation HardwareDecompressVideo{
    uint8_t *_sps;
    uint8_t *_pps;
    
    BOOL _isTakePicture;
    BOOL _isSaveTakePictureImage;
    NSString *_saveTakePicturePath;
    
    unsigned int _spsSize;
    unsigned int _ppsSize;
    
    int64_t mCurrentVideoSeconds;
    VTDecompressionSessionRef _decompressionSession;
    CMVideoFormatDescriptionRef _decompressionFormatDesc;
}

-(id)init
{
    if(self = [super init]){
        _isTakePicture = false;
    }
    
    return self;
}

-(BOOL)takePicture:(NSString *)fileName
{
    _isTakePicture = true;
    _isSaveTakePictureImage = false;
    _saveTakePicturePath = fileName;
    
    while(_isSaveTakePictureImage == false){
        //Just waiting "_isSaveTakePictureImage" become true.
    }
    
    _isTakePicture = false;
    return true;;
}

-(CVPixelBufferRef)deCompressedCMSampleBufferWithData:(AVFrameData *)frameData andOffset:(int)offset
{
    NALUnit nalUnit;
    CVPixelBufferRef pixelBufferRef = NULL;
    char *data = (char*)frameData->getData();
    int dataLen = frameData->getDataLen();
    
    if(data == NULL || dataLen == 0){
        return NULL;
    }
    
    while([self nalunitWithData:data andDataLen:dataLen andOffset:offset toNALUnit:&nalUnit])
    {
        if(nalUnit.data == NULL || nalUnit.size == 0){
            return NULL;
        }
        
        pixelBufferRef = NULL;
        [self infalteStartCodeWithNalunitData:&nalUnit];
        NSLog(@"NALUint Type: %d.", nalUnit.type);
        
        switch (nalUnit.type) {
            case NALUTypeIFrame://IFrame
                if(_sps && _pps)
                {
                    if([self initH264Decoder]){
                        pixelBufferRef = [self decompressWithNalUint:nalUnit];
                        NSLog(@"NALUint I Frame size:%d", nalUnit.size);
                        
                        free(_sps);
                        free(_pps);
                        _pps = NULL;
                        _sps = NULL;
                        return pixelBufferRef;
                    }
                }
                break;
            case NALUTypeSPS://SPS
                _spsSize = nalUnit.size - 4;
                if(_spsSize <= 0){
                    return NULL;
                }
                
                _sps = (uint8_t*)malloc(_spsSize);
                memcpy(_sps, nalUnit.data + 4, _spsSize);
                NSLog(@"NALUint SPS size:%d", nalUnit.size - 4);
                break;
            case NALUTypePPS://PPS
                _ppsSize = nalUnit.size - 4;
                if(_ppsSize <= 0){
                    return NULL;
                }
                
                _pps = (uint8_t*)malloc(_ppsSize);
                memcpy(_pps, nalUnit.data + 4, _ppsSize);
                NSLog(@"NALUint PPS size:%d", nalUnit.size - 4);
                break;
            case NALUTypeBPFrame://B/P Frame
                pixelBufferRef = [self decompressWithNalUint:nalUnit];
                NSLog(@"NALUint B/P Frame size:%d", nalUnit.size);
                return pixelBufferRef;
            default:
                break;
        }
        
        offset += nalUnit.size;
        if(offset >= dataLen){
            return NULL;
        }
    }
    
    NSLog(@"The AVFrame data size:%d", offset);
    return NULL;
}

-(void)infalteStartCodeWithNalunitData:(NALUnit *)dataUnit
{
    //Inflate start code with data length
    unsigned char* data  = dataUnit->data;
    unsigned int dataLen = dataUnit->size - 4;
    
    data[0] = (unsigned char)(dataLen >> 24);
    data[1] = (unsigned char)(dataLen >> 16);
    data[2] = (unsigned char)(dataLen >> 8);
    data[3] = (unsigned char)(dataLen & 0xff);
}

-(int)nalunitWithData:(char *)data andDataLen:(int)dataLen andOffset:(int)offset toNALUnit:(NALUnit *)unit
{
    unit->size = 0;
    unit->data = NULL;
    
    int addUpLen = offset;
    while(addUpLen < dataLen)
    {
        if(data[addUpLen++] == 0x00 &&
           data[addUpLen++] == 0x00 &&
           data[addUpLen++] == 0x00 &&
           data[addUpLen++] == 0x01){//H264 start code
            
            int pos = addUpLen;
            while(pos < dataLen){//Find next NALU
                if(data[pos++] == 0x00 &&
                   data[pos++] == 0x00 &&
                   data[pos++] == 0x00 &&
                   data[pos++] == 0x01){
                    
                    break;
                }
            }
            
            unit->type = data[addUpLen] & 0x1f;
            if(pos == dataLen){
                unit->size = pos - addUpLen + 4;
            }else{
                unit->size = pos - addUpLen;
            }
            
            unit->data = (unsigned char*)&data[addUpLen - 4];
            return 1;
        }
    }
    return -1;
}

-(BOOL)initH264Decoder
{
    if(_decompressionSession){
        return true;
    }
    
    const uint8_t * const parameterSetPointers[2] = {_sps, _pps};
    const size_t parameterSetSizes[2] = {_spsSize, _ppsSize};
    OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
                                                                          2,//parameter count
                                                                          parameterSetPointers,
                                                                          parameterSetSizes,
                                                                          4,//NAL start code size
                                                                          &(_decompressionFormatDesc));
    if(status == noErr){
        const void *keys[] = { kCVPixelBufferPixelFormatTypeKey};
        
        //kCVPixelFormatType_420YpCbCr8Planar is YUV420, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12
        uint32_t biPlanarType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
        const void *values[] = {CFNumberCreate(NULL, kCFNumberSInt32Type, &biPlanarType)};
        CFDictionaryRef attributes = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL);
        
        //Create decompression session
        VTDecompressionOutputCallbackRecord outputCallBaclRecord;
        outputCallBaclRecord.decompressionOutputRefCon = NULL;
        outputCallBaclRecord.decompressionOutputCallback = decompressionOutputCallbackRecord;
        status = VTDecompressionSessionCreate(kCFAllocatorDefault,
                                              _decompressionFormatDesc,
                                              NULL, attributes,
                                              &outputCallBaclRecord,
                                              &_decompressionSession);
        CFRelease(attributes);
        if(status != noErr){
            return false;
        }
    }else{
        NSLog(@"Error code %d:Creates a format description for a video media stream described by H.264 parameter set NAL units.", (int)status);
        return false;
    }
    
    return true;
}

//Callback function:Return data when finished, the data includes decompress data、status and so on.
static void decompressionOutputCallbackRecord(void * CM_NULLABLE decompressionOutputRefCon,
                                              void * CM_NULLABLE sourceFrameRefCon,
                                              OSStatus status,
                                              VTDecodeInfoFlags infoFlags,
                                              CM_NULLABLE CVImageBufferRef imageBuffer,
                                              CMTime presentationTimeStamp,
                                              CMTime presentationDuration ){
    CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
    *outputPixelBuffer = CVPixelBufferRetain(imageBuffer);
}

-(CVPixelBufferRef)decompressWithNalUint:(NALUnit)dataUnit
{
    CMBlockBufferRef blockBufferRef = NULL;
    CVPixelBufferRef outputPixelBufferRef = NULL;
    
    //1.Fetch video data and generate CMBlockBuffer
    OSStatus status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,
                                                         dataUnit.data,
                                                         dataUnit.size,
                                                         kCFAllocatorNull,
                                                         NULL,
                                                         0,
                                                         dataUnit.size,
                                                         0,
                                                         &blockBufferRef);
    //2.Create CMSampleBuffer
    if(status == kCMBlockBufferNoErr){
        CMSampleBufferRef sampleBufferRef = NULL;
        const size_t sampleSizes[] = {dataUnit.size};
        OSStatus createStatus = CMSampleBufferCreateReady(kCFAllocatorDefault,
                                           blockBufferRef,
                                           _decompressionFormatDesc,
                                           1,
                                           0,
                                           NULL,
                                           1,
                                           sampleSizes,
                                           &sampleBufferRef);
        
        //3.Create CVPixelBuffer
        if(createStatus == kCMBlockBufferNoErr && sampleBufferRef){
            VTDecodeFrameFlags frameFlags = 0;
            VTDecodeInfoFlags infoFlags = 0;
            
            OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_decompressionSession,
                                                                      sampleBufferRef,
                                                                      frameFlags,
                                                                      &outputPixelBufferRef,
                                                                      &infoFlags);
            
            if(decodeStatus != noErr){
                CFRelease(sampleBufferRef);
                CFRelease(blockBufferRef);
                outputPixelBufferRef = nil;
                return outputPixelBufferRef;
            }
            
            
            if(_isTakePicture){
                if(!_isSaveTakePictureImage){
                    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:outputPixelBufferRef];
                    CIContext *ciContext = [CIContext contextWithOptions:nil];
                    CGImageRef videoImage = [ciContext
                                             createCGImage:ciImage
                                             fromRect:CGRectMake(0, 0,
                                             CVPixelBufferGetWidth(outputPixelBufferRef),
                                             CVPixelBufferGetHeight(outputPixelBufferRef))];
                    
                    UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
                    _isSaveTakePictureImage = [UIImageJPEGRepresentation(uiImage, 1.0) writeToFile:_saveTakePicturePath atomically:YES];
                    CGImageRelease(videoImage);
                }
            }
            CFRelease(sampleBufferRef);
        }
        CFRelease(blockBufferRef);
    }
    return outputPixelBufferRef;
}

-(void)dealloc
{
    if(_sps){
        free(_sps);
        _sps = NULL;
    }
    
    if(_pps){
        free(_pps);
        _pps = NULL;
    }
    
    if(_decompressionSession){
        CFRelease(_decompressionSession);
        _decompressionSession = NULL;
    }
    
    if(_decompressionFormatDesc){
        CFRelease(_decompressionFormatDesc);
        _decompressionFormatDesc = NULL;
    }
}

@end


  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 11
    评论
评论 11
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值