#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <VideoToolbox/VideoToolbox.h>
#import <AVFoundation/AVSampleBufferDisplayLayer.h>
@protocol H264HwDecoderImplDelegate <NSObject>
- (void)displayDecodedFrame:(CVImageBufferRef )imageBuffer;
@end
@interface H264HwDecoderImpl : NSObject
@property (weak,nonatomic) id<H264HwDecoderImplDelegate> delegate;
@property(nonatomic,assign)int vheight;
@property(nonatomic,assign)int vwidth;
-(BOOL)initH264Decoder;
-(void)decodeNalu:(uint8_t *)frame withSize:(uint32_t)frameSize;
//停止解码销毁session
-(void)stopDecode;
//截图
-(UIImage*)getImageCVPixel:(CVPixelBufferRef)pixelBuffer;
@end
#import "H264HwDecoderImpl.h"
#import "config.h"
@interface H264HwDecoderImpl()
{
uint8_t *_sps;
NSInteger _spsSize;
uint8_t *_pps;
NSInteger _ppsSize;
VTDecompressionSessionRef _deocderSession;
CMVideoFormatDescriptionRef _decoderFormatDescription;
}
@end
@implementation H264HwDecoderImpl
//解码回调函数
static void didDecompress(void *decompressionOutputRefCon,void *sourceFrameRefCon,OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
*outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
H264HwDecoderImpl *decoder = (__bridgeH264HwDecoderImpl *)decompressionOutputRefCon;
if (decoder.delegate!=nil)
{
[decoder.delegatedisplayDecodedFrame:pixelBuffer];
}
}
-(id)init
{
self = [superinit];
if (self) {
_vwidth =640;
_vheight =480;
}
returnself;
}
-(BOOL)initH264Decoder {
if(_deocderSession) {
returnYES;
}
constuint8_t* const parameterSetPointers[2] = {_sps, _pps };
constsize_t parameterSetSizes[2] = {_spsSize, _ppsSize };
OSStatus status =CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,2,parameterSetPointers,parameterSetSizes,4,&_decoderFormatDescription);
if(status ==noErr) {
//硬解必须是 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
//或者是kCVPixelFormatType_420YpCbCr8Planar
//因为iOS是 nv12 其他是nv21
//这里宽高和编码反的
NSDictionary* destinationPixelBufferAttributes =@{
(id)kCVPixelBufferPixelFormatTypeKey : [NSNumbernumberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], (id)kCVPixelBufferWidthKey : [NSNumber numberWithInt:_vwidth],
(id)kCVPixelBufferHeightKey : [NSNumbernumberWithInt:_vheight], (id)kCVPixelBufferOpenGLCompatibilityKey : [NSNumber numberWithBool:YES]
};
VTDecompressionOutputCallbackRecord callBackRecord;
callBackRecord.decompressionOutputCallback =didDecompress;
callBackRecord.decompressionOutputRefCon = (__bridgevoid *)self;
status = VTDecompressionSessionCreate(kCFAllocatorDefault,
_decoderFormatDescription,
NULL,
(__bridgeCFDictionaryRef)destinationPixelBufferAttributes,
&callBackRecord,
&_deocderSession);
VTSessionSetProperty(_deocderSession,kVTDecompressionPropertyKey_ThreadCount, (__bridgeCFTypeRef)[NSNumbernumberWithInt:1]);
VTSessionSetProperty(_deocderSession,kVTDecompressionPropertyKey_RealTime,kCFBooleanTrue);
} else {
NSLog(@"IOS8VT: reset decoder session failed status=%d===会话失败", (int)status);
}
returnYES;
}
-(CVPixelBufferRef)decode:(uint8_t *)frame withSize:(uint32_t)frameSize
{
CVPixelBufferRef outputPixelBuffer =NULL;
CMBlockBufferRef blockBuffer =NULL;
OSStatus status =CMBlockBufferCreateWithMemoryBlock(NULL,
(void *)frame,
frameSize,
kCFAllocatorNull,
NULL,
0,
frameSize,
FALSE,
&blockBuffer);
if(status ==kCMBlockBufferNoErr) {
CMSampleBufferRef sampleBuffer =NULL;
constsize_t sampleSizeArray[] = {frameSize};
status = CMSampleBufferCreateReady(kCFAllocatorDefault,
blockBuffer,
_decoderFormatDescription ,
1,0, NULL,1, sampleSizeArray,
&sampleBuffer);
if (status ==kCMBlockBufferNoErr && sampleBuffer) {
VTDecodeFrameFlags flags =0;
VTDecodeInfoFlags flagOut =0;
OSStatus decodeStatus =VTDecompressionSessionDecodeFrame(_deocderSession,
sampleBuffer,
flags,
&outputPixelBuffer,
&flagOut);
if(decodeStatus ==kVTInvalidSessionErr) {
// VTDecompressionSessionInvalidate(_deocderSession);
_deocderSession =nil;
NSLog(@"IOS8VT: Invalid session, reset decoder session");
} elseif(decodeStatus == kVTVideoDecoderBadDataErr) {
NSLog(@"IOS8VT: decode failed status=%d(Bad data)", (int)decodeStatus);
} elseif(decodeStatus != noErr) {
NSLog(@"IOS8VT: decode failed status=%d", (int)decodeStatus);
}
CFRelease(sampleBuffer);
}
CFRelease(blockBuffer);
}
return outputPixelBuffer;
}
-(void) decodeNalu:(uint8_t *)frame withSize:(uint32_t)frameSize
{
// NSLog(@">>>>>>>>>>开始解码");
int nalu_type = (frame[4] &0x1F);
CVPixelBufferRef pixelBuffer =NULL;
uint32_t nalSize = (uint32_t)(frameSize -4);
uint8_t *pNalSize = (uint8_t*)(&nalSize);
frame[0] = *(pNalSize +3);
frame[1] = *(pNalSize +2);
frame[2] = *(pNalSize +1);
frame[3] = *(pNalSize);
//传输的时候。关键帧不能丢数据否则绿屏 B/P可以丢 这样会卡顿
switch (nalu_type)
{
case0x05:
NSLog(@"nalu_type:%d Nal type is IDR frame",nalu_type); //关键帧
if([selfinitH264Decoder])
{
pixelBuffer = [selfdecode:frame withSize:frameSize];
}
break;
case0x07:
NSLog(@"nalu_type:%d Nal type is SPS",nalu_type); //sps
_spsSize = frameSize -4;
_sps =malloc(_spsSize);
memcpy(_sps, &frame[4], _spsSize);
break;
case0x08:
{
NSLog(@"nalu_type:%d Nal type is PPS",nalu_type); //pps
_ppsSize = frameSize -4;
_pps =malloc(_ppsSize);
memcpy(_pps, &frame[4], _ppsSize);
break;
}
default:
{
NSLog(@"Nal type is B/P frame");//其他帧
if([selfinitH264Decoder])
{
pixelBuffer = [selfdecode:frame withSize:frameSize];
}
break;
}
}
// CVPixelBufferRelease(pixelBuffer);
}
-(void)stopDecode
{
VTDecompressionSessionInvalidate(_deocderSession);
}
-(UIImage*)getImageCVPixel:(CVPixelBufferRef)pixelBuffer
{
CIImage *ciImage = [CIImageimageWithCVPixelBuffer:pixelBuffer];
UIImage *uiImage = [UIImageimageWithCIImage:ciImage];
return uiImage;
}
@end