导入框架和头文件#import <VideoToolbox/VideoToolbox.h>
{
//声明的全局变量
NSData *spsData ;
NSData *ppsData;
uint8_t pFrameData[BUFFER_SIZE];
CMVideoFormatDescriptionRef videoFormatDescription ;
VTDecompressionSessionRef decompressionSession;
VTDecompressionOutputCallback decompressionSessionDecodeFrameCallback;
}
-(void)createDecompSession{
VTDecompressionOutputCallbackRecord callBackRecord;
callBackRecord.decompressionOutputCallback = decompressionSessionDecodeFrameCallback;
callBackRecord.decompressionOutputRefCon = (__bridge void *)self;
//attr是传递给decode session的属性词典,两种方式
// NSDictionary *destinationImageBufferAttributes =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],(id)kCVPixelBufferOpenGLESCompatibilityKey,
// [NSNumber numberWithInt:kCVPixelFormatType_24RGB],(id)kCVPixelBufferPixelFormatTypeKey,nil];
//kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,kCVPixelFormatType_24RGB
//(__bridge CFDictionaryRef)(destinationImageBufferAttributes)
//attr是传递给decode session的属性词典
CFDictionaryRef attrs = NULL;
//kCVPixelBufferPixelFormatTypeKey,指定解码后的图像格式,必须指定成NV12,苹果的硬解码器只支持NV12。
const void *keys[] = { kCVPixelBufferPixelFormatTypeKey };
// kCVPixelFormatType_420YpCbCr8Planar is YUV420
// kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12
uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) };
attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL);
//创建解码session
OSStatus status = VTDecompressionSessionCreate(kCFAllocatorDefault,
videoFormatDescription,
NULL,
attrs , // attrs, // NULL
&callBackRecord,
&decompressionSession);
}
-(void)decodeData:(char*)pFromeData Leng:(int)length{
//去掉开始码,提取sps和pps
int naluType = ((uint8_t)pFromeData[4]&0x1f);
// for (int i=0; i<length; i++) {
// printf("%c",pFromeData[i]);
// }
if ((naluType==7||naluType==8)&&videoFormatDescription ==NULL) {
if (naluType==7) {
spsData = [NSData dataWithBytes:pFromeData + 4 length:length-4];
}
if (naluType==8) {
ppsData = [NSData dataWithBytes:pFromeData+4 length:length-4];
}
}
if(ppsData!=nil&&spsData!=nil){
const uint8_t * const parameterSetPointers[2] = { (const uint8_t*)[spsData bytes], (const uint8_t*)[ppsData bytes] };
const size_t parameterSetSizes[2] = { spsData.length, ppsData.length };
CMVideoFormatDescriptionRef formatDesc = NULL;
//使用CMVideoFormatDescriptionCreateFromH264ParameterSets函数来构建CMVideoFormatDescriptionRef
OSStatus formatCreateResult = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, parameterSetPointers, parameterSetSizes, 4, &formatDesc);
if (formatCreateResult == noErr) {
videoFormatDescription = formatDesc;
if (decompressionSession == NULL ||
VTDecompressionSessionCanAcceptFormatDescription(decompressionSession, formatDesc) == NO) {
[self createDecompSession];
}
}
}
if ((naluType == 1 || naluType == 5) && videoFormatDescription) {
uint32_t dataLength32 = htonl(length - 4);
memcpy (pFrameData, &dataLength32, sizeof(uint32_t));
NSLog(@"%lu",sizeof(pFromeData));
CMBlockBufferRef blockBuffer = NULL;
//使用CMBlockBufferCreateWithMemoryBlock接口构造CMBlockBufferRef;
OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, pFrameData, length, kCFAllocatorNull, NULL, 0, length, kCMBlockBufferAlwaysCopyDataFlag, &blockBuffer);
NSLog(@"%lu",sizeof(blockBuffer));
if (status == kCMBlockBufferNoErr) {
const size_t sampleSize = CMBlockBufferGetDataLength(blockBuffer);//length; //
CMSampleBufferRef sampBuf = NULL;
// status = CMSampleBufferCreate(kCFAllocatorDefault,
// blockBuffer,
// true,
// NULL,
// NULL,
// videoFormatDescription,
// 1,
// 0,
// NULL,
// 1,
// &sampleSize,
// &sampBuf);
status = CMSampleBufferCreateReady(kCFAllocatorDefault,
blockBuffer,
videoFormatDescription,
1, 0, NULL, 1, &sampleSize,
&sampBuf);
if (status == noErr) {
// 然后根据获取到的buffer画到AVSampleBufferPlayerLayer上即可
// NSLog(@"decode success");
/*
将CMSampleBuffer数据使用VTDecompressionSessionDecodeFrame接口解码成CVPixelBufferRef数据:
*/
//NSLog(@"%ld",CMSampleBufferGetDataBuffer(sampBuf));
CVPixelBufferRef outputPixelBuffer = NULL;
VTDecodeFrameFlags flags = 0;// kVTDecodeFrame_EnableTemporalProcessing;
VTDecodeInfoFlags flagOut = 0;//kVTDecodeInfo_ImageBufferModifiable;
//将CMSampleBuffer数据使用VTDecompressionSessionDecodeFrame接口解码成CVPixelBufferRef数据:
OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(decompressionSession,
sampBuf,
flags,
&outputPixelBuffer,
&flagOut);
if(decodeStatus == kVTInvalidSessionErr) {
NSLog(@"IOS8VT: Invalid session, reset decoder session");
} else if(decodeStatus == kVTVideoDecoderBadDataErr) {
NSLog(@"IOS8VT: decode failed status=%d(Bad data)", decodeStatus);
} else if(decodeStatus != noErr) {
NSLog(@"IOS8VT: decode failed status=%d ", decodeStatus);
}
if (decodeStatus==noErr) {
NSLog(@"decode success--------");
}
//将CVPixelBufferRef数据转换成UIImage并显示
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:outputPixelBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];//required
CGImageRef videoImage = [temporaryContext
createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(outputPixelBuffer),
CVPixelBufferGetHeight(outputPixelBuffer))];//required
UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
// NSLog(@"%@",uiImage);
CGImageRelease(videoImage);
}
}
}
}