h264视频解码

//
//  ViewController.m
//  decoder
//
//  Created by targetcloud on 2017/4/1.
//  Copyright © 2017年 targetcloud. All rights reserved.
//

#import "ViewController.h"
#import "TGLayer.h"
#import <VideoToolbox/VideoToolbox.h>

const char * startBytes = "\x00\x00\x00\x01";

@interface ViewController (){
    //read
    long inputMaxSize;
    long inputSize;
    uint8_t * inputBuffer;
    //parse
    long packetSize;
    uint8_t * packetBuffer;
    //sps
    long size_SPS;
    uint8_t * p_SPS;
    //pps
    long size_PPS;
    uint8_t * p_PPS;
}

@property(nonatomic,weak) CADisplayLink * link ;
@property(nonatomic,strong) NSInputStream * inputStream;
@property(nonatomic,strong) dispatch_queue_t queue;
@property(nonatomic,assign) VTDecompressionSessionRef decompressionSession;
@property(nonatomic,assign) CMVideoFormatDescriptionRef formatDescription;
@property(nonatomic,weak) TGLayer * glLayer;
@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    
    CADisplayLink * displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(updateFrame)];
    self.link = displayLink;
    self.link.frameInterval = 2;//60 / 30 = 2     or preferredFramesPerSecond (30 expectedFrameRate)
    [displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
    [self.link setPaused:YES];
    
    NSString * filePath = [[NSBundle mainBundle] pathForResource:@"2017-04-01 05:32:45.h264" ofType:nil];
    self.inputStream = [NSInputStream inputStreamWithFileAtPath:filePath];
    
    self.queue = dispatch_get_global_queue(0, 0);
    
    TGLayer * layer = [[TGLayer alloc] initWithFrame:self.view.bounds];
    [self.view.layer insertSublayer:layer atIndex:0];
    self.glLayer = layer;
}


- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (IBAction)play:(id)sender {
    inputMaxSize = 720 * 1280;
    inputSize = 0;
    inputBuffer = malloc(inputMaxSize);
    
    [self.inputStream open];
    
    [self.link setPaused: NO];
}

- (void) updateFrame{
    dispatch_sync(_queue, ^{
        [self readPacket];
    });
    
    if (packetSize == 0 && packetBuffer == NULL){
        [self.link setPaused:YES];
        [self.inputStream close];
        NSLog(@"end<");
        return;
    }
    
    //再转回大端模式
    uint32_t size_NALU = (uint32_t)(packetSize - 4);
    uint32_t * p_NALU = (uint32_t *)packetBuffer;
    *p_NALU = CFSwapInt32HostToBig(size_NALU);
    
    //sps 0x27   pps 0x28   IDR 0x25
    //00 10 01 11=>27(00 10=>2    01 11=>7)    00 10 01 11=>取前5位,则00111=>[0x7 sps], 其他依此类推    [0x08 pps]    [0x05 I帧]    [其他B帧 P帧]
    int type_NALU = packetBuffer[4] & 0x1F; //第5位 与运算31(前5位都是11111) => 00 10 01 11 & 00 01 11 11 => 00 00 01 11 =>111=>7
    switch (type_NALU) {
        case 0x07:
            NSLog(@"sps");
            size_SPS = packetSize - 4;
            p_SPS = malloc(size_SPS);
            memcpy(p_SPS, packetBuffer + 4, size_SPS);
            break;
        case 0x08:
            NSLog(@"pps");
            size_PPS = packetSize - 4;
            p_PPS = malloc(size_SPS);
            memcpy(p_PPS, packetBuffer + 4, size_PPS);
            break;
        case 0x05:
            NSLog(@"I");
            //sps/pps->GOP
            [self initVTDecompressionSession];
            [self decodeFrame];
            break;
        default:
            NSLog(@"B or P");
            [self decodeFrame];
            break;
    }
    NSLog(@"reading...");
}

//NALU
-(void) readPacket{
    if (packetSize || packetBuffer){
        packetSize = 0;
        free(packetBuffer);
        packetBuffer = nil;
    }
    if(inputSize < inputMaxSize && _inputStream.hasBytesAvailable){
        inputSize += [self.inputStream read:inputBuffer + inputSize maxLength:inputMaxSize - inputSize];
        NSLog(@">inputSize-%ld , inputMaxSize-%ld, inputMaxSize - inputSize %ld",inputSize,inputMaxSize,inputMaxSize - inputSize);
    }
    if (memcmp(inputBuffer, startBytes, 4) == 0){
        uint8_t * pStart = inputBuffer + 4;
        uint8_t * pEnd = inputBuffer + inputSize;
        while (pStart != pEnd) {
            if (memcmp(pStart - 3, startBytes, 4) == 0){
                packetSize = pStart - 3 - inputBuffer;
                packetBuffer = malloc(packetSize);
                memcpy(packetBuffer, inputBuffer, packetSize);
                memmove(inputBuffer, inputBuffer + packetSize, inputSize - packetSize);
                inputSize -= packetSize;
                break;
            }else{
                ++pStart;
            }
        }
    }
}

//initH264Decoder
-(void) initVTDecompressionSession{
    if(_decompressionSession) {
        return;
    }
    //创建CMVideoFormatDescriptionRef
    const uint8_t * p_ParameterSet[] = {p_SPS,p_PPS};
    size_t  p_ParameterSetSizes[] = {size_SPS,size_PPS};
    OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, p_ParameterSet, p_ParameterSetSizes, 4, &_formatDescription);
    if(status == noErr) {
        /*
        CFDictionaryRef attrs = NULL;
        const void *keys[] = { kCVPixelBufferPixelFormatTypeKey };
        // kCVPixelFormatType_420YpCbCr8Planar  YUV420
        // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange  NV12
        uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
        const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) };
        attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL);
        */
         
        //创建VTDecompressionSession
        NSDictionary * attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:
                                          @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
        VTDecompressionOutputCallbackRecord  outputCallback;
        outputCallback.decompressionOutputCallback = decodeCallback;
        /*
         typedef void (*VTDecompressionOutputCallback)(
         void * CM_NULLABLE decompressionOutputRefCon,
         void * CM_NULLABLE sourceFrameRefCon,
         OSStatus status,
         VTDecodeInfoFlags infoFlags,
         CM_NULLABLE CVImageBufferRef imageBuffer,
         CMTime presentationTimeStamp,
         CMTime presentationDuration );
         
         struct VTDecompressionOutputCallbackRecord {
         CM_NULLABLE VTDecompressionOutputCallback  decompressionOutputCallback;
         void * CM_NULLABLE                         decompressionOutputRefCon;
         };
         typedef struct VTDecompressionOutputCallbackRecord VTDecompressionOutputCallbackRecord;
         */
        status = VTDecompressionSessionCreate(kCFAllocatorDefault, _formatDescription, NULL, (__bridge CFDictionaryRef)attributes, &outputCallback, &_decompressionSession);
        
        
        /*
         status = VTDecompressionSessionCreate(kCFAllocatorDefault, _formatDescription, NULL, attrs, &outputCallback, &_decompressionSession);
         CFRelease(attrs);
         */
    }else {
        NSLog(@" --- session failed. status=%d --- ", status);
    }
}

-(void)clearH264Deocder {
    if(_decompressionSession) {
        VTDecompressionSessionInvalidate(_decompressionSession);
        CFRelease(_decompressionSession);
        _decompressionSession = NULL;
    }
    
    if(_formatDescription) {
        CFRelease(_formatDescription);
        _formatDescription = NULL;
    }
    
    free(p_SPS);
    free(p_PPS);
    size_SPS = size_PPS = 0;
}

void decodeCallback(void * CM_NULLABLE decompressionOutputRefCon,
                    
                    void * CM_NULLABLE sourceFrameRefCon,
                    OSStatus status,
                    VTDecodeInfoFlags infoFlags,
                    CM_NULLABLE CVImageBufferRef imageBuffer,
                    CMTime presentationTimeStamp,
                    CMTime presentationDuration ){
    
    NSLog(@"解码");
    
    ViewController * vc = (__bridge ViewController *)(sourceFrameRefCon);
    vc.glLayer.pixelBuffer = imageBuffer;
}

//decode
-(void) decodeFrame{
    CMBlockBufferRef blockBuffer = NULL;
    CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, (void *)packetBuffer, packetSize, kCFAllocatorNull, NULL, 0, packetSize, 0,   &blockBuffer);
    
    CMSampleBufferRef  sampleBuffer = NULL;
    const size_t sampleSizeArray[] = {packetSize};
    CMSampleBufferCreateReady(kCFAllocatorDefault, blockBuffer, _formatDescription, 1, 0, NULL, 1, sampleSizeArray, &sampleBuffer);//1,0,NULL,1
    
    VTDecompressionSessionDecodeFrame(_decompressionSession, sampleBuffer, 0, (__bridge void * _Nullable)(self), NULL);
    
    CFRelease(sampleBuffer);
    CFRelease(blockBuffer);
}
@end


//
//  TGLayer.h
//  decoder
//
//  Created by targetcloud on 2017/4/2.
//  Copyright © 2017年 targetcloud. All rights reserved.
//

#include <QuartzCore/QuartzCore.h>
#include <CoreVideo/CoreVideo.h>

@interface TGLayer : CAEAGLLayer
@property CVPixelBufferRef pixelBuffer;
- (id)initWithFrame:(CGRect)frame;
- (void)resetRenderBuffer;
@end

//
//  TGLayer.m
//  decoder
//
//  Created by targetcloud on 2017/4/2.
//  Copyright © 2017年 targetcloud. All rights reserved.
//

#import "TGLayer.h"
#import <AVFoundation/AVUtilities.h>
#include <AVFoundation/AVFoundation.h>
#import <mach/mach_time.h>
#import <UIKit/UIScreen.h>
#include <OpenGLES/EAGL.h>
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>

enum{
    UNIFORM_Y,
    UNIFORM_UV,
    UNIFORM_ROTATION_ANGLE,
    UNIFORM_COLOR_CONVERSION_MATRIX,
    NUM_UNIFORMS
};
GLint uniforms[NUM_UNIFORMS];

enum{
    ATTRIB_VERTEX,
    ATTRIB_TEXCOORD,
    NUM_ATTRIBUTES
};

static const GLfloat kColorConversion601[] = {
    1.164,  1.164, 1.164,
    0.0, -0.392, 2.017,
    1.596, -0.813,   0.0,
};

static const GLfloat kColorConversion709[] = {
    1.164,  1.164, 1.164,
    0.0, -0.213, 2.112,
    1.793, -0.533,   0.0,
};

@interface TGLayer (){
    GLint _backingWidth;
    GLint _backingHeight;
    EAGLContext *_context;
    CVOpenGLESTextureRef _lumaTexture;
    CVOpenGLESTextureRef _chromaTexture;
    GLuint _frameBufferHandle;
    GLuint _colorBufferHandle;
    const GLfloat *_preferredConversion;
}
@property GLuint program;
@end

@implementation TGLayer
@synthesize pixelBuffer = _pixelBuffer;

-(CVPixelBufferRef) pixelBuffer{
    return _pixelBuffer;
}

- (void)setPixelBuffer:(CVPixelBufferRef)pb{
    if(_pixelBuffer) {
        CVPixelBufferRelease(_pixelBuffer);
    }
    _pixelBuffer = CVPixelBufferRetain(pb);
    int frameWidth = (int)CVPixelBufferGetWidth(_pixelBuffer);
    int frameHeight = (int)CVPixelBufferGetHeight(_pixelBuffer);
    [self displayPixelBuffer:_pixelBuffer width:frameWidth height:frameHeight];
}

- (instancetype)initWithFrame:(CGRect)frame{
    self = [super init];
    if (self) {
        CGFloat scale = [[UIScreen mainScreen] scale];
        self.contentsScale = scale;
        self.opaque = TRUE;
        self.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:YES]};
        [self setFrame:frame];
        _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
        if (!_context) {
            return nil;
        }
        _preferredConversion = kColorConversion709;
        [self setupGL];
    }
    return self;
}

- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer width:(uint32_t)frameWidth height:(uint32_t)frameHeight{
    if (!_context || ![EAGLContext setCurrentContext:_context]) {
        return;
    }
    if(pixelBuffer == NULL) {
        NSLog(@"Pixel buffer is null");
        return;
    }
    CVReturn err;
    size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
    CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
    if (CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) {
        _preferredConversion = kColorConversion601;
    }
    else {
        _preferredConversion = kColorConversion709;
    }
    CVOpenGLESTextureCacheRef _videoTextureCache;
    err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
    if (err != noErr) {
        NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
        return;
    }
    glActiveTexture(GL_TEXTURE0);
    err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                       _videoTextureCache,
                                                       pixelBuffer,
                                                       NULL,
                                                       GL_TEXTURE_2D,
                                                       GL_RED_EXT,
                                                       frameWidth,
                                                       frameHeight,
                                                       GL_RED_EXT,
                                                       GL_UNSIGNED_BYTE,
                                                       0,
                                                       &_lumaTexture);
    if (err) {
        NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
    }
    glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    if(planeCount == 2) {
        glActiveTexture(GL_TEXTURE1);
        err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                           _videoTextureCache,
                                                           pixelBuffer,
                                                           NULL,
                                                           GL_TEXTURE_2D,
                                                           GL_RG_EXT,
                                                           frameWidth / 2,
                                                           frameHeight / 2,
                                                           GL_RG_EXT,
                                                           GL_UNSIGNED_BYTE,
                                                           1,
                                                           &_chromaTexture);
        if (err) {
            NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
        }
        glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    }
    glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
    glViewport(0, 0, _backingWidth, _backingHeight);
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);
    glUseProgram(self.program);
    glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
    glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
    CGRect viewBounds = self.bounds;
    CGSize contentSize = CGSizeMake(frameWidth, frameHeight);
    CGRect vertexSamplingRect = AVMakeRectWithAspectRatioInsideRect(contentSize, viewBounds);
    CGSize normalizedSamplingSize = CGSizeMake(0.0, 0.0);
    CGSize cropScaleAmount = CGSizeMake(vertexSamplingRect.size.width/viewBounds.size.width,
                                        vertexSamplingRect.size.height/viewBounds.size.height);
    if (cropScaleAmount.width > cropScaleAmount.height) {
        normalizedSamplingSize.width = 1.0;
        normalizedSamplingSize.height = cropScaleAmount.height/cropScaleAmount.width;
    }
    else {
        normalizedSamplingSize.width = cropScaleAmount.width/cropScaleAmount.height;
        normalizedSamplingSize.height = 1.0;;
    }
    GLfloat quadVertexData [] = {
        -1 * normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
        normalizedSamplingSize.width, -1 * normalizedSamplingSize.height,
        -1 * normalizedSamplingSize.width, normalizedSamplingSize.height,
        normalizedSamplingSize.width, normalizedSamplingSize.height,
    };
    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
    glEnableVertexAttribArray(ATTRIB_VERTEX);
    CGRect textureSamplingRect = CGRectMake(0, 0, 1, 1);
    GLfloat quadTextureData[] =  {
        CGRectGetMinX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
        CGRectGetMaxX(textureSamplingRect), CGRectGetMaxY(textureSamplingRect),
        CGRectGetMinX(textureSamplingRect), CGRectGetMinY(textureSamplingRect),
        CGRectGetMaxX(textureSamplingRect), CGRectGetMinY(textureSamplingRect)
    };
    glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData);
    glEnableVertexAttribArray(ATTRIB_TEXCOORD);
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
    [_context presentRenderbuffer:GL_RENDERBUFFER];
    [self cleanUpTextures];
    CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
    if(_videoTextureCache) {
        CFRelease(_videoTextureCache);
    }
}


- (void)setupGL{
    if (!_context || ![EAGLContext setCurrentContext:_context]) {
        return;
    }
    [self setupBuffers];
    [self loadShaders];
    glUseProgram(self.program);
    glUniform1i(uniforms[UNIFORM_Y], 0);
    glUniform1i(uniforms[UNIFORM_UV], 1);
    glUniform1f(uniforms[UNIFORM_ROTATION_ANGLE], 0);
    glUniformMatrix3fv(uniforms[UNIFORM_COLOR_CONVERSION_MATRIX], 1, GL_FALSE, _preferredConversion);
}

- (void)setupBuffers{
    glDisable(GL_DEPTH_TEST);
    glEnableVertexAttribArray(ATTRIB_VERTEX);
    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
    glEnableVertexAttribArray(ATTRIB_TEXCOORD);
    glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
    [self createBuffers];
}

- (void) createBuffers{
    glGenFramebuffers(1, &_frameBufferHandle);
    glBindFramebuffer(GL_FRAMEBUFFER, _frameBufferHandle);
    glGenRenderbuffers(1, &_colorBufferHandle);
    glBindRenderbuffer(GL_RENDERBUFFER, _colorBufferHandle);
    [_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self];
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle);
    if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
        NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
    }
}

- (void) releaseBuffers{
    if(_frameBufferHandle) {
        glDeleteFramebuffers(1, &_frameBufferHandle);
        _frameBufferHandle = 0;
    }
    if(_colorBufferHandle) {
        glDeleteRenderbuffers(1, &_colorBufferHandle);
        _colorBufferHandle = 0;
    }
}

- (void) resetRenderBuffer{
    if (!_context || ![EAGLContext setCurrentContext:_context]) {
        return;
    }
    [self releaseBuffers];
    [self createBuffers];
}

- (void) cleanUpTextures{
    if (_lumaTexture) {
        CFRelease(_lumaTexture);
        _lumaTexture = NULL;
    }
    if (_chromaTexture) {
        CFRelease(_chromaTexture);
        _chromaTexture = NULL;
    }
}

const GLchar *shader_fsh = (const GLchar*)"varying highp vec2 texCoordVarying;"
"precision mediump float;"
"uniform sampler2D SamplerY;"
"uniform sampler2D SamplerUV;"
"uniform mat3 colorConversionMatrix;"
"void main()"
"{"
"    mediump vec3 yuv;"
"    lowp vec3 rgb;"
"    yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));"
"    yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));"
"    rgb = colorConversionMatrix * yuv;"
"    gl_FragColor = vec4(rgb, 1);"
"}";

const GLchar *shader_vsh = (const GLchar*)"attribute vec4 position;"
"attribute vec2 texCoord;"
"uniform float preferredRotation;"
"varying vec2 texCoordVarying;"
"void main()"
"{"
"    mat4 rotationMatrix = mat4(cos(preferredRotation), -sin(preferredRotation), 0.0, 0.0,"
"                               sin(preferredRotation),  cos(preferredRotation), 0.0, 0.0,"
"                               0.0,					    0.0, 1.0, 0.0,"
"                               0.0,					    0.0, 0.0, 1.0);"
"    gl_Position = position * rotationMatrix;"
"    texCoordVarying = texCoord;"
"}";

- (BOOL)loadShaders{
    GLuint vertShader = 0, fragShader = 0;
    self.program = glCreateProgram();
    if(![self compileShaderString:&vertShader type:GL_VERTEX_SHADER shaderString:shader_vsh]) {
        NSLog(@"Failed to compile vertex shader");
        return NO;
    }
    if(![self compileShaderString:&fragShader type:GL_FRAGMENT_SHADER shaderString:shader_fsh]) {
        NSLog(@"Failed to compile fragment shader");
        return NO;
    }
    glAttachShader(self.program, vertShader);
    glAttachShader(self.program, fragShader);
    glBindAttribLocation(self.program, ATTRIB_VERTEX, "position");
    glBindAttribLocation(self.program, ATTRIB_TEXCOORD, "texCoord");
    if (![self linkProgram:self.program]) {
        NSLog(@"Failed to link program: %d", self.program);
        if (vertShader) {
            glDeleteShader(vertShader);
            vertShader = 0;
        }
        if (fragShader) {
            glDeleteShader(fragShader);
            fragShader = 0;
        }
        if (self.program) {
            glDeleteProgram(self.program);
            self.program = 0;
        }
        return NO;
    }
    uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
    uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
    uniforms[UNIFORM_ROTATION_ANGLE] = glGetUniformLocation(self.program, "preferredRotation");
    uniforms[UNIFORM_COLOR_CONVERSION_MATRIX] = glGetUniformLocation(self.program, "colorConversionMatrix");
    if (vertShader) {
        glDetachShader(self.program, vertShader);
        glDeleteShader(vertShader);
    }
    if (fragShader) {
        glDetachShader(self.program, fragShader);
        glDeleteShader(fragShader);
    }
    return YES;
}

- (BOOL)compileShaderString:(GLuint *)shader type:(GLenum)type shaderString:(const GLchar*)shaderString{
    *shader = glCreateShader(type);
    glShaderSource(*shader, 1, &shaderString, NULL);
    glCompileShader(*shader);
#if defined(DEBUG)
    GLint logLength;
    glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
    if (logLength > 0) {
        GLchar *log = (GLchar *)malloc(logLength);
        glGetShaderInfoLog(*shader, logLength, &logLength, log);
        NSLog(@"Shader compile log:\n%s", log);
        free(log);
    }
#endif
    GLint status = 0;
    glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
    if (status == 0) {
        glDeleteShader(*shader);
        return NO;
    }
    return YES;
}

- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL{
    NSError *error;
    NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
    if (sourceString == nil) {
        NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]);
        return NO;
    }
    const GLchar *source = (GLchar *)[sourceString UTF8String];
    return [self compileShaderString:shader type:type shaderString:source];
}

- (BOOL)linkProgram:(GLuint)prog{
    GLint status;
    glLinkProgram(prog);
#if defined(DEBUG)
    GLint logLength;
    glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
    if (logLength > 0) {
        GLchar *log = (GLchar *)malloc(logLength);
        glGetProgramInfoLog(prog, logLength, &logLength, log);
        NSLog(@"Program link log:\n%s", log);
        free(log);
    }
#endif
    glGetProgramiv(prog, GL_LINK_STATUS, &status);
    if (status == 0) {
        return NO;
    }
    return YES;
}

- (BOOL)validateProgram:(GLuint)prog{
    GLint logLength, status;
    glValidateProgram(prog);
    glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
    if (logLength > 0) {
        GLchar *log = (GLchar *)malloc(logLength);
        glGetProgramInfoLog(prog, logLength, &logLength, log);
        NSLog(@"Program validate log:\n%s", log);
        free(log);
    }
    glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
    if (status == 0) {
        return NO;
    }
    return YES;
}

- (void)dealloc{
    if (!_context || ![EAGLContext setCurrentContext:_context]) {
        return;
    }
    [self cleanUpTextures];
    if(_pixelBuffer) {
        CVPixelBufferRelease(_pixelBuffer);
    }
    if (self.program) {
        glDeleteProgram(self.program);
        self.program = 0;
    }
    if(_context) {
        _context = nil;
    }
}
@end

注意点

1、NV12,APPLE使用的是 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange

2、CFDictionaryRef的创建方式

        CFDictionaryRef attrs = NULL;

        const void *keys[] = { kCVPixelBufferPixelFormatTypeKey };

        uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;

        const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) };

        attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL);

        NSDictionary * attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};


使用时

        status = VTDecompressionSessionCreate(kCFAllocatorDefault, _formatDescription, NULL, attrs, &outputCallback, &_decompressionSession);

         CFRelease(attrs);

        status = VTDecompressionSessionCreate(kCFAllocatorDefault, _formatDescription, NULL, (__bridge CFDictionaryRef)attributes, &outputCallback, &_decompressionSession);


3、NALU单元类型判断时使用前5位(0x07 0x08 0x05),而不是内存中显示的0x27 0x28 0x25

  举例说明0x27 0x07

  00 10=>2    

  01 11=>7

  00 10 01 11=>27

  00 10 01 11=>5位,则为00111=>[0x7 sps]

  代码操作则为  & 0x1F

    与运算十六进制1F ,十进制为31,二进制为11111 => 00 10 01 11 

                                           & 

                                           00 01 11 11 => 

                                           00 00 01 11 =>

                                                  1 11 =>

                                                       7

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值