利用OpenGLES渲染采集到的视频流

//
//  ViewController.m
//  OpenGLESVideo02
//
//  Created by kemuchao on 2022/9/26.
//

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "MKView.h"
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate> {
    dispatch_queue_t processQueue;
}
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;
@property (nonatomic, strong) AVCaptureVideoDataOutput *captureDeviceOutput;
@property (nonatomic, strong) MKView *faceDetectionView;
@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    
    self.faceDetectionView = [[MKView alloc] initWithFrame:self.view.bounds];
    [self.view addSubview:self.faceDetectionView];
    
    
    self.captureSession = [[AVCaptureSession alloc] init];
    [self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];
    
    AVCaptureDevice *captureDevice = nil;
    NSArray *captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in captureDevices) {
        if (device.position == AVCaptureDevicePositionBack) {
            captureDevice = device;
            break;
        }
    }
    self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:nil];
    
    if ([self.captureSession canAddInput:self.captureDeviceInput]) {
        [self.captureSession addInput:self.captureDeviceInput];
    }
    
    self.captureDeviceOutput = [[AVCaptureVideoDataOutput alloc] init];
    [self.captureDeviceOutput setAlwaysDiscardsLateVideoFrames:YES];
    
    processQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
    [self.captureDeviceOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
    
    [self.captureDeviceOutput setSampleBufferDelegate:self queue:processQueue];
    
    if ([self.captureSession canAddOutput:self.captureDeviceOutput]) {
        [self.captureSession addOutput:self.captureDeviceOutput];
    }
    
    AVCaptureConnection *captureConnection = [self.captureDeviceOutput connectionWithMediaType:AVMediaTypeVideo];
    [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
    
    [self.captureSession startRunning];
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    self.faceDetectionView.isFullYUVRange = YES;
    [self.faceDetectionView displayPixelBuffer:pixelBuffer];
    NSLog(@"%@", @"开始渲染一帧新的图片");
    
}

@end

MKView.h

//
//  MKView.h
//  OpenGLESVideo4
//
//  Created by kemuchao on 2022/9/28.
//

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN

@interface MKView : UIView
- (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer;
@property (nonatomic , assign) BOOL isFullYUVRange;

@end

NS_ASSUME_NONNULL_END

MKView.m

//
//  MKView.m
//  OpenGLESVideo4
//
//  Created by kemuchao on 2022/9/28.
//

#import "MKView.h"
#import <OpenGLES/ES3/gl.h>
#import <GLKit/GLKit.h>
#import <AVFoundation/AVUtilities.h>

// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)

// BT.601, which is the standard for SDTV.
static const GLfloat kColorConversion601[] = {
        1.164,  1.164, 1.164,
          0.0, -0.392, 2.017,
        1.596, -0.813,   0.0,
};

// BT.709, which is the standard for HDTV.
static const GLfloat kColorConversion709[] = {
        1.164,  1.164, 1.164,
          0.0, -0.213, 2.112,
        1.793, -0.533,   0.0,
};


// BT.601 full range (ref: http://www.equasys.de/colorconversion.html)
const GLfloat kColorConversion601FullRange[] = {
    1.0,    1.0,    1.0,
    0.0,    -0.343, 1.765,
    1.4,    -0.711, 0.0,
};


// uniforms 记录统一变量(uniform修饰)在GPU的位置
enum
{
    UNIFORM_Y, // 对应着色器的SamplerY
    UNIFORM_UV, // 对应着色器的SamplerUV
    UNIFORM_ROTATE_MATRIX, // 对应着色器的rotateMatrix
    UNIFORM_TEMP_INPUT_IMG_TEXTURE, // 对应着色器的SamplerUV
    NUM_UNIFORMS
};
GLint uniforms[NUM_UNIFORMS];

// Attribute index.
enum
{
    ATTRIB_VERTEX,
    ATTRIB_TEXCOORD,
    NUM_ATTRIBUTES
};


@interface MKView() {
    GLint _backingWidth;
    GLint _backingHeight;
    dispatch_semaphore_t _lock;
    CVOpenGLESTextureRef _yTexture;
    CVOpenGLESTextureRef _uvTexture;
    CVOpenGLESTextureCacheRef _textureCache;//视频缓冲区
    const GLfloat *_preferredConversion;
    
    GLint allUniforms; //记录所有的属性
    GLint maxUnifomLen ; //记录属性的数量
    char *uniformName;
    GLint index;
}

//@property(nonatomic, strong)CAEAGLLayer *eagLayer;
@property(nonatomic, strong)EAGLContext *myContext;

// 渲染缓冲区
@property(nonatomic, assign)GLuint renderBuffer;
//帧缓冲区
@property(nonatomic, assign)GLuint frameBuffer;

@property(nonatomic, assign)GLuint program;

@property(nonatomic, assign)bool show;
@end

@implementation MKView

-(instancetype)initWithFrame:(CGRect)frame {
    self = [super initWithFrame:frame];
    if (self) {
//        _lock = dispatch_semaphore_create(1);
        [self setup];
        
        if (!_textureCache) {
            CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _myContext, NULL, &_textureCache);
            if (err != noErr) {
                NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
            }
        }
        _show = true;
    }
    return self;
}


-(void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer {
    if(_show) {
        
        OSType result = CVPixelBufferGetPixelFormatType(pixelBuffer);

        int width = (int)CVPixelBufferGetWidth(pixelBuffer);
        int height = (int)CVPixelBufferGetHeight(pixelBuffer);
        
        if (!_textureCache) {
            NSLog(@"NO Video Texture Cache");
            return;
        }
        [EAGLContext setCurrentContext:self.myContext];
        
        [self cleanUpTexture];
        
        CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
        
        if (colorAttachments == kCVImageBufferYCbCrMatrix_ITU_R_601_4) {
            if (self.isFullYUVRange) {
                _preferredConversion = kColorConversion601FullRange;
            }
            else {
                _preferredConversion = kColorConversion601;
            }
        }
        else {
            _preferredConversion = kColorConversion709;
        }
//        这一部分的代码参考自GPUImage的GPUImageVideoCamera类,YUV视频帧分为亮度和色度两个纹理,分别用GL_LUMINANCE格式和GL_LUMINANCE_ALPHA格式读取。
        //1、创建亮度纹理;
        glActiveTexture(GL_TEXTURE0);
        CVReturn err;
        err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                           _textureCache,
                                                           pixelBuffer,
                                                           NULL,
                                                           GL_TEXTURE_2D,
                                                           GL_LUMINANCE,
                                                           width,
                                                           height,
                                                           GL_LUMINANCE,
                                                           GL_UNSIGNED_BYTE,
                                                           0,
                                                           &_yTexture);
        glBindTexture(CVOpenGLESTextureGetTarget(_yTexture), CVOpenGLESTextureGetName(_yTexture));
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        if (err != kCVReturnSuccess) {
//            CVBufferRelease(pixelBuffer);
            if(err == kCVReturnInvalidPixelFormat){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage Invalid pixel format");
            }

            if(err == kCVReturnInvalidPixelBufferAttributes){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage Invalid pixel buffer attributes");
            }

            if(err == kCVReturnInvalidSize){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage invalid size");
            }

            if(err == kCVReturnPixelBufferNotOpenGLCompatible){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage::not opengl compatible");
            }

        }else{
            NSLog(@"ok CVOpenGLESTextureCacheCreateTextureFromImage SUCCESS");
        }
        
        //2、配置颜色数据纹理
        glActiveTexture(GL_TEXTURE1);
        err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                           _textureCache,
                                                           pixelBuffer,
                                                           NULL,
                                                           GL_TEXTURE_2D,
                                                           GL_LUMINANCE_ALPHA,
                                                           width / 2,
                                                           height / 2,
                                                           GL_LUMINANCE_ALPHA,
                                                           GL_UNSIGNED_BYTE,
                                                           1,
                                                           &_uvTexture);
        
        if (err != kCVReturnSuccess) {
//            CVBufferRelease(pixelBuffer);
            if(err == kCVReturnInvalidPixelFormat){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage Invalid pixel format");
            }

            if(err == kCVReturnInvalidPixelBufferAttributes){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage Invalid pixel buffer attributes");
            }

            if(err == kCVReturnInvalidSize){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage invalid size");
            }

            if(err == kCVReturnPixelBufferNotOpenGLCompatible){
                NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage::not opengl compatible");
            }

        }else{
            NSLog(@"ok CVOpenGLESTextureCacheCreateTextureFromImage SUCCESS");
        }
        
        glBindTexture(CVOpenGLESTextureGetTarget(_uvTexture), CVOpenGLESTextureGetName(_uvTexture));
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        
//        glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
        
        // Set the view port to the entire view.
        glViewport(0, 0, _backingWidth, _backingHeight);
        glClearColor(0.1f, 0.0f, 0.0f, 1.0f);
        glClear(GL_COLOR_BUFFER_BIT);
        
        glUseProgram(self.program);
        
        //对这几个纹理采样器变量进行设置,绑定第0个纹理单于
        glUniform1i(uniforms[UNIFORM_Y], 0); //UNIFORM_Y 对应第一个纹理单元
        glUniform1i(uniforms[UNIFORM_UV], 1); //UNIFORM_UV 对应第而个纹理单元
        
        //设置 YUV -> RGB 变换矩阵
        glUniformMatrix4fv(uniforms[UNIFORM_ROTATE_MATRIX], 1, GL_FALSE, GLKMatrix4MakeXRotation(M_PI).m);

        GLfloat quadVertexData[] = {
            -1, -1,
            1, -1 ,
            -1, 1,
            1, 1,
        };
        // 更新顶点数据
        // 为每个attribute变量赋值
        glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, quadVertexData);
        glEnableVertexAttribArray(ATTRIB_VERTEX);
        
        GLfloat quadTextureData[] =  { // 正常坐标
            0.0, 0.0,
            1.0, 0.0,
            0.0, 1.0,
            1.0, 1.0
        };
        
        
        glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, quadTextureData);
        glEnableVertexAttribArray(ATTRIB_TEXCOORD);
        
       
        glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
        
        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

        if ([EAGLContext currentContext] == _myContext) {
            [_myContext presentRenderbuffer:GL_RENDERBUFFER];
        }
//        dispatch_semaphore_signal(_lock);
    }
}


-(void)setup {

    //1.创建图层
    [self setUplayer];
    //2.创建上下文
    [self setContext];
    //3.清空缓存区
    [self deleteRenderAndFrameBuffer];
  
    //5.设置FrameBuffer
    [self setFrameBuffer];
    //6.创建顶点着色器,片元着色器,程序对象,并且把他们链接起来
    [self createProgram];

    [self logInfo];
    _preferredConversion = kColorConversion709;
}

- (void)logInfo {
    glGetProgramiv(self.program, GL_ACTIVE_UNIFORMS, &allUniforms);
    glGetProgramiv(self.program, GL_ACTIVE_UNIFORM_MAX_LENGTH, &maxUnifomLen);
    
    uniformName = malloc(sizeof(char *) *maxUnifomLen);
    
    GLint size;
    GLenum type;
    GLint location;
    
    for (index = 0; index < allUniforms; index ++) {
        glGetActiveUniform(self.program, index, maxUnifomLen, NULL, &size, &type, uniformName);
        
        printf(uniformName);
        
    }
}

- (void)cleanUpTexture {
    if(_yTexture) {
        CFRelease(_yTexture);
        _yTexture = NULL;
    }
    if(_uvTexture) {
        CFRelease(_uvTexture);
        _uvTexture = NULL;
    }
    CVOpenGLESTextureCacheFlush(_textureCache, 0);
}

-(void)setUplayer {
    CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;

    eaglLayer.opaque = TRUE;
    eaglLayer.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking :[NSNumber numberWithBool:NO],
                                      kEAGLDrawablePropertyColorFormat : kEAGLColorFormatRGBA8};
}

-(void)setContext {
    EAGLContext *content = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
    if (!content) {
        NSLog(@"create context fail");
        return;
    }
    
    if(![EAGLContext setCurrentContext:content]) {
        NSLog(@"setCurrentContext fail");
        return;
    }
    self.myContext = content;
}

-(void)deleteRenderAndFrameBuffer {
    // 删除一个保存在_frameBuffer数组中的缓存对象,被释放的缓存对象可以复用
    glDeleteBuffers(1, &_frameBuffer);
    _frameBuffer = 0;
    glDeleteBuffers(1, &_renderBuffer);
    _renderBuffer = 0;
    
}

// 设置framebuffer
-(void)setFrameBuffer {
    
    glDisable(GL_DEPTH_TEST);
    
    glEnableVertexAttribArray(ATTRIB_VERTEX);
    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
    
    glEnableVertexAttribArray(ATTRIB_TEXCOORD);
    glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
    
    // 返回一个当前未使用的缓存对象名称,并保存到_frameBuffer数组中
    glGenFramebuffers(1, &_frameBuffer);
    // 绑定顶点缓存区域到OpenGL环境,相当于激活这个内存区域
    glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
    // 分配帧缓存区域
    glGenRenderbuffers(1, &_renderBuffer);
    //4.绑定帧缓存区域到OpenGL环境,相当于激活这个内存区域
    glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
    
    //5.将数据通过OpenGL环境的上下文,载入缓存对象
    [_myContext renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
    
    //获取绘制缓冲区的像素宽度,赋值给_backingWidth
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
    //获取绘制缓冲区的像素高度,赋值给_backingHeight
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
    //将renderbuffer对象附加到framebuffer对象
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderBuffer);
    
    if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
        NSLog(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
    }
}

// 创建程序对象,链接着色器等等
-(void)createProgram {
    //设置清屏颜色
    glClearColor(0.3f, 0.4f, 0.f, 1);
    //清除屏幕
    glClear(GL_COLOR_BUFFER_BIT);
    //1.设置视口大小
    CGFloat scale = [[UIScreen mainScreen] scale];
    glViewport(self.frame.origin.x * scale, self.frame.origin.y * scale,  self.frame.size.width * scale, self.frame.size.height * scale);
    
    //2、读取定点着色器程序,片元着色器程序
    NSString *vertfile = [[NSBundle mainBundle] pathForResource:@"FaceDetectionShader" ofType:@"vsh"];
    NSString *fragFile = [[NSBundle mainBundle] pathForResource:@"FaceDetectionShader" ofType:@"fsh"];
    
    NSLog(@"vsh == %@",vertfile);
    NSLog(@"fsh == %@",fragFile);
    
    //3、加载着色器,并返回程序对象
    _program = [self loadSource:vertfile fragFilePath:fragFile];
    
    //4、链接程序
    glLinkProgram(_program);
    
    GLint linkStatus;
    // 获取链接状态
    glGetProgramiv(_program, GL_LINK_STATUS, &linkStatus);
    if (linkStatus == GL_FALSE) {
        GLchar mesage[512];
        glGetProgramInfoLog(_program, sizeof(mesage), 0, &mesage[0]);
        NSString *message = [NSString stringWithUTF8String:mesage];
        NSLog(@"链接程序对象错误 %@",message);
        return;
    }
    
    // 使用程序对象
    glUseProgram(_program);
    
    
    /**
     glGetUniformLocation可用于获取统一变量在GPU的位置。 然后可以将此位置值传递给glUniform以设置统一变量的值或glGetUniform以查询统一变量的当前值。成功链接程序对象后,统一变量的索引值保持不变,直到发生下一个链接命令。 如果链接成功,则只能在链接后查询统一变量位置和值
     */
    uniforms[UNIFORM_Y] = glGetUniformLocation(self.program, "SamplerY");
    uniforms[UNIFORM_UV] = glGetUniformLocation(self.program, "SamplerUV");
    uniforms[UNIFORM_ROTATE_MATRIX] = glGetUniformLocation(self.program, "rotateMatrix");

    

}

//加载着色器,并返回程序对象
-(GLuint)loadSource:(NSString *)vertfilePath fragFilePath:(NSString *)fragFilePath {
    //1、创建两个着色器临时对象
    GLuint verShader, fragShader;
    
    GLint program;
    program = glCreateProgram();
    
    [self compileShader:&verShader type:GL_VERTEX_SHADER file:vertfilePath];
    [self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragFilePath];
   
    //3.链接着色器道程序对象
    glAttachShader(program, verShader);
    glAttachShader(program, fragShader);
    
//    来绑定每个attribute变量的位置
    glBindAttribLocation(program, ATTRIB_VERTEX, "aPosition");
    glBindAttribLocation(program, ATTRIB_TEXCOORD, "aTexCoordinate");
    
    //4.释放不需要的shader
    glDeleteShader(verShader);
    glDeleteShader(fragShader);
    
    return program;

}

//编译着色器,shader 传过来的是地址,所以需要加*号
- (void)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file{
    // 1.读取文件路径字符串
    NSString *content = [NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:Nil];
    
    NSLog(@"vsh content == %@",content);
    
    const GLchar *source = (GLchar *)[content UTF8String];
    // 2.根据类型创建一个着色器对象 *号表示的是取值,
    *shader = glCreateShader(type);
    // 3、将着色器源码附加到着色器对象上
    //参数1:shader,要编译的着色器对象 *shader
    //参数2:numOfStrings,传递的源码字符串数量 1个
    //参数3:strings,着色器程序的源码(真正的着色器程序源码)
    //参数4:lenOfStrings,长度,具有每个字符串长度的数组,或NULL,这意味着字符串是NULL终止的
    glShaderSource(*shader, 1, &source, NULL);
    
    //4.把着色器源代码编译成目标代码
    glCompileShader(*shader);
    
}

+ (Class)layerClass {
    return [CAEAGLLayer class];
}

@end

顶点着色器:


//一般用函数glBindAttribLocation()来绑定每个attribute变量的位置
//然后用函数glVertexAttribPointer()为每个attribute变量赋值

attribute vec4 aPosition; //attribute变量来表示一些顶点的数据,如:顶点坐标,法线,纹理坐标,顶点颜色
attribute vec2 aTexCoordinate;//attribute变量来表示一些顶点的数据,如:顶点坐标,法线,纹理坐标,顶点颜色

//varying变量是vertex和fragment shader之间做数据传递用的。
//一般vertex shader修改varying变量的值,然后fragment shader使用该varying变量的值。
//因此varying变量在vertex和fragment shader二者之间的声明必须是一致的。application不能使用此变量。

varying lowp vec2 texCoordVarying;

uniform mat4 rotateMatrix; // 需要通过glUniform**()函数赋值

void main()
{
    texCoordVarying = aTexCoordinate;
    gl_Position = rotateMatrix * aPosition;
}

片元着色器:

//yuv420f/v
varying highp vec2 texCoordVarying;
precision mediump float;
uniform sampler2D SamplerY; // 统一变量,需要通过glUniform**()函数赋值,其实就是纹理采样器
uniform sampler2D SamplerUV; // 统一变量,需要通过glUniform**()函数赋值,其实就是纹理采样器

void main()
{
    mediump vec3 yuv;
    lowp vec3 rgb;
    mediump mat3 convert = mat3(1.164,  1.164, 1.164,
                                0.0, -0.213, 2.112,
                                1.793, -0.533,   0.0);
//    mediump mat3 convert = mat3(1.0, 1.0, 1.0,      //第一列
//                           0.0,-0.338,1.732, //第二列
//                           1.371,-0.698, 0.0);
    // Subtract constants to map the video range start at 0
    yuv.x = (texture2D(SamplerY, texCoordVarying).r);// - (16.0/255.0));
    yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));
    
    rgb = convert * yuv;
    
    gl_FragColor = vec4(rgb,1);
}

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值