FFmpeg 解码视频流实现yuv播放

14 篇文章 0 订阅
10 篇文章 0 订阅

参考文章雷神文章
FFMPEG–裸码流解码AVCodec
最简单的基于FFmpeg的libswscale的示例(YUV转RGB)
视频流测试端口
初始化解码器相关变量

-(void)initDecoder{
    if (decodeFrame) {
        return;
    }
    int codec_id = AV_CODEC_ID_H264;
    pCodec = avcodec_find_decoder(codec_id);
    if (!pCodec) {
        perror("find codec err");
        return;
    }
    decodeFrame = av_frame_alloc();
    if (!decodeFrame) {
        perror("alloc frame err");
        return;
    }
    pCodecCtx = avcodec_alloc_context3(pCodec);
    pCodecCtx->codec_id = codec_id;
    if (!pCodecCtx) {
        perror("pcodecx err");
        return;
    }
    
    pParserCtx = av_parser_init(codec_id);
    if (!pParserCtx) {
        perror("pParserctx err");
        return;
    }
    av_init_packet(&package);
    int open_ret = avcodec_open2(pCodecCtx, pCodec, NULL);
    if (open_ret<0) {
        perror("open decoder err");
        return;
    }
    
}

解码,buffer就是读取的裸流,len是长度

-(void)decodeVideo:(char *)buffer length:(int)len{
    if (len<=0) {
        return;
    }
    [self initDecoder];
    int in_len = len;
    uint8_t *in_buffer = (uint8_t*)buffer;
    uint8_t *out_buffer = NULL;
    int out_len = 0;
    while (in_len>0) {
        int lens = av_parser_parse2(pParserCtx, pCodecCtx, &out_buffer, &out_len, in_buffer, in_len, 0, 0, 0);
        in_len -= lens;
        in_buffer += lens;
        if (out_len<=0) {
            continue;
        }
        switch (pParserCtx->pict_type) {
            case AV_PICTURE_TYPE_P:
                NSLog(@"====>P");
                break;
            case AV_PICTURE_TYPE_I:
                NSLog(@"====>I");
                break;
            case AV_PICTURE_TYPE_B:
                NSLog(@"====>B");
                break;
            default:
                NSLog(@"other");
                break;
        }
        package.data = out_buffer;
        package.size = out_len;
        int finished = -1;
       //int decoded_ret = avcodec_decode_video2(pCodecCtx, decodeFrame, &finished, &package);
       
        /*
         下方的avcodec_send_packet() 和 avcodec_receive_frame()替代avcodec_decode_video2()函数
         */
        int send_ret = avcodec_send_packet(pCodecCtx, &package);
        if (send_ret!=0) {
            return;
        }
        while (avcodec_receive_frame(pCodecCtx, decodeFrame)==0) {
             NSLog(@"decode succes");
        }
      
//        if (decoded_ret<0) {
//            perror("decode err");
//            return;
//        }
       
    }
    
}

解码结束之后释放内存

-(void)releaseFrames{
    if (pCodecCtx) {
        avcodec_close(pCodecCtx);
        avcodec_free_context(&pCodecCtx);
        pCodecCtx = NULL;
    }
    if (decodeFrame) {
        av_frame_unref(decodeFrame);
        decodeFrame = NULL;
    }
    av_packet_unref(&package);
    if (buffer) {
        free(buffer);
        buffer = NULL;
    }
    
}

下面提供一个解码rtsp 流的一个demo
ZbOpenGLView 是渲染播放 yuv视频的一个类,后面提供代码
先放读取流和解码的代码

DecodeManager.h

#import <Foundation/Foundation.h>
#import "ZbOpenGLView.h"
NS_ASSUME_NONNULL_BEGIN

@interface DecodeManager : NSObject

+(instancetype)sharedInstance;
-(void)decodeRtspStreams:(ZbOpenGLView*)displayView;
@end

NS_ASSUME_NONNULL_END

DecodeManager.m

#import "DecodeManager.h"
#include <libavcodec/avcodec.h>
#import <libavformat/avformat.h>
#import <libavutil/imgutils.h>
#import <libswscale/swscale.h>
#import <ImageIO/ImageIO.h>
@interface DecodeManager()
@property(nonatomic,strong)ZbOpenGLView *displayView;
@end
@implementation DecodeManager
static DecodeManager *instance=nil;
+(instancetype)sharedInstance{
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        instance =[[DecodeManager alloc]init];
    });
    return instance;
}
-(void)decodeRtspStreams:(ZbOpenGLView*)displayView{
    self.displayView = displayView;
//    AVPacket *pPacket=0;
    AVFrame *pFrame=0;
    av_register_all();
    //注册解码控件
    avcodec_register_all();
    avformat_network_init();
    NSString *rstpUrl = @"rtsp://184.72.239.149/vod/mp4://BigBuckBunny_175k.mov";
//    rstpUrl = @"http://live.hkstv.hk.lxdns.com/live/hks/playlist.m3u8";
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    //打开输入的视频流
    int ret = avformat_open_input(&pFormatCtx, [rstpUrl UTF8String], NULL, NULL);
    if (ret<0) {
        NSLog(@"打开 input stream faild");
        return;
    }
    //找到流信息
    ret = avformat_find_stream_info(pFormatCtx, NULL);
    if (ret<0) {
        NSLog(@"can not find stream info");
        return;
    }
    int video_index = -1;
    for (int i=0; i<pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_index = i;
            break;
        }
    }
    if (video_index==-1) {
        NSLog(@"can not find video type");
        return;
    }
    //解码器上下文
    AVCodecContext*pCodecCtx = pFormatCtx->streams[video_index]->codec;
    if (pCodecCtx==NULL) {
        NSLog(@"解码器上下文 失败");
        return;
    }
    //寻找解码器
    AVCodec *pCodec =  avcodec_find_decoder(pCodecCtx->codec_id);
    if (pCodec==NULL) {
        NSLog(@"can not find codec");
        return;
    }
    //打开解码器
    ret = avcodec_open2(pCodecCtx, pCodec, NULL);
    if (ret<0) {
        NSLog(@"open codec faild");
        return;
    }
    //初始化
     //初始化解码后的frame容器
     pFrame = av_frame_alloc();
    AVFrame *pFrameYUV = av_frame_alloc();
     AVPacket *packet = av_packet_alloc();
    
    struct SwsContext *img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
                                     pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
   
    while (1) {
        if (av_read_frame(pFormatCtx, packet)<0) {
            break;
        }
        int got_picture = -1;
        //解码
        int ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
        if (got_picture==1) {
            sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                      pFrameYUV->data, pFrameYUV->linesize);
            [self showWithFrame:pFrame];
//             [self.displayView displayYUV420pData:pFrameYUV->data[0] width:pFrameYUV -> width height:pFrameYUV ->height];
            
        }
       
    }
    av_packet_free(&packet);
    av_frame_free(&pFrame);

    
}
-(void)showWithFrame:(AVFrame*)_pFrame{
    char *buf = (char *)malloc(_pFrame->width * _pFrame->height * 3 / 2);
    AVPicture *pict;
    int w, h, i;
    char *y, *u, *v;
    pict = (AVPicture *)_pFrame;//这里的frame就是解码出来的AVFrame
    w = _pFrame->width;
    h = _pFrame->height;
    y = buf;
    u = y + w * h;
    v = u + w * h / 4;
    for (i=0; i<h; i++)
        memcpy(y + w * i, pict->data[0] + pict->linesize[0] * i, w);
    for (i=0; i<h/2; i++)
        memcpy(u + w / 2 * i, pict->data[1] + pict->linesize[1] * i, w / 2);
    for (i=0; i<h/2; i++)
        memcpy(v + w / 2 * i, pict->data[2] + pict->linesize[2] * i, w / 2);
    if (buf == NULL) {
        return;
    }else {
        dispatch_async(dispatch_get_global_queue(0, 0), ^{
//            sleep(1);
            [self.displayView displayYUV420pData:buf width:_pFrame -> width height:_pFrame ->height];
            free(buf);
        });
    }
}
@end

下面是 ZbOpenGLView渲染类

//
//  ZbOpenGLView.h
//

#import <UIKit/UIKit.h>
#import <QuartzCore/QuartzCore.h>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#import <OpenGLES/EAGL.h>
#import <OpenGLES/EAGLDrawable.h>

#include <sys/time.h>

@interface ZbOpenGLView : UIView
{
	
}
#pragma mark  ----- ----
- (void)displayYUV420pData:(void *)data width:(NSInteger)w height:(NSInteger)h;
- (void)clearFrame;
@end

.m文件

//
//  OpenGLView.m
//  MyTest
//
//  Created by smy on 12/20/11.
//  Copyright (c) 2011 ZY.SYM. All rights reserved.
//

#import "ZbOpenGLView.h"

enum AttribEnum
{
    ATTRIB_VERTEX,
    ATTRIB_TEXTURE
    //ATTRIB_COLOR,
};

enum TextureType
{
    TEXY = 0,
    TEXU,
    TEXV
   // TEXC
};

//#define PRINT_CALL 1

@interface ZbOpenGLView()
{
    /**
     OpenGL绘图上下文
     */
    EAGLContext             *_glContext;
    
    /**
     帧缓冲区
     */
    GLuint                  _framebuffer;
    
    /**
     渲染缓冲区
     */
    GLuint                  _renderBuffer;
    
    /**
     着色器句柄
     */
    GLuint                  _program;
    
    /**
     YUV纹理数组
     */
    GLuint                  _textureYUV[4];
    
    /**
     视频宽度
     */
    GLuint                  _videoW;
    
    /**
     视频高度
     */
    GLuint                  _videoH;
    
    GLsizei                 _viewScale;
	   
    //void                    *_pYuvData;
    
#ifdef DEBUG
    struct timeval      _time;
    NSInteger           _frameRate;
#endif
}



/** 
 初始化YUV纹理
 */
- (void)setupYUVTexture;

/** 
 创建缓冲区
 @return 成功返回TRUE 失败返回FALSE
 */
- (BOOL)createFrameAndRenderBuffer;

/** 
 销毁缓冲区
 */
- (void)destoryFrameAndRenderBuffer;

//加载着色器
/** 
 初始化YUV纹理
 */
- (void)loadShader;

/** 
 编译着色代码
 @param shader        代码
 @param shaderType    类型
 @return 成功返回着色器 失败返回-1
 */
- (GLuint)compileShader:(NSString*)shaderCode withType:(GLenum)shaderType;

/** 
 渲染
 */
- (void)render;


- (void)setVideoSize:(GLuint)width height:(GLuint)height;





@end

@implementation ZbOpenGLView

//- (void)debugGlError
//{
//    GLenum r = glGetError();
//    if (r != 0)
//    {
//        printf("%d   \n", r);
//    }
//}
- (BOOL)doInit
{
    CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
    //eaglLayer.opaque = YES;
    
    eaglLayer.opaque = YES;
    eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
                                    [NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking,
                                    kEAGLColorFormatRGB565, kEAGLDrawablePropertyColorFormat,
                                    //[NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking,
                                    nil];
    self.contentScaleFactor = [UIScreen mainScreen].scale;
    _viewScale = [UIScreen mainScreen].scale;
    
    _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
    
    //[self debugGlError];
    
    if(!_glContext || ![EAGLContext setCurrentContext:_glContext])
    {
        return NO;
    }
	
    [self setupYUVTexture];
    [self loadShader];
    glUseProgram(_program);
    
    GLuint textureUniformY = glGetUniformLocation(_program, "SamplerY");
    GLuint textureUniformU = glGetUniformLocation(_program, "SamplerU");
    GLuint textureUniformV = glGetUniformLocation(_program, "SamplerV");
    glUniform1i(textureUniformY, 0);
    glUniform1i(textureUniformU, 1);
    glUniform1i(textureUniformV, 2);
    printf("InitGL");
    
    return YES;
}

- (id)initWithCoder:(NSCoder *)aDecoder
{
    self = [super initWithCoder:aDecoder];
    if (self)
    {
        
        if (![self doInit])
        {
            self = nil;
        }
        
    }
    return self;
}

- (id)initWithFrame:(CGRect)frame
{
    self = [super initWithFrame:frame];
    if (self)
    {
        
        if (![self doInit])
        {
            self = nil;
        }
    }
    return self;
}

- (void)layoutSubviews
{
    [super layoutSubviews];
    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        @synchronized(self)
        {
            [EAGLContext setCurrentContext:_glContext];
            [self destoryFrameAndRenderBuffer];
            [self createFrameAndRenderBuffer];
        }
        
        glViewport(1, 1, self.bounds.size.width*_viewScale - 2, self.bounds.size.height*_viewScale - 2);
    });
}

- (void)setupYUVTexture
{
    if (_textureYUV[TEXY])
    {
        glDeleteTextures(4, _textureYUV);
    }
    glGenTextures(4, _textureYUV);
    if (!_textureYUV[TEXY] || !_textureYUV[TEXU] || !_textureYUV[TEXV])
    {
        NSLog(@"<<<<<<<<<<<<纹理创建失败!>>>>>>>>>>>>");
        return;
    }
    
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXY]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXU]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXV]);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    
    
    
}

- (void)render
{
    [EAGLContext setCurrentContext:_glContext];
    CGSize size = self.bounds.size;
    glViewport(1, 1, size.width*_viewScale-2, size.height*_viewScale-2);
    static const GLfloat squareVertices[] = {
        -1.0f, -1.0f,
        1.0f, -1.0f,
        -1.0f,  1.0f,
        1.0f,  1.0f,
    };

    
    static const GLfloat coordVertices[] = {
        0.0f, 1.0f,
        1.0f, 1.0f,
        0.0f,  0.0f,
        1.0f,  0.0f,
    };

    // Update attribute values
    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
    glEnableVertexAttribArray(ATTRIB_VERTEX);
    
    glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices);
    glEnableVertexAttribArray(ATTRIB_TEXTURE);
    
    // Draw
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
    
    GLenum err = glGetError();
    if (err != GL_NO_ERROR)
    {
        printf("GL_ERROR  11111=======>%d\n", err);
    }
    [_glContext presentRenderbuffer:GL_RENDERBUFFER];
    err = glGetError();
    if (err != GL_NO_ERROR)
    {
        printf("GL_ERROR  22222=======>%d\n", err);
    }
}

#pragma mark - 设置openGL
+ (Class)layerClass
{
    return [CAEAGLLayer class];
}

- (BOOL)createFrameAndRenderBuffer
{
    glGenFramebuffers(1, &_framebuffer);
    glGenRenderbuffers(1, &_renderBuffer);
    
    glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
    
    
    if (![_glContext renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer])
    {
        NSLog(@"attach渲染缓冲区失败");
    }
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderBuffer);
    if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    {
        NSLog(@"创建缓冲区错误 0x%x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
        return NO;
    }
    return YES;
}

- (void)destoryFrameAndRenderBuffer
{
    if (_framebuffer)
    {
        glDeleteFramebuffers(1, &_framebuffer);
    }
    
    if (_renderBuffer)
    {
        glDeleteRenderbuffers(1, &_renderBuffer);
    }
    
    _framebuffer = 0;
    _renderBuffer = 0;
}

#define FSH @"varying lowp vec2 TexCoordOut;\
\
uniform sampler2D SamplerY;\
uniform sampler2D SamplerU;\
uniform sampler2D SamplerV;\
\
void main(void)\
{\
    mediump vec3 yuv;\
    lowp vec3 rgb;\
    \
    yuv.x = texture2D(SamplerY, TexCoordOut).r;\
    yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;\
    yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;\
    \
    rgb = mat3( 1,       1,         1,\
               0,       -0.39465,  2.03211,\
               1.13983, -0.58060,  0) * yuv;\
    \
    gl_FragColor = vec4(rgb, 1);\
    \
}"

#define VSH @"attribute vec4 position;\
attribute vec2 TexCoordIn;\
varying vec2 TexCoordOut;\
\
void main(void)\
{\
    gl_Position = position;\
    TexCoordOut = TexCoordIn;\
}"



#define FSH2 @"attribute vec4 Position; \
attribute vec2 TextureCoords; \
varying vec2 TextureCoordsOut; \
void main(void) \
{ \
//用来展现纹理的多边形顶点 \
gl_Position = Position; \
//表示使用的纹理的范围的顶点,因为是2D纹理,所以用vec2类型 \
TextureCoordsOut = TextureCoords; \
}"

#define VSH2 @"precision mediump float; \
uniform sampler2D Texture; \
varying vec2 TextureCoordsOut; \
void main(void) \
{ \
//获取纹理的像素 \
vec4 mask = texture2D(Texture, TextureCoordsOut); \
gl_FragColor = vec4(mask.rgb, 1.0); \
}"




/**
 加载着色器
 */
- (void)loadShader
{
	/** 
	 1
	 */
    GLuint vertexShader = [self compileShader:VSH withType:GL_VERTEX_SHADER];
    GLuint fragmentShader = [self compileShader:FSH withType:GL_FRAGMENT_SHADER];

    
    
	/** 
	 2
	 */
    _program = glCreateProgram();
    glAttachShader(_program, vertexShader);
    glAttachShader(_program, fragmentShader);
    
	/** 
	 绑定需要在link之前
	 */
    glBindAttribLocation(_program, ATTRIB_VERTEX, "position");
    glBindAttribLocation(_program, ATTRIB_TEXTURE, "TexCoordIn");
    
    glLinkProgram(_program);
    
	/** 
	 3
	 */
    GLint linkSuccess;
    glGetProgramiv(_program, GL_LINK_STATUS, &linkSuccess);
    if (linkSuccess == GL_FALSE) {
        GLchar messages[256];
        glGetProgramInfoLog(_program, sizeof(messages), 0, &messages[0]);
        NSString *messageString = [NSString stringWithUTF8String:messages];
        NSLog(@"<<<<着色器连接失败 %@>>>", messageString);
        //exit(1);
    }
    
    if (vertexShader)
		glDeleteShader(vertexShader);
    if (fragmentShader)
		glDeleteShader(fragmentShader);
}

- (GLuint)compileShader:(NSString*)shaderString withType:(GLenum)shaderType
{
    
   	/** 
	 1
	 */
    if (!shaderString) {
       // NSLog(@"Error loading shader: %@", error.localizedDescription);
       // exit(1);
    }
    else
    {
        //NSLog(@"shader code-->%@", shaderString);
    }
    
	/** 
	 2
	 */
    GLuint shaderHandle = glCreateShader(shaderType);    
    
	/** 
	 3
	 */
    const char * shaderStringUTF8 = [shaderString UTF8String];    
    int shaderStringLength = (int)[shaderString length];
    glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength);
    
	/** 
	 4
	 */
    glCompileShader(shaderHandle);
    
	/** 
	 5
	 */
    GLint compileSuccess;
    glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess);
    if (compileSuccess == GL_FALSE) {
        GLchar messages[256];
        glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]);
        NSString *messageString = [NSString stringWithUTF8String:messages];
        NSLog(@"%@", messageString);
        exit(1);
    }
    return shaderHandle;
}


#pragma mark - 接口
- (void)displayYUV420pData:(void *)data width:(NSInteger)w height:(NSInteger)h
{
    //_pYuvData = data;
  //  if (_offScreen || !self.window)
    {
    //    return;
    }
    @synchronized(self)
    {
        if (w != _videoW || h != _videoH)
        {
            GLenum err = glGetError();
            if (err != GL_NO_ERROR)
            {
                printf("GL_ERROR111=======>%d\n", err);
            }
            [self setVideoSize:(GLuint)w height:(GLuint)h];
        }
        if(data == NULL)
        {
            GLenum err = glGetError();
            if (err != GL_NO_ERROR)
            {
                printf("GL_ERROR111222=======>%d\n", err);
            }
        }
        [EAGLContext setCurrentContext:_glContext];
        
        glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXY]);
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, (GLuint)w, (GLuint)h, GL_RED_EXT, GL_UNSIGNED_BYTE, data);
        
        //[self debugGlError];
        
        glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXU]);
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, (GLuint)w/2, (GLuint)h/2, GL_RED_EXT, GL_UNSIGNED_BYTE, data + w * h);
        
       // [self debugGlError];
        
        glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXV]);
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, (GLuint)w/2, (GLuint)h/2, GL_RED_EXT, GL_UNSIGNED_BYTE, data + w * h * 5 / 4);
        
        [self render];
    }
    
#ifdef DEBUG
    
    GLenum err = glGetError();
    if (err != GL_NO_ERROR)
    {
        printf("GL_ERROR=======>%d\n", err);
    }
    
    struct timeval nowtime;
    gettimeofday(&nowtime, NULL);
    if (nowtime.tv_sec != _time.tv_sec)
    {
        //printf("视频 %ld 帧率:   %ld\n", (long)self.tag, (long)_frameRate);
        memcpy(&_time, &nowtime, sizeof(struct timeval));
        _frameRate = 1;
    }
    else
    {
        _frameRate++;
    }
    
#endif
}

- (void)setVideoSize:(GLuint)width height:(GLuint)height
{
    _videoW = width;
    _videoH = height;
    
    void *blackData = malloc(width * height * 1.5);
	if(blackData)
		//bzero(blackData, width * height * 1.5);
        memset(blackData, 0x0, width * height * 1.5);
    
    [EAGLContext setCurrentContext:_glContext];
    glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXY]);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, width, height, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, blackData);
    glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXU]);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, width/2, height/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, blackData + width * height);
    
    glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXV]);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, width/2, height/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, blackData + width * height * 5 / 4);
    free(blackData);
}


- (void)clearFrame
{
    if ([self window])
    {
        [EAGLContext setCurrentContext:_glContext];
        glClearColor(0.0, 0.0, 0.0, 0.1);
        glClear(GL_COLOR_BUFFER_BIT);
        glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
        [_glContext presentRenderbuffer:GL_RENDERBUFFER];
    }
    
}


#if 0
- (void)clearFrame:(UIImage *)img
{
    void *blackData = malloc(640 * 360 * 1.5);
    if(blackData)
        //bzero(blackData, width * height * 1.5);
        memset(blackData, 0x0, 640 * 360 * 1.5);
    
    GLuint texture = [self createOGLTexture:img];
    glBindTexture(GL_TEXTURE_2D, texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, 640, 360, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, blackData);
    free(blackData);
    
    //glBegin(GL_QUADS);
    //glTexCoord2d(0.0, 0.0);
    //glVertex2d(-1.0, -1.0);
    //glTexCoord2d(1.0, 0.0); glVertex2d(+1.0, -1.0);
    //glTexCoord2d(1.0, 1.0); glVertex2d(+1.0, +1.0);
    //glTexCoord2d(0.0, 1.0); glVertex2d(-1.0, +1.0);
    //glEnd();
}




-(GLuint)createOGLTexture:(UIImage *)image
{
    //转换为CGImage,获取图片基本参数
    CGImageRef cgImageRef = [image CGImage];
    GLuint width = (GLuint)CGImageGetWidth(cgImageRef);
    GLuint height = (GLuint)CGImageGetHeight(cgImageRef);
    CGRect rect = CGRectMake(0, 0, width, height);
    //绘制图片
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    void *imageData = malloc(width * height * 4);
    CGContextRef context = CGBitmapContextCreate(imageData, width, height, 8, width * 4, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
    CGContextTranslateCTM(context, 0, height);
    CGContextScaleCTM(context, 1.0f, -1.0f);
    CGColorSpaceRelease(colorSpace);
    CGContextClearRect(context, rect);
    CGContextDrawImage(context, rect, cgImageRef);
    //纹理一些设置,可有可无
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    //生成纹理
    glEnable(GL_TEXTURE_2D);
    GLuint textureID;
    glGenTextures(1, &textureID);
    glBindTexture(GL_TEXTURE_2D, textureID);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
    
    //绑定纹理位置
    glBindTexture(GL_TEXTURE_2D, 0);
    //释放内存
    CGContextRelease(context);
    free(imageData);
    return textureID;
}
#endif

@end

  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值