mediacodec和OpenGLES纹理共享方案

 

1、OpenGL生成纹理

2、纹理绑定到SurfaceTexture上

3、用SurfaceTexture做参数创建Surface

4、MediaCodec解码的视频就往Surface发送,就显示出画面了

直接上代码

vertex_shader.glsl

attribute vec4 av_Position;
attribute vec2 af_Position;
varying vec2 v_texPosition;
void main() {
    v_texPosition = af_Position;
    gl_Position = av_Position;
}

fragment_mediacodec.glsl

#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 v_texPosition;
//samplerExternalOES渲染视频
uniform samplerExternalOES sTexture;

void main() {
    gl_FragColor=texture2D(sTexture, v_texPosition);
}

VideoRender.java

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.view.Surface;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class VideoRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {

    private Context context;

    private final float[] vertexData = {
            -1f, -1f,
            1f, -1f,
            -1f, 1f,
            1f, 1f

    };

    private final float[] textureData = {
            0f, 1f,
            1f, 1f,
            0f, 0f,
            1f, 0f
    };

    private FloatBuffer vertexBuffer;
    private FloatBuffer textureBuffer;


    //mediacodec
    private int program_mediacodec;
    private int avPosition_mediacodec;
    private int afPosition_mediacodec;
    private int samplerOES_mediacodec;
    private int textureId_mediacodec;
    private SurfaceTexture surfaceTexture;
    private Surface surface;

    private OnSurfaceCreateListener onSurfaceCreateListener;
    private OnRenderListener onRenderListener;

    public VideoRender(Context context) {
        this.context = context;
        vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(vertexData);
        vertexBuffer.position(0);

        textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(textureData);
        textureBuffer.position(0);
    }


    public void setOnSurfaceCreateListener(OnSurfaceCreateListener onSurfaceCreateListener) {
        this.onSurfaceCreateListener = onSurfaceCreateListener;
    }

    public void setOnRenderListener(OnRenderListener onRenderListener) {
        this.onRenderListener = onRenderListener;
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        initRenderMediacodec();
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
        renderMediacodec();
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        if (onRenderListener != null) {
            将onFrameAvailable函数回掉到GLSurfaceView调用requestRender()触发onDrawFrame()
            onRenderListener.onRender();
        }
    }

    private void initRenderMediacodec() {
        String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
        String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_mediacodec);
        program_mediacodec = ShaderUtil.createProgram(vertexSource, fragmentSource);

        avPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "av_Position");
        afPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "af_Position");
        samplerOES_mediacodec = GLES20.glGetUniformLocation(program_mediacodec, "sTexture");

        int[] textureids = new int[1];
        GLES20.glGenTextures(1, textureids, 0);
        textureId_mediacodec = textureids[0];
            
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

        surfaceTexture = new SurfaceTexture(textureId_mediacodec);
        surface = new Surface(surfaceTexture);
        surfaceTexture.setOnFrameAvailableListener(this);

        if (onSurfaceCreateListener != null) {
            //将Surface回掉出去给MediaCodec绑定渲染
            onSurfaceCreateListener.onSurfaceCreate(surface);
        }
    }

    private void renderMediacodec() {
        surfaceTexture.updateTexImage();
        GLES20.glUseProgram(program_mediacodec);

        GLES20.glEnableVertexAttribArray(avPosition_mediacodec);
        GLES20.glVertexAttribPointer(avPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);

        GLES20.glEnableVertexAttribArray(afPosition_mediacodec);
        GLES20.glVertexAttribPointer(afPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId_mediacodec);
        GLES20.glUniform1i(samplerOES_mediacodec, 0);
    }


    public interface OnSurfaceCreateListener {
        void onSurfaceCreate(Surface surface);
    }

    public interface OnRenderListener {
        void onRender();
    }
}

VideoGLSurfaceView.java

import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;

public class VideoGLSurfaceView extends GLSurfaceView {

    private VideoRender render;

    public VideoGLSurfaceView(Context context) {
        this(context, null);
    }

    public VideoGLSurfaceView(Context context, AttributeSet attrs) {
        super(context, attrs);
        setEGLContextClientVersion(2);
        render = new VideoRender(context);
        setRenderer(render);
        setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

        render.setOnRenderListener(new VideoRender.OnRenderListener() {
            @Override
            public void onRender() {
                requestRender();
            }
        });
    }

    public VideoRender getWlRender() {
        return render;
    }
}

 

  • 1
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
这里提供一个使用OpenGL ESMediaCodec的简单例子,用于解码视频并将其渲染到屏幕上: ```java public class VideoRenderer implements SurfaceTexture.OnFrameAvailableListener { private static final String TAG = "VideoRenderer"; private SurfaceTexture mSurfaceTexture; private Surface mSurface; private int mTextureId; private MediaCodec mMediaCodec; private MediaFormat mMediaFormat; private ByteBuffer[] mInputBuffers; private ByteBuffer[] mOutputBuffers; private int mWidth; private int mHeight; private boolean mIsRunning = false; private EGLDisplay mEGLDisplay; private EGLSurface mEGLSurface; private EGLContext mEGLContext; public VideoRenderer(SurfaceTexture surfaceTexture, int width, int height) { mSurfaceTexture = surfaceTexture; mWidth = width; mHeight = height; } public void start(String filePath) throws IOException { mMediaFormat = MediaFormat.createVideoFormat("video/avc", mWidth, mHeight); mMediaCodec = MediaCodec.createDecoderByType("video/avc"); mMediaCodec.configure(mMediaFormat, mSurface, null, 0); mInputBuffers = mMediaCodec.getInputBuffers(); mOutputBuffers = mMediaCodec.getOutputBuffers(); File file = new File(filePath); FileInputStream inputStream = new FileInputStream(file); FileChannel fileChannel = inputStream.getChannel(); mMediaCodec.start(); while (mIsRunning) { int inputBufferIndex = mMediaCodec.dequeueInputBuffer(1000); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = mInputBuffers[inputBufferIndex]; int sampleSize = fileChannel.read(inputBuffer); if (sampleSize < 0) { mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); } else { mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sampleSize, 0, 0); } } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 1000); while (outputBufferIndex >= 0) { mMediaCodec.releaseOutputBuffer(outputBufferIndex, true); outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0); } } } public void stop() { mIsRunning = false; // Release MediaCodec if (mMediaCodec != null) { mMediaCodec.stop(); mMediaCodec.release(); mMediaCodec = null; } // Destroy EGL context and surface if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); if (mEGLSurface != null) { EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface); } if (mEGLContext != null) { EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); } EGL14.eglTerminate(mEGLDisplay); } // Release SurfaceTexture and Surface if (mSurfaceTexture != null) { mSurfaceTexture.release(); mSurfaceTexture = null; } if (mSurface != null) { mSurface.release(); mSurface = null; } } @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { // Update SurfaceTexture surfaceTexture.updateTexImage(); // Render frame with OpenGL ES float[] transformMatrix = new float[16]; surfaceTexture.getTransformMatrix(transformMatrix); GLES20.glClearColor(0, 0, 0, 1); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(mProgram); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); GLES20.glUniformMatrix4fv(mTransformMatrixHandle, 1, false, transformMatrix, 0); GLES20.glVertexAttribPointer(mPositionHandle, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer); GLES20.glVertexAttribPointer(mTextureHandle, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer); GLES20.glEnableVertexAttribArray(mPositionHandle); GLES20.glEnableVertexAttribArray(mTextureHandle); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glDisableVertexAttribArray(mPositionHandle); GLES20.glDisableVertexAttribArray(mTextureHandle); // Swap buffers EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface); } } ``` 在这个示例中,我们首先创建一个`SurfaceTexture`和一个`Surface`来将视频渲染到屏幕上。然后,我们创建一个`MediaCodec`来解码视频,并将其配置为使用指定的`Surface`进行输出。我们使用`FileInputStream`和`FileChannel`来从文件中读取视频数据,并将其输入到`MediaCodec`中进行解码。最后,我们使用OpenGL ES将解码后的视频帧渲染到屏幕上。 需要注意的是,在`onFrameAvailable`回调中,我们需要使用OpenGL ES将视频帧渲染到屏幕上,并且需要在渲染完成后调用`EGL14.eglSwapBuffers`来交换前后缓冲区。此外,我们还需要在程序退出时释放所有资源,包括`MediaCodec`,EGL上下文和SurfaceTexture。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值