OpenGL ES例子

@Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        GLSurfaceView view = new GLSurfaceView(this);
        view.setRenderer(new OpenGLRenderer());
        setContentView(view);
    }

    class OpenGLRenderer implements GLSurfaceView.Renderer {
        private Cube mCube = new Cube();
        private float mCubeRotation;

        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f);
            gl.glClearDepthf(1.0f);
            gl.glEnable(GL10.GL_DEPTH_TEST);
            gl.glDepthFunc(GL10.GL_LEQUAL);
            gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT,
                    GL10.GL_NICEST);
        }

        @Override
        public void onDrawFrame(GL10 gl) {
            gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
            gl.glLoadIdentity();
            gl.glTranslatef(0.0f, 0.0f, -10.0f);
            gl.glRotatef(mCubeRotation, 1.0f, 1.0f, 1.0f);
            mCube.draw(gl);
            gl.glLoadIdentity();
            mCubeRotation -= 0.15f;
        }

        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            gl.glViewport(0, 0, width, height);
            gl.glMatrixMode(GL10.GL_PROJECTION);
            gl.glLoadIdentity();
            GLU.gluPerspective(gl, 45.0f, (float) width / (float) height, 0.1f, 100.0f);
            gl.glViewport(0, 0, width, height);
            gl.glMatrixMode(GL10.GL_MODELVIEW);
            gl.glLoadIdentity();
        }
    }

    class Cube {
        private FloatBuffer mVertexBuffer;
        private FloatBuffer mColorBuffer;
        private ByteBuffer mIndexBuffer;
        private float vertices[] = {
                -1.0f, -1.0f, -1.0f,
                1.0f, -1.0f, -1.0f,
                1.0f, 1.0f, -1.0f,
                -1.0f, 1.0f, -1.0f,
                -1.0f, -1.0f, 1.0f,
                1.0f, -1.0f, 1.0f,
                1.0f, 1.0f, 1.0f,
                -1.0f, 1.0f, 1.0f
        };
        private float colors[] = {
                0.0f, 1.0f, 0.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,
                1.0f, 0.5f, 0.0f, 1.0f,
                1.0f, 0.5f, 0.0f, 1.0f,
                1.0f, 0.0f, 0.0f, 1.0f,
                1.0f, 0.0f, 0.0f, 1.0f,
                0.0f, 0.0f, 1.0f, 1.0f,
                1.0f, 0.0f, 1.0f, 1.0f
        };
        private byte indices[] = {
                0, 4, 5, 0, 5, 1,
                1, 5, 6, 1, 6, 2,
                2, 6, 7, 2, 7, 3,
                3, 7, 4, 3, 4, 0,
                4, 7, 6, 4, 6, 5,
                3, 0, 1, 3, 1, 2
        };

        public Cube() {
            ByteBuffer byteBuf = ByteBuffer.allocateDirect(vertices.length * 4);
            byteBuf.order(ByteOrder.nativeOrder());
            mVertexBuffer = byteBuf.asFloatBuffer();
            mVertexBuffer.put(vertices);
            mVertexBuffer.position(0);
            byteBuf = ByteBuffer.allocateDirect(colors.length * 4);
            byteBuf.order(ByteOrder.nativeOrder());
            mColorBuffer = byteBuf.asFloatBuffer();
            mColorBuffer.put(colors);
            mColorBuffer.position(0);
            mIndexBuffer = ByteBuffer.allocateDirect(indices.length);
            mIndexBuffer.put(indices);
            mIndexBuffer.position(0);
        }

        public void draw(GL10 gl) {
            gl.glFrontFace(GL10.GL_CW);
            gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
            gl.glColorPointer(4, GL10.GL_FLOAT, 0, mColorBuffer);
            gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
            gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
            gl.glDrawElements(GL10.GL_TRIANGLES, 36, GL10.GL_UNSIGNED_BYTE,
                    mIndexBuffer);
            gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
            gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
        }
    }

这里写图片描述

这里提供一个使用OpenGL ES和MediaCodec的简单例子,用于解码视频并将其渲染到屏幕上: ```java public class VideoRenderer implements SurfaceTexture.OnFrameAvailableListener { private static final String TAG = "VideoRenderer"; private SurfaceTexture mSurfaceTexture; private Surface mSurface; private int mTextureId; private MediaCodec mMediaCodec; private MediaFormat mMediaFormat; private ByteBuffer[] mInputBuffers; private ByteBuffer[] mOutputBuffers; private int mWidth; private int mHeight; private boolean mIsRunning = false; private EGLDisplay mEGLDisplay; private EGLSurface mEGLSurface; private EGLContext mEGLContext; public VideoRenderer(SurfaceTexture surfaceTexture, int width, int height) { mSurfaceTexture = surfaceTexture; mWidth = width; mHeight = height; } public void start(String filePath) throws IOException { mMediaFormat = MediaFormat.createVideoFormat("video/avc", mWidth, mHeight); mMediaCodec = MediaCodec.createDecoderByType("video/avc"); mMediaCodec.configure(mMediaFormat, mSurface, null, 0); mInputBuffers = mMediaCodec.getInputBuffers(); mOutputBuffers = mMediaCodec.getOutputBuffers(); File file = new File(filePath); FileInputStream inputStream = new FileInputStream(file); FileChannel fileChannel = inputStream.getChannel(); mMediaCodec.start(); while (mIsRunning) { int inputBufferIndex = mMediaCodec.dequeueInputBuffer(1000); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = mInputBuffers[inputBufferIndex]; int sampleSize = fileChannel.read(inputBuffer); if (sampleSize < 0) { mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); } else { mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sampleSize, 0, 0); } } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 1000); while (outputBufferIndex >= 0) { mMediaCodec.releaseOutputBuffer(outputBufferIndex, true); outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0); } } } public void stop() { mIsRunning = false; // Release MediaCodec if (mMediaCodec != null) { mMediaCodec.stop(); mMediaCodec.release(); mMediaCodec = null; } // Destroy EGL context and surface if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); if (mEGLSurface != null) { EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface); } if (mEGLContext != null) { EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); } EGL14.eglTerminate(mEGLDisplay); } // Release SurfaceTexture and Surface if (mSurfaceTexture != null) { mSurfaceTexture.release(); mSurfaceTexture = null; } if (mSurface != null) { mSurface.release(); mSurface = null; } } @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { // Update SurfaceTexture surfaceTexture.updateTexImage(); // Render frame with OpenGL ES float[] transformMatrix = new float[16]; surfaceTexture.getTransformMatrix(transformMatrix); GLES20.glClearColor(0, 0, 0, 1); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(mProgram); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); GLES20.glUniformMatrix4fv(mTransformMatrixHandle, 1, false, transformMatrix, 0); GLES20.glVertexAttribPointer(mPositionHandle, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer); GLES20.glVertexAttribPointer(mTextureHandle, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer); GLES20.glEnableVertexAttribArray(mPositionHandle); GLES20.glEnableVertexAttribArray(mTextureHandle); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glDisableVertexAttribArray(mPositionHandle); GLES20.glDisableVertexAttribArray(mTextureHandle); // Swap buffers EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface); } } ``` 在这个示例中,我们首先创建一个`SurfaceTexture`和一个`Surface`来将视频渲染到屏幕上。然后,我们创建一个`MediaCodec`来解码视频,并将其配置为使用指定的`Surface`进行输出。我们使用`FileInputStream`和`FileChannel`来从文件中读取视频数据,并将其输入到`MediaCodec`中进行解码。最后,我们使用OpenGL ES将解码后的视频帧渲染到屏幕上。 需要注意的是,在`onFrameAvailable`回调中,我们需要使用OpenGL ES将视频帧渲染到屏幕上,并且需要在渲染完成后调用`EGL14.eglSwapBuffers`来交换前后缓冲区。此外,我们还需要在程序退出时释放所有资源,包括`MediaCodec`,EGL上下文和SurfaceTexture。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值