Android使用MediaCodec解码视频并用OpenGL ES进行渲染的思路

in Android 4.1 (API 16) and above, MediaCodec is introduced by Android. By using MediaCodec, we can easily decode video without using Android NDK, like creating video playback application, etc.
In game development, video is also needed to make the game environment more realistics, like when you are playing a racing game with video banner around the track.
In here, I'll introduce of how to decode the video and render it into OpenGL ES.
Firstly, we prepare some stuffs to decode the video, like MediaExtractor and MediaCodec.

private boolean initExtractor() {
    extractor = new MediaExtractor();
    try {
        extractor.setDataSource(mFilePath);
    } catch (IOException e) {
        return false;
    }
 
    // get video track
    for (int i = 0; i < extractor.getTrackCount(); i++) {
        MediaFormat format = extractor.getTrackFormat(i);
        String mime = format.getString(MediaFormat.KEY_MIME);
        if (mime.startsWith("video/")) {
            tracknumb = i;
            break;
        }
    }
 
    if (tracknumb == -1) {
        Log.e("DecodeActivity", "Can't find video track!");
        return false;
    }
 
    // set track to extractor
    extractor.selectTrack(tracknumb);
 
    return true;
}
 
private boolean initDecoder(Surface surface) {
    // get mimetype and format
    MediaFormat format = extractor.getTrackFormat(tracknumb);
    String mime = format.getString(MediaFormat.KEY_MIME);
 
    decoder = MediaCodec.createDecoderByType(mime);
    decoder.configure(format, surface, null, 0);
    if (decoder == null) {
        Log.e("DecodeActivity", "Can't find video info!");
        return false;
    }
 
    decoder.start();
 
    return true;
}
Later, inside the OpenGL ES, prepare that surface to MediaCodec.

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
 
    final String fragmentShaderSourceOES =
    "#extension GL_OES_EGL_image_external : require\n" +
    "precision mediump float;\n" +
    "varying vec2 vTextureCoord;\n" +
    "uniform samplerExternalOES sTexture;\n" +
    "void main() {\n" +
    " vec4 color = texture2D(sTexture, vTextureCoord);\n" +
    " gl_FragColor = color;\n" +
    "}\n";
 
    // Prepare your shader program here
    mVertexShader = ...
    mPixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderSourceOES); // use fragment shader that support OES
 
    mProgram = createProgram(mVertexShader, mPixelShader);
 
    // Prepare texture handler
    int[] textures = new int[1];
    GLES20.glGenTextures(1, textures, 0);
 
    mTextureID = textures[0];
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
 
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
 
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
 
    // Link the texture handler to surface texture
    mSurfaceTexture = new SurfaceTexture(mTextureID);
    mSurfaceTexture.setDefaultBufferSize(320, 240);
    mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
         @Override
         public void onFrameAvailable(SurfaceTexture surfaceTexture) {
             synchronized(updateSurface) {
                 updateSurface = true;
             }
         }
    });
 
    // Create decoder surface
    mDecoderSurface = new Surface(mSurfaceTexture);
}
 
@Override
public void onDrawFrame(GL10 gl) {
    ...
 
    synchronized(updateSurface) {
         if (updateSurface) {
              mSurfaceTexture.updateTexImage(); // update surfacetexture if available
         updateSurface = false;
        }
    }
 
    // use program
    GLES20.glUseProgram(mProgram);
    CLGLUtility.checkGlError(TAG, "glUseProgram");
 
    // bind texture
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
 
    // set vertices position
    mShape.setPositionArray(maPositionHandle);
 
    // set vertices texture coordinate
    mShape.setTexCoordArray(maTextureHandle);
 
    // draw shape
    mShape.drawArrays();
}
And set it to Media Codec.

...
initDecoder(mDecoderSurface);
...
Finally, OpenGL ES and MediaCodec are linked together and we can start the game.

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    ...
    startDecode();
}
 
public void startDecode() {
    // get buffers
    decoderInputBuffers = decoder.getInputBuffers();
    decoderOutputBuffers = decoder.getOutputBuffers();
 
    // start getting buffer
    BufferInfo info = new BufferInfo();
    boolean isEOS = false;
    long startMs = System.currentTimeMillis();
 
    Log.d("DecodeActivity", "BufferInfo: size:"+info.size);
 
    while (!threadIterrupted) {
        // get input buffer (decoder)
        if (!isEOS) {
            isEOS = readDecoderBuffer();
        }
 
        isEOS = checkDecoderBuffer(info, startMs);
        if (isEOS)
            break;
    }
 
    decoder.stop();
    decoder.release();
    extractor.release();
}
 
private boolean readDecoderBuffer() {
    int inIndex = decoder.dequeueInputBuffer(10000);
 
    // index did not get correctly
    if (inIndex < 0)
        return true;
 
    ByteBuffer buffer = decoderInputBuffers[inIndex];
    int sampleSize = extractor.readSampleData(buffer, 0);
 
    if (sampleSize < 0) {
 
        // We shouldn't stop the playback at this point, just pass the EOS
        // flag to decoder, we will get it again from the dequeueOutputBuffer
        Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
        decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
 
        return true;
 
    } else {
 
        decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
        extractor.advance();
 
    }
 
    return false;
}
 
private boolean checkDecoderBuffer(BufferInfo info, long startMs) {
    // get output buffer, to control the time
    int outIndex = decoder.dequeueOutputBuffer(info, 10000);
    Log.i(TAG , "BufferInfo: size:"+info.size+" presentationTimeUs:"+info.presentationTimeUs+" offset:"+info.offset+" flags:"+info.flags);
 
    switch (outIndex) {
    case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
        Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
        decoderOutputBuffers = decoder.getOutputBuffers();
        break;
    case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
        break;
    case MediaCodec.INFO_TRY_AGAIN_LATER:
        Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
        break;
    default:
        ByteBuffer buffer = decoderOutputBuffers[outIndex];
        Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
 
        // We use a very simple clock to keep the video FPS, or the video playback will be too fast
        while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
            try {
                sleep(10);
            } catch (InterruptedException e) {
                return false;
            }
        }
 
        decoder.releaseOutputBuffer(outIndex, true);
        break;
    }
 
    // All decoded frames have been rendered, we can stop playing now
    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
        return true;
    }
 
    return false;
}

References: (Thanks to Ray)
https://github.com/crossle/MediaPlayerSurface/blob/master/src/me/crossle/demo/surfacetexture/VideoSurfaceView.java
https://vec.io/posts/android-hardware-decoding-with-mediacodec
  • 1
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值