Android OpenGL ES基本用法(14),MediaCodec录制Camera视频


目录


视频编码

得到MediaCodec的输入Surface,然后OpenGL把视频数据渲染到这个Surface上,MediaCodec就可以进行视频编码了。
在这里插入图片描述

EncoderBase

package com.zhangyu.myopengl.encoder;

import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;
import android.view.Surface;

import com.zhangyu.myopengl.egl.EGLHelper;
import com.zhangyu.myopengl.egl.EGLSurfaceView;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;

import javax.microedition.khronos.egl.EGLContext;

public abstract class EncoderBase {

    private Surface surface;
    private EGLContext eglContext;

    private int width;
    private int height;

    private MediaCodec videoEncodec;
    private MediaFormat videoFormat;
    private MediaCodec.BufferInfo videoBufferinfo;

    private MediaMuxer mediaMuxer;

    private WlEGLMediaThread wlEGLMediaThread;
    private VideoEncodecThread videoEncodecThread;


    private EGLSurfaceView.EGLRender eglRender;

    public enum RenderMode{
        RENDERMODE_WHEN_DIRTY,
        RENDERMODE_CONTINUOUSLY
    }

    private RenderMode mRenderMode = RenderMode.RENDERMODE_CONTINUOUSLY;

    private OnMediaInfoListener onMediaInfoListener;


    public EncoderBase(Context context) {
    }

    public void setRender(EGLSurfaceView.EGLRender wlGLRender) {
        this.eglRender = wlGLRender;
    }

    public void setmRenderMode(RenderMode mRenderMode) {
        if (eglRender == null) {
            throw new RuntimeException("must set render before");
        }
        this.mRenderMode = mRenderMode;
    }

    public void setOnMediaInfoListener(OnMediaInfoListener onMediaInfoListener) {
        this.onMediaInfoListener = onMediaInfoListener;
    }

    public void initEncodec(EGLContext eglContext, String savePath, String mimeType, int width, int height) {
        this.width = width;
        this.height = height;
        this.eglContext = eglContext;
        initMediaEncodec(savePath, mimeType, width, height);
    }

    public void startRecord() {
        if (surface != null && eglContext != null) {
            wlEGLMediaThread = new WlEGLMediaThread(new WeakReference<EncoderBase>(this));
            videoEncodecThread = new VideoEncodecThread(new WeakReference<EncoderBase>(this));
            wlEGLMediaThread.isCreate = true;
            wlEGLMediaThread.isChange = true;
            wlEGLMediaThread.start();
            videoEncodecThread.start();
        }
    }

    public void stopRecord() {
        if (wlEGLMediaThread != null && videoEncodecThread != null) {
            videoEncodecThread.exit();
            wlEGLMediaThread.onDestory();
            videoEncodecThread = null;
            wlEGLMediaThread = null;
        }
    }

    private void initMediaEncodec(String savePath, String mimeType, int width, int height) {
        try {
            mediaMuxer = new MediaMuxer(savePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
            initVideoEncodec(mimeType, width, height);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    private void initVideoEncodec(String mimeType, int width, int height) {
        try {
            videoBufferinfo = new MediaCodec.BufferInfo();
            videoFormat = MediaFormat.createVideoFormat(mimeType, width, height);
            videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 4);
            videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
            videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

            videoEncodec = MediaCodec.createEncoderByType(mimeType);
            videoEncodec.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

            surface = videoEncodec.createInputSurface();

        } catch (IOException e) {
            e.printStackTrace();
            videoEncodec = null;
            videoFormat = null;
            videoBufferinfo = null;
        }

    }


    static class WlEGLMediaThread extends Thread {
        private WeakReference<EncoderBase> encoder;
        private EGLHelper eglHelper;
        private Object object;

        private boolean isExit = false;
        private boolean isCreate = false;
        private boolean isChange = false;
        private boolean isStart = false;

        public WlEGLMediaThread(WeakReference<EncoderBase> encoder) {
            this.encoder = encoder;
        }

        @Override
        public void run() {
            super.run();
            isExit = false;
            isStart = false;
            object = new Object();
            eglHelper = new EGLHelper();
            eglHelper.initEgl(encoder.get().surface, encoder.get().eglContext);

            while (true) {
                if (isExit) {
                    release();
                    break;
                }

                if (isStart) {
                    if (encoder.get().mRenderMode == RenderMode.RENDERMODE_WHEN_DIRTY) {
                        synchronized (object) {
                            try {
                                object.wait();
                            } catch (InterruptedException e) {
                                e.printStackTrace();
                            }
                        }
                    } else if (encoder.get().mRenderMode == RenderMode.RENDERMODE_CONTINUOUSLY) {
                        try {
                            Thread.sleep(1000 / 60);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                    } else {
                        throw new RuntimeException("mRenderMode is wrong value");
                    }
                }
                onCreate();
                onChange(encoder.get().width, encoder.get().height);
                onDraw();
                isStart = true;
            }

        }

        private void onCreate() {
            if (isCreate && encoder.get().eglRender != null) {
                isCreate = false;
                encoder.get().eglRender.onSurfaceCreated();
            }
        }

        private void onChange(int width, int height) {
            if (isChange && encoder.get().eglRender != null) {
                isChange = false;
                encoder.get().eglRender.onSurfaceChanged(width, height);
            }
        }

        private void onDraw() {
            if (encoder.get().eglRender != null && eglHelper != null) {
                encoder.get().eglRender.onDrawFrame();
                if (!isStart) {
                    encoder.get().eglRender.onDrawFrame();
                }
                eglHelper.swapBuffers();

            }
        }

        private void requestRender() {
            if (object != null) {
                synchronized (object) {
                    object.notifyAll();
                }
            }
        }

        public void onDestory() {
            isExit = true;
            requestRender();
        }

        public void release() {
            if (eglHelper != null) {
                eglHelper.destoryEgl();
                eglHelper = null;
                object = null;
                encoder = null;
            }
        }
    }

    static class VideoEncodecThread extends Thread {
        private WeakReference<EncoderBase> encoder;

        private boolean isExit;

        private MediaCodec videoEncodec;
        private MediaFormat videoFormat;
        private MediaCodec.BufferInfo videoBufferinfo;
        private MediaMuxer mediaMuxer;

        private int videoTrackIndex;
        private long pts;


        public VideoEncodecThread(WeakReference<EncoderBase> encoder) {
            this.encoder = encoder;
            videoEncodec = encoder.get().videoEncodec;
            videoFormat = encoder.get().videoFormat;
            videoBufferinfo = encoder.get().videoBufferinfo;
            mediaMuxer = encoder.get().mediaMuxer;
        }

        @Override
        public void run() {
            super.run();
            pts = 0;
            videoTrackIndex = -1;
            isExit = false;
            videoEncodec.start();
            while (true) {
                if (isExit) {

                    videoEncodec.stop();
                    videoEncodec.release();
                    videoEncodec = null;

                    mediaMuxer.stop();
                    mediaMuxer.release();
                    mediaMuxer = null;


                    Log.d("zhangyu", "录制完成");
                    break;
                }

                int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);

                if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    videoTrackIndex = mediaMuxer.addTrack(videoEncodec.getOutputFormat());
                    mediaMuxer.start();
                } else {
                    while (outputBufferIndex >= 0) {
                        ByteBuffer outputBuffer = videoEncodec.getOutputBuffers()[outputBufferIndex];
                        outputBuffer.position(videoBufferinfo.offset);
                        outputBuffer.limit(videoBufferinfo.offset + videoBufferinfo.size);
                        //

                        if (pts == 0) {
                            pts = videoBufferinfo.presentationTimeUs;
                        }
                        videoBufferinfo.presentationTimeUs = videoBufferinfo.presentationTimeUs - pts;

                        mediaMuxer.writeSampleData(videoTrackIndex, outputBuffer, videoBufferinfo);
                        if (encoder.get().onMediaInfoListener != null) {
                            encoder.get().onMediaInfoListener.onMediaTime((int) (videoBufferinfo.presentationTimeUs / 1000000));
                        }

                        videoEncodec.releaseOutputBuffer(outputBufferIndex, false);
                        outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
                    }
                }
            }

        }

        public void exit() {
            isExit = true;
        }

    }

    public interface OnMediaInfoListener {
        void onMediaTime(int times);
    }


}

MyCameraRenderFbo用于显示和录制,共用一个FBO

package com.zhangyu.myopengl.testCamera;

import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLES20;

import com.zhangyu.myopengl.R;
import com.zhangyu.myopengl.egl.EGLSurfaceView;
import com.zhangyu.myopengl.egl.EGLUtils;
import com.zhangyu.myopengl.utils.BitmapUtils;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

public class MyCameraRenderFbo implements EGLSurfaceView.EGLRender {

    private Context context;

    private float[] vertexData = {
            -1f, -1f,
            1f, -1f,
            -1f, 1f,
            1f, 1f,
            //水印的位置,用于站位
            0f, 0f,
            0f, 0f,
            0f, 0f,
            0f, 0f
    };
    private FloatBuffer vertexBuffer;

    private float[] fragmentData = {
            0f, 1f,
            1f, 1f,
            0f, 0f,
            1f, 0f

    };
    private FloatBuffer fragmentBuffer;

    private int program;
    private int vPosition;
    private int fPosition;
    private int sampler;

    private int vboId;

    private Bitmap bitmap;
    private int bitmapTextureId;

    private int textureId;

    public void setTextureId(int textureId) {
        this.textureId = textureId;
    }

    public MyCameraRenderFbo(Context context){
        this(context,0);
    }

    public MyCameraRenderFbo(Context context,int textureId) {
        this.context = context;
        this.textureId = textureId;

        bitmap = BitmapUtils.text2Bitmap("你好", 30, "#ff0000", "#00ffffff", 0);
        int width = bitmap.getWidth();
        int height = bitmap.getHeight();
        float ratio = (float) width / height;
        float previewHeight = 0.2f;
        float previewWidth = ratio * previewHeight;
        vertexData[8] = -previewWidth/2;
        vertexData[9] = -previewHeight/2;
        vertexData[10] = previewWidth/2;
        vertexData[11] = -previewHeight/2;
        vertexData[12] = -previewWidth/2;
        vertexData[13] = previewHeight/2;
        vertexData[14] = previewWidth/2;
        vertexData[15] = previewHeight/2;

        vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(vertexData);
        vertexBuffer.position(0);

        fragmentBuffer = ByteBuffer.allocateDirect(fragmentData.length * 4)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer()
                .put(fragmentData);
        fragmentBuffer.position(0);

    }

    @Override
    public void onSurfaceCreated() {
        //开启透明通道
        GLES20.glEnable(GLES20.GL_BLEND);
        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA,GLES20.GL_ONE_MINUS_SRC_ALPHA);

        String vertexSource = EGLUtils.readRawTxt(context, R.raw.vertex_shader);
        String fragmentSource = EGLUtils.readRawTxt(context, R.raw.fragment_shader);

        program = EGLUtils.createProgram(vertexSource, fragmentSource);

        vPosition = GLES20.glGetAttribLocation(program, "av_Position");
        fPosition = GLES20.glGetAttribLocation(program, "af_Position");
        sampler = GLES20.glGetUniformLocation(program, "sTexture");

        int[] vbos = new int[1];
        GLES20.glGenBuffers(1, vbos, 0);
        vboId = vbos[0];

        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4 + fragmentData.length * 4, null, GLES20.GL_STATIC_DRAW);
        GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
        GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, fragmentData.length * 4, fragmentBuffer);
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);

        //创建水印纹理
        bitmapTextureId = EGLUtils.createImageTextureId(bitmap);
    }

    @Override
    public void onSurfaceChanged(int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame() {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glClearColor(1f, 0f, 0f, 1f);

        GLES20.glUseProgram(program);

        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);

        //fbo纹理
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
        //从vbo中取坐标
        GLES20.glEnableVertexAttribArray(vPosition);
        GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8,
                0);
        GLES20.glEnableVertexAttribArray(fPosition);
        GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8,
                vertexData.length * 4);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        //水印纹理
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bitmapTextureId);
        //从vbo中取坐标
        GLES20.glEnableVertexAttribArray(vPosition);
        GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8,
                8*4);
        GLES20.glEnableVertexAttribArray(fPosition);
        GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8,
                vertexData.length * 4);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

        //解绑
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
        GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
    }

    public void onDrawFrame(int textureId){
        this.textureId = textureId;
        onDrawFrame();
    }
}


MyCameraEncode 具体的录制类

package com.zhangyu.myopengl.testCamera;

import android.content.Context;

import com.zhangyu.myopengl.encoder.EncoderBase;

public class MyCameraEncode extends EncoderBase {

    private MyCameraRenderFbo encodecRender;

    public MyCameraEncode(Context context, int textureId) {
        super(context);
        encodecRender = new MyCameraRenderFbo(context, textureId);
        setRender(encodecRender);
        setmRenderMode(RenderMode.RENDERMODE_CONTINUOUSLY);
    }
}

MyCameraActivity

package com.zhangyu.myopengl.testCamera;

import android.content.Context;
import android.content.Intent;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.View;
import android.widget.Button;

import androidx.appcompat.app.AppCompatActivity;

import com.zhangyu.myopengl.R;
import com.zhangyu.myopengl.encoder.EncoderBase;

public class MyCameraActivity extends AppCompatActivity {

    private static final String TAG = "TestCameraActivity";
    private MyCameraView camerView;
    private Button btRecoder;
    private MyCameraEncode cameraEncoder;

    public static void start(Context context) {
        Intent starter = new Intent(context, MyCameraActivity.class);
        context.startActivity(starter);
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_test_camera);
        initView();
    }


    @Override
    protected void onDestroy() {
        super.onDestroy();
        camerView.onDestory();
    }

    private void initView() {
        camerView = (MyCameraView) findViewById(R.id.camerView);
        btRecoder = (Button) findViewById(R.id.bt_recoder);
    }

    public void recoder(View view) {
        if (cameraEncoder == null) {
            Log.e(TAG, "camerView.getTextureId(): "+camerView.getTextureId() );
            String path = Environment.getExternalStorageDirectory() + "/1/encode_" + System.currentTimeMillis() + ".mp4";
            cameraEncoder = new MyCameraEncode(this, camerView.getTextureId());
            cameraEncoder.initEncodec(camerView.getEglContext(), path, MediaFormat.MIMETYPE_VIDEO_AVC, 1080, 1920);
            cameraEncoder.setOnMediaInfoListener(new EncoderBase.OnMediaInfoListener() {
                @Override
                public void onMediaTime(int times) {
                    Log.e(TAG, "onMediaTime: " + times);
                }
            });
            cameraEncoder.startRecord();
            btRecoder.setText("正在录制...");
        } else {
            cameraEncoder.stopRecord();
            btRecoder.setText("开始录制");
            cameraEncoder = null;
        }
    }
}

MyCameraView

package com.zhangyu.myopengl.testCamera;

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.WindowManager;

import com.zhangyu.myopengl.egl.EGLSurfaceView;

public class MyCameraView extends EGLSurfaceView {
    private static final String TAG = "TestCameraView";

    private MyCamera myCamera;
    private MyCameraRender cameraRender;

    private int textureId = -1;

    public MyCameraView(Context context) {
        this(context, null);
    }

    public MyCameraView(Context context, AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public MyCameraView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        Log.e(TAG, "onCreate: getWidth()=" + getWidth() + "-----getHeight()=" + getHeight());
        cameraRender = new MyCameraRender(context);
        setRender(cameraRender);
        previewAngle(context);
        cameraRender.setOnSurfaceCreateListener(new MyCameraRender.OnSurfaceCreateListener() {
            @Override
            public void onCreate(SurfaceTexture surfaceTexture, int tid) {
                textureId = tid;
                Log.e(TAG, "onCreate: getWidth()=" + getWidth() + "-----getHeight()=" + getHeight());
                myCamera = new MyCamera(surfaceTexture, getWidth(), getHeight());
                myCamera.startPreview(Camera.CameraInfo.CAMERA_FACING_BACK);
            }
        });
    }

    @Override
    protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
        super.onMeasure(widthMeasureSpec, heightMeasureSpec);
        setMeasuredDimension(1080,1920);
    }

    public void onDestory() {
        if (myCamera != null) {
            myCamera.stopPreview();
        }
    }

    public void previewAngle(Context context) {
        cameraRender.resetMatrix();
        WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
        int angle = windowManager.getDefaultDisplay().getRotation();
        switch (angle) {
            case Surface.ROTATION_0:
                cameraRender.setAngle(-90, 0, 0, 1);
                break;
            case Surface.ROTATION_90:
                break;
            case Surface.ROTATION_180:
                break;
            case Surface.ROTATION_270:
                cameraRender.setAngle(180, 0, 0, 1);
                break;
        }
    }

    public int getTextureId(){
        return textureId;
    }
}

MyCamera

package com.zhangyu.myopengl.testCamera;

import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.Log;

import java.io.IOException;
import java.util.List;

public class MyCamera {

    private static final String TAG = "MyCamera";


    private Camera camera;
    private SurfaceTexture surfaceTexture;

    private int width;
    private int height;

    public MyCamera(SurfaceTexture surfaceTexture, int width, int height) {
        this.surfaceTexture = surfaceTexture;
        this.width = width;
        this.height = height;
    }

    public void startPreview(int cameraId) {
        try {
            camera = Camera.open(cameraId);
            camera.setPreviewTexture(surfaceTexture);
            Camera.Parameters parameters = camera.getParameters();
            parameters.setFlashMode("off");
            parameters.setPreviewFormat(ImageFormat.NV21);

            Camera.Size size = getFitSize(parameters.getSupportedPictureSizes());
            parameters.setPictureSize(size.width, size.height);
            Log.e(TAG, "setPictureSize: size.width=" + size.width + "-----size.height=" + size.height);
            size = getFitSize(parameters.getSupportedPreviewSizes());
            parameters.setPreviewSize(size.width, size.height);
            Log.e(TAG, "setPreviewSize: size.width=" + size.width + "-----size.height=" + size.height);

            camera.setParameters(parameters);
            camera.startPreview();
            Log.e(TAG, "camera start preview");
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 停止
     */
    public void stopPreview() {
        if (camera != null) {
            camera.stopPreview();
            camera.release();
            camera = null;
        }
    }

    /**
     * 切换相机
     *
     * @param cameraId
     */
    public void changeCamera(int cameraId) {
        if (camera != null) {
            stopPreview();
        }
        startPreview(cameraId);
    }

    /**
     * 从相机的Size中选取和Surface等比的宽高
     * @param sizes
     * @return
     */
    private Camera.Size getFitSize(List<Camera.Size> sizes) {
        if (width < height) {
            int t = height;
            height = width;
            width = t;
        }

        for (Camera.Size size : sizes) {
            if (1.0f * size.width / size.height == 1.0f * width / height) {
                return size;
            }
        }
        return sizes.get(0);
    }

}

在这里插入图片描述

  • 2
    点赞
  • 7
    收藏
    觉得还不错? 一键收藏
  • 6
    评论
评论 6
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值