mediacodec编码camera视频(一)

glsurfaceview+camera绘制。

打开camera

注意 :parameters.setPreviewSize设置数据像素必须是Camera.Parameters所支持的宽和高。

 static android.hardware.Camera camera;
    public static  int width = 1280;
    public static final int height = 720;
    /*
    preview-size-values -> 2592x1168,2376x1080,2160x1080,1920x1080,
    1920x864,1920x822,1600x800,1600x720,1584x720,1440x1080,1280x960,
    1280x768,1024x768,1280x720,1188x540,1200x540,1080x1080,840x360,
    800x400,792x360,720x540,720x480,640x640,640x480,640x360,352x288,320x240,176x144
    */
    public static void openCamera(SurfaceTexture surfaceTexture){

         camera = android.hardware.Camera.open(0);

        Camera.Parameters parameters = camera.getParameters();
       parameters.setPreviewFormat(ImageFormat.NV21);
//        parameters.setPictureFormat(ImageFormat.JPEG);
//        parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
        parameters.setPreviewSize(width, height);
//        camera.setDisplayOrientation(90);
        camera.setParameters(parameters);


        try {
            camera.setPreviewTexture(surfaceTexture);
        } catch (IOException e) {
            e.printStackTrace();
        }
        camera.setDisplayOrientation(90);
        camera.startPreview();

    }

glse 绘制camera

1.顶点vextex.vsh

attribute vec4 position;
attribute vec2 i_position;
varying vec2 o_position;
void main(){
    o_position = i_position;
    gl_Position = position;
}

2.纹理fragment.fsh

#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 o_position;
uniform samplerExternalOES s_texture;
void main(){
    gl_FragColor = texture2D( s_texture, o_position );
}

3.渲染

surfaceTexture = new SurfaceTexture(texture[0])等于
surfaceTexture.attachToGLContext(texture[0])
public class CameraRemender implements GLSurfaceView.Renderer , SurfaceTexture.OnFrameAvailableListener {
    private GLSurfaceView glSurfaceView;
    private Context context;
    private SurfaceTexture surfaceTexture;
    public  CameraRemender (Context context,GLSurfaceView glSurfaceView){
        this.context = context;
        this.glSurfaceView = glSurfaceView;
    }
  
    private int program;
    private FloatBuffer vexFloatBuffer;
    private float[] squareCoords = {
            //满屏
            -1.0f, -1.0f,  //1
            1.0f, -1.0f,   //2
            -1.0f, 1.0f,   //3
            1.0f, 1.0f,    //4

    };
    private FloatBuffer fragFloatBuffer;
    private final float[] textureVertexData = {
            1.0f, 1.0f,
            1.0f, 0.0f,
            0.0f, 1.0f,
            0.0f, 0.0f,




    };
    int position;
    int i_position;
    public int [] texture;
    int s_texture;
    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {

        program = GLES20.glCreateProgram();
        final ByteBuffer buffer = ByteBuffer.allocateDirect(squareCoords.length*4);
        buffer.order(ByteOrder.nativeOrder());
        vexFloatBuffer = buffer.asFloatBuffer();
        vexFloatBuffer.put(squareCoords);
        vexFloatBuffer.position(0);
        ByteBuffer buffer1 = ByteBuffer.allocateDirect(textureVertexData.length*4);
        buffer1.order(ByteOrder.nativeOrder());
        fragFloatBuffer = buffer1.asFloatBuffer();
        fragFloatBuffer.put(textureVertexData);
        fragFloatBuffer.position(0);
        int verShader = loadShader(GLES20.GL_VERTEX_SHADER,FFmpegUtil.readFileFromRaw(context,R.raw.camera_ver));
        int fragShader = loadShader(GLES20.GL_FRAGMENT_SHADER,FFmpegUtil.readFileFromRaw(context,R.raw.camera_fram));
        GLES20.glAttachShader(program,verShader);
        GLES20.glAttachShader(program,fragShader);
        GLES20.glLinkProgram(program);
        Log.d("tagxxx",GLES20.glGetProgramInfoLog(program));
        position = GLES20.glGetAttribLocation(program,"position");
        i_position = GLES20.glGetAttribLocation(program,"i_position");

        s_texture = GLES20.glGetUniformLocation(program,"s_texture");
        texture = new int[1];
        GLES20.glGenTextures(1,texture,0);
        GLES20.glActiveTexture(texture[0]);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,texture[0]);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        // 比加载的小
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        // 如果纹理坐标超出范围 0,0-1,1 坐标会被截断在范围内
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);

        surfaceTexture = new SurfaceTexture(texture[0]);
//        surfaceTexture.attachToGLContext(texture[0]);
        surfaceTexture.setOnFrameAvailableListener(this);
        CameraHelper.openCamera(surfaceTexture);

//



    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        GLES20.glViewport(0,0,width,height);
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        GLES20.glClearColor(0f, 0f, 0f, 1f);

            GLES20.glUseProgram(program);

            surfaceTexture.updateTexImage();

            GLES20.glEnableVertexAttribArray(position);
            GLES20.glVertexAttribPointer(position, 2, GLES20.GL_FLOAT, false, 4 * 2, vexFloatBuffer);
            GLES20.glEnableVertexAttribArray(i_position);
            GLES20.glVertexAttribPointer(i_position, 2, GLES20.GL_FLOAT, false, 4 * 2, fragFloatBuffer);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
            GLES20.glUniform1i(s_texture, 0);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);

    }
    int loadShader(int type, String shaderCode) {
        //根据type创建顶点着色器或者片元着色器,返回一个容器的句柄
        int shader = GLES20.glCreateShader(type);
        //将资源加入到着色器句柄所关联的内存中,并编译
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        //验证Shader是否编译通过 intBuffer 返回1时编译成功,返回0编译失败
        IntBuffer intBuffer = IntBuffer.allocate(1);
        GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, intBuffer);
        Log.d("tagxxx", "shader编译:" + intBuffer.get(0));
        if (intBuffer.get(0)==0){
            Log.d("tagxxx","shader编译:"+GLES20.glGetShaderInfoLog(shader));
            GLES20.glDeleteShader(shader);
        }
        return shader;
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
           glSurfaceView.requestRender();
    }
}

4。mediacodec编码

设置编码器

formmat.setInteger(MediaFormat.KEY_BIT_RATE, (int) (width*height*frameRate*0.5));

码率计算公式 :Biterate = Width * Height * FrameRate * Factor

其中,Width、Height和FrameRate分别代表视频的宽度,高度和帧速率,而Factor则是一个系数,用来控制码率。


通常情况下,在网络流媒体使用场景中,可以将Factor设置为0.1~0.2,这样能在保证画面损失不严重的情况下生成出来的视频文件大小较小;

在普通本地浏览的使用场景中,可以将Factor设置为0.25~0.5,这样可以保证画面清晰度不会因为编码而造成过多肉眼可见的损失,这样生成出来的视频文件也相对较大;

在高清视频处理的使用场景中,可以将Factor设置为0.5以上。
不过,当视频画面颜色越丰富、画面变化越快时,视频编码需要的码率就更高,如果遇到这种视频画面场景,需要适当提高码率来保证清晰度。
需要注意的是,大多数安卓机型在对Bitrate的支持都有一个上限,如果设置的Bitrate值超过了上限,可能导致编码器抛出异常,进而编码流程失败。所以在设计Bitrate的值时,需要通过MediaCodecInfo.CodecCapabilities提供的相关接口来检查系统支持的Bitrate上限。

formmat.setInteger(MediaFormat.KEY_BITRATE_MODE,BITRATE_MODE_CQ);

android.media.MediaCodec$CodecException: Error 0x80001001
        at android.media.MediaCodec.native_configure(Native Method

是因为设置了BITRATE_MODE_CQ;

/**
 *Biterate Mode的默认设置是BITRATE_MODE_VBR
 * BITRATE_MODE_CQ
 * 忽略用户设置的码率,由编码器自己控制码率,并尽可能保证画面清晰度和码率的均衡。
 *
 * BITRATE_MODE_CBR
 * 无论视频的画面内容如果,尽可能遵守用户设置的码率
 *
 * BITRATE_MODE_VBR
 * 尽可能遵守用户设置的码率,但是会根据帧画面之间运动矢量(通俗理解就是帧与帧之间的画面变化程度)来动态调整码率,如果运动矢量较大,则在该时间段将码率调高,如果画面变换很小,则码率降低。
 *
 * 所以,我们在设置码率的同时,也要注意对Bitrate Mode的设置。不同的设置对于生成出来的视频文件的大小和清晰度的影响都是不同的。
 */
 try {
            mediaCodec = MediaCodec.createEncoderByType("video/avc");
        } catch (IOException e) {
            e.printStackTrace();
        }
//        Surface surface = mediaCodec.createInputSurface();
        formmat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC,width,height);
        //设置码率 单位时间内编码的数据 码率和体积成反比
        formmat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
        //设置帧率 每秒绘制的帧数
        formmat.setInteger(MediaFormat.KEY_FRAME_RATE, 60);
        //设置颜色 COLOR_FormatYUV420Flexible represent COLOR_FormatYUV420Planar and COLOR_FormatYUV420SemiPlanar
        formmat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
        /**time_interval   between key frames
         * a positive value   time_interval  between key frames
         * a negative value    first frame is key frame ,other no
         * zero    each frames is key frame
         */


        formmat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);

异步编码。

  1.  mediaCodec.setCallback(new MediaCodec.Callback() {
                @Override
                public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
                    ByteBuffer inputBuffer = codec.getInputBuffer(index);
                    inputBuffer.clear();
                    int length = 0;
                    byte[] input = queue.poll();
                    if (input != null) {
                        Log.d("tag","开始编码"+queue.size());
    //                        inputBuffer.position(index);
    //                        inputBuffer.limit(input.length);
                        length = input.length;
                        inputBuffer.put(input);
    
                    }
                    codec.queueInputBuffer(index, 0, length, 0, 0);
                }
    
                @Override
                public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
                    ByteBuffer outputBuffer = codec.getOutputBuffer(index);
                    MediaFormat bufferFormat = codec.getOutputFormat(index); // option A
                    // bufferFormat is equivalent to mOutputFormat
                    // outputBuffer is ready to be processed or rendered.
                    Log.d("tag","编码完成");
                    if (info.size>0) {
                        byte[] outData = new byte[info.size];
                        outputBuffer.get(outData);
                        try {
                            outputStream.write(outData, 0, outData.length);
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
    
    
                    }
                    codec.releaseOutputBuffer(index, true);
                }
    
                @Override
                public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
                    Log.d("tag","onError"+e);
    
                }
    
                @Override
                public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
                    Log.d("tag","onOutputFormatChanged");
    
                }
            });

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值