Android项目小结——硬编码(MediaCodec实现YUV420[p|sp]转[h264])

概述

MediaCodec可以实现硬编码(相比FFmpeg来说,效率很高)。
可供参考的博客非常多,只是记录一下自己参考后改写的东西,难免会忘记。

三种方式

(1)基于缓存(ByteBuffer)的同步编码
(2)基于缓存(ByteBuffer)的异步编码
(3)基于缓存数组的同步编码(废弃,可能效率没前面两种高吧)

基于缓存(ByteBuffer)的同步编码

很典型的代码例程。

public class AvcEncoderOnSynchronous {

    private MediaCodec mediaCodec;
    private int m_width;
    private int m_height;
    private byte[] configByte = null;
    private BufferedOutputStream bos;
    private long generateIndex = 0;

    @SuppressLint("NewApi")
    public AvcEncoderOnSynchronous(int width, int height, int frameRate, int bitRate, String outPath) throws IOException {

        m_width = width;
        m_height = height;
        this.bos = new BufferedOutputStream(new FileOutputStream(new File(outPath), false));
        mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
        MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
        // 关键帧间隔时间 单位s,设置为0,则没一个都是关键帧
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
        mediaFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
        mediaFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel51);

        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.start();
    }

    @SuppressLint("NewApi")
    public void close() {
        try {
            mediaCodec.stop();
            mediaCodec.release();
            bos.flush();
            bos.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * @param input yuv420p格式的内容
     */
    @SuppressLint("NewApi")
    public void offerEncoder(byte[] input) {
        try {
            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufferIndex);
                if (inputBuffer != null) {
                    inputBuffer.clear();
                    byte[] data_420sp = new byte[input.length];
                    yuv420pTo420sp(input, data_420sp, m_width, m_height);
                    input = data_420sp;
                    inputBuffer.put(input);
                    System.out.println("输入" + generateIndex + "帧");
                }
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, computePresentationTime(generateIndex++), 0);
            }

            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 12000);
            while (outputBufferIndex >= 0) {
                ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferIndex);
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);
                if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
                    configByte = new byte[bufferInfo.size];
                    configByte = outData;
                } else if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
                    byte[] keyframe = new byte[bufferInfo.size + configByte.length];
                    System.arraycopy(configByte, 0, keyframe, 0, configByte.length);
                    System.arraycopy(outData, 0, keyframe, configByte.length, outData.length);

                    bos.write(keyframe, 0, keyframe.length);
                } else {
                    bos.write(outData, 0, outData.length);
                }
                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 12000);
            }
        } catch (Throwable t) {
            t.printStackTrace();
        }
    }

    private void yuv420pTo420sp(byte[] yuv420p, byte[] yuv420sp, int width, int height) {
        if (yuv420p == null || yuv420sp == null) return;
        int frameSize = width * height;
        int j;
        System.arraycopy(yuv420p, 0, yuv420sp, 0, frameSize);
        for (j = 0; j < frameSize / 4; j++) {
            // u
            yuv420sp[frameSize + 2 * j] = yuv420p[j + frameSize];
            // v
            yuv420sp[frameSize + 2 * j + 1] = yuv420p[(int) (j + frameSize * 1.25)];
        }
    }

    /**
     * Generates the presentation time for frame N, in microseconds.
     */
    private long computePresentationTime(long frameIndex) {
        return 132 + frameIndex * 1000000 / 30;
    }
}

注:
(1)各个机器支持的KEY_COLOR_FORMAT不同,但5.0系统后都支持COLOR_FormatYUV420Flexible,这个格式就是YUV,Y是width x height大小,U和V都是步长为2进行放置(放一个,跳一个),长度均为width x height / 4
(2)放入的Byte目前两个机器(SM Tab S3,HW M5)都是需要放入420sp的数据,猜测Camera1的API出来的数据是sp数据,可能android原生的数据都是sp的吧,所以硬编码也需要sp数据。(待测试,不转格式会颜色异常)
(3)关于queueInputBuffer方法的第四个参数,只要后一个比前一个大就可以,但是参考了别人的博客,有了computePresentationTime方法。
(4)dequeueInputBuffer方法的参数,是超时时间,负数就是阻塞式等待。
(5)编码后的数据,有一个BUFFER_FLAG_CODEC_CONFIG信息,只会获取一次,需要保存下来,每一个关键帧(BUFFER_FLAG_KEY_FRAME)都需要附加上该编码头信息以便解码时使用。

基于缓存(ByteBuffer)的异步编码

模式和同步的一样,只是加载回调函数里,效率可能更高。

public class AvcEncoderOnAsynchronous {

    private MediaCodec mediaCodec;
    private int m_width;
    private int m_height;
    private int frameSize;
    private byte[] configByte = null;
    private byte[] yuv420;
    private BufferedOutputStream bos;
    private AtomicInteger index = new AtomicInteger(0);
    private RandomAccessFile randomAccessFile;
    private int totalFrameNum;

    @SuppressLint("NewApi")
    public AvcEncoderOnAsynchronous(String inPath, final int width, final int height, int frameRate, int bitRate, String outPath) throws IOException {

        m_width = width;
        m_height = height;
        frameSize = width * height * 3 / 2;
        yuv420 = new byte[frameSize];
        this.bos = new BufferedOutputStream(new FileOutputStream(new File(outPath), false));
        mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
        MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); //关键帧间隔时间 单位s
        mediaFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
        mediaFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel51);

        File file = new File(inPath);
        randomAccessFile = new RandomAccessFile(file, "r");
        totalFrameNum = (int) (randomAccessFile.length() / frameSize);
        mediaCodec.setCallback(new MediaCodec.Callback() {
            @Override
            public void onInputBufferAvailable(@NonNull MediaCodec codec, int inputBufferId) {
                if (index.get() == totalFrameNum) {
                    return;
                }
                ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
                try {
                    randomAccessFile.seek((long) index.get() * frameSize);
                    randomAccessFile.read(yuv420, 0, yuv420.length);
                } catch (IOException e) {
                    e.printStackTrace();
                }
                // 420p转420sp
                byte[] data_420sp = new byte[yuv420.length];
                yuv420pTo420sp(yuv420, data_420sp, m_width, m_height);
                yuv420 = data_420sp;
                inputBuffer.put(yuv420);
                System.out.println(Thread.currentThread().getId() + ":输入" + index.get() + "帧");
                mediaCodec.queueInputBuffer(inputBufferId, 0, yuv420.length, computePresentationTime(index.getAndIncrement()), 0);
            }

            @Override
            public void onOutputBufferAvailable(@NonNull MediaCodec codec, int outputBufferId, @NonNull MediaCodec.BufferInfo bufferInfo) {
                ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);
                if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
                    configByte = new byte[bufferInfo.size];
                    configByte = outData;
                } else if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
                    byte[] keyframe = new byte[bufferInfo.size + configByte.length];
                    System.arraycopy(configByte, 0, keyframe, 0, configByte.length);
                    System.arraycopy(outData, 0, keyframe, configByte.length, outData.length);
                    try {
                        bos.write(keyframe, 0, keyframe.length);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                } else {
                    try {
                        bos.write(outData, 0, outData.length);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                mediaCodec.releaseOutputBuffer(outputBufferId, false);
            }

            @Override
            public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
                System.out.println(e.toString());
            }

            @Override
            public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
                System.out.println(format);
            }
        });

        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    }

    @SuppressLint("NewApi")
    public void close() {
        try {
            mediaCodec.stop();
            mediaCodec.release();
            bos.flush();
            bos.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void start() {
        mediaCodec.start();
        while (index.get() != totalFrameNum) {
            try {
                Thread.sleep(2000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
        close();
    }

    private void yuv420pTo420sp(byte[] yuv420p, byte[] yuv420sp, int width, int height) {
        if (yuv420p == null || yuv420sp == null) return;
        int frameSize = width * height;
        int j;
        // Y
        System.arraycopy(yuv420p, 0, yuv420sp, 0, frameSize);
        for (j = 0; j < frameSize / 4; j++) {
            // u
            yuv420sp[frameSize + 2 * j] = yuv420p[j + frameSize];
            // v
            yuv420sp[frameSize + 2 * j + 1] = yuv420p[(int) (j + frameSize * 1.25)];
        }
    }

    /**
     * Generates the presentation time for frame N, in microseconds.
     */
    private long computePresentationTime(long frameIndex) {
        return 132 + frameIndex * 1000000 / 30;
    }
}

测试

    @Test
    public void Test1() {

        try {
            long index = 0;
            int width = 2160;
            int height = 2880;
            int frameSize = (int) (width * height * 1.5);
            byte[] inData = new byte[frameSize];
            File file = new File(Environment.getExternalStorageDirectory().getAbsoluteFile() + "/shen.data/test.yuv");
            RandomAccessFile randomAccessFile = new RandomAccessFile(file, "r");
            int total = Math.round(randomAccessFile.length() / frameSize);

            AvcEncoderOnSynchronous avcEncoderOnSynchronous = new AvcEncoderOnSynchronous(2160, 2880, 30, 2160 * 2880 * 5, Environment.getExternalStorageDirectory().getAbsolutePath() + "/shen.data/test.h264");
            while (index < total) {
                randomAccessFile.seek(index++ * frameSize);
                randomAccessFile.read(inData, 0, inData.length);
                avcEncoderOnSynchronous.offerEncoder(inData);
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Test
    public void Test2() {

        try {
            AvcEncoderOnAsynchronous avcEncoderOnAsynchronous = new AvcEncoderOnAsynchronous(Environment.getExternalStorageDirectory().getAbsoluteFile() + "/shen.data/test.yuv",
                    1952, 2592, 30, 2592 * 1952 * 10, Environment.getExternalStorageDirectory().getAbsolutePath() + "/shen.data/test.h264");
            avcEncoderOnAsynchronous.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

区别

处理函数变成了回调,当输入可用时,调用输入函数;输出可用就调用输出函数。
节省了一部分同步等待的时间。

源码地址:https://github.com/shen511460468/MediaCodecDemo

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 4
    评论
是的,您可以使用Android的MediaProjectionManager API来获取屏幕捕获的权限,然后使用MediaCodec API将捕获的屏幕数据编码为H.264格式。具体步骤如下: 1. 创建一个MediaProjectionManager对象并请求屏幕捕获的权限: ``` MediaProjectionManager mediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE); Intent permissionIntent = mediaProjectionManager.createScreenCaptureIntent(); startActivityForResult(permissionIntent, REQUEST_CODE); ``` 2. 在onActivityResult()方法中获取MediaProjection对象: ``` @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_CODE) { if (resultCode != Activity.RESULT_OK) { // 用户拒绝了屏幕共享请求 return; } // 获取MediaProjection对象 MediaProjection mediaProjection = mediaProjectionManager.getMediaProjection(resultCode, data); } } ``` 3. 创建一个MediaCodec对象并配置编码器: ``` MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height); format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval); MediaCodec codec = MediaCodec.createEncoderByType("video/avc"); codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); codec.start(); ``` 4. 在屏幕捕获回调中获取屏幕数据,并将数据编码后写入到输出流: ``` class ScreenCaptureCallback extends MediaProjection.Callback { private Surface surface; private MediaCodec codec; public ScreenCaptureCallback(Surface surface, MediaCodec codec) { this.surface = surface; this.codec = codec; } @Override public void onStop() { codec.stop(); codec.release(); } @Override public void onScreenCaptureStarted(MediaProjection projection) { // 创建一个虚拟屏幕Surface DisplayManager displayManager = (DisplayManager) getSystemService(Context.DISPLAY_SERVICE); Display display = displayManager.getDisplay(Display.DEFAULT_DISPLAY); VirtualDisplay virtualDisplay = mediaProjection.createVirtualDisplay( "ScreenCapture", width, height, dpi, DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY, surface, null, null); // 开始屏幕捕获 codec.setCallback(new MediaCodec.Callback() { @Override public void onInputBufferAvailable(MediaCodec codec, int index) { // do nothing } @Override public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { ByteBuffer buffer = codec.getOutputBuffer(index); // 将编码后的数据写入到输出流 outputStream.write(buffer.array(), info.offset, info.size); codec.releaseOutputBuffer(index, false); } @Override public void onError(MediaCodec codec, MediaCodec.CodecException e) { // do nothing } @Override public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { // do nothing } }); codec.getInputBuffers(); } } ``` 注意:需要在AndroidManifest.xml文件中添加录屏权限: ``` <uses-permission android:name="android.permission.RECORD_AUDIO" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.MEDIA_CONTENT_CONTROL" /> <uses-permission android:name="android.permission.CAPTURE_VIDEO_OUTPUT" /> ```

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值