Android MediaCodec编码后解析出H264/H265中vps sps pps帧


H264格式编码是没有vps帧,是从sps帧开始,这里从H264和H265两种编码器中分别解析各自编码出的配置。

解析的两种方式

  1. 直接解析编码器编码的原始码的第一帧数据,第一帧一般为配置帧。H264编码中是按照sps,pps顺序拼接而成一帧;H265是按照vps,sps,pps顺序拼接成一帧。
  2. 回调函数onOutputFormatChanged中,根据MediaFormat的csd-0,csd-1中分析出配置帧的数据。H264中sps为csd-0,pps为csd-1;H265中vps、sps、pps都在csd-0中按顺序拼接。

MediaCodec异步编码具体过程如下:

1.配置Camera回调,获取preview的原始数据

//打开camera
mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
//设置preview回调
mCamera.setPreviewCallback(new CameraPreviewCallback());

class CameraPreviewCallback implements Camera.PreviewCallback {
        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
//            Log.d(TAG, "onPreviewFrame: ");
            if (frameRawDataListener != null)
                frameRawDataListener.previewDataCallBack(data);
        }
    }

//定义回调
public interface FrameRawDataListener{
        public void previewDataCallBack(byte[] frameData);
    }

2.配置encoder和MediaFormat

public void prepareEncoder(){
        Log.d(TAG, "prepareEncoder: ");
        MediaFormat videoFormat = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, mWidth, mHeight);
        videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mFps);
        videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, (int)(mWidth * mHeight));
        videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, selectColorFormat(VIDEO_MIME_TYPE));
        videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL);
        if(VIDEO_MIME_TYPE.equals(MediaFormat.MIMETYPE_VIDEO_AVC)){
            videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
            videoFormat.setInteger(MediaFormat.KEY_PROFILE,MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
            videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31);
        }else{
            videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
            videoFormat.setInteger(MediaFormat.KEY_PROFILE,MediaCodecInfo.CodecProfileLevel.HEVCProfileMain);
            videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.HEVCHighTierLevel4);
        }

        try {
            mEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
        } catch (IOException e) {
            e.printStackTrace();
        }
        mEncoder.setCallback(mediaCodecCallback);
        mEncoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    }

//选择ColorFormat
private int selectColorFormat(String type) {
        return selectColorFormat(selectCodec(type), type);
    }

    private int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
        for (int i = 0; i < capabilities.colorFormats.length; i++) {
            int colorFormat = capabilities.colorFormats[i];
            if (isRecognizedFormat(colorFormat)) {
                return colorFormat;
            }
        }

        Log.w(TAG, "Couldn't find color format for " + codecInfo.getName()
                + " / " + mimeType);
        return 0; // not reached
    }

    private boolean isRecognizedFormat(int colorFormat) {
        switch (colorFormat) {
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
            case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
                return true;
            default:
                return false;
        }
    }

    private MediaCodecInfo selectCodec(String mimeType) {
        int numCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < numCodecs; i++) {
            MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
            if (!codecInfo.isEncoder()) {
                continue;
            }
            String[] types = codecInfo.getSupportedTypes();
            for (int j = 0; j < types.length; j++) {
                if (types[j].equalsIgnoreCase(mimeType)) {
                    return codecInfo;
                }
            }
        }
        return null;
    }

3.开启encoder

public void startEncodec(){
        mEncoder.start();
        mStartTime = System.currentTimeMillis() * 1000;
        WorkThread workThread = new WorkThread();
        videoEncoderLoop = true;
        workThread.start();
    }

4.获取到preview数据,并送如编码器

class WorkThread extends Thread{
        @Override
        public void run() {
            while (videoEncoderLoop && !Thread.interrupted()) {
                Log.d(TAG, "run: ");
                int inputBufferIndex = 0;
                try {
                    inputBufferIndex = mInputIndexQueue.take().arg1;
                    Log.d(TAG,"inputBufferIndex="+inputBufferIndex);
                    if (inputBufferIndex >= 0) {
                        byte[] nv21Data = (byte[]) frameDataLinkedBlockingQueue.take();
                        ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex);
                        inputBuffer.put(nv21Data);

                        long pts = System.currentTimeMillis() * 1000 - mStartTime;
                        mEncoder.queueInputBuffer(inputBufferIndex, 0, nv21Data.length, pts, 0);
                    }
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
            mInputIndexQueue.clear();
            frameDataLinkedBlockingQueue.clear();
        }
    }

    @Override
    public void previewDataCallBack(byte[] frameData) {
        try {
            frameDataLinkedBlockingQueue.put(frameData);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

5.mediaCodecCallback解析出编码后的vps sps vps帧,两种方式

private MediaCodec.Callback mediaCodecCallback = new MediaCodec.Callback() {
        @Override
        public void onInputBufferAvailable(MediaCodec mediaCodec, int i) {
            Message msg = new Message();
            msg.obj = mediaCodec;
            msg.arg1 = i;
            mInputIndexQueue.offer(msg);
        }

        @Override
        public void onOutputBufferAvailable(MediaCodec mediaCodec, int i, MediaCodec.BufferInfo bufferInfo) {
            if (!videoEncoderLoop)
                return;

            ByteBuffer buffer = mediaCodec.getOutputBuffer(i);//获取到该缓冲区
            //第一种方法,start {
            if (VIDEO_MIME_TYPE.equals(MediaFormat.MIMETYPE_VIDEO_AVC)) {
                int typeH264 = buffer.get(4) & 0x1F;//获取该缓冲区的类型
                if (typeH264 == 7 || typeH264 == 8) { //7代表SPS帧,8代表PPS帧(只有第一帧会出现此类帧(目前发现只有7),其余都是1或者5)
                    byte[] configFps = new byte[bufferInfo.size];
                    buffer.get(configFps);
                    Log.d(TAG, "chris buffer.position="+buffer.position()+",H264type= " + typeH264 +",value = "+bytesToHex(configFps));
                    //H264type= 7,value = 000000016764001FACB402802DD2905020206D0A13500000000168EE06E2C0

                    searchSPSandPPSFromH264(ByteBuffer.wrap(configFps),bufferInfo);
                }

            } else if (VIDEO_MIME_TYPE.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
                int typeH265 = (buffer.get(4) & 0x7e) >> 1;
                Log.d(TAG, "chris H265type= " + typeH265);
                if (typeH265 == 32 || typeH265 == 33 || typeH265 == 34) { //32代表VPS帧   33代表SPS帧,34代表PPS帧(只有第一帧会出现此类帧(目前发现只有32),其余都是1或者19)
                    byte[] configFps = new byte[bufferInfo.size];
                    buffer.get(configFps);
                    Log.d(TAG, "chris H265type= " + typeH265 +",value = "+bytesToHex(configFps));
                    //H265type= 32, value = 0000000140010C01FFFF016000000300B0000003000003005DAC5900000001420101016000000300B0000003000003005DA00280802E1F1396BB9324BB948281010176850940000000014401C0F1800420

                    searchVpsSpsPpsFromH265(ByteBuffer.wrap(configFps));
                }
            }
            //第一种方法,end }

            mediaCodec.releaseOutputBuffer(i, false);
        }

        @Override
        public void onError(MediaCodec mediaCodec, MediaCodec.CodecException e) {

        }

        @Override
        public void onOutputFormatChanged(MediaCodec mediaCodec, MediaFormat mediaFormat) {
            //第二种方法,start {
            if (VIDEO_MIME_TYPE == MediaFormat.MIMETYPE_VIDEO_HEVC) {
                searchVpsSpsPpsFromH265(mediaFormat.getByteBuffer("csd-0"));
            } else if (VIDEO_MIME_TYPE == MediaFormat.MIMETYPE_VIDEO_AVC) {
                ByteBuffer sps = mediaFormat.getByteBuffer("csd-0"); //000000016764001FACB402802DD2905020206D0A1350
                ByteBuffer pps = mediaFormat.getByteBuffer("csd-1"); //0000000168EE06E2C0
                Log.d(TAG,"H264 onOutputFormatChanged sps="+bytesToHex(sps.array()) + ",pps=" + bytesToHex(pps.array()));
                //onOutputFormatChanged sps=000000016764001FACB402802DD2905020206D0A1350,pps=0000000168EE06E2C0
            }
            //第二种方法,end }
        }
    };

6.解析每个帧的值

 //查找sps pps vps
    public void searchSPSandPPSFromH264(ByteBuffer buffer, MediaCodec.BufferInfo bufferInfo){

        byte[] csd = new byte[128];
        int len = 0, p = 4, q = 4;

        len = bufferInfo.size;
        Log.d(TAG,"len="+len);
        if (len<128) {
            buffer.get(csd,0,len);
            if (len>0 && csd[0]==0 && csd[1]==0 && csd[2]==0 && csd[3]==1) {
                // Parses the SPS and PPS, they could be in two different packets and in a different order
                //depending on the phone so we don't make any assumption about that
                while (p<len) {
                    while (!(csd[p+0]==0 && csd[p+1]==0 && csd[p+2]==0 && csd[p+3]==1) && p+3<len) p++;
                    if (p+3>=len) p=len;
                    if ((csd[q]&0x1F)==7) {
                        byte[] sps = new byte[p-q];
                        System.arraycopy(csd, q, sps, 0, p-q);
                        Log.d(TAG,"chris, searchSPSandPPSFromH264 SPS="+bytesToHex(sps));
                        //chris, searchSPSandPPSFromH264 SPS=6764001FACB402802DD2905020206D0A1350
                    } else {
                        byte[] pps = new byte[p-q];
                        System.arraycopy(csd, q, pps, 0, p-q);
                        Log.d(TAG,"chris, searchSPSandPPSFromH264 PPS="+bytesToHex(pps));
                        //chris, searchSPSandPPSFromH264 PPS=68EE06E2C0
                    }
                    p += 4;
                    q = p;
                }
            }
        }
    }

public void searchVpsSpsPpsFromH265(ByteBuffer csd0byteBuffer) {
    int vpsPosition = -1;
    int spsPosition = -1;
    int ppsPosition = -1;
    int contBufferInitiation = 0;
    byte[] csdArray = csd0byteBuffer.array();
    for (int i = 0; i < csdArray.length; i++) {
        if (contBufferInitiation == 3 && csdArray[i] == 1) {
            if (vpsPosition == -1) {
                vpsPosition = i - 3;
            } else if (spsPosition == -1) {
                spsPosition = i - 3;
            } else {
                ppsPosition = i - 3;
            }
        }
        if (csdArray[i] == 0) {
            contBufferInitiation++;
        } else {
            contBufferInitiation = 0;
        }
    }
    byte[] vps = new byte[spsPosition];
    byte[] sps = new byte[ppsPosition - spsPosition];
    byte[] pps = new byte[csdArray.length - ppsPosition];
    for (int i = 0; i < csdArray.length; i++) {
        if (i < spsPosition) {
            vps[i] = csdArray[i];
        } else if (i < ppsPosition) {
            sps[i - spsPosition] = csdArray[i];
        } else {
            pps[i - ppsPosition] = csdArray[i];
        }
    }

    Log.d(TAG, "searchVpsSpsPpsFromH265: vps="+ bytesToHex(vps)+",sps="+bytesToHex(sps)+",pps="+bytesToHex(pps));
    //vps=0000000140010C01FFFF016000000300B0000003000003005DAC59,sps=00000001420101016000000300B0000003000003005DA00280802E1F1396BB9324BB948281010176850940,pps=000000014401C0F1800420
}

7.工具bytesToHex

final protected static char[] hexArray = "0123456789ABCDEF".toCharArray();
public static String bytesToHex(byte[] bytes) {
    char[] hexChars = new char[bytes.length * 2];
    for ( int j = 0; j < bytes.length; j++ ) {
        int v = bytes[j] & 0xFF;
        hexChars[j * 2] = hexArray[v >>> 4];
        hexChars[j * 2 + 1] = hexArray[v & 0x0F];
    }
    return new String(hexChars);
}
  • 9
    点赞
  • 20
    收藏
    觉得还不错? 一键收藏
  • 5
    评论
是的,您可以使用Android的MediaProjectionManager API来获取屏幕捕获的权限,然后使用MediaCodec API将捕获的屏幕数据编码H.264格式。具体步骤如下: 1. 创建一个MediaProjectionManager对象并请求屏幕捕获的权限: ``` MediaProjectionManager mediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE); Intent permissionIntent = mediaProjectionManager.createScreenCaptureIntent(); startActivityForResult(permissionIntent, REQUEST_CODE); ``` 2. 在onActivityResult()方法获取MediaProjection对象: ``` @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_CODE) { if (resultCode != Activity.RESULT_OK) { // 用户拒绝了屏幕共享请求 return; } // 获取MediaProjection对象 MediaProjection mediaProjection = mediaProjectionManager.getMediaProjection(resultCode, data); } } ``` 3. 创建一个MediaCodec对象并配置编码器: ``` MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height); format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval); MediaCodec codec = MediaCodec.createEncoderByType("video/avc"); codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); codec.start(); ``` 4. 在屏幕捕获回调获取屏幕数据,并将数据编码后写入到输流: ``` class ScreenCaptureCallback extends MediaProjection.Callback { private Surface surface; private MediaCodec codec; public ScreenCaptureCallback(Surface surface, MediaCodec codec) { this.surface = surface; this.codec = codec; } @Override public void onStop() { codec.stop(); codec.release(); } @Override public void onScreenCaptureStarted(MediaProjection projection) { // 创建一个虚拟屏幕Surface DisplayManager displayManager = (DisplayManager) getSystemService(Context.DISPLAY_SERVICE); Display display = displayManager.getDisplay(Display.DEFAULT_DISPLAY); VirtualDisplay virtualDisplay = mediaProjection.createVirtualDisplay( "ScreenCapture", width, height, dpi, DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY, surface, null, null); // 开始屏幕捕获 codec.setCallback(new MediaCodec.Callback() { @Override public void onInputBufferAvailable(MediaCodec codec, int index) { // do nothing } @Override public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { ByteBuffer buffer = codec.getOutputBuffer(index); // 将编码后的数据写入到输流 outputStream.write(buffer.array(), info.offset, info.size); codec.releaseOutputBuffer(index, false); } @Override public void onError(MediaCodec codec, MediaCodec.CodecException e) { // do nothing } @Override public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { // do nothing } }); codec.getInputBuffers(); } } ``` 注意:需要在AndroidManifest.xml文件添加录屏权限: ``` <uses-permission android:name="android.permission.RECORD_AUDIO" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.MEDIA_CONTENT_CONTROL" /> <uses-permission android:name="android.permission.CAPTURE_VIDEO_OUTPUT" /> ```
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值