Android MediaCodec将h264实时视频流数据解码为yuv,并转换yuv的颜色格式为nv21

初始化mediacodec

    //宽高根据摄像头分辨率设置
    private int Width = 1280;
    private int Height = 720;
    private MediaCodec mediaCodec;
    private ByteBuffer[] inputBuffers;

    private void initMediaCodec(Surface surface) {

        try {
            Log.d(TAG, "onGetNetVideoData: ");
            //创建解码器 H264的Type为  AAC
            mediaCodec = MediaCodec.createDecoderByType("video/avc");
            //创建配置
            MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", Width, Height);
            //设置解码预期的帧速率【以帧/秒为单位的视频格式的帧速率的键】
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);

            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420SemiPlanar);//
//            byte[] headerSps = {0, 0, 0, 1, 103, 66, 0, 41, -115, -115, 64, 80, 30, -48, 15, 8, -124, 83, -128};
//            byte[] headerPps = {0, 0, 0, 1, 104, -54, 67, -56};
//
//            mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(headerSps));
//            mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(headerPps));
            //配置绑定mediaFormat和surface
            mediaCodec.configure(mediaFormat, null, null, 0);
            mediaCodec.start();
        } catch (IOException e) {
            e.printStackTrace();
            //创建解码失败
            Log.e(TAG, "创建解码失败");
        }

        inputBuffers = mediaCodec.getInputBuffers();

    }

处理数据,解码h264数据为yuv格式

这里传入的是h264格式的实时视频流数据。

    private void onFrame(byte[] buf, int offset, int length) {

        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        //查询10000毫秒后,如果dSP芯片的buffer全部被占用,返回-1;存在则大于0
        int inIndex = mediaCodec.dequeueInputBuffer(10000);
        if (inIndex >= 0) {
            //根据返回的index拿到可以用的buffer
            ByteBuffer byteBuffer = inputBuffers[inIndex];
            //清空缓存
            byteBuffer.clear();
            //开始为buffer填充数据
            byteBuffer.put(buf);
            //填充数据后通知mediacodec查询inIndex索引的这个buffer,
            mediaCodec.queueInputBuffer(inIndex, 0, length, mCount * 20, 0);
            mCount++;
        } else {
            Log.i(TAG, "inIndex < 0");
            //等待查询空的buffer
            return;
        }
        //mediaCodec 查询 "mediaCodec的输出方队列"得到索引
        int outIndex = mediaCodec.dequeueOutputBuffer(info, 10000);
        Log.e(TAG, "解码输出outIndex " + outIndex);
        if (outIndex >= 0) {

            //dsp的byteBuffer无法直接使用
            ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outIndex);
            //设置偏移量
            byteBuffer.position(info.offset);
            byteBuffer.limit(info.size + info.offset);

            byte[] ba = new byte[byteBuffer.remaining()];
            byteBuffer.get(ba);
            //需要预先分配与NV12相同大小的字节数组
            byte[] yuv = new byte[ba.length];
            //不确定是什么颜色格式,挨个试的
            //convertI420ToNV21(ba, yuv, Width, Height);
            //convertYV12toNV21(ba, yuv, Width, Height);
            convertNV12toNV21(ba, yuv, Width, Height);
            NV21Data(yuv);
            //检查所支持的颜色格式
//            MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
//             for (int i = 0; i < capabilities.colorFormats.length; i++) {
//                int format = capabilities.colorFormats[i];
//
//                //华为平板:COLOR_FormatYUV420SemiPlanar、COLOR_FormatYUV420Planar
//                //魅族手机:COLOR_FormatYUV420SemiPlanar
//                //rk3588s: COLOR_FormatYUV420Planar、COLOR_FormatYUV420Flexible、COLOR_FormatYUV420PackedSemiPlanar、COLOR_FormatYUV420SemiPlanar
//                switch (format) {
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar://(对应 I420 or YV12)
//                        Log.i("COLOR_Format_TAG", "=========COLOR_FormatYUV420Planar");
//                        byte[] convertNv21YUV420Planar = new byte[ba.length];
//                        //不确定是什么颜色格式,挨个试的
                            convertI420ToNV21(ba, convertNv21YUV420Planar, Width, Height);
                            convertYV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
//                        long l1 = System.currentTimeMillis();
//                        convertNV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
//                        Log.i("耗时测试", "转为nv21的耗时: " + (System.currentTimeMillis() - l1));
//                        long l2 = System.currentTimeMillis();
//                        NV21Data(convertNv21YUV420Planar);
//                        Log.i("耗时测试", "识别耗时: " + (System.currentTimeMillis() - l2));
//                        continue;
//
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar://NV12
//                        Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420SemiPlanar");
//                        byte[] nv21YUV420SemiPlanar = new byte[ba.length];
//                        convertNV12toNV21(ba, nv21YUV420SemiPlanar, Width, Height);
//                        NV21Data(nv21YUV420SemiPlanar);
//
//                        continue;
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
//                        Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420PackedSemiPlanar");
//                        byte[] nv21YUV420PackedSemiPlanar = new byte[ba.length];
//                        convertNV12toNV21(ba, nv21YUV420PackedSemiPlanar, Width, Height);
//                        NV21Data(nv21YUV420PackedSemiPlanar);
//                        continue;
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible:
//                        byte[] nv21YUV420YUV420Flexible = new byte[ba.length];
//                        convertNV12toNV21(ba, nv21YUV420YUV420Flexible, Width, Height);
//                        NV21Data(nv21YUV420YUV420Flexible);
//                        Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420Flexible");
//                        continue;
//                    default:
//                        continue;
//
//                }
//
//            }

            //如果surface绑定了,则直接输入到surface渲染并释放
            mediaCodec.releaseOutputBuffer(outIndex, false);
        } else {
            Log.e(TAG, "没有解码成功");
        }
    }

处理获取到的nv21颜色格式的yuv数据

    private int printImageStatus = 0;
    private void NV21Data(byte[] nv21) {
          //将nv21视频流数据传入YuvImage中,转换成bitmap之后,显示在imageview上、
          //或者保存为png图片到本地,如果不出现灰色、不出现蓝色图像和红色图像颜色颠倒,
          //图像显示正常,则说明是标准的nv21格式视频流数据
          YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, Width, Height, null);
          ByteArrayOutputStream baos = new ByteArrayOutputStream();
          yuvImage.compressToJpeg(new Rect(0, 0, Width, Height), 100, baos);
          byte[] data = baos.toByteArray();
          Log.i(TAG, "NV21Data-data: " + data.length);
      
          Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
  
         if (bitmap != null) {
              runOnUiThread(new Runnable() {
                  @Override
                  public void run() {
                      mIvShowImage.setImageBitmap(bitmap);
                  }
              });
              //保存bitmap为png图片
              if (printImageStatus == 0) {
                  printImageStatus = 1;
                  try {
                      File myCaptureFile = new File(Environment.getExternalStorageDirectory(), "img.png");
                      BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(myCaptureFile));
                      bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
                      bos.flush();
                      bos.close();
                  } catch (Exception e) {
                      e.printStackTrace();
                  }
              }
          }
    }

 yuv视频数据颜色格式转换

    public static void convertI420ToNV21(byte[] i420, byte[] nv21, int width, int height) {
        System.arraycopy(i420, 0, nv21, 0, width * height);
        int offset = width * height;
        for (int i = 0; i < width * height / 4; i++) {
            nv21[offset + 2 * i] = i420[offset + i + width * height / 4];
            nv21[offset + 2 * i + 1] = i420[offset + i];
        }
    }

    public static void convertYV12toNV21(byte[] yv12, byte[] nv21, int width, int height) {
        int size = width * height;
        int vOffset = size;
        int uOffset = size + (size / 4);

        // Copy Y channel as it is
        System.arraycopy(yv12, 0, nv21, 0, size);

        for (int i = 0; i < size / 4; i++) {
            nv21[vOffset + (i * 2)] = yv12[vOffset + i];      // V
            nv21[vOffset + (i * 2) + 1] = yv12[uOffset + i];  // U
        }
    }


    public static void convertNV12toNV21(byte[] nv12, byte[] nv21, int width, int height) {
        int size = width * height;
        int offset = size;

        // copy Y channel as it is
        System.arraycopy(nv12, 0, nv21, 0, offset);

        for (int i = 0; i < size / 4; i++) {
            nv21[offset + (i * 2) + 1] = nv12[offset + (i * 2)];       // U
            nv21[offset + (i * 2)] = nv12[offset + (i * 2) + 1];       // V
        }
    }

h264实时视频流的数据来源

    @Override
    public void onPacketEvent(byte[] data) {


        onFrame(data, 0, data.length);
        //写入h264视频流到sdcard中
        //wirte2file(data, data.length);

    }

写入h264视频流到sdcard中

    private BufferedOutputStream BufOs = null;
    private File destfile = null;
    private FileOutputStream destfs = null;
    private String dsetfilePath = Environment.getExternalStorageDirectory() + "/" + "test.h264";

    private void wirte2file(byte[] buf, int length) {
        if (isStart) {
            if (BufOs == null) {
                destfile = new File(dsetfilePath);
                try {
                    destfs = new FileOutputStream(destfile);
                    BufOs = new BufferedOutputStream(destfs);
                    Log.d(TAG, "wirte2file-new ");
                } catch (FileNotFoundException e) {
                    // TODO: handle exception
                    Log.i("TRACK", "initerro" + e.getMessage());
                    Log.d(TAG, "wirte2file-FileNotFoundException:" + e.getMessage());
                    e.printStackTrace();
                }
            }

            try {
                BufOs.write(buf, 0, length);
                BufOs.flush();
                Log.d(TAG, "wirte2file-write");
            } catch (Exception e) {
                Log.d(TAG, "wirte2file-e: " + e.getMessage());
                // TODO: handle exception
            }

        }
    }

    private boolean isStart;

    public void onStop(View view) {
        isStart = false;
        Toast.makeText(this, "停止保存", Toast.LENGTH_SHORT).show();
    }

    public void onStart(View view) {
        isStart = true;
        Toast.makeText(this, "开始保存", Toast.LENGTH_SHORT).show();
    }

rtsp获取h264实时视频流数据

public class FFDemuxJava {

    static {
        System.loadLibrary("demux");
    }

    private long m_handle = 0;
    private EventCallback mEventCallback = null;

    public void init(String url) {
        m_handle = native_Init(url);
    }

    public void Start() {
        native_Start(m_handle);
    }

    public void stop() {
        native_Stop(m_handle);
    }

    public void unInit() {
        native_UnInit(m_handle);
    }

    public void addEventCallback(EventCallback callback) {
        mEventCallback = callback;
    }


    private void playerEventCallback(int msgType, float msgValue) {
        if(mEventCallback != null)
            mEventCallback.onMessageEvent(msgType, msgValue);

    }


    private void packetEventCallback(byte[]data) {
        if(mEventCallback != null)
            mEventCallback.onPacketEvent(data);

    }



    private native long native_Init(String url);

    private native void native_Start(long playerHandle);

    private native void native_Stop(long playerHandle);

    private native void native_UnInit(long playerHandle);


    public interface EventCallback {
        void onMessageEvent(int msgType, float msgValue);
        void onPacketEvent(byte []data);
    }

}
 编写C代码加载ffmpeg库
#include <jni.h>
#include <string>

#include "FFBridge.h"

extern "C"
{
#include <libavutil/time.h>
#include <libavcodec/avcodec.h>
#include <libavcodec/packet.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/opt.h>
};

extern "C" JNIEXPORT jstring JNICALL
Java_com_qmcy_demux_MainActivity_stringFromJNI(
        JNIEnv* env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(hello.c_str());
}


extern "C" JNIEXPORT jstring JNICALL
Java_com_qmcy_demux_MainActivity_GetVersion(
        JNIEnv* env,
        jobject /* this */) {
    char strBuffer[1024 * 4] = {0};
    strcat(strBuffer, "libavcodec : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVCODEC_VERSION));
    strcat(strBuffer, "\nlibavformat : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVFORMAT_VERSION));
    strcat(strBuffer, "\nlibavutil : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVUTIL_VERSION));
    strcat(strBuffer, "\nlibavfilter : ");
    strcat(strBuffer, AV_STRINGIFY(LIBAVFILTER_VERSION));
    strcat(strBuffer, "\nlibswresample : ");
    strcat(strBuffer, AV_STRINGIFY(LIBSWRESAMPLE_VERSION));
    strcat(strBuffer, "\nlibswscale : ");
    strcat(strBuffer, AV_STRINGIFY(LIBSWSCALE_VERSION));
    strcat(strBuffer, "\navcodec_configure : \n");
    strcat(strBuffer, avcodec_configuration());
    strcat(strBuffer, "\navcodec_license : ");
    strcat(strBuffer, avcodec_license());
    //LOGCATE("GetFFmpegVersion\n%s", strBuffer);
    return env->NewStringUTF(strBuffer);
}


extern "C" JNIEXPORT jlong JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Init
        (JNIEnv *env, jobject obj, jstring jurl)
{
    const char* url = env->GetStringUTFChars(jurl, nullptr);
    FFBridge *bridge = new FFBridge();
    bridge->Init(env, obj, const_cast<char *>(url));
    env->ReleaseStringUTFChars(jurl, url);
    return reinterpret_cast<jlong>(bridge);
}

extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Start
        (JNIEnv *env, jobject obj, jlong handle)
{
    if(handle != 0)
    {
        FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);
        bridge->Start();
    }

}

extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Stop
        (JNIEnv *env, jobject obj, jlong handle)
{
    if(handle != 0)
    {
        FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);
        bridge->Stop();
    }
}


extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1UnInit
        (JNIEnv *env, jobject obj, jlong handle)
{
    if(handle != 0)
    {
        FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);
        bridge->UnInit();
        delete bridge;
    }
}

源码地址icon-default.png?t=N7T8https://gitee.com/baipenggui/demux_demo.git

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
MediaCodecAndroid 提供的一个用于音视频编解码的 API,通过 MediaCodec 可以实现对 H.264 等常见视频格式的硬解码。下面是一个简单的使用 MediaCodec 解码 H.264 的示例: ```java // 创建 MediaCodec 对象 MediaCodec codec = MediaCodec.createDecoderByType("video/avc"); // 配置 MediaCodec,设置输入数据格式和输出数据 Surface MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible); codec.configure(format, surface, null, 0); // 启动 MediaCodec codec.start(); // 循环读取 H.264 数据并进行解码 while (decoding) { int inputBufferIndex = codec.dequeueInputBuffer(-1); if (inputBufferIndex >= 0) { ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferIndex); // 将 H.264 数据写入 inputBuffer 中 codec.queueInputBuffer(inputBufferIndex, 0, data.length, presentationTimeUs, 0); } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, 0); if (outputBufferIndex >= 0) { ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferIndex); // 处理解码后的 YUV 数据 codec.releaseOutputBuffer(outputBufferIndex, true); } } ``` 需要注意的是,在使用 MediaCodec 解码 H.264 数据时,需要将 H.264 数据先解析成 NAL 单元,再将 NAL 单元写入到 inputBuffer 中进行解码。另外,解码后的数据YUV 格式数据,需要根据实际需求进行处理。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

窗台的花花

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值