mediacodec mp4v2应用

mediacodec + mp4v2

demo: https://github.com/wangzuxing/MyMp4V2H264

java端的camera preview data经mediacodec编码,送jni端, jni调用libmp4v2库相关函数处理、写入.mp4视频文件

Java端:
1、
MainActivity0

    static {
        System.loadLibrary("mp4v2"); 
        //libmp4v2  .mp4视频文件封装库(即可以把.aac和.264文件封装成.mp4格式的视频文件的函数库)
        System.loadLibrary("mp");
    }

    public native boolean Mp4Start(String pcm); // 传入需创建的.mp4文件路径
    public native void Mp4PackV(byte[] array, int length, int keyframe); // mediacodec编码器输出的sps pps nalu数据传入jni端
    public native void Mp4PackA(byte[] array, int length);
    public native void Mp4End(); //

    /*
    packed formats:将Y、U、V值储存成Macro Pixels阵列,和RGB的存放方式类似。
    planar formats:将Y、U、V的三个份量分别存放在不同的矩阵中。
    COLOR_FormatYUV420Planar:    YUV420P I420
    COLOR_FormatYUV420SemiPlanar:   YUV420SP NV12
    YUV420P,Y,U,V三个分量都是平面格式,分为I420和YV12。I420格式和YV12格式的不同处在U平面和V平面的位置不同。在I420格式中,U平面紧跟在Y平面之后,然后才是V平面(即:YUV);但YV12则是相反(即:YVU)。
    YUV420SP, Y分量平面格式,UV打包格式, 即NV12。 NV12与NV21类似,U 和 V 交错排列,不同在于UV顺序。
    I420: YYYYYYYY UU VV    =>YUV420P
    YV12: YYYYYYYY VV UU    =>YUV420P
    NV12: YYYYYYYY UVUV     =>YUV420SP
    NV21: YYYYYYYY VUVU     =>YUV420SP
    */

    //yv12 =》 yuv420p : yvu -> yuv  
    private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height)   
    {        
        System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height);  
        System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4);  
        System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4);    
    } 

    //编码camera preview data
    public void onFrame(byte[] buf, int length) {   

            swapYV12toI420(buf, h264, width, height); // H.264编码器只支持YUV视频格式输入

            ByteBuffer[] inputBuffers = mediaCodec0.getInputBuffers();
            ByteBuffer[] outputBuffers = mediaCodec0.getOutputBuffers();
            int inputBufferIndex = mediaCodec0.dequeueInputBuffer(-1);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                inputBuffer.put(h264, 0, length);
                mediaCodec0.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
            }
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mediaCodec0.dequeueOutputBuffer(bufferInfo,0);

            while (outputBufferIndex >= 0) {
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];

                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);

                if(mp4fFlag){
                      if(outData.length==21){ 
                           Log.i("Encoder", "--------- pps sps set---------");
                           /*
                           不同的camera width、height、fps等参数,编码其输出的sps pps不同,需实测而定,若想暂停然后新录制.MP4文件,则需保存该sps pps参数,直接在首个IDR帧前写入:sps pps idr
                           */

                           //int length = outData.length;
                           for (int ix = 0; ix < 21; ++ix) {
                                System.out.printf("%02x ", outData[ix]);
                           }
                           System.out.println("\n----------");
                           //00 00 00 01 67 42 80 1e e9 01 40 7b 20 00 00 00 01 68 ce 06 e2 
                           byte[] outData0 = new byte[13]; 
                           byte[] outData1 = new byte[8]; 
                           System.arraycopy(outData, 0,  outData0, 0, 13);  
                           System.arraycopy(outData, 13, outData1, 0, 8); 

                           Mp4PackV(outData0, 13, keyFrame);
                           Mp4PackV(outData1, 8, keyFrame);
                      }else{
                           if(outData[4] == 0x65) //key frame 0x65
                           { 
                               keyFrame = 1;
                           }
                           Mp4PackV(outData, outData.length, keyFrame);
                      }
                }
                mediaCodec0.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec0.dequeueOutputBuffer(bufferInfo, 0);
            }
     }  

JNI端:
2、mp.c

uint8_t MP4AdtsFindSamplingRateIndex(uint32_t samplingRate)
{
    uint8_t i;
    for(i = 0; i < NUM_ADTS_SAMPLING_RATES; i++) {
        if (samplingRate == AdtsSamplingRates[i]) {
            return i;
        }
    }
    return NUM_ADTS_SAMPLING_RATES - 1;
}

bool MP4AacGetConfiguration(uint8_t** ppConfig,
                         uint32_t* pConfigLength,
                         uint8_t profile,
                         uint32_t samplingRate,
                         uint8_t channels)
{
    /* create the appropriate decoder config */

    uint8_t* pConfig = (uint8_t*)malloc(2);

    if (pConfig == NULL) {
        return false;
    }

    uint8_t samplingRateIndex = MP4AdtsFindSamplingRateIndex(samplingRate);

    pConfig[0] = ((profile + 1) << 3) | ((samplingRateIndex & 0xe) >> 1);
    pConfig[1] = ((samplingRateIndex & 0x1) << 7) | (channels << 3);

    /* LATER this option is not currently used in MPEG4IP
     if (samplesPerFrame == 960) {
     pConfig[1] |= (1 << 2);
     }
     */

    *ppConfig = pConfig;
    *pConfigLength = 2;

    return true;
}

int ReadOneNaluFromBuf(const unsigned char *buffer,
        unsigned int nBufferSize,
        unsigned int offSet,
        MP4ENC_NaluUnit *nalu)
{
    int i = offSet;
    while(i<nBufferSize)
    {
        if(buffer[i++] == 0x00 &&
            buffer[i++] == 0x00 &&
            buffer[i++] == 0x00 &&
            buffer[i++] == 0x01
            )
        {
            int pos = i;
            while (pos<nBufferSize)
            {
                if(buffer[pos++] == 0x00 &&
                    buffer[pos++] == 0x00 &&
                    buffer[pos++] == 0x00 &&
                    buffer[pos++] == 0x01
                    )
                {
                    break;
                }
            }
            if(pos == nBufferSize)
            {
                nalu->size = pos-i;
            }
            else
            {
                nalu->size = (pos-4)-i;
            }

            nalu->type = buffer[i]&0x1f;
            nalu->data =(unsigned char*)&buffer[i];
            LOGI("     nalu type = %d, size = %d    ", nalu->type, nalu->size);
            return (nalu->size+i-offSet);
        }
    }
    return 0;
}

int WriteH264Data(MP4FileHandle hMp4File,const unsigned char* pData,int size)
{
    if(hMp4File == NULL)
    {
        return -1;
    }
    if(pData == NULL)
    {
        return -1;
    }
    MP4ENC_NaluUnit nalu;
    int pos = 0, len = 0;
    while (len = ReadOneNaluFromBuf(pData, size, pos, &nalu))
    {
        if(nalu.type == 0x07) // sps
        {
            // track
            m_videoId = MP4AddH264VideoTrack
                (hMp4File,
                m_nTimeScale,
                m_nTimeScale / m_nFrameRate,
                m_nWidth,     // width
                m_nHeight,    // height
                nalu.data[1], // sps[1] AVCProfileIndication
                nalu.data[2], // sps[2] profile_compat
                nalu.data[3], // sps[3] AVCLevelIndication
                3);           // 4 bytes length before each NAL unit
            if (m_videoId == MP4_INVALID_TRACK_ID)
            {
                printf("add video track failed.\n");
                //MP4Close(mMp4File, 0);
                return 0;
            }
            MP4SetVideoProfileLevel(hMp4File, 1); //  Simple Profile @ Level 3

            MP4AddH264SequenceParameterSet(hMp4File,m_videoId,nalu.data,nalu.size);
            LOGI("              write sps                ");
        }
        else if(nalu.type == 0x08) // pps
        {
            MP4AddH264PictureParameterSet(hMp4File,m_videoId,nalu.data,nalu.size);
            LOGI("              write pps                ");
        }
        else
        {
            int datalen = nalu.size+4;
            unsigned char data[datalen];
            // MP4 Nalu
            data[0] = nalu.size>>24;
            data[1] = nalu.size>>16;
            data[2] = nalu.size>>8;
            data[3] = nalu.size&0xff;
            memcpy(data+4, nalu.data, nalu.size);
            if(!MP4WriteSample(hMp4File, m_videoId, data, datalen,MP4_INVALID_DURATION, 0, 1))
            {
                LOGI("              MP4_INVALID_TRACK_ID = %d               ",m_samplesWritten);
                // MP4DeleteTrack(mMp4File, video);
                return 0;
            }
        }

        pos += len;
    }
    return pos;
}

MP4FileHandle CreateMP4File(const char *pFileName,int width,int height)
{
    if(pFileName == NULL)
    {
        return false;
    }
    // create mp4 file
    MP4FileHandle hMp4file = MP4Create(pFileName, 0);
    if (hMp4file == MP4_INVALID_FILE_HANDLE)
    {
        //printf("ERROR:Open file fialed.\n");
        LOGI("              MP4_INVALID_FILE_HANDLE                ");
        return false;
    }
    m_nWidth  = width;
    m_nHeight = height;
    m_nTimeScale = 90000;
    m_nFrameRate = 15;
    MP4SetTimeScale(hMp4file, m_nTimeScale);
    return hMp4file;
}

void CloseMP4File(MP4FileHandle hMp4File)
{
    if(hMp4File)
    {
        MP4Close(hMp4File,0);
        hMp4File = NULL;
    }
}

//直接打包h.264文件为.mp4文件
bool WriteH264File(const char* pFile264,const char* pFileMp4)
{
    if(pFile264 == NULL || pFileMp4 == NULL)
    {
        return false;
    }

    MP4FileHandle hMp4File = CreateMP4File(pFileMp4, 640, 480);//240,320);

    if(hMp4File == NULL)
    {
        //printf("ERROR:Create file failed!");
        LOGI("              MP4_INVALID_FILE_HANDLE                ");
        return false;
    }

    FILE *fp = fopen(pFile264, "rb");
    if(!fp)
    {
        //printf("ERROR:open file failed!");
        LOGI("              h264 fopen error                ");
        return false;
    }
    LOGI("              h264 fopen                 ");

    fseek(fp, 0, SEEK_SET);

    unsigned char buffer[BUFFER_SIZE];
    int pos = 0;
    LOGI("       mp4Encoder start %s      ",pFile264);
    while(1)
    {
        int readlen = fread(buffer+pos, sizeof(unsigned char), BUFFER_SIZE-pos, fp);
        if(readlen<=0)
        {
            break;
        }
        readlen += pos;

        int writelen = 0;
        int i;
        for(i = readlen-1; i>=0; i--)
        {
            if(buffer[i--] == 0x01 &&
                buffer[i--] == 0x00 &&
                buffer[i--] == 0x00 &&
                buffer[i--] == 0x00
                )
            {
                writelen = i+5;
                break;
            }
        }
        LOGI("          mp4Encoder writelen = %d     ",writelen);
        writelen = WriteH264Data(hMp4File,buffer,writelen);
        if(writelen<=0)
        {
            break;
        }
        memcpy(buffer,buffer+writelen,readlen-writelen+1);
        pos = readlen-writelen+1;
    }
    fclose(fp);
    CloseMP4File(hMp4File);
    LOGI("              mp4Encoder end                ");
    LOGI("              mp4Encoder end                ");
    return true;
}

JNIEXPORT bool JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4Start
 (JNIEnv *env, jclass clz, jstring mp4)
{
    const char* mp4_title = (*env)->GetStringUTFChars(env,mp4, NULL);
    if(mp4_title == NULL)
    {
        return false;
    }

    //video width=640, height=480, 若需要可以改写MainActivity0.java的Mp4Start(..., width, height)参数表,传入编码视频的高、宽
    fileHandle = CreateMP4File(mp4_title, 640, 480);

    if(fileHandle == NULL)
    {
        //printf("ERROR:Create file failed!");
        LOGI("              MP4_INVALID_FILE_HANDLE NULL             ");
        (*env)->ReleaseStringUTFChars(env, mp4, mp4_title);
        return false;
    }

    uint32_t samplesPerSecond;
    uint8_t profile;
    uint8_t channelConfig;

    samplesPerSecond = 44100;
    profile = 2; // AAC LC

    /*
    0: Null
    1: AAC Main
    2: AAC LC (Low Complexity)
    3: AAC SSR (Scalable Sample Rate)
    4: AAC LTP (Long Term Prediction)
    5: SBR (Spectral Band Replication)
    6: AAC Scalable
    */
    channelConfig = 1;

    uint8_t* pConfig = NULL;
    uint32_t configLength = 0;

    //m_audio = MP4AddAudioTrack(m_file, 44100, 1024, MP4_MPEG2_AAC_MAIN_AUDIO_TYPE );
    audio = MP4AddAudioTrack(fileHandle, 44100, 1024, MP4_MPEG2_AAC_LC_AUDIO_TYPE);//MP4_MPEG4_AUDIO_TYPE);//MP4_MPEG2_AAC_LC_AUDIO_TYPE
    //MP4_MPEG2_AAC_LC_AUDIO_TYPE);//16000 1024
    if(audio == MP4_INVALID_TRACK_ID)
    {
        MP4Close(fileHandle, 0);
        return false;
    }
    MP4SetAudioProfileLevel(fileHandle, 0x02);
    LOGI("              MP4AddAudioTrack ok                ");

    MP4AacGetConfiguration(&pConfig, &configLength, profile, samplesPerSecond, channelConfig);
    //free(pConfig);
    MP4SetTrackESConfiguration(fileHandle, audio, pConfig, configLength);

    (*env)->ReleaseStringUTFChars(env, mp4, mp4_title);
    return true;
}

//添加视频帧的方法
JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4PackV
(JNIEnv *env, jclass clz, jbyteArray data, jint size, jint keyframe)
{
    unsigned char *buf = (unsigned char *)(*env)->GetByteArrayElements(env, data, JNI_FALSE);

    unsigned char type;
    type = buf[4]&0x1f;
    //LOGI(" 0x%x 0x%x 0x%x 0x%x 0x%x ",buf[0],buf[1],buf[2],buf[3], type);
    if(type == 0x07) // sps
    {
                // track
                m_videoId = MP4AddH264VideoTrack(fileHandle,
                    m_nTimeScale,
                    m_nTimeScale / m_nFrameRate,
                    m_nWidth,     // width
                    m_nHeight,    // height
                    buf[5], // sps[1] AVCProfileIndication
                    buf[6], // sps[2] profile_compat
                    buf[7], // sps[3] AVCLevelIndication
                    3);           // 4 bytes length before each NAL unit
                if (m_videoId == MP4_INVALID_TRACK_ID)
                {
                    printf("add video track failed.\n");
                    //MP4Close(mMp4File, 0);
                    //return 0;
                }else{
                    MP4SetVideoProfileLevel(fileHandle, 0x7F); //  Simple Profile @ Level 3 = 2

                    MP4AddH264SequenceParameterSet(fileHandle, m_videoId, &buf[4], size-4);
                    LOGI("              write sps                ");
                }
    }
    else if(type == 0x08) // pps
    {
                MP4AddH264PictureParameterSet(fileHandle, m_videoId, &buf[4], size-4);
                m_samplesWritten = 0;
                m_lastTime = 0;
                LOGI("              write pps                ");
    }
    else
    {
                int nalsize = size-4;
                bool ret = false;
                /*
                buf[0] = (nalsize >> 24) & 0xff;
                buf[1] = (nalsize >> 16) & 0xff;
                buf[2] = (nalsize >> 8)& 0xff;
                buf[3] =  nalsize & 0xff;
                */
                //LOGI(" 0x%02x 0x%02x 0x%02x 0x%02x %d ", buf[0],buf[1],buf[2],buf[3],nalsize);
                buf[0] = (nalsize&0xff000000)>>24;
                buf[1] = (nalsize&0x00ff0000)>>16;
                buf[2] = (nalsize&0x0000ff00)>>8;
                buf[3] = nalsize&0x000000ff;

                /*
                // method 2 时间戳计算方式
                m_samplesWritten++;
                double thiscalc;
                thiscalc = m_samplesWritten;
                thiscalc *= m_nTimeScale;
                thiscalc /= m_nFrameRate;

                m_thisTime = (MP4Duration)thiscalc;
                MP4Duration dur;
                dur = m_thisTime - m_lastTime;
                */

                //ret = MP4WriteSample(fileHandle, video, buf, size, dur, 0, keyframe); //MP4_INVALID_DURATION keyframe

                if(keyframe){
                    LOGI("       type = %d, size = %d, %d       ",type, size, keyframe);
                }
                ret = MP4WriteSample(fileHandle, m_videoId, buf, size, MP4_INVALID_DURATION, 0, keyframe);

                //method 2
                //ret = MP4WriteSample(fileHandle, m_videoId, buf, size, dur, 0, keyframe);
                //m_lastTime = m_thisTime;
                if(!ret){
                    //fprintf(stderr,   "can't write video frame %u\n", m_samplesWritten );
                    LOGI("              MP4_INVALID_TRACK_ID = %d               ",ret);
                    //MP4DeleteTrack(fileHandle, m_videoId);
                    //return MP4_INVALID_TRACK_ID;
                }
    }
    (*env)->ReleaseByteArrayElements(env, data, (jbyte *)buf, 0);
}

//添加音频帧的方法
JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4PackA
(JNIEnv *env, jclass clz, jbyteArray data, jint size)
{
    uint8_t *bufaudio = (uint8_t *)(*env)->GetByteArrayElements(env, data, JNI_FALSE);
    //LOGI("       Mp4PackA = %d       ", size);
    //MP4WriteSample(fileHandle, audio, &bufaudio[7], size-7);
    MP4WriteSample(fileHandle, audio, &bufaudio[7], size-7, MP4_INVALID_DURATION, 0, 1);
    /*
    bool MP4WriteSample(
        MP4FileHandle hFile,
        MP4TrackId trackId,
        const u_int8_t* pBytes,
        u_int32_t numBytes,
        MP4Duration duration DEFAULT(MP4_INVALID_DURATION),
        MP4Duration renderingOffset DEFAULT(0),
        bool isSyncSample DEFAULT(true));
        */
    //减去7为了删除adts头部的7个字节
    (*env)->ReleaseByteArrayElements(env, data, (jbyte *)bufaudio, 0);
}

//视频录制结束调用
JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4End
(JNIEnv *env, jclass clz)
{
    MP4Close(fileHandle, 0);
    fileHandle = NULL;
    LOGI("              mp4close              ");
    LOGI("              mp4close              ");
}

3、Android.mk

LOCAL_PATH := (callmydir)include (CLEAR_VARS)
LOCAL_MODULE := mp4v2
LOCAL_SRC_FILES := libmp4v2.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_C_INCLUDES += \
(LOCALPATH)  (LOCAL_PATH)/mp4v2

LOCAL_SHARED_LIBRARIES := mp4v2
LOCAL_MODULE := mp
LOCAL_SRC_FILES := mp.c
LOCAL_LDLIBS += -llog

include $(BUILD_SHARED_LIBRARY)

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值