x264 ffmpeg编解码

x264 ffmpeg编解码

demo: https://github.com/wangzuxing/MyFFmpegH264H265YUVOpenGL

H.264编码:
java端的camera preview data经mediacodec编码,送jni端, jni调用x264库/ffmpeg进行编码、生成.264文件

H.264解码:
java端的camera preview data经mediacodec编码,送jni端, jni调用ffmpeg进行实时解码、解码yuv数据直接送java进行刷新显示(open gl yuv渲染)

JAVA端:
MainActivity0:

static {
    ...
    System.loadLibrary("x264");
    System.loadLibrary("ffmpeg");
    ...
}

//x264 编码: 直接使用x264编码库进行编码
public native void Mp4CC(String h264_file); // 编码写入文件的路径
public native void Mp4CE(byte[] array, int length); // camera preview data 传入jni,有jni端调用x264编码,然后写入文件
public native void Mp4EE();     
...

//ffmpeg + x264 编码:把x264编译成libx264.so, 并编译进ffmpeg,然后由ffmpeg通过统一的avcodec进行调用编码
// 使用avcodec_register_all、avcodec_find_encoder、avcodec_alloc_context3等可直接产生编码操作环境、对象的方法(avcodeccontext->avcodec(avframe avpacket)
//(当然也可使用构建更多一级的avformat_alloc_context/avformat_alloc_output_context2环境对象的方法 -- avformatcontext->avstream->avcodeccontext->avcodec(avframe avpacket))
// 根据实际需要而定
public native void Mp4FpgCC(String h264_file); 
public native void Mp4FpgCE(byte[] array, int length);
public native void Mp4FpgEE();
...

//ffmpeg + x264 解码:
直接调用avcodec_register_all、avcodec_find_encoder、avcodec_alloc_context3

public native void Mp4FpgDCC0(String out_file, String h264_file); 

// h264_file:传入需解码的.264文件,out_file:解码输出写入YUV文件,可用于测试, 此处Mp4FpgDCC0()只做初始化相关参数,不对文件进行操作

 public native void Mp4FpgDCE0(byte[] array, int length); 

// camera preview data的mediacodec 编码数据传入jni,由 jni调用ffmpeg进行实时解码、解码yuv数据直接送java进行刷新显示(open gl yuv渲染)

 public native void Mp4FpgDEE0();

JNI端:
mp.c

// 使用X264库进行编码 — X264的编解码跟X265的大致相同

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4CC
 (JNIEnv *env, jclass clz, jstring h264_file)
{
  int m_bitRate;
    const char* h264_file_n = (*env)->GetStringUTFChars(env, h264_file, NULL);
    h264s_file = fopen(h264_file_n, "wb");
    if(h264s_file == NULL){
       LOGI("              Mp4CC Could not open audio file             ");
       return;
    }
    yuv_size = (width * height *3)/2;

    x264_param_default_preset(&m_param,"fast","zerolatency"); //
    //x264_param_default(&m_param);
    //m_param.rc.i_bitrate = (int)m_bitRate/1000; 
    m_bitRate = 2000000;
    m_param.rc.i_rc_method = X264_RC_ABR;   //参数i_rc_method表示码率控制,CQP(恒定质量),CRF(恒定码率),ABR(平均码率)
                                            //CQP是以量化值作为目标,bitrate以输出文件大小为目标,CRF以“视觉质量”为目标。
    m_param.rc.i_vbv_max_bitrate=(int)((m_bitRate*1.2)/1000) ; // 平均码率模式下,最大瞬时码率,默认0(与-B设置相同)
  m_param.rc.i_bitrate = (int)m_bitRate/1000; 
    m_param.i_threads = 1;   // 工作线程数
  m_param.i_width   = width;
  m_param.i_height  = height;
  m_param.i_fps_num = fps;
  m_param.i_fps_den = 1;
  m_param.i_bframe  = 10; //两个参考帧之间B帧的数目

  //m_param.i_csp = (csp == 17) ? X264_CSP_NV12 : csp;//编码比特流的CSP,仅支持i420,色彩空间设置
  m_param.i_keyint_max    = 25; // 设置IDR关键帧的间隔
    m_param.b_intra_refresh = 1;
    m_param.b_annexb  = 1;        // 每个NAL单元前加一个四字节的前缀符
    //m_param.b_repeat_headers = 1;  // 重复SPS/PPS 放到关键帧前面  

  x264_param_apply_profile(&m_param,"baseline"); // profile "baseline" (baseline、main、high编码)
  encoder = x264_encoder_open(&m_param);

    x264_encoder_parameters(encoder, &m_param);    //设置编码器参数
    x264_picture_alloc(&pic_in, X264_CSP_I420, width, height); // 设定编码器视频输入格式为I420

    yuv_buffer = (uint8_t *)malloc(yuv_size);

    //pic_in.img.i_csp = X264_CSP_I420;
    //pic_in.img.i_plane = 3;

  pic_in.img.plane[0] = yuv_buffer;
  pic_in.img.plane[1] = pic_in.img.plane[0] + width * height;
  pic_in.img.plane[2] = pic_in.img.plane[1] + width * height / 4;

  run_ce = 0;
  i_pts  = 0;

  //* 获取允许缓存的最大帧数.
  int iMaxFrames = x264_encoder_maximum_delayed_frames(encoder);
  LOGI("              Mp4CE iMaxFrames = %d           ",iMaxFrames);

  //* 获取X264中缓冲帧数.
  //int iFrames = x264_encoder_delayed_frames(pX264Handle);

    (*env)->ReleaseStringUTFChars(env, h264_file, h264_file_n);
}

int nal_n;
JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4CE
 (JNIEnv *env, jclass clz, jbyteArray data, jint size)
{
    unsigned char *buf = (unsigned char *)(*env)->GetByteArrayElements(env, data, JNI_FALSE);
    memcpy(yuv_buffer, buf, yuv_size);

    nnal  = 0;
    nal_n = 0;
    pic_in.i_pts = i_pts++;
    x264_encoder_encode(encoder, &nals, &nnal, &pic_in, &pic_out);

    x264_nal_t *nal;
    for (nal = nals; nal < nals + nnal; nal++) {
          nal_n++;
        fwrite(nal->p_payload, 1, nal->i_payload, h264s_file);
    }
    run_ce++;
    LOGI("              Mp4CE %d  %d           ",run_ce, nnal);
    (*env)->ReleaseByteArrayElements(env, data, (jbyte *)buf, 0);
}

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4EE
 (JNIEnv *env, jclass clz)
{
    while(1){
        int j;
        int ret = x264_encoder_encode(encoder, &nals, &nnal, NULL, &pic_out);
        if(ret<=0){
            break;
        }
        for (j=0; j < nnal; j++){
            fwrite(nals[j].p_payload, 1, nals[j].i_payload, h264s_file);
        }
    }
    LOGI("              Mp4EE end             ");

  x264_picture_clean(&pic_in);
  //x264_picture_clean(&pic_out);
  x264_encoder_close(encoder);
  fclose(h264s_file);
  //free(yuv_buffer);
}

// 使用ffmpeg的libx264库(作为ffmpeg内avcodec进行统一调用)进行编码
AVCodec *ptrcodec;
AVCodecContext *pctx= NULL;
FILE *ptrf;
FILE *ptrfo;
AVFrame *ptrframe;
AVPacket avpkt;
uint8_t endcode[] = { 0, 0, 1, 0xb7 };

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4FpgCC
 (JNIEnv *env, jclass clz, jstring h264_file)
{
        const char* h264_file_n = (*env)->GetStringUTFChars(env, h264_file, NULL); // 参数编码写入的文件路径
        filename = h264_file_n;
        int ret;
        int codec_id = AV_CODEC_ID_H264;

      //注册所有的编解码
        avcodec_register_all();

        LOGI("              Mp4FpgCC %s             ", filename);

        //根据AVCodecID查找 AVCodec
        ptrcodec = avcodec_find_encoder(codec_id);
        if (!ptrcodec) {
            LOGI("Codec not found\n");
            exit(1);
        }

      //根据AVCodec创建AVCodecContext
        pctx = avcodec_alloc_context3(ptrcodec);
        if (!pctx) {
            LOGI("Could not allocate video codec context\n");
            exit(1);
        }

        int bitrate = 1000;
        int br = 1000*1000;
        int fps = 25;
        //pctx->codec_type = CODEC_TYPE_VIDEO;       
      /*
        pctx->bit_rate = br;
        pctx->rc_min_rate =br;
        pctx->rc_max_rate = br;
        pctx->bit_rate_tolerance = br;
        pctx->rc_buffer_size=br;
        pctx->rc_initial_buffer_occupancy = pctx->rc_buffer_size*3/4;
        pctx->rc_buffer_aggressivity= (float)1.0;
        pctx->rc_initial_cplx= 0.5f;
      */
        //av_opt_set(pctx->priv_data,"crf", "1", AV_OPT_SEARCH_CHILDREN);

        //pctx->bit_rate = bitrate * 1000;
        //pctx->bit_rate_tolerance = 2000000; //表示有多少bit的视频流可以偏移出目前的设定.这里的"设定"是指的cbr或者vbr.

        //不设定bit_rate,则使用crf参数
        pctx->width  = width;
        pctx->height = height;
        pctx->time_base.den = fps;
        pctx->time_base.num = 1;
        pctx->gop_size = fps;    // * 10;
        //pctx->refs = 3;
        pctx->max_b_frames = 3;  //两个非B帧之间允许出现多少个B帧数,0表示不使用B帧,b 帧越多,图片越小
        //pctx->trellis = 2;

        //pctx->me_method = 8;
        //pctx->me_range = 64;//16;

        //pctx->me_subpel_quality = 7;
        //pctx->qmin = 10;        //介于0~31之间,值越小,量化越精细,图像质量就越高,而产生的码流也越长。
        //pctx->qmax = 51;
        //pctx->rc_initial_buffer_occupancy = 0.9;
        pctx->i_quant_factor = 1.0 / 1.40f; //p和i的量化系数比例因子,越接近1P帧越清楚、越优化, p的量化系数 = I帧的量化系数 * i_quant_factor + i_quant_offset
        // x4->params.rc.f_ip_factor = 1 / fabs(avctx->i_quant_factor);

        pctx->b_quant_factor = 1.30f;       //表示i/p与BQ值比例因子,值越大B帧劣化越严重,设置 i帧、p帧与B帧之间的量化系数q比例因子,这个值越大,B帧越不清楚
        //B帧量化系数 = 前一个P帧的量化系数q * b_quant_factor + b_quant_offset
        //pctx->chromaoffset = 0;

        //pctx->max_qdiff = 4;
        //pctx->qcompress = 0.6f; //浮点数值,表示在压制“容易压的场景”和“难压的场景”时,允许Q值之比值的变化范围。可选值是0.0-1.0。
        //pctx->qblur = 0.5f;     //浮点数,表示Q值的比例随时间消减的程度,取之范围是0.0-1.0,取0就是不消减

        pctx->pix_fmt = AV_PIX_FMT_YUV420P;

        //pctx->scenechange_threshold = 40;
        //pctx->flags |= CODEC_FLAG_LOOP_FILTER;
        //pctx->me_cmp = FF_CMP_CHROMA;
        //pctx->flags2 |= CODEC_FLAG_NORMALIZE_AQP;
        //pctx->keyint_min = 25;

        //pctx->rc_qsquish=1.0  //采用Qmin/Qmax的比值来限定和控制码率的方法。选1表示局部(即一个clip)采用此方法,选1表示全部采用。
        //pctx->level = 30;
        //pctx->b_frame_strategy = 2;
        //pctx->codec_tag = 7;

        /*
        //编码器预设
        AVDictionary *dictParam = 0;
        if(pCodecCtx->codec_id == AV_CODEC_ID_H264)
        {
           av_dict_set(&dictParam,"preset","medium",0);
           av_dict_set(&dictParam,"tune","zerolatency",0);
           av_dict_set(&dictParam,"profile","main",0);
        }
        */

        if (codec_id == AV_CODEC_ID_H264) {
            //av_opt_set(pctx->priv_data, "preset", "slow", 0); //ultrafast
            av_opt_set(pctx->priv_data, "preset", "ultrafast", 0); //ultrafast
            av_opt_set(pctx->priv_data, "tune", "zerolatency", 0);
            //av_opt_set(pctx->priv_data, "profile", "main", 0);
        }

        /* open it */
        if (avcodec_open2(pctx, ptrcodec, NULL) < 0) {
              LOGI("Could not open codec\n");
            exit(1);
        }

        ptrf = fopen(filename, "wb");
        if (!ptrf) {
              LOGI("Could not open %s\n", filename);
            exit(1);
        }

        ptrframe = av_frame_alloc();
        if (!ptrframe) {
              LOGI("Could not allocate video frame\n");
            exit(1);
        }
        ptrframe->format =  pctx->pix_fmt;
        ptrframe->width  =  pctx->width;
        ptrframe->height =  pctx->height;

        /* the image can be allocated by any means and av_image_alloc() is
         * just the most convenient way if av_malloc() is to be used */
        ret = av_image_alloc(ptrframe->data, ptrframe->linesize, pctx->width, pctx->height,
                pctx->pix_fmt, 32);
        if (ret < 0) {
            LOGI("Could not allocate raw picture buffer\n");
            exit(1);
        }

      //根据I420格式来分配缓存区
        picture_size=  pctx->width*pctx->height*3/2; 
        picture_buf = (uint8_t *)av_malloc(picture_size);

        int y_size = pctx->width*pctx->height;
        LOGI(" w = %d, h = %d, picture_size= %d, y_size = %d\n", pctx->width, pctx->height, picture_size, y_size);

      //I420格式数据
        ptrframe->data[0] = picture_buf;              // Y
        ptrframe->data[1] = picture_buf+ y_size;      // U
        ptrframe->data[2] = picture_buf+ y_size*5/4;  // V

        av_init_packet(&avpkt);
        avpkt.data = NULL;    // packet data will be allocated by the encoder
        avpkt.size = 0;

        framecnt  = 0;
        ffmpeg_ce = 0;
        frame_pts = 0;
        (*env)->ReleaseStringUTFChars(env, h264_file, h264_file_n);
}

int total_st;
int total_stream;

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4FpgCE
 (JNIEnv *env, jclass clz, jbyteArray data, jint size)
{
      unsigned char *buf = (unsigned char *)(*env)->GetByteArrayElements(env, data, JNI_FALSE);
        memcpy(picture_buf, buf, picture_size);

        int got_picture=0;

        av_init_packet(&avpkt);
      avpkt.data = NULL;    // packet data will be allocated by the encoder
        avpkt.size = 0;

        //视频时间戳
        //pkt.pts = inc++ *(1000/fps); //inc初始值为0,每次打完时间戳inc加1.
        //pkt.pts = m_nVideoTimeStamp++ * (pctx->time_base.num * 1000 / pctx->time_base.den);

        //Encode
        int ret = avcodec_encode_video2(pctx, &avpkt, ptrframe, &got_picture);
        if(ret < 0){
            LOGI("              Mp4FpgCE Failed to encode!            ");
            return ;
        }
        if (got_picture){  
          //if (pctx->coded_frame->pts != AV_NOPTS_VALUE) {
          //    avpkt.pts= av_rescale_q(pctx->coded_frame->pts, pctx->time_base, ost->st->time_base);
          //}
          //pkt.stream_index = video_st0->index;
            //avpkt.pts = frame_pts*1000*/25;
            avpkt.pts = frame_pts*(pctx->time_base.num*1000/pctx->time_base.den);
            frame_pts++;

          fwrite(avpkt.data, 1, avpkt.size, ptrf);
          av_packet_unref(&avpkt);
        }

        ffmpeg_ce++;
        (*env)->ReleaseByteArrayElements(env, data, (jbyte *)buf, 0);
}

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4FpgEE
 (JNIEnv *env, jclass clz)
{
    LOGI("              Mp4FpgEE              ");
    int got_output = 0;
    int ret = avcodec_encode_video2(pctx, &avpkt, NULL, &got_output);
    if (ret < 0) {
       LOGI("Error encoding frame\n");
       //exit(1);
    }
    if (got_output) {
       avpkt.pts = frame_pts*(pctx->time_base.num*1000/pctx->time_base.den);
       frame_pts++;

       fwrite(avpkt.data, 1, avpkt.size, ptrf);
       av_packet_unref(&avpkt);
    }
    /* add sequence end code to have a real mpeg file */
    fwrite(endcode, 1, sizeof(endcode), ptrf);
    fclose(ptrf);

    avcodec_close(pctx);
    av_free(pctx);
    av_freep(&ptrframe->data[0]);
    av_frame_free(&ptrframe);

}

//使用ffmpeg的libx264库(作为ffmpeg内avcodec进行统一调用)进行解码,
uint8_t* packet_buf;

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4FpgDCC0
 (JNIEnv *env, jclass clz, jstring outfile_name, jstring h264_file)
{
    const char* h264_file_n = (*env)->GetStringUTFChars(env, h264_file, NULL); //h264_file 待解码.264文档
    const char* h264_file_o = (*env)->GetStringUTFChars(env, outfile_name, NULL);

    filename = h264_file_n;
    outfilename = h264_file_o;
    int ret, i;
    int codec_id = AV_CODEC_ID_H264;
    avcodec_register_all();

    // find the mpeg1 video encoder
    ptrcodec = avcodec_find_decoder(AV_CODEC_ID_H264);
    if (!ptrcodec) {
        LOGI("Codec not found\n");
         exit(1);
    }
  pctx = avcodec_alloc_context3(ptrcodec);
  if (!pctx) {
        LOGI("Could not allocate video codec context\n");
        exit(1);
    }
    if (ptrcodec->capabilities & AV_CODEC_CAP_TRUNCATED)
        pctx->flags |= AV_CODEC_FLAG_TRUNCATED; // we do not send complete frames
    /* open it */
    if (avcodec_open2(pctx, ptrcodec, NULL) < 0) {
        LOGI("Could not open codec\n");
        exit(1);
  }  
    ptrframe = av_frame_alloc();
    if (!ptrframe) {
        LOGI("Could not allocate video frame\n");
        exit(1);
    }
    av_init_packet(&avpkt);

    frame_count  = 0;
    ffmpeg_ce = 0;
  frame_pts = 0;

    packet_buf = (unsigned char *)malloc(320*240/2);
    //(uint8_t *)av_malloc(pctx->width*pctx->height/2); //pctx->width*pctx->height*3/2

    LOGI("        Mp4FpgDCC0 end     \n");

    (*env)->ReleaseStringUTFChars(env, h264_file, h264_file_n);
    (*env)->ReleaseStringUTFChars(env, outfile_name, h264_file_o);
}

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4FpgDCE0
 (JNIEnv *env, jobject obj, jbyteArray data, jint size)
{
    int len;
    unsigned char *buf = (unsigned char *)(*env)->GetByteArrayElements(env, data, JNI_FALSE);
  memcpy(packet_buf, buf, size);

  got_frame = 0;

  av_init_packet(&pkt);
  pkt.size = size;
  pkt.data = packet_buf;
  while (pkt.size > 0) 
    {
        len = avcodec_decode_video2(pctx, ptrframe, &got_frame, &pkt);
        if (pkt.data) {
            pkt.size -= len;
            pkt.data += len;
        }
        if (len < 0) {
            LOGI("Error while decoding frame %d\n", frame_count);
            //av_packet_unref(&pkt);
            break;
        }
        if(got_frame){
            if(frame_size==0){
                frame_size = ptrframe->width*ptrframe->height;
                frame_size_l = frame_size*3/2;
            }
            if(!temp_store_a && frame_size>0){
                temp_store_a = (unsigned char *)malloc(frame_size*3/2);
                LOGI(" Saving frame %d, %d, %d\n",frame_count, frame_size, frame_size_l);
            }

            pgm_save2(ptrframe->data[0], ptrframe->linesize[0], ptrframe->width, ptrframe->height, temp_store_a);
            pgm_save2(ptrframe->data[1], ptrframe->linesize[1], ptrframe->width/2, ptrframe->height/2, temp_store_a+frame_size);
          pgm_save2(ptrframe->data[2], ptrframe->linesize[2], ptrframe->width/2, ptrframe->height/2, temp_store_a+frame_size*5/4);

            if(method1==0){
                //1 . 找到java代码的 class文件
                // jclass      (*FindClass)(JNIEnv*, const char*);
                jclass dpclazz = (*env)->FindClass(env,"com/example/mymp4v2h264/MainActivity0"); //com_example_mymp4v2h264_MainActivity0_Mp4FpgDCE0
                if(dpclazz==0){
                    return;
                }
                LOGI("find class ");

                //2 寻找class里面的方法
                //jmethodID   (*GetMethodID)(JNIEnv*, jclass, const char*, const char*);
                method1 = (*env)->GetMethodID(env,dpclazz,"updateYUV","([BII)V");
                if(method1==0){
                    LOGI("find method1 error");
                    return;
                }
                LOGI("find method1 ");
            }else{
                //3 .调用这个方法,把解码数据上传到java端的缓存列表中刷新显示(java使用的是opengl es yuv纹理刷新)
                // void (*CallVoidMethod)(JNIEnv*, jobject, jmethodID, ...);
                 jbyteArray result = (*env)->NewByteArray(env, frame_size_l);
               (*env)->SetByteArrayRegion(env, result, 0, frame_size_l, (jbyte *)temp_store_a); //temp_store_a

               (*env)->CallVoidMethod(env, obj, method1, result, ptrframe->width, ptrframe->height);
            }
            frame_count++;
        }
    }

    (*env)->ReleaseByteArrayElements(env, data, (jbyte *)buf, 0);
}

JNIEXPORT void JNICALL Java_com_example_mymp4v2h264_MainActivity0_Mp4FpgDEE0
 (JNIEnv *env, jclass clz)
{
    LOGI("              Mp4FpgDEE0              ");
    avcodec_close(pctx);
    av_free(pctx);
    av_frame_free(&ptrframe);
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值