ffmpeg音频视频合并(前面提取出来的)命令行+ C语言

命令行

ffmpeg -i out.h264 -i out.aac -vcodec copy -acodec copy out.mp4

这次就不解释了 这些基本都解释过了

C实现

pts dts 基础知识

avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_file)) < 0 )
再说一次这个函数  创建输出文件类型上下文这个是根据 写好的文件名创建的 .mp4就创建对应MP4 flv就flv 

AVCodecParameters *in_codecpar
avcodec_parameters_copy(out_stream2->codecpar, in_codecpar))
音视频相关参数信息  比如要创建一个新的流来接收旧流中的东西 首先就要把参数导入

out_stream2->codecpar->codec_tag = 0;
表示没有额外参数

//打开输出文件
if(!(ofmt->flags & AVFMT_NOFILE)){
    if((err_code = avio_open(&ofmt_ctx->pb, out_file, AVIO_FLAG_WRITE))<0){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
               "Could not open output file, %s, %d(%s)\n",
               out_file, err_code, errors);
        goto __FAIL;
    }
}

av_compare_ts(cur_pts1, in_stream1->time_base,
                        cur_pts2, in_stream2->time_base); 
 Compare two timestamps each in its own time base. 
 如果返回值是-1 则 第一个参数时间基计算后在前 -1反之  同步则返回0
 av_rescale_q(pkt.duration, in_stream1->time_base, out_stream1->time_base);
	 时间基转换 :av_rescale_q()用于不同时间基的转换,用于将时间值从一种时间基转换为另一种时间基。
	 这里就是讲in_stream的时间基转化为out_stream
/**
 * Rescale a 64-bit integer by 2 rational numbers.
 *
 * The operation is mathematically equivalent to `a × bq / cq`.
 *
 * This function is equivalent to av_rescale_q_rnd() with #AV_ROUND_NEAR_INF.
 *
 * @see av_rescale(), av_rescale_rnd(), av_rescale_q_rnd()
 */
完整代码
//
//  main.c
//  mp4_flv
//
//  Created by zyc on 2020/10/22.
//

#include <stdio.h>
#include <stdlib.h>

#include "libavutil/timestamp.h"
#include "libavformat/avformat.h"

#include "libavutil/log.h"


#define ERROR_STR_SIZE 1024

int main(int argc, char *argv[])
{

    int ret = -1;

    int err_code;
    char errors[ERROR_STR_SIZE];

    char *src_file1, *src_file2, *out_file;

    AVFormatContext *ifmt_ctx1 = NULL;
    AVFormatContext *ifmt_ctx2 = NULL;

    AVFormatContext *ofmt_ctx = NULL;
    AVOutputFormat *ofmt = NULL;

    AVStream *in_stream1 = NULL;
    AVStream *in_stream2 = NULL;

    AVStream *out_stream1 = NULL;
    AVStream *out_stream2 = NULL;

    int64_t cur_pts1=0, cur_pts2=0;

    int b_use_video_ts = 1;
    uint32_t packets = 0;
    AVPacket pkt;

    int stream1 = 0, stream2 = 0;

    av_log_set_level(AV_LOG_DEBUG);

//    if(argc < 4){
//        av_log(NULL, AV_LOG_ERROR, "Usage: \n " \
//                            "Command src_file1 src_file2 out_file \n");
//        return ret;
//    }

    src_file1 = "1";
    src_file2 = "2";

    out_file = "3";

    //open first file
    if((err_code = avformat_open_input(&ifmt_ctx1, src_file1, 0, 0)) < 0 ){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
               "Could not open src file, %s, %d(%s)\n",
               src_file1, err_code, errors);
        goto __FAIL;
    }

    if((err_code = avformat_find_stream_info(ifmt_ctx1, 0)) <0){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
               "Failed to retrieve input stream info, %s, %d(%s) \n",
               src_file1, err_code, errors);
        goto __FAIL;
    }

    av_dump_format(ifmt_ctx1, 0, src_file1, 0);

    //open second file
    if((err_code = avformat_open_input(&ifmt_ctx2, src_file2, 0, 0)) < 0 ){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
                "Could not open the second src file, %s, %d(%s)\n",
                src_file2, err_code, errors);
        goto __FAIL;
    }

    if((err_code = avformat_find_stream_info(ifmt_ctx2, 0)) <0){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
                "Failed to retrieve input stream info, %s, %d(%s) \n",
                src_file2, err_code, errors);
        goto __FAIL;
    }

    av_dump_format(ifmt_ctx2, 0, src_file2, 0);
 
    //create out context
    if((err_code = avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_file)) < 0 ){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
                "Failed to create an context of outfile , %d(%s) \n",
                err_code, errors);
    }

    ofmt = ofmt_ctx->oformat;

    //create out stream according to input stream
    if(ifmt_ctx1->nb_streams == 1){
        in_stream1 = ifmt_ctx1->streams[0];
        stream1 = 1;

        AVCodecParameters *in_codecpar = in_stream1->codecpar;

        if(in_codecpar->codec_type != AVMEDIA_TYPE_AUDIO &&
           in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO &&
           in_codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE){
            av_log(NULL, AV_LOG_ERROR, "The Codec type is invalid!\n");
            goto __FAIL;
        }

        out_stream1 = avformat_new_stream(ofmt_ctx, NULL);
        if(!out_stream1){
            av_log(NULL, AV_LOG_ERROR, "Failed to alloc out stream!\n");
            goto __FAIL;
        }

        if((err_code = avcodec_parameters_copy(out_stream1->codecpar, in_codecpar)) < 0 ){
            av_strerror(err_code, errors, ERROR_STR_SIZE);
            av_log(NULL, AV_LOG_ERROR,
                   "Failed to copy codec parameter, %d(%s)\n",
                   err_code, errors);
        }

        out_stream1->codecpar->codec_tag = 0;

        /*
        if (ofmt->flags & AVFMT_GLOBALHEADER)
            out_stream1->codecpar->flags |= CODEC_FLAG_GLOBAL_HEADER;
            */
    }

    if(ifmt_ctx2->nb_streams == 1){
        in_stream2 = ifmt_ctx2->streams[0];
        stream2 = 1;

        AVCodecParameters *in_codecpar = in_stream2->codecpar;

        if(in_codecpar->codec_type != AVMEDIA_TYPE_AUDIO &&
           in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO &&
           in_codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE){
            av_log(NULL, AV_LOG_ERROR, "The Codec type is invalid!\n");
            goto __FAIL;
        }

        out_stream2 = avformat_new_stream(ofmt_ctx, NULL);
        if(!out_stream2){
            av_log(NULL, AV_LOG_ERROR, "Failed to alloc out stream!\n");
            goto __FAIL;
        }

        if((err_code = avcodec_parameters_copy(out_stream2->codecpar, in_codecpar)) < 0 ){
            av_strerror(err_code, errors, ERROR_STR_SIZE);
            av_log(NULL, AV_LOG_ERROR,
                   "Failed to copy codec parameter, %d(%s)\n",
                   err_code, errors);
            goto __FAIL;
        }

        out_stream2->codecpar->codec_tag = 0;
        /*
        if (ofmt->flags & AVFMT_GLOBALHEADER)
            out_stream2->codecpar->flags |= CODEC_FLAG_GLOBAL_HEADER;
            */
    }

    av_dump_format(ofmt_ctx, 0, out_file, 1);

    //open out file
    if(!(ofmt->flags & AVFMT_NOFILE)){
        if((err_code = avio_open(&ofmt_ctx->pb, out_file, AVIO_FLAG_WRITE))<0){
            av_strerror(err_code, errors, ERROR_STR_SIZE);
            av_log(NULL, AV_LOG_ERROR,
                   "Could not open output file, %s, %d(%s)\n",
                   out_file, err_code, errors);
            goto __FAIL;
        }
    }

    //write media header
    if((err_code = avformat_write_header(ofmt_ctx, NULL)) < 0){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
               "Error occurred when writing media header!\n");
        goto __FAIL;
    }

    av_init_packet(&pkt);

    while ( stream1 || stream2 ) {
        /* select the stream to encode */
        //stream1 = 1 stream2 = 0; av_compare_ts返回值是1
        ret = av_compare_ts(cur_pts1, in_stream1->time_base,
                            cur_pts2, in_stream2->time_base);
        if (stream1 &&
            ( !stream2 || av_compare_ts(cur_pts1, in_stream1->time_base,
                                            cur_pts2, in_stream2->time_base) <= 0)) {
            ret = av_read_frame(ifmt_ctx1, &pkt);
            if(ret < 0 ){
                stream1 = 0;
                continue;
            }

            //pkt.pts = packets++;
            //in_stream1->time_base = (AVRational){in_stream1->r_frame_rate.den, in_stream1->r_frame_rate.num};

//            if(!b_use_video_ts &&
//                    (in_stream1->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)){
//                pkt.pts = ++packets;
//                in_stream1->time_base = (AVRational){in_stream1->r_frame_rate.den, in_stream1->r_frame_rate.num};
//
//                //pkt.pts = av_rescale_q(pkt.pts, fps, out_stream1->time_base);
//                //pkt.dts = av_rescale_q(pkt.dts, fps, out_stream1->time_base);
//
//                pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream1->time_base, out_stream1->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
//                //pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream1->time_base, out_stream1->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
//                //pkt.duration = av_rescale_q(pkt.duration, fps, out_stream1->time_base);
//                pkt.dts = pkt.pts;
//                av_log(NULL, AV_LOG_DEBUG, "xxxxxxxxx%d, dts=%lld, pts=%lld\n", packets, pkt.dts, pkt.pts);
//            }

            //FIX:No PTS (Example: Raw H.264)
            //Simple Write PTS
            if(pkt.pts==AV_NOPTS_VALUE){
                //Write PTS
                AVRational time_base1 = in_stream1->time_base;
                //Duration between 2 frames (us)
                av_log(NULL, AV_LOG_DEBUG, "AV_TIME_BASE=%d,av_q2d=%d(num=%d, den=%d)\n",
                                        AV_TIME_BASE,
                                        av_q2d(in_stream1->r_frame_rate),
                                        in_stream1->r_frame_rate.num,
                                        in_stream1->r_frame_rate.den);

                int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream1->r_frame_rate);
                //Parameters
                pkt.pts=(double)(packets*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
                pkt.dts=pkt.pts;
                cur_pts1 = pkt.pts;
                pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
                packets++;
            }

            //Convert PTS/DTS
            pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream1->time_base, out_stream1->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            pkt.dts = pkt.pts;
            //pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream1->time_base, out_stream1->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            //pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream1->time_base, out_stream1->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));

            pkt.duration = av_rescale_q(pkt.duration, in_stream1->time_base, out_stream1->time_base);
            pkt.pos = -1;
            pkt.stream_index=0;
            av_log(NULL, AV_LOG_DEBUG, "xxxxxxxxx%d, dts=%lld, pts=%lld\n", packets, pkt.dts, pkt.pts);

            stream1 = !av_interleaved_write_frame(ofmt_ctx, &pkt);
            //stream1 = !av_write_frame(ofmt_ctx, &pkt);
        } else if(stream2){
            ret = av_read_frame(ifmt_ctx2, &pkt);
            if(ret < 0 ){
                stream2 = 0;
                continue;
            }

//            if(!b_use_video_ts &&
//                    (in_stream2->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)){
//                pkt.pts = packets++;
//                pkt.dts = pkt.pts;
//            }


            cur_pts2 = pkt.pts;
            //Convert PTS/DTS
            pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream2->time_base, out_stream2->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            pkt.dts= pkt.pts;
            //pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream2->time_base, out_stream2->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));

            pkt.duration = av_rescale_q(pkt.duration, in_stream2->time_base, out_stream2->time_base);
            pkt.pos = -1;
            pkt.stream_index=1;

            av_log(NULL, AV_LOG_DEBUG, "Write stream2 Packet. size:%5d\tpts:%lld\tdts:%lld\n",pkt.size,pkt.pts, pkt.dts);


            stream2 = !av_interleaved_write_frame(ofmt_ctx, &pkt);
        }

        av_packet_unref(&pkt);
    }

    //write media tailer
    if((err_code = av_write_trailer(ofmt_ctx)) < 0){
        av_strerror(err_code, errors, ERROR_STR_SIZE);
        av_log(NULL, AV_LOG_ERROR,
               "Error occurred when writing media tailer!\n");
        goto __FAIL;
    }

    ret = 0;

__FAIL:

    if(ifmt_ctx1){
        avformat_close_input(&ifmt_ctx1);
    }

    if(ifmt_ctx2){
        avformat_close_input(&ifmt_ctx2);
    }

    if(ofmt_ctx){
        if(!(ofmt->flags & AVFMT_NOFILE)){
            avio_closep(&ofmt_ctx->pb);
        }
        avformat_free_context(ofmt_ctx);
    }


    return ret;
}




  • 3
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值