ffmpeg time_base

ffmpeg time_base
ffmpeg存在多个时间基准(time_base),对应不同的阶段(结构体),每个time_base具体的值不一样,ffmpeg提供函数在各个time_base中进行切换。搞清楚各个time_base的来源,对于阅读ffmpeg的代码很重要。

一、time_base
1、AVStream(libavformat/avformat.h)


typedef struct AVStream {
     /**
      * This is the fundamental unit of time (in seconds) in terms
      * of which frame timestamps are represented.
      *
      * decoding: set by libavformat
      * encoding: May be set by the caller before avformat_write_header() to
      *           provide a hint to the muxer about the desired timebase. In
      *           avformat_write_header(), the muxer will overwrite this field
      *           with the timebase that will actually be used for the timestamps
      *           written into the file (which may or may not be related to the
      *           user-provided one, depending on the format).
      */
     AVRational time_base;

     /**
      * Decoding: pts of the first frame of the stream in presentation order, in stream time base.
      * Only set this if you are absolutely 100% sure that the value you set
      * it to really is the pts of the first frame.
      * This may be undefined (AV_NOPTS_VALUE).
      * @note The ASF header does NOT contain a correct start_time the ASF
      * demuxer must NOT set this.
      */
     int64_t start_time;

     /**
      * Decoding: duration of the stream, in stream time base.
      * If a source file does not specify a duration, but does specify
      * a bitrate, this value will be estimated from bitrate and file size.
      */
     int64_t duration;

从上面的信息可以看到,AVStream->time_base单位为秒。

那AVStream->time_base具体的值是多少呢?下面以mpegts_demuxer为例:


static int mpegts_set_stream_info(AVStream *st, PESContext *pes,
                                   uint32_t stream_type, uint32_t prog_reg_desc)
 {
 avpriv_set_pts_info(st, 33, 1, 90000);


 void avpriv_set_pts_info(AVStream *s, int pts_wrap_bits,
                          unsigned int pts_num, unsigned int pts_den)
 {
     AVRational new_tb;
     if (av_reduce(&new_tb.num, &new_tb.den, pts_num, pts_den, INT_MAX)) {
         if (new_tb.num != pts_num)
             av_log(NULL, AV_LOG_DEBUG,
                    "st:%d removing common factor %d from timebase\n",
                    s->index, pts_num / new_tb.num);
     } else
         av_log(NULL, AV_LOG_WARNING,
                "st:%d has too large timebase, reducing\n", s->index);

     if (new_tb.num <= 0 || new_tb.den <= 0) {
         av_log(NULL, AV_LOG_ERROR,
                "Ignoring attempt to set invalid timebase %d/%d for st:%d\n",
                new_tb.num, new_tb.den,
                s->index);
         return;
     }
     s->time_base     = new_tb;
     av_codec_set_pkt_timebase(s->codec, new_tb);
     s->pts_wrap_bits = pts_wrap_bits;
 }

通过avpriv_set_pts_info(st, 33, 1, 90000)函数,设置AVStream->time_base为1/90000。为什么是90000?因为mpeg的pts、dts都是以90kHz来采样的,所以采样间隔为1/90000秒。

2、AVCodecContext


typedef struct AVCodecContext {
     /**
      * This is the fundamental unit of time (in seconds) in terms
      * of which frame timestamps are represented. For fixed-fps content,
      * timebase should be 1/framerate and timestamp increments should be
      * identically 1.
      * - encoding: MUST be set by user.
      * - decoding: Set by libavcodec.
      */
     AVRational time_base;

从上面的信息可以看到,AVCodecContext->time_base单位同样为秒,不过精度没有AVStream->time_base高,大小为1/framerate。

下面以ffmpeg转码工具为例:


static int transcode_init(void)
 {
             if (enc_ctx->codec_type == AVMEDIA_TYPE_VIDEO) {
                 if (ost->filter && !ost->frame_rate.num)
                     ost->frame_rate = av_buffersink_get_frame_rate(ost->filter->filter);
                 if (ist && !ost->frame_rate.num)
                     ost->frame_rate = ist->framerate;
                 if (ist && !ost->frame_rate.num)
                     ost->frame_rate = ist->st->r_frame_rate;
                 if (ist && !ost->frame_rate.num) {
                     ost->frame_rate = (AVRational){25, 1};
                     av_log(NULL, AV_LOG_WARNING,
                            "No information "
                            "about the input framerate is available. Falling "
                            "back to a default value of 25fps for output stream #%d:%d. Use the -r option "
                            "if you want a different framerate.\n",
                            ost->file_index, ost->index);
                 }
//                    ost->frame_rate = ist->st->avg_frame_rate.num ? ist->st->avg_frame_rate : (AVRational){25, 1};
                 if (ost->enc && ost->enc->supported_framerates && !ost->force_fps) {
                     int idx = av_find_nearest_q_idx(ost->frame_rate, ost->enc->supported_framerates);
                     ost->frame_rate = ost->enc->supported_framerates[idx];
                 }
                 if (enc_ctx->codec_id == AV_CODEC_ID_MPEG4) {
                     av_reduce(&ost->frame_rate.num, &ost->frame_rate.den,
                               ost->frame_rate.num, ost->frame_rate.den, 65535);
                 }
             }

             switch (enc_ctx->codec_type) {
             case AVMEDIA_TYPE_VIDEO:

                 enc_ctx->time_base = av_inv_q(ost->frame_rate);
                 if (ost->filter && !(enc_ctx->time_base.num && enc_ctx->time_base.den))
                     enc_ctx->time_base = ost->filter->filter->inputs[0]->time_base;
                 if (   av_q2d(enc_ctx->time_base) < 0.001 && video_sync_method != VSYNC_PASSTHROUGH
                    && (video_sync_method == VSYNC_CFR || video_sync_method == VSYNC_VSCFR || (video_sync_method == VSYNC_AUTO && !(oc->oformat->flags & AVFMT_VARIABLE_FPS)))){
                     av_log(oc, AV_LOG_WARNING, "Frame rate very high for a muxer not efficiently supporting it.\n"
                                                "Please consider specifying a lower framerate, a different muxer or -vsync 2\n");
                 }

首先获取ost->frame_rate,然后计算enc_ctx->time_base = 1/ost->frame_rate。

总结:
AVStream->time_base比AVCodecContext->time_base精度要高(数值要小),比如AVStream->time_base为1/90000,而AVCodecContext->time_base为1/30(假设frame_rate为30);同样的pts和dts,以AVStream->time_base为单位,数值要比以AVCodecContext->time_base为单位要大。

二、pts、dts
那各个结构下,pts和dts使用哪个time_base来表示呢?

1、AVPacket

typedef struct AVPacket {
     /**
      * Presentation timestamp in AVStream->time_base units; the time at which
      * the decompressed packet will be presented to the user.
      * Can be AV_NOPTS_VALUE if it is not stored in the file.
      * pts MUST be larger or equal to dts as presentation cannot happen before
      * decompression, unless one wants to view hex dumps. Some formats misuse
      * the terms dts and pts/cts to mean something different. Such timestamps
      * must be converted to true pts/dts before they are stored in AVPacket.
      */
     int64_t pts;
     /**
      * Decompression timestamp in AVStream->time_base units; the time at which
      * the packet is decompressed.
      * Can be AV_NOPTS_VALUE if it is not stored in the file.
      */
     int64_t dts;

从上面可以看到,AVPacket下的pts和dts以AVStream->time_base为单位(数值比较大)。这也很容易理解,根据mpeg的协议,压缩后或解压前的数据,pts和dts是90kHz时钟的采样值,时间间隔就是AVStream->time_base。

2、AVFrame

typedef struct AVFrame {
     /**
      * Presentation timestamp in time_base units (time when frame should be shown to user).
      */
     int64_t pts;

     /**
      * PTS copied from the AVPacket that was decoded to produce this frame.
      */
     int64_t pkt_pts;

     /**
      * DTS copied from the AVPacket that triggered returning this frame. (if frame threading isn't used)
     * This is also the Presentation time of this AVFrame calculated from
      * only AVPacket.dts values without pts values.
      */
     int64_t pkt_dts;

注意:
AVFrame里面的pkt_pts和pkt_dts是拷贝自AVPacket,同样以AVStream->time_base为单位;而pts是为输出(显示)准备的,以AVCodecContex->time_base为单位)。//FIXME

3、InputStream

typedef struct InputStream {
     int file_index;
     AVStream *st;
     AVCodecContext *dec_ctx;
     int64_t       start;     /* time when read started */
     /* predicted dts of the next packet read for this stream or (when there are
      * several frames in a packet) of the next frame in current packet (in AV_TIME_BASE units) */
     int64_t       next_dts;
     int64_t       dts;       ///< dts of the last packet read for this stream (in AV_TIME_BASE units)

     int64_t       next_pts;  ///< synthetic pts for the next decode frame (in AV_TIME_BASE units)
     int64_t       pts;       ///< current pts of the decoded frame  (in AV_TIME_BASE units)

从上面可以看到,InputStream下的pts和dts以AV_TIME_BASE为单位(微秒),至于为什么要转化为微妙,可能是为了避免使用浮点数。

4、OutputStream


typedef struct OutputStream {
     int file_index;          /* file index */
     int index;               /* stream index in the output file */
     int source_index;        /* InputStream index */
     AVStream *st;            /* stream in the output file */
     int encoding_needed;     /* true if encoding needed for this stream */
     int frame_number;
     /* input pts and corresponding output pts
        for A/V sync */
     struct InputStream *sync_ist; /* input stream to sync against */
     int64_t sync_opts;       /* output frame counter, could be changed to some true timestamp */ // FIXME look at frame_number
     /* pts of the first frame encoded for this stream, used for limiting
      * recording time */
     int64_t first_pts;
     /* dts of the last packet sent to the muxer */
     int64_t last_mux_dts;
     AVBitStreamFilterContext *bitstream_filters;
     AVCodecContext *enc_ctx;
     AVCodec *enc;
     int64_t max_frames;
     AVFrame *filtered_frame;

OutputStream涉及音视频同步,结构和InputStream不同,暂时只作记录,不分析。

三、各个time_base之间转换

ffmpeg提供av_rescale_q函数用于time_base之间转换,av_rescale_q(a,b,c)作用相当于执行a*b/c,通过设置b,c的值,可以很方便的实现time_base之间转换。
例如:
1、InputStream(AV_TIME_BASE)到AVPacket(AVStream->time_base)


static int decode_video(InputStream *ist, AVPacket *pkt, int *got_output)
 {
 pkt->dts  = av_rescale_q(ist->dts, AV_TIME_BASE_Q, ist->st->time_base);

2、AVPacket(AVStream->time_base)到InputStream(AV_TIME_BASE)

static int process_input_packet(InputStream *ist, const AVPacket *pkt)
 {

     if (pkt->dts != AV_NOPTS_VALUE) {
         ist->next_dts = ist->dts = av_rescale_q(pkt->dts, ist->st->time_base, AV_TIME_BASE_Q); 

四、后记:
AVFrame->pts和AVPacket->pts、AVPacket->dts的值,在解码/编码后,会经历短暂的time_base不匹配的情况:

1、解码后

static int decode_video(InputStream *ist, AVPacket *pkt, int *got_output)
 {
     decoded_frame = ist->decoded_frame;
     pkt->dts  = av_rescale_q(ist->dts, AV_TIME_BASE_Q, ist->st->time_base); 

     update_benchmark(NULL);
     ret = avcodec_decode_video2(ist->dec_ctx,
                                 decoded_frame, got_output, pkt);

     best_effort_timestamp= av_frame_get_best_effort_timestamp(decoded_frame);   
     if(best_effort_timestamp != AV_NOPTS_VALUE)
         ist->next_pts = ist->pts = av_rescale_q(decoded_frame->pts = best_effort_timestamp, ist->st->time_base, AV_TIME_BASE_Q);  

解码后,decoded_frame->pts的值使用AVStream->time_base为单位,后在AVFilter里面转换成以AVCodecContext->time_base为单位。 //FIXME

2、编码后

static void do_video_out(AVFormatContext *s,
                          OutputStream *ost,
                          AVFrame *in_picture)
 {
         ret = avcodec_encode_video2(enc, &pkt, in_picture, &got_packet);    
         if (got_packet) {
             if (debug_ts) {
                 av_log(NULL, AV_LOG_INFO, "encoder -> type:video "
                        "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
                        av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &enc->time_base),
                        av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &enc->time_base));
             }

             if (pkt.pts == AV_NOPTS_VALUE && !(enc->codec->capabilities & CODEC_CAP_DELAY))
                 pkt.pts = ost->sync_opts;

             av_packet_rescale_ts(&pkt, enc->time_base, ost->st->time_base); 

             if (debug_ts) {
                 av_log(NULL, AV_LOG_INFO, "encoder -> type:video "
                     "pkt_pts:%s pkt_pts_time:%s pkt_dts:%s pkt_dts_time:%s\n",
                     av_ts2str(pkt.pts), av_ts2timestr(pkt.pts, &ost->st->time_base),
                     av_ts2str(pkt.dts), av_ts2timestr(pkt.dts, &ost->st->time_base));
             }

             frame_size = pkt.size;
             write_frame(s, &pkt, ost);  
             /* if two pass, output log */
             if (ost->logfile && enc->stats_out) {
                 fprintf(ost->logfile, "%s", enc->stats_out);
             }
         }

编码后,pkt.pts、pkt.dts使用AVCodecContext->time_base为单位,后通过调用”av_packet_rescale_ts”转换为AVStream->time_base为单位。

转载地址:http://www.cnitblog.com/luofuchong/archive/2014/11/28/89869.html

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值