webrtc-m79-VCMPacket中的时间计算

1代码

int32_t RtpVideoStreamReceiver::OnReceivedPayloadData(
    const uint8_t* payload_data,
    size_t payload_size,
    const RTPHeader& rtp_header,
    const RTPVideoHeader& video_header,
    const absl::optional<RtpGenericFrameDescriptor>& generic_descriptor,
    bool is_recovered) {
  // VCMPacket.ntp_time_ms_ 中保存的就是 ntp_estimator_.Estimate 的返回值
  VCMPacket packet(payload_data, payload_size, rtp_header, video_header,
                   ntp_estimator_.Estimate(rtp_header.timestamp), // RemoteNtpTimeEstimator::Estimate 根据RTP时间戳"估计"出接收端在收到此RTP包时的NTP时间(单位是ms)
                   clock_->TimeInMilliseconds());
  packet.generic_descriptor = generic_descriptor;

  if (loss_notification_controller_) {
    if (is_recovered) {
      // TODO(bugs.webrtc.org/10336): Implement support for reordering.
      RTC_LOG(LS_INFO)
          << "LossNotificationController does not support reordering.";
    } else if (!generic_descriptor) {
      RTC_LOG(LS_WARNING) << "LossNotificationController requires generic "
                             "frame descriptor, but it is missing.";
    } else {
      loss_notification_controller_->OnReceivedPacket(rtp_header.sequenceNumber,
                                                      *generic_descriptor);
    }
  }

  if (nack_module_) {
    const bool is_keyframe =
        video_header.is_first_packet_in_frame &&
        video_header.frame_type == VideoFrameType::kVideoFrameKey;

    packet.timesNacked = nack_module_->OnReceivedPacket(
        rtp_header.sequenceNumber, is_keyframe, is_recovered);
  } else {
    packet.timesNacked = -1;
  }

  if (packet.sizeBytes == 0) {
    NotifyReceiverOfEmptyPacket(packet.seqNum);
    rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
    return 0;
  }

  if (packet.codec() == kVideoCodecH264) {
    // Only when we start to receive packets will we know what payload type
    // that will be used. When we know the payload type insert the correct
    // sps/pps into the tracker.
    if (packet.payloadType != last_payload_type_) {
      last_payload_type_ = packet.payloadType;
      InsertSpsPpsIntoTracker(packet.payloadType);
    }

    switch (tracker_.CopyAndFixBitstream(&packet)) { // CopyAndFixBitstream 这里是关键,从新分配了buffer保存在了VCMPacket里面,并插入了start code
      case video_coding::H264SpsPpsTracker::kRequestKeyframe:
        rtcp_feedback_buffer_.RequestKeyFrame();
        rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
        RTC_FALLTHROUGH();
      case video_coding::H264SpsPpsTracker::kDrop:
        return 0;
      case video_coding::H264SpsPpsTracker::kInsert:
        break;
    }

  } else {
    uint8_t* data = new uint8_t[packet.sizeBytes];
    memcpy(data, packet.dataPtr, packet.sizeBytes);
    packet.dataPtr = data;
  }

  rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
  if (!packet_buffer_.InsertPacket(&packet)) { //video_coding::PacketBuffer packet_buffer_;
    RequestKeyFrame();
  }
  return 0;
}


int64_t RemoteNtpTimeEstimator::Estimate(uint32_t rtp_timestamp) {
  int64_t sender_capture_ntp_ms = 0;
  // 利用线性回归的结果,根据发送端的 rtp 时间戳计算出发送端对应的 ntp 时间戳
  if (!rtp_to_ntp_.Estimate(rtp_timestamp, &sender_capture_ntp_ms)) { // RtpToNtpEstimator::Estimate
    return -1;
  }
  
	// 在 RemoteNtpTimeEstimator::UpdateRtcpTimestamp 函数中进行数据更新,更新数据的具体过程是:
	// 首先计算接收端在接收到SR包时的本地时间(单位为ms):receiver_arrival_time_ms;
	// 然后将SR包中的NTP时间戳转化成时间单位也为ms的数据: sender_send_time_ms;
	// 根据之前所计算的发送端和接收端之间的RTT,可以估计出该SR包到达对端的NTP时间(也就是绝对时间):sender_arrival_time_ms = sender_send_time_ms + rtt / 2;
	// 不过这里SR到达对端的NTP时间是以发送SR包的NTP时间为基准来进行估计的,并不是以接收SR包的所在机器的NTP为基准的(两台机器的NTP时间在没有校时的情况下大概率是不同的)进行估计的。
	// 将(receiver_arrival_time_ms - sender_arrival_time_ms)的差保存到中值滤波器中,那后面怎么使用这个中值滤波器中的值呢?
	// 通过下面的代码进行分析:
	// 其中 sender_capture_ntp_ms 就是发送SR的端中的 RTP 时间戳根据线性回归所计算出来的发送端的 NTP时间戳(单位为ms);
	// 从中值滤波器中获取 remote_to_local_clocks_offset ,(sender_capture_ntp_ms + remote_to_local_clocks_offset) 可以估计出SR包到达接收端的本地时间 receiver_capture_ms ,
	// 在知道接收端的本地时间后,如何得到该本地时间对应的NTP时间?那就是计算出本地NTP时间与本地时间的差值(这个差值是固定的),然后用该差值加上本地时间即可,也就是:
	// receiver_capture_ntp_ms = receiver_capture_ms + (clock_->CurrentNtpInMilliseconds() - clock_->TimeInMilliseconds())
	// 但是值得注意的是:中值滤波器的输入值是考虑了RTT的。
	
  int64_t remote_to_local_clocks_offset =
      ntp_clocks_offset_estimator_.GetFilteredValue();
  int64_t receiver_capture_ms =
      sender_capture_ntp_ms + remote_to_local_clocks_offset;
  int64_t now_ms = clock_->TimeInMilliseconds();
  int64_t ntp_offset = clock_->CurrentNtpInMilliseconds() - now_ms;
  int64_t receiver_capture_ntp_ms = receiver_capture_ms + ntp_offset;

  if (now_ms - last_timing_log_ms_ > kTimingLogIntervalMs) {
    RTC_LOG(LS_INFO) << "RTP timestamp: " << rtp_timestamp
                     << " in NTP clock: " << sender_capture_ntp_ms
                     << " estimated time in receiver clock: "
                     << receiver_capture_ms
                     << " converted to NTP clock: " << receiver_capture_ntp_ms;
    last_timing_log_ms_ = now_ms;
  }
  return receiver_capture_ntp_ms;
}


bool RtpToNtpEstimator::Estimate(int64_t rtp_timestamp,
                                 int64_t* ntp_timestamp_ms) const {
  if (!params_)
    return false;
	// 举个例子,回绕的时间戳是:(2^32 - 3600) -> 0 -> 3600,不回绕的时间戳就是:(2^32 - 3600) -> 2^32 -> (2^32 + 3600)
  int64_t rtp_timestamp_unwrapped = unwrapper_.Unwrap(rtp_timestamp);
	
  // params_calculated_ should not be true unless ms params.frequency_khz has
  // been calculated to something non zero.
  RTC_DCHECK_NE(params_->frequency_khz, 0.0); // params_ 的数据在 RtpToNtpEstimator::UpdateParameters 中更新
  double rtp_ms = // params_ 保存的是线性回归的斜率和截距
      static_cast<double>(rtp_timestamp_unwrapped) / params_->frequency_khz +
      params_->offset_ms + 0.5f;

  if (rtp_ms < 0)
    return false;

  *ntp_timestamp_ms = rtp_ms; // 根据 rtp 时间戳计算出 ntp

  return true;
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值