webrtc视频流完整接收流程分析

webrtc 视频流接收流程分析从 socket 接收数据一直到放入 jitterbuffer 内整个处理流程与环节

部分流程依赖于 webrtc 中有关 socket 运行机制以及 stun 收发过程 及 Candidates 生成流程分析

/false, type, description);
    
    ./pc/jsep_transport_controller.cc
    RTCError JsepTransportController::ApplyDescription_n(
        bool local,
        SdpType type,
        const cricket::SessionDescription* description)
        error = MaybeCreateJsepTransport(local, content_info, *description);
    
    ./pc/jsep_transport_controller.cc
    RTCError JsepTransportController::MaybeCreateJsepTransport(
        bool local,
        const cricket::ContentInfo& content_info,
        const cricket::SessionDescription& description)
        //--------------------------------------------------------------------------
        // 看到这个 rtp_dtls_transport 了吗?这个就是我们的 DtlsTransport,在这个地方创建的
        //--------------------------------------------------------------------------
        std::unique_ptr<cricket::DtlsTransportInternal> rtp_dtls_transport =
            CreateDtlsTransport(content_info, ice->internal(), nullptr);
        dtls_srtp_transport = CreateDtlsSrtpTransport(
            content_info.name, rtp_dtls_transport.get(), rtcp_dtls_transport.get());
    
    ./pc/jsep_transport_controller.cc
    std::unique_ptr<webrtc::DtlsSrtpTransport>
    JsepTransportController::CreateDtlsSrtpTransport(
        const std::string& transport_name,
        cricket::DtlsTransportInternal* rtp_dtls_transport,
        cricket::DtlsTransportInternal* rtcp_dtls_transport) {
        RTC_DCHECK(network_thread_->IsCurrent());
        auto dtls_srtp_transport = std::make_unique<webrtc::DtlsSrtpTransport>(rtcp_dtls_transport == nullptr);
        if (config_.enable_external_auth) {
            dtls_srtp_transport->EnableExternalAuth();
        }

        dtls_srtp_transport->SetDtlsTransports(rtp_dtls_transport, rtcp_dtls_transport);
        dtls_srtp_transport->SetActiveResetSrtpParams(config_.active_reset_srtp_params);
        dtls_srtp_transport->SignalDtlsStateChange.connect(this, &JsepTransportController::UpdateAggregateStates_n);
        return dtls_srtp_transport;
    }
    
    void DtlsSrtpTransport::SetDtlsTransports(
        cricket::DtlsTransportInternal* rtp_dtls_transport,
        cricket::DtlsTransportInternal* rtcp_dtls_transport) {
        // Transport names should be the same.
        if (rtp_dtls_transport && rtcp_dtls_transport) 
            SetRtpPacketTransport(rtp_dtls_transport);
                    
    ./pc/rtp_transport.cc
    void RtpTransport::SetRtpPacketTransport(
        rtc::PacketTransportInternal* new_packet_transport)
        //---------------------------------------------------------------------
        // 这句把 DtlsTransport 与 RtpTransport::OnReadPacket 调用挂接,所以流程 8 会调用到 流程 9
        //---------------------------------------------------------------------
        new_packet_transport->SignalReadPacket.connect(this, &RtpTransport::OnReadPacket);

9.    
./pc/rtp_transport.cc
void RtpTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
    const char* data,
    size_t len,
    const int64_t& packet_time_us,
    int flags)
    OnRtpPacketReceived(std::move(packet), packet_time_us);
                

void RtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
    int64_t packet_time_us) {
    DemuxPacket(packet, packet_time_us);
}
                        
void RtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet,
    int64_t packet_time_us) {
    webrtc::RtpPacketReceived parsed_packet(&header_extension_map_);
    if (!parsed_packet.Parse(std::move(packet))) {
        RTC_LOG(LS_ERROR) << "Failed to parse the incoming RTP packet before demuxing. Drop it.";
        return;
    }

    if (packet_time_us != -1) {
        parsed_packet.set_arrival_time_ms((packet_time_us + 500) / 1000);
    }
    if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) {
        RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: " << RtpDemuxer::DescribePacket(parsed_packet);
    }
}            

10.
bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) {
    RtpPacketSinkInterface* sink = ResolveSink(packet);
    if (sink != nullptr) {
        sink->OnRtpPacket(packet);
        return true;
    }
    return false;
}

下面我们分析一下这个 Sink 到底是哪个对象

10.1
首先从这个的调用流程参考上面的分析,谁调用的这个函数,上面已经分析过了
./pc/peer_connection.cc
RTCError PeerConnection::ApplyRemoteDescription(
    std::unique_ptr<SessionDescriptionInterface> desc)
    
    error = UpdateSessionState(type, cricket::CS_REMOTE, remote_description()->description());

10.2
./pc/peer_connection.cc
RTCError PeerConnection::UpdateSessionState(
    SdpType type,
    cricket::ContentSource source,
    const cricket::SessionDescription* description)
    RTCError error = PushdownMediaDescription(type, source);

10.3
./pc/peer_connection.cc
RTCError PeerConnection::PushdownMediaDescription(
    SdpType type,
    cricket::ContentSource source) {
  const SessionDescriptionInterface* sdesc =
      (source == cricket::CS_LOCAL ? local_description()
                                   : remote_description());
  RTC_DCHECK(sdesc);

  // Push down the new SDP media section for each audio/video transceiver.
  for (const auto& transceiver : transceivers_) {
    const ContentInfo* content_info =
        FindMediaSectionForTransceiver(transceiver, sdesc);
    // transceiver 的分析参见 10.3.1
    // transceiver 的 channel 分析参见 10.3.2
    cricket::ChannelInterface* channel = transceiver->internal()->channel();
    if (!channel || !content_info || content_info->rejected) {
      continue;
    }
    const MediaContentDescription* content_desc =
        content_info->media_description();
    if (!content_desc) {
      continue;
    }
    std::string error;
    bool success = (source == cricket::CS_LOCAL)
                       ? channel->SetLocalContent(content_desc, type, &error)
                       // 参见流程 10.3.3
                       : channel->SetRemoteContent(content_desc, type, &error);
    if (!success) {
      LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error);
    }
  }
  
  10.3.1 transceiver 对象的创建
  上面的 transceivers_ 我们分析一下,在 PeerConnection::Initialize 创建的
  ./pc/peer_connection.cc
  bool PeerConnection::Initialize(const PeerConnectionInterface::RTCConfiguration& configuration,
    PeerConnectionDependencies dependencies)
    if (!IsUnifiedPlan()) {
        transceivers_.push_back(
            RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
                signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_AUDIO)));
        transceivers_.push_back(
            RtpTransceiverProxyWithInternal<RtpTransceiver>::Create(
                signaling_thread(), new RtpTransceiver(cricket::MEDIA_TYPE_VIDEO)));
    }  
  
  我们看到这是一个 RtpTransceiver 对象,它的 channel 就是 VideoChannel 参考下面的分析
  
  10.3.2 transceiver 的 channel 由来
  
  10.3.2.1
  RTCError PeerConnection::ApplyRemoteDescription(
    std::unique_ptr<SessionDescriptionInterface> desc)
    RTCError error = CreateChannels(*remote_description()->description());
  
  10.3.2.2
  RTCError PeerConnection::CreateChannels(const SessionDescription& desc)    
    // 参见流程 10.3.2.2.1
    cricket::VideoChannel* video_channel = CreateVideoChannel(video->name);
    if (!video_channel) {
      LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
                           "Failed to create video channel.");
    }
    // 这个地方挂接了 RtpTransceiver 的 channel 就是 VideoChannel
    GetVideoTransceiver()->internal()->SetChannel(video_channel);
    
    10.3.2.2.1下面这个创建了 channel
    cricket::VideoChannel* PeerConnection::CreateVideoChannel(
        const std::string& mid) {
      RtpTransportInternal* rtp_transport = GetRtpTransport(mid);
      MediaTransportConfig media_transport_config =
          transport_controller_->GetMediaTransportConfig(mid);

      cricket::VideoChannel* video_channel = channel_manager()->CreateVideoChannel(
          call_ptr_, configuration_.media_config, rtp_transport,
          media_transport_config, signaling_thread(), mid, SrtpRequired(),
          GetCryptoOptions(), &ssrc_generator_, video_options_,
          video_bitrate_allocator_factory_.get());
      if (!video_channel) {
        return nullptr;
      }
      video_channel->SignalDtlsSrtpSetupFailure.connect(
          this, &PeerConnection::OnDtlsSrtpSetupFailure);
      video_channel->SignalSentPacket.connect(this,
                                              &PeerConnection::OnSentPacket_w);
      video_channel->SetRtpTransport(rtp_transport);

      return video_channel;
    }
    
    //--------------------------- 分析 channel_manager ---------------------------------
    // channel_manager 函数的定义,factory_ 是 PeerConnection 构造函数传递过来的,我们继续分析构造函数
    cricket::ChannelManager* PeerConnection::channel_manager() const {
        return factory_->channel_manager();
    }
    
    PeerConnection 构造函数,我们看到其实 PeerConnectionFactory
    PeerConnection::PeerConnection(PeerConnectionFactory* factory,
        std::unique_ptr<RtcEventLog> event_log,
        std::unique_ptr<Call> call) : factory_(factory),
        
    有关 PeerConnection 的创建,我们看到就是传递的 this(PeerConnectionFactory*) 到 PeerConnection
    rtc::scoped_refptr<PeerConnectionInterface>
    PeerConnectionFactory::CreatePeerConnection(
        const PeerConnectionInterface::RTCConfiguration& configuration,
        PeerConnectionDependencies dependencies)
        
        rtc::scoped_refptr<PeerConnection> pc(new rtc::RefCountedObject<PeerConnection>(this, std::move(event_log),
            std::move(call)));
            
    因此上述的 factory_->channel_manager(),其实就是 PeerConnectionFactory::channel_manager
    cricket::ChannelManager* PeerConnectionFactory::channel_manager() {
        return channel_manager_.get();
    }
    
    在函数内部创建了  channel_manager_
    bool PeerConnectionFactory::Initialize()
        channel_manager_ = std::make_unique<cricket::ChannelManager>(
            std::move(media_engine_), std::make_unique<cricket::RtpDataEngine>(),
            worker_thread_, network_thread_);
            
    // 到此我们知道 channel_manager_ 就是 cricket::ChannelManager 对象
            
    // 下面分析 media_engine_,因为下面流程用到了
        PeerConnectionFactory 里的 media_engine_ 来自构造函数
        PeerConnectionFactory::PeerConnectionFactory(
            PeerConnectionFactoryDependencies dependencies)
            : wraps_current_thread_(false),
            network_thread_(dependencies.network_thread),
            worker_thread_(dependencies.worker_thread),
            signaling_thread_(dependencies.signaling_thread),
            task_queue_factory_(std::move(dependencies.task_queue_factory)),
            media_engine_(std::move(dependencies.media_engine)),
            
        PeerConnectionFactory 的创建在下面函数
        ./sdk/android/src/jni/pc/peer_connection_factory.cc
        ScopedJavaLocalRef<jobject> CreatePeerConnectionFactoryForJava(
            JNIEnv* jni,
            const JavaParamRef<jobject>& jcontext,
            const JavaParamRef<jobject>& joptions,
            rtc::scoped_refptr<AudioDeviceModule> audio_device_module,
            rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
            rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory,
            const JavaParamRef<jobject>& jencoder_factory,
            const JavaParamRef<jobject>& jdecoder_factory,
            rtc::scoped_refptr<AudioProcessing> audio_processor,
            std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory,
            std::unique_ptr<NetworkControllerFactoryInterface>
                network_controller_factory,
            std::unique_ptr<NetworkStatePredictorFactoryInterface>
                network_state_predictor_factory,
            std::unique_ptr<MediaTransportFactory> media_transport_factory,
            std::unique_ptr<NetEqFactory> neteq_factory)
            
            // 这个就是我们需要分析的 media_engine_
            dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_dependencies));
            // 下面这句在 ./pc/peer_connection_factory.cc 里就是创建 PeerConnectionFactory 对象
            rtc::scoped_refptr<PeerConnectionFactoryInterface> factory =
                CreateModularPeerConnectionFactory(std::move(dependencies));
                
            函数 CreateMediaEngine 在
            ./media/engine/webrtc_media_engine.cc            
            std::unique_ptr<MediaEngineInterface> CreateMediaEngine(
                MediaEngineDependencies dependencies) {
              auto audio_engine = std::make_unique<WebRtcVoiceEngine>(
                  dependencies.task_queue_factory, std::move(dependencies.adm),
                  std::move(dependencies.audio_encoder_factory),
                  std::move(dependencies.audio_decoder_factory),
                  std::move(dependencies.audio_mixer),
                  std::move(dependencies.audio_processing));
            #ifdef HAVE_WEBRTC_VIDEO
              auto video_engine = std::make_unique<WebRtcVideoEngine>(
                  std::move(dependencies.video_encoder_factory),
                  std::move(dependencies.video_decoder_factory));
            #else
              auto video_engine = std::make_unique<NullWebRtcVideoEngine>();
            #endif
              return std::make_unique<CompositeMediaEngine>(std::move(audio_engine),
                                                            std::move(video_engine));
            }
            
            从这里我们看到 media_engine_ 就是 CompositeMediaEngine , 里面包含 WebRtcVoiceEngine 和 WebRtcVideoEngine 两个引擎
            所以下面的函数调用其实就是 WebRtcVideoEngine::CreateMediaChannel
            
        
    // ----- GetVideoTransceiver()->internal()->SetChannel(video_channel) ----- 
    // 表明 RtpTransceiver 的 channel 就是 VideoChannel 了
    VideoChannel* ChannelManager::CreateVideoChannel(
        webrtc::Call* call,
        const cricket::MediaConfig& media_config,
        webrtc::RtpTransportInternal* rtp_transport,
        const webrtc::MediaTransportConfig& media_transport_config,
        rtc::Thread* signaling_thread,
        const std::string& content_name,
        bool srtp_required,
        const webrtc::CryptoOptions& crypto_options,
        rtc::UniqueRandomIdGenerator* ssrc_generator,
        const VideoOptions& options,
        webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory)
        
        VideoMediaChannel* media_channel = media_engine_->video().CreateMediaChannel(
            call, media_config, options, crypto_options,
            video_bitrate_allocator_factory);
        if (!media_channel) {
            return nullptr;
        }

        auto video_channel = std::make_unique<VideoChannel>(
            worker_thread_, network_thread_, signaling_thread,
            absl::WrapUnique(media_channel), content_name, srtp_required,
            crypto_options, ssrc_generator);

        video_channel->Init_w(rtp_transport, media_transport_config);

        VideoChannel* video_channel_ptr = video_channel.get();
        video_channels_.push_back(std::move(video_channel));
        return video_channel_ptr;
        
        我们分析 media_engine_->video().CreateMediaChannel 参见下面的函数,media_channel 就是一个 WebRtcVideoChannel 对象
        ./media/engine/webrtc_video_engine.cc
        VideoMediaChannel* WebRtcVideoEngine::CreateMediaChannel(
            webrtc::Call* call,
            const MediaConfig& config,
            const VideoOptions& options,
            const webrtc::CryptoOptions& crypto_options,
            webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) {
            RTC_LOG(LS_INFO) << "CreateMediaChannel. Options: " << options.ToString();
            return new WebRtcVideoChannel(call, config, options, crypto_options,
                                        encoder_factory_.get(), decoder_factory_.get(),
                                        video_bitrate_allocator_factory);
        }
        
    //-------------------------------------------------------------------------------
    
    10.3.3
    我们继续分析 SetRemoteContent
    ./pc/channel.cc
    bool BaseChannel::SetRemoteContent(const MediaContentDescription* content,
                                       SdpType type,
                                       std::string* error_desc) {
      TRACE_EVENT0("webrtc", "BaseChannel::SetRemoteContent");
      return InvokeOnWorker<bool>(
          RTC_FROM_HERE,
          Bind(&BaseChannel::SetRemoteContent_w, this, content, type, error_desc));
    }

    ./pc/channel.cc
    bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content,
        SdpType type,
        std::string* error_desc)
        if (!RegisterRtpDemuxerSink()) {
          RTC_LOG(LS_ERROR) << "Failed to update video demuxing.";
          return false;
        }

    ./pc/channel.cc
    bool BaseChannel::RegisterRtpDemuxerSink() {
      RTC_DCHECK(rtp_transport_);
      return network_thread_->Invoke<bool>(RTC_FROM_HERE, [this] {
        return rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this);
      });
    }
    
    ./pc/rtp_transport.cc
    bool RtpTransport::RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
        RtpPacketSinkInterface* sink) {
        rtp_demuxer_.RemoveSink(sink);
        if (!rtp_demuxer_.AddSink(criteria, sink)) {
            RTC_LOG(LS_ERROR) << "Failed to register the sink for RTP demuxer.";
            return false;
        }
        return true;
    }
    
    我们看到 VideoChannel 做为 Sink 加到 RtpTransport 里的 rtp_demuxer_, 所以 RtpDemuxer::OnRtpPacket
    会调用 VideoChannel::OnRtpPacket

11. VideoChannel的基类实现
./pc/channel.cc
void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet)
    invoker_.AsyncInvoke<void>(
        RTC_FROM_HERE, worker_thread_, [this, packet_buffer, packet_time_us] {
            RTC_DCHECK(worker_thread_->IsCurrent());
                // 其实这个就是 WebRtcVideoChannel
                media_channel_->OnPacketReceived(packet_buffer, packet_time_us);
        });
                  
    系统初始化过程中或
    VideoMediaChannel* WebRtcVideoEngine::CreateMediaChannel(
        webrtc::Call* call,
        const MediaConfig& config,
        const VideoOptions& options,
        const webrtc::CryptoOptions& crypto_options,
        webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) {
        RTC_LOG(LS_INFO) << "CreateMediaChannel. Options: " << options.ToString();
        return new WebRtcVideoChannel(call, config, options, crypto_options,
            encoder_factory_.get(), decoder_factory_.get(),
            video_bitrate_allocator_factory);
    }
                 
12.                 
./media/engine/webrtc_video_engine.cc
void WebRtcVideoChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us)
    if (call_->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO, packet, packet_time_us) !=
        webrtc::PacketReceiver::DELIVERY_OK) {
        RTC_LOG(LS_WARNING) << "Failed to deliver RTP packet on re-delivery.";
        return;
    }
    
    // 其实 call_->Receiver() 就是 Call 对象自身。
    ./call/call.cc
    PacketReceiver* Call::Receiver() {
        RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
        return this;
    }

13.
./call/call.cc
PacketReceiver::DeliveryStatus Call::DeliverPacket(
    MediaType media_type,
    rtc::CopyOnWriteBuffer packet,
    int64_t packet_time_us) {
    RTC_DCHECK_RUN_ON(&configuration_sequence_checker_);
    if (IsRtcp(packet.cdata(), packet.size()))
        return DeliverRtcp(media_type, packet.cdata(), packet.size());

    return DeliverRtp(media_type, std::move(packet), packet_time_us);
}
                
./call/call.cc
PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type,
    rtc::CopyOnWriteBuffer packet,
    int64_t packet_time_us)
    if (video_receiver_controller_.OnRtpPacket(parsed_packet)) {
            
            
14.
./call/rtp_stream_receiver_controller.cc
bool RtpStreamReceiverController::OnRtpPacket(const RtpPacketReceived& packet) {
    rtc::CritScope cs(&lock_);
    return demuxer_.OnRtpPacket(packet);
}

    // 下面挂 RtpVideoStreamReceiver 到 demuxer_, 具体过程如下
    14.1 
    ./video/video_receive_stream.cc        
    VideoReceiveStream::VideoReceiveStream(
        TaskQueueFactory* task_queue_factory,
        RtpStreamReceiverControllerInterface* receiver_controller,
        int num_cpu_cores,
        PacketRouter* packet_router,
        VideoReceiveStream::Config config,
        ProcessThread* process_thread,
        CallStats* call_stats,
        Clock* clock,
        VCMTiming* timing)                    
        media_receiver_ = receiver_controller->CreateReceiver(config_.rtp.remote_ssrc, &rtp_video_stream_receiver_);
                        
        rtp_video_stream_receiver_ 就是这个类,在 VideoReceiveStream.h 
        RtpVideoStreamReceiver rtp_video_stream_receiver_;
      
            
    14.2
    std::unique_ptr<RtpStreamReceiverInterface>
    RtpStreamReceiverController::CreateReceiver(uint32_t ssrc,
        RtpPacketSinkInterface* sink) {
        return std::make_unique<Receiver>(this, ssrc, sink);
    }
                
    14.3
    RtpStreamReceiverController::Receiver::Receiver(
        RtpStreamReceiverController* controller,
        uint32_t ssrc,
        RtpPacketSinkInterface* sink)
            : controller_(controller), sink_(sink) {
        const bool sink_added = controller_->AddSink(ssrc, sink_);
        if (!sink_added) {
            RTC_LOG(LS_ERROR)
            << "RtpStreamReceiverController::Receiver::Receiver: Sink "
            << "could not be added for SSRC=" << ssrc << ".";
        }
    }
                
    14.4 这个地方把上面的 rtp_video_stream_receiver_ 加到 RtpStreamReceiverController 的 demuxer_ 了,
    也就是说 demuxer_.OnRtpPacket(packet) 就会调用 RtpVideoStreamReceiver::OnRtpPacket
    
    bool RtpStreamReceiverController::AddSink(uint32_t ssrc,
        RtpPacketSinkInterface* sink) {
        rtc::CritScope cs(&lock_);
        return demuxer_.AddSink(ssrc, sink);
    }

15.                
./video/rtp_video_stream_receiver.cc
void RtpVideoStreamReceiver::OnRtpPacket(const RtpPacketReceived& packet)
    ReceivePacket(packet);
                
void RtpVideoStreamReceiver::ReceivePacket(const RtpPacketReceived& packet)
    // NAT 保活包
    if (packet.payload_size() == 0) {
        // Padding or keep-alive packet.
        // TODO(nisse): Could drop empty packets earlier, but need to figure out how
        // they should be counted in stats.
        NotifyReceiverOfEmptyPacket(packet.SequenceNumber());
        return;
    }
            
    OnReceivedPayloadData(
        rtc::MakeArrayView(parsed_payload.payload, parsed_payload.payload_length),
        packet, parsed_payload.video);
                    
void RtpVideoStreamReceiver::OnReceivedPayloadData(
    rtc::ArrayView<const uint8_t> codec_payload,
    const RtpPacketReceived& rtp_packet,
    const RTPVideoHeader& video)
                
    // 丢包处理
    if (loss_notification_controller_) {
        if (rtp_packet.recovered()) {
            // TODO(bugs.webrtc.org/10336): Implement support for reordering.
            RTC_LOG(LS_INFO)
                << "LossNotificationController does not support reordering.";
        } else if (!packet.generic_descriptor) {
            RTC_LOG(LS_WARNING) << "LossNotificationController requires generic "
                "frame descriptor, but it is missing.";
            } else {
                loss_notification_controller_->OnReceivedPacket(
                    rtp_packet.SequenceNumber(), *packet.generic_descriptor);
            }
        }
                
        // NACK 处理
        if (nack_module_) {
            const bool is_keyframe =
                video_header.is_first_packet_in_frame &&
                video_header.frame_type == VideoFrameType::kVideoFrameKey;

            packet.times_nacked = nack_module_->OnReceivedPacket(
            rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered());
        } else {
            packet.times_nacked = -1;
        }
                
        rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
        frame_counter_.Add(packet.timestamp);
        OnInsertedPacket(packet_buffer_.InsertPacket(&packet));
                
void RtpVideoStreamReceiver::OnInsertedPacket(
    video_coding::PacketBuffer::InsertResult result) {
    for (std::unique_ptr<video_coding::RtpFrameObject>& frame : result.frames) {
        OnAssembledFrame(std::move(frame));
    }
    if (result.buffer_cleared) {
        RequestKeyFrame();
    }
}
            
void RtpVideoStreamReceiver::OnAssembledFrame(
    std::unique_ptr<video_coding::RtpFrameObject> frame)
    
    buffered_frame_decryptor_->ManageEncryptedFrame(std::move(frame));
                
    // buffered_frame_decryptor_ 的产生过程
    buffered_frame_decryptor_ =    std::make_unique<BufferedFrameDecryptor>(this, this);
    if (frame_decryptor != nullptr) {
        buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor));
    }
                
16.
./video/buffered_frame_decryptor.cc
void BufferedFrameDecryptor::ManageEncryptedFrame(
    std::unique_ptr<video_coding::RtpFrameObject> encrypted_frame) {
    // 流程 16.1
    switch (DecryptFrame(encrypted_frame.get())) {
        case FrameDecision::kStash:
            if (stashed_frames_.size() >= kMaxStashedFrames) {
                RTC_LOG(LS_WARNING) << "Encrypted frame stash full poping oldest item.";
                stashed_frames_.pop_front();
            }
            stashed_frames_.push_back(std::move(encrypted_frame));
            break;
        case FrameDecision::kDecrypted:
            RetryStashedFrames();
            // 流程 16.2 
            decrypted_frame_callback_->OnDecryptedFrame(std::move(encrypted_frame));
            break;
        case FrameDecision::kDrop:
            break;
        }
    }

    16.1            
    BufferedFrameDecryptor::FrameDecision BufferedFrameDecryptor::DecryptFrame(
        video_coding::RtpFrameObject* frame)
        // Attempt to decrypt the video frame.
        const FrameDecryptorInterface::Result decrypt_result =
            frame_decryptor_->Decrypt(cricket::MEDIA_TYPE_VIDEO, {},
            additional_data, *frame,
            inline_decrypted_bitstream);
    
    16.2
    void RtpVideoStreamReceiver::OnDecryptedFrame(
        std::unique_ptr<video_coding::RtpFrameObject> frame) {
        rtc::CritScope lock(&reference_finder_lock_);
            reference_finder_->ManageFrame(std::move(frame));
                
            // reference_finder_ 的创建过程,其实就是 RtpFrameReferenceFinder
            reference_finder_ = std::make_unique<video_coding::RtpFrameReferenceFinder>(this);
        }
            
17.
./modules/video_coding/rtp_frame_reference_finder.cc
void RtpFrameReferenceFinder::ManageFrame(std::unique_ptr<RtpFrameObject> frame)
    // 流程 17.1
    FrameDecision decision = ManageFrameInternal(frame.get());
    switch (decision) {
    case kStash:
        if (stashed_frames_.size() > kMaxStashedFrames)
            stashed_frames_.pop_back();
        stashed_frames_.push_front(std::move(frame));
        break;
    case kHandOff:
        // 流程 17.2 
        HandOffFrame(std::move(frame));
        RetryStashedFrames();
        break;
    case kDrop:
        break;
    }
    
    //--------------------------- ManageFrameInternal 流程分析 ---------------------------
    17.1.1
    RtpFrameReferenceFinder::FrameDecision
    RtpFrameReferenceFinder::ManageFrameInternal(RtpFrameObject* frame) {
        absl::optional<RtpGenericFrameDescriptor> generic_descriptor =
        frame->GetGenericFrameDescriptor();
        if (generic_descriptor) {
            return ManageFrameGeneric(frame, *generic_descriptor);
        }

        switch (frame->codec_type()) {
        case kVideoCodecVP8:
            return ManageFrameVp8(frame);
        case kVideoCodecVP9:
            return ManageFrameVp9(frame);
        case kVideoCodecH264:
            return ManageFrameH264(frame);
        default: {
            // Use 15 first bits of frame ID as picture ID if available.
            const RTPVideoHeader& video_header = frame->GetRtpVideoHeader();
            int picture_id = kNoPictureId;
            if (video_header.generic)
                picture_id = video_header.generic->frame_id & 0x7fff;

                return ManageFramePidOrSeqNum(frame, picture_id);
            }
        }
    }
            
    17.1.2
    RtpFrameReferenceFinder::FrameDecision RtpFrameReferenceFinder::ManageFrameH264(
        RtpFrameObject* frame)
                
        UpdateDataH264(frame, unwrapped_tl0, tid);
                
    17.1.3    
    void RtpFrameReferenceFinder::UpdateDataH264(RtpFrameObject* frame,
        int64_t unwrapped_tl0,
        uint8_t temporal_idx)
                
        UpdateLayerInfoH264(frame, unwrapped_tl0, temporal_idx);
                
            
    //---------------------------------  HandOffFrame 流程分析 -----------------------------------------
    17.2.1
    void RtpFrameReferenceFinder::HandOffFrame(std::unique_ptr<RtpFrameObject> frame)
        frame_callback_->OnCompleteFrame(std::move(frame));
                
        // frame_callback_ 就是 RtpVideoStreamReceiver,参见上面的分析
        reference_finder_ = std::make_unique<video_coding::RtpFrameReferenceFinder>(this);
                
    17.2.2
    void RtpVideoStreamReceiver::OnCompleteFrame(
        std::unique_ptr<video_coding::EncodedFrame> frame) {
        {
            rtc::CritScope lock(&last_seq_num_cs_);
            video_coding::RtpFrameObject* rtp_frame =
                static_cast<video_coding::RtpFrameObject*>(frame.get());
            last_seq_num_for_pic_id_[rtp_frame->id.picture_id] = rtp_frame->last_seq_num();
        }
        last_completed_picture_id_ =
            std::max(last_completed_picture_id_, frame->id.picture_id);
            complete_frame_callback_->OnCompleteFrame(std::move(frame));
        }
            
    17.2.3
    void VideoReceiveStream::OnCompleteFrame(std::unique_ptr<video_coding::EncodedFrame> frame)
        int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
        if (last_continuous_pid != -1)
            rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
                    
        // VideoReceiveStream 构造函数中创建 frame_buffer_
        frame_buffer_.reset(new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_));
                
                
    17.2.4 这个就是 jitterbuffer 了,到次为止,后续会解码,渲染是另外一个流程了,音频接收流程基本类似于这个。
    ./modules/video_coding/frame_buffer2.cc
    int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) 
    
    // 整个视频的接收到此分析完毕!!!!!!!

  • 5
    点赞
  • 13
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值