【owt】WebrtcNode, subscirbe-sdp offer 流程(2)

流程图

在这里插入图片描述

  • 创建MediaStream, MediaStream一方面作为从VideoFramePacketizer接收到媒体数据;

  • 创建VideoFramePacketizer,MediaStream 把sink 注册到VideoFramePacketizer,这样VideoFramePacketizer(继承了MediaSource)的视频数据会流向到MediaStream。即MediaStream作为视频源,VideoFramePacketizer作为接收者。VideoFramePacketizer的数据是哪里接收到的???

4. new WrtcStream

dist/webrtc_agent/webrtc/wrtcConnection.js

/*
 * WrtcStream represents a stream object
 * of WrtcConnection. It has media source
 * functions (addDestination) and media sink
 * functions (receiver) which will be used
 * in connection link-up. Each rtp-stream-id
 * in simulcast refers to one WrtcStream.
 */
class WrtcStream extends EventEmitter {

  /*
   * audio: { format, ssrc, mid, midExtId }
   * video: {
   *   format, ssrcs, mid, midExtId,
   *   transportcc, red, ulpfec, scalabilityMode
   * }
   */
  // Connection wrtc
  //  id = mid
  //  {audio, video, owner, enableBWE} 是从SdoInfo.getMediaSettings 得到
  constructor(id, wrtc, direction, {audio, video, owner, enableBWE}) {
    super();
    this.id = id;
    this.wrtc = wrtc;
    this.direction = direction;
    this.audioFormat = audio ? audio.format : null;
    this.videoFormat = video ? video.format : null;
    this.audio = audio;
    this.video = video;
    this.audioFrameConstructor = null;
    this.audioFramePacketizer = null;
    this.videoFrameConstructor = null;
    this.videoFramePacketizer = null;
    this.closed = false;
    this.owner = owner;
    if (video && video.scalabilityMode) {
      this.scalabilityMode = video.scalabilityMode;
      // string => LayerStream
      this.layerStreams = new Map();
    }

     // 设立recvOnly, 所以是out 
    if (direction === 'in') {
            ...
    } else {
      // 1. Connection wrtc, 创建MediaStream, 最后一个参数,isPublisher = false
      wrtc.addMediaStream(id, {label: id}, false);

      if (audio) {
        ...
      }
      if (video) {
        // 2. 
        //  wrtc 是Connction
        //  wrtc.callBase = new CallBase();
        //  在创建Connction的时候创建,dist/webrtc_agent/webrtc/wrtcConnection.js
        this.videoFramePacketizer = new VideoFramePacketizer(
          video.red, video.ulpfec, video.transportcc, video.mid,
          video.midExtId, false, wrtc.callBase, enableBWE);
        // 3.wrtc.getMediaStream 就是 5.1 创建的addon.MediaStream
        this.videoFramePacketizer.bindTransport(wrtc.getMediaStream(id));
      }
    }
  }

...
}

4.1 Connection.addMediaStream

小节5 , 创建Mediastream,同时把stream 添加到 WebrtcConnection

4.2 new addon.VideoFramePacketizer

小节6, 创建

4.3 addon.VideoFramePacketizer.bindTransport

小节7, source的赋值

5. Connection.addMediaStream——创建Mediastream,同时把stream 添加到 WebrtcConnection

dist/webrtc_agent/webrtc/wrtcConnection.js

2023-04-26T21:54:19.799  - INFO: Connection - message: addMediaStream, connectionId: b149e44bb10d4e91bd162a8c6806ae7b, mediaStreamId: 1
  //  id = mid
  addMediaStream(id, options, isPublisher) {
  // this.id 就是 transportId(或者connect id)
    log.info(`message: addMediaStream, connectionId: ${this.id}, mediaStreamId: ${id}`);
    if (this.mediaStreams.get(id) === undefined) {
      // 1. create media stream
      const mediaStream = this._createMediaStream(id, options, isPublisher);
      // 2. wrtc 就是 addon.WebRtcConnection    
      this.wrtc.addMediaStream(mediaStream);
      // 3. map,  存放是 mid->MediaStream
      this.mediaStreams.set(id, mediaStream);
    }
  }

5.1 Connection._createMediaStream——创建addon.MediaStream

dist/webrtc_agent/webrtc/wrtcConnection.js

2023-04-26T21:54:19.799  - DEBUG: Connection - message: _createMediaStream, connectionId: b149e44bb10d4e91bd162a8c6806ae7b, mediaStreamId: 1, isPublisher: false
   //  id = mid
  _createMediaStream(id, options = {}, isPublisher = true) {
    log.debug(`message: _createMediaStream, connectionId: ${this.id}, ` +
              `mediaStreamId: ${id}, isPublisher: ${isPublisher}`);
   // this.wrtc 就是 addon.WebRtcConnection
    const mediaStream = new addon.MediaStream(this.threadPool, this.wrtc, id,
      options.label, this._getMediaConfiguration(this.mediaConfiguration), isPublisher);
    mediaStream.id = id;
    // 这里的label 就是id
    mediaStream.label = options.label;
    if (options.metadata) {
      // mediaStream.metadata = options.metadata;
      // mediaStream.setMetadata(JSON.stringify(options.metadata));
    }
    // 2. ???
    mediaStream.onMediaStreamEvent((type, message) => {
      this._onMediaStreamEvent(type, message, mediaStream.id);
    });
    return mediaStream;
  }
======5.1.1 NAN_METHOD(MediaStream::New)

source/agent/webrtc/rtcConn/MediaStream.cc

NAN_METHOD(MediaStream::New) {
  if (info.Length() < 3) {
    Nan::ThrowError("Wrong number of arguments");
  }

  if (info.IsConstructCall()) {
    // Invoked as a constructor with 'new MediaStream()'
    ThreadPool* thread_pool = Nan::ObjectWrap::Unwrap<ThreadPool>(Nan::To<v8::Object>(info[0]).ToLocalChecked());
    // 就是在source/agent/webrtc/rtcConn/WebRtcConnection.cc的类 class WebRtcConnection
    WebRtcConnection
    WebRtcConnection* connection =
     Nan::ObjectWrap::Unwrap<WebRtcConnection>(Nan::To<v8::Object>(info[1]).ToLocalChecked());

    erizo::WebRtcConnection
    std::shared_ptr<erizo::WebRtcConnection> wrtc = connection->me;
        // wrtc_id=id=mid
    std::string wrtc_id = getString(info[2]);
    std::string stream_label = getString(info[3]);

    bool is_publisher = Nan::To<bool>(info[5]).FromJust();

///
    // source/agent/webrtc/rtcConn/MediaStream.cc
    // MediaStream 结构关系图,参考小节9
    MediaStream* obj = new MediaStream();
    // source/agent/webrtc/rtcConn/WebRtcConnection.cc
    // Share same worker with connection
    obj->me = std::make_shared<erizo::MediaStream>(wrtc->getWorker(), wrtc, wrtc_id, stream_label, is_publisher);
    // erizo::MediaSink* msink; MediaStream 集成于erizo::MediaSink
    obj->msink = obj->me.get();
    // erizo::MediaSource* msource; MediaStream 集成于erizo::MediaSource
    obj->msource = obj->me.get();
    obj->id_ = wrtc_id;
    obj->label_ = stream_label;
///
    ELOG_DEBUG("%s, message: Created", obj->toLog());
    obj->Wrap(info.This());
    info.GetReturnValue().Set(info.This());
    obj->asyncResource_ = new Nan::AsyncResource("MediaStreamCallback");
  } else {
    // TODO(pedro) Check what happens here
  }
}
log——MediaStreamWrapper
2023-04-26 21:54:19,800  - DEBUG: MediaStreamWrapper - id: 1, message: Created
MediaStream 和 erizo::MediaStream 注意区分

参考小节9

5.1.2 addon.MediaStream::MediaStream

source/agent/webrtc/rtcConn/MediaStream.cc

MediaStream::MediaStream() : closed_{false}, id_{"undefined"} {
  // 异步回调事情
  async_stats_ = new uv_async_t;
  async_event_ = new uv_async_t;
  uv_async_init(uv_default_loop(), async_stats_, &MediaStream::statsCallback);
  uv_async_init(uv_default_loop(), async_event_, &MediaStream::eventCallback);
}
5.1.3 erizo::MediaStream::MediaStream

source/agent/webrtc/rtcConn/erizo/src/erizo/MediaStream.cpp

2023-04-26 21:54:19,799  - INFO: MediaStream - 
id: 1, 
role:subscriber,  
message: constructor, 
id: 1
MediaStream::MediaStream(std::shared_ptr<Worker> worker,
  std::shared_ptr<WebRtcConnection> connection,
  const std::string& media_stream_id, // media_stream_id = mid
  const std::string& media_stream_label,
  bool is_publisher) :
    audio_enabled_{false}, video_enabled_{false},
    media_stream_event_listener_{nullptr},
    connection_{std::move(connection)},
    stream_id_{media_stream_id},
    mslabel_ {media_stream_label},
    bundle_{false},
    pipeline_{Pipeline::create()},
    worker_{std::move(worker)},
    audio_muted_{false}, video_muted_{false},
    pipeline_initialized_{false},
    is_publisher_{is_publisher},
    simulcast_{false},
    bitrate_from_max_quality_layer_{0},
    video_bitrate_{0} {
  ///   
  setVideoSinkSSRC(kDefaultVideoSinkSSRC);
  setAudioSinkSSRC(kDefaultAudioSinkSSRC);
  ///   
  ELOG_INFO("%s message: constructor, id: %s",
      toLog(), media_stream_id.c_str());

  ///   
  // FeedbackSink
  source_fb_sink_ = this;
  // FeedbackSource
  sink_fb_source_ = this;
  ///   
  stats_ = std::make_shared<Stats>();
  log_stats_ = std::make_shared<Stats>();
  quality_manager_ = std::make_shared<QualityManager>();
  packet_buffer_ = std::make_shared<PacketBufferService>();
  std::srand(std::time(nullptr));

  ///   
  audio_sink_ssrc_ = std::rand();
  video_sink_ssrc_ = std::rand();
  ///   

  rtcp_processor_ = nullptr;

  should_send_feedback_ = true;
  slide_show_mode_ = false;

  mark_ = clock::now();

  rate_control_ = 0;
  sending_ = true;
}

5.2 NAN_METHOD(WebRtcConnection::addMediaStream)——添加MediaStream到WebRtcConnection

source/agent/webrtc/rtcConn/WebRtcConnection.cc

NAN_METHOD(WebRtcConnection::addMediaStream) {
  WebRtcConnection* obj = Nan::ObjectWrap::Unwrap<WebRtcConnection>(info.Holder());
  std::shared_ptr<erizo::WebRtcConnection> me = obj->me;
  if (!me) {
    return;
  }

  MediaStream* param = Nan::ObjectWrap::Unwrap<MediaStream>(
    Nan::To<v8::Object>(info[0]).ToLocalChecked());
   // param->me就是个erizo::MediaStream 指针
  auto wr = std::shared_ptr<erizo::MediaStream>(param->me);

  // erizo::WebRtcConnection me,addMediaStream
  me->addMediaStream(wr);
}
about me

source/agent/webrtc/rtcConn/MediaStream.h

class MediaStream : public MediaFilter, public erizo::MediaStreamStatsListener, public erizo::MediaStreamEventListener {
 public:
...
    std::shared_ptr<erizo::MediaStream> me;
...

}
5.2.1 WebRtcConnection::addMediaStream

source/agent/webrtc/rtcConn/erizo/src/erizo/WebRtcConnection.cpp

2023-04-26 21:54:19,800  - DEBUG: WebRtcConnection - id: b149e44bb10d4e91bd162a8c6806ae7b,  message: Adding mediaStream, id: 1
void WebRtcConnection::addMediaStream(std::shared_ptr<MediaStream> media_stream) {
  asyncTask([media_stream] (std::shared_ptr<WebRtcConnection> connection) {
    boost::mutex::scoped_lock lock(connection->update_state_mutex_);
    ELOG_DEBUG("%s message: Adding mediaStream, id: %s", connection->toLog(), media_stream->getId().c_str());
    connection->media_streams_.push_back(media_stream);
  });
}

5.3 mediaStreams

    this.mediaStreams = new Map();

   //mid->MediaStream
   this.mediaStreams.set(id, mediaStream);

以id=mid为key,mediaStream 为value

6. new addon.VideoFramePacketizer——创建

???========wrtc.callBase

6.1 NAN_METHOD(addon.VideoFramePacketizer::New)

source/agent/webrtc/rtcFrame/VideoFramePacketizerWrapper.cc

void VideoFramePacketizer::New(const FunctionCallbackInfo<Value>& args) {
  Isolate* isolate = Isolate::GetCurrent();
  HandleScope scope(isolate);

  bool supportRED = args.Length() > 0 ? Nan::To<bool>(args[0]).FromJust() : false;
  bool supportULPFEC = args.Length() > 1 ? Nan::To<bool>(args[1]).FromJust() : false;
  int transportccExt = (args.Length() > 2) ? Nan::To<int32_t>(args[2]).FromJust() : -1;
  std::string mid;
  int midExtId = -1;
  if (args.Length() >= 5) {
    Nan::Utf8String param4(Nan::To<v8::String>(args[3]).ToLocalChecked());
    mid = std::string(*param4);
    midExtId = args[4]->IntegerValue(Nan::GetCurrentContext()).ToChecked();
  }
  bool selfRequestKeyframe = (args.Length() >= 6)
      ? Nan::To<bool>(args[5]).FromJust() : false;
  CallBase* baseWrapper = (args.Length() >= 7)
      ? Nan::ObjectWrap::Unwrap<CallBase>(Nan::To<v8::Object>(args[6]).ToLocalChecked())
      : nullptr;
  bool enableBandwidthEstimation = (args.Length() >= 8)
      ? Nan::To<bool>(args[7]).FromJust() : false;


  // 创建addon::VideoFramePacketizer
  VideoFramePacketizer* obj = new VideoFramePacketizer();
  owt_base::VideoFramePacketizer::Config config;
  config.enableRed = supportRED;
  config.enableUlpfec = supportULPFEC;
  config.transportccExt = transportccExt;
  config.mid = mid;
  config.midExtId = midExtId;
  config.enableBandwidthEstimation = enableBandwidthEstimation;
  if (baseWrapper) {
    config.rtcAdapter = baseWrapper->rtcAdapter;
  }

  // 创建owt_base::VideoFramePacketizer
  if (transportccExt > 0) {
    config.enableTransportcc = true;
    config.selfRequestKeyframe = false;
    obj->me = new owt_base::VideoFramePacketizer(config);
  } else if (selfRequestKeyframe) {
    config.enableTransportcc = false;
    config.selfRequestKeyframe = true;
    obj->me = new owt_base::VideoFramePacketizer(config);
  } else {
    config.enableTransportcc = false;
    obj->me = new owt_base::VideoFramePacketizer(config);
  }
  obj->dest = obj->me;

  obj->Wrap(args.This());
  args.GetReturnValue().Set(args.This());
}

addon.VideoFramePacketizer::VideoFramePacketizer

VideoFramePacketizer::VideoFramePacketizer() {};

6.2 owt_base::VideoFramePacketizer::VideoFramePacketizer

source/core/owt_base/VideoFramePacketizer.cpp

VideoFramePacketizer::VideoFramePacketizer(VideoFramePacketizer::Config& config)
    : m_enabled(true)
    , m_frameFormat(FRAME_FORMAT_UNKNOWN)
    , m_frameWidth(0)
    , m_frameHeight(0)
    , m_ssrc(0)
    , m_sendFrameCount(0)
    , m_rtcAdapter(config.rtcAdapter)
    , m_videoSend(nullptr)
{
    video_sink_ = nullptr;
    if (!m_rtcAdapter) {
        ELOG_DEBUG("Create RtcAdapter");
        m_rtcAdapter.reset(RtcAdapterFactory::CreateRtcAdapter());
    }
    if (config.enableBandwidthEstimation) {
        m_feedbackTimer = SharedJobTimer::GetSharedFrequencyTimer(
            kBitrateEstimationInterval);
        m_feedbackTimer->addListener(this);
    }
    init(config);
}
??? RtcAdapterFactory::CreateRtcAdapter
VideoFramePacketizer::init

source/core/owt_base/VideoFramePacketizer.cpp

bool VideoFramePacketizer::init(VideoFramePacketizer::Config& config)
{
    if (!m_videoSend) {
        // Create Send Video Stream
        rtc_adapter::RtcAdapter::Config sendConfig;
        if (config.enableTransportcc) {
            sendConfig.transport_cc = config.transportccExt;
        }
        if (config.enableRed) {
            sendConfig.red_payload = RED_90000_PT;
        }
        if (config.enableUlpfec) {
            sendConfig.ulpfec_payload = ULP_90000_PT;
        }
        if (!config.mid.empty()) {
            memset(sendConfig.mid, 0, sizeof(sendConfig.mid));
            strncat(sendConfig.mid, config.mid.c_str(), sizeof(sendConfig.mid) - 1);
            sendConfig.mid_ext = config.midExtId;
        }
        if (config.enableBandwidthEstimation) {
            sendConfig.bandwidth_estimation = true;
        }
        sendConfig.feedback_listener = this;
        sendConfig.rtp_listener = this;
        sendConfig.stats_listener = this;
      /
        m_videoSend = m_rtcAdapter->createVideoSender(sendConfig);
        m_ssrc = m_videoSend->ssrc();
      /
        return true;
    }

    return false;
}

7. NAN_METHOD(VideoFramePacketizer::bindTransport)——erizo::MediaSink赋值

source/agent/webrtc/rtcFrame/VideoFramePacketizerWrapper.cc

参数addon.MediaStream 就是在5.1 小节创建。

void VideoFramePacketizer::bindTransport(const FunctionCallbackInfo<Value>& args) {
  Isolate* isolate = Isolate::GetCurrent();
  HandleScope scope(isolate);

  VideoFramePacketizer* obj = ObjectWrap::Unwrap<VideoFramePacketizer>(args.Holder());
  owt_base::VideoFramePacketizer* me = obj->me;

 // 传入的参数就是addon.MediaStream 继承于 MediaFilter
// 在 5.1.1 的时候对msource和msink 赋值
  MediaFilter* param = Nan::ObjectWrap::Unwrap<MediaFilter>(Nan::To<v8::Object>(args[0]).ToLocalChecked());
  erizo::MediaSink* transport = param->msink;

  // owt_base::VideoFramePacketizer* me
  me->bindTransport(transport);
}

7.1 addon.MediaFilter,addon.MediaSink,addon.MediaSource

source/agent/webrtc/rtcConn/MediaStream.h

source/core/owt_base/MediaWrapper.h

小节9

??? 7.3 owt_base::VideoFramePacketizer.bindTransport——向source注册回调,从erizo::MediaStream获取视频数据

source/core/owt_base/VideoFramePacketizer.cpp

void VideoFramePacketizer::bindTransport(erizo::MediaSink* sink)
{
    boost::unique_lock<boost::shared_mutex> lock(m_transportMutex);
      //  MediaSink* video_sink_;
    video_sink_ = sink;
    video_sink_->setVideoSinkSSRC(m_videoSend->ssrc());
    erizo::FeedbackSource* fbSource = video_sink_->getFeedbackSource();
    if (fbSource)
        fbSource->setFeedbackSink(this);
}
setVideoSinkSSRC
getFeedbackSource
setFeedbackSink
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值