1 问题
本文主要以windows为例,基于peerconnection_client的例子来讲解本地采集的视频是如何渲染到本地的窗口上的。
该问题主要分为两个环节:
第一个环节:需要建立渲染窗口到视频采集器的流水线的建立;
第二个环节:采集的视频沿着流水线送到渲染窗口;
注意:
视频采集器采集到的是像素格式(RGB或者YUV)的VideoFrame,此时还没有编码(比如:VP8/VP9/H264/H265等)成视频帧;
2 VideoTrack到渲染窗口流水线的建立
3 Capturer到VideoTrack流水线的建立
4 相关代码
4.1 VideoTrack到渲染窗口流水线的建立
class MainWnd : public MainWindow {
public:
static const wchar_t kClassName[];
enum WindowMessages {
UI_THREAD_CALLBACK = WM_APP + 1,
};
MainWnd(const char* server, int port, bool auto_connect, bool auto_call);
~MainWnd();
bool Create();
bool Destroy();
bool PreTranslateMessage(MSG* msg);
virtual void RegisterObserver(MainWndCallback* callback);
virtual bool IsWindow();
virtual void SwitchToConnectUI();
virtual void SwitchToPeerList(const Peers& peers);
virtual void SwitchToStreamingUI();
virtual void MessageBox(const char* caption, const char* text, bool is_error);
virtual UI current_ui() { return ui_; }
virtual void StartLocalRenderer(webrtc::VideoTrackInterface* local_video);
virtual void StopLocalRenderer();
virtual void StartRemoteRenderer(webrtc::VideoTrackInterface* remote_video);
virtual void StopRemoteRenderer();
virtual void QueueUIThreadCallback(int msg_id, void* data);
HWND handle() const { return wnd_; }
class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> { ///
public:
VideoRenderer(HWND wnd,
int width,
int height,
webrtc::VideoTrackInterface* track_to_render);
virtual ~VideoRenderer();
void Lock() { ::EnterCriticalSection(&buffer_lock_); }
void Unlock() { ::LeaveCriticalSection(&buffer_lock_); }
// VideoSinkInterface implementation
void OnFrame(const webrtc::VideoFrame& frame) override;
const BITMAPINFO& bmi() const { return bmi_; }
const uint8_t* image() const { return image_.get(); }
protected:
void SetSize(int width, int height);
enum {
SET_SIZE,
RENDER_FRAME,
};
HWND wnd_;
BITMAPINFO bmi_;
std::unique_ptr<uint8_t[]> image_;
CRITICAL_SECTION buffer_lock_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> rendered_track_;
};
// A little helper class to make sure we always to proper locking and
// unlocking when working with VideoRenderer buffers.
template <typename T>
class AutoLock {
public:
explicit AutoLock(T* obj) : obj_(obj) { obj_->Lock(); }
~AutoLock() { obj_->Unlock(); }
protected:
T* obj_;
};
protected:
enum ChildWindowID {
EDIT_ID = 1,
BUTTON_ID,
LABEL1_ID,
LABEL2_ID,
LISTBOX_ID,
};
void OnPaint();
void OnDestroyed();
void OnDefaultAction();
bool OnMessage(UINT msg, WPARAM wp, LPARAM lp, LRESULT* result);
static LRESULT CALLBACK WndProc(HWND hwnd, UINT msg, WPARAM wp, LPARAM lp);
static bool RegisterWindowClass();
void CreateChildWindow(HWND* wnd,
ChildWindowID id,
const wchar_t* class_name,
DWORD control_style,
DWORD ex_style);
void CreateChildWindows();
void LayoutConnectUI(bool show);
void LayoutPeerListUI(bool show);
void HandleTabbing();
private:
std::unique_ptr<VideoRenderer> local_renderer_; ///
std::unique_ptr<VideoRenderer> remote_renderer_;
UI ui_;
HWND wnd_;
DWORD ui_thread_id_;
HWND edit1_;
HWND edit2_;
HWND label1_;
HWND label2_;
HWND button_;
HWND listbox_;
bool destroyed_;
void* nested_msg_;
MainWndCallback* callback_;
static ATOM wnd_class_;
std::string server_;
std::string port_;
bool auto_connect_;
bool auto_call_;
};
void Conductor::AddTracks()
===>
rtc::scoped_refptr<CapturerTrackSource> video_device = CapturerTrackSource::Create();
if (video_device) {
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_( // video_track_ 实际指向的是 webrtc::VideoTrack
peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
main_wnd_->StartLocalRenderer(video_track_); 建立了 VideoTrack 到本地渲染的流水线
result_or_error = peer_connection_->AddTrack(video_track_, {kStreamId}); 建立了 VideoTrack 到编码器的流水线
if (!result_or_error.ok()) {
RTC_LOG(LS_ERROR) << "Failed to add video track to PeerConnection: "
<< result_or_error.error().message();
}
} else {
RTC_LOG(LS_ERROR) << "OpenVideoCaptureDevice failed";
}
4.2 Capturer到VideoTrack流水线的建立
VideoTrack 到本地渲染流水线的建立
void MainWnd::StartLocalRenderer(webrtc::VideoTrackInterface* local_video) {
local_renderer_.reset(new VideoRenderer(handle(), 1, 1, local_video));
}
class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame>
MainWnd::VideoRenderer::VideoRenderer(
HWND wnd,
int width,
int height,
webrtc::VideoTrackInterface* track_to_render)
: wnd_(wnd), rendered_track_(track_to_render) {
::InitializeCriticalSection(&buffer_lock_);
ZeroMemory(&bmi_, sizeof(bmi_));
bmi_.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmi_.bmiHeader.biPlanes = 1;
bmi_.bmiHeader.biBitCount = 32;
bmi_.bmiHeader.biCompression = BI_RGB;
bmi_.bmiHeader.biWidth = width;
bmi_.bmiHeader.biHeight = -height;
bmi_.bmiHeader.biSizeImage =
width * height * (bmi_.bmiHeader.biBitCount >> 3);
rendered_track_->AddOrUpdateSink(this, rtc::VideoSinkWants()); // VideoTrack::AddOrUpdateSink
}
void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(worker_thread_->IsCurrent());
VideoSourceBase::AddOrUpdateSink(sink, wants);
rtc::VideoSinkWants modified_wants = wants;
modified_wants.black_frames = !enabled();
video_source_->AddOrUpdateSink(sink, modified_wants); / video_source_ 实际上指向的是 CapturerTrackSource ,
}/// 但是 CapturerTrackSource 没有定义 AddOrUpdateSink ,因此使用的是 CapturerTrackSource 的父类中的 AddOrUpdateSink ,也就是 VideoTrackSource::AddOrUpdateSink
void VideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(worker_thread_checker_.IsCurrent());
source()->AddOrUpdateSink(sink, wants); souce() 在 VideoTrackSource 中是纯虚函数,此处是多态,调用的是 CapturerTrackSource::source()
} CapturerTrackSource::source() 返回的是 capturer_ 的裸指针,而 capturer_ 实际指向的是 VcmCapturer,故此处调用的是 VcmCapturer::AddOrUpdateSink,
// 而 VcmCapturer::AddOrUpdateSink 又继承自 TestVideoCapturer::AddOrUpdateSink
void TestVideoCapturer::AddOrUpdateSink(
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
broadcaster_.AddOrUpdateSink(sink, wants); VideoBroadcaster::AddOrUpdateSink
UpdateVideoAdapter(); ///
}
void VideoBroadcaster::AddOrUpdateSink(
VideoSinkInterface<webrtc::VideoFrame>* sink,
const VideoSinkWants& wants) {
RTC_DCHECK(sink != nullptr);
rtc::CritScope cs(&sinks_and_wants_lock_);
if (!FindSinkPair(sink)) {
// |Sink| is a new sink, which didn't receive previous frame.
previous_frame_sent_to_all_sinks_ = false;
}
VideoSourceBase::AddOrUpdateSink(sink, wants); ///
UpdateWants();
}
void VideoSourceBase::AddOrUpdateSink(
VideoSinkInterface<webrtc::VideoFrame>* sink,
const VideoSinkWants& wants) {
RTC_DCHECK(sink != nullptr);
SinkPair* sink_pair = FindSinkPair(sink); /
if (!sink_pair) {
sinks_.push_back(SinkPair(sink, wants));//
} else {
sink_pair->wants = wants;
}
}
4.3 采集的像素格式的VideoFrame到渲染窗口的流程
//
MainWnd::VideoRenderer 就是一个 SINK
class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame>
当在 TestVideoCapturer::OnFrame(frame) 调用时,就会触发 broadcaster_.OnFrame(frame)
void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) {
rtc::CritScope cs(&sinks_and_wants_lock_);
bool current_frame_was_discarded = false;
for (auto& sink_pair : sink_pairs()) {
if (sink_pair.wants.rotation_applied &&
frame.rotation() != webrtc::kVideoRotation_0) {
// Calls to OnFrame are not synchronized with changes to the sink wants.
// When rotation_applied is set to true, one or a few frames may get here
// with rotation still pending. Protect sinks that don't expect any
// pending rotation.
RTC_LOG(LS_VERBOSE) << "Discarding frame with unexpected rotation.";
sink_pair.sink->OnDiscardedFrame();
current_frame_was_discarded = true;
continue;
}
if (sink_pair.wants.black_frames) {
webrtc::VideoFrame black_frame =
webrtc::VideoFrame::Builder()
.set_video_frame_buffer(
GetBlackFrameBuffer(frame.width(), frame.height()))
.set_rotation(frame.rotation())
.set_timestamp_us(frame.timestamp_us())
.set_id(frame.id())
.build();
sink_pair.sink->OnFrame(black_frame);
} else if (!previous_frame_sent_to_all_sinks_) {
// Since last frame was not sent to some sinks, full update is needed.
webrtc::VideoFrame copy = frame;
copy.set_update_rect(
webrtc::VideoFrame::UpdateRect{0, 0, frame.width(), frame.height()});
sink_pair.sink->OnFrame(copy);
} else {
sink_pair.sink->OnFrame(frame); / 比如下面的 MainWnd::VideoRenderer::OnFrame
}
}
previous_frame_sent_to_all_sinks_ = !current_frame_was_discarded;
}
void MainWnd::VideoRenderer::OnFrame(const webrtc::VideoFrame& video_frame) {
{
AutoLock<VideoRenderer> lock(this);
rtc::scoped_refptr<webrtc::I420BufferInterface> buffer(
video_frame.video_frame_buffer()->ToI420());
if (video_frame.rotation() != webrtc::kVideoRotation_0) {
buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation());
}
SetSize(buffer->width(), buffer->height());
RTC_DCHECK(image_.get() != NULL);
libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(),
buffer->StrideU(), buffer->DataV(), buffer->StrideV(),
image_.get(),
bmi_.bmiHeader.biWidth * bmi_.bmiHeader.biBitCount / 8,
buffer->width(), buffer->height());
}
InvalidateRect(wnd_, NULL, TRUE);
}