在使用peerconnection进行webrtc开发时,其默认生成的sdp信息中有比较多的H264编码器信息,这会导致sdp内容略大如何在不修改webrtc内部代码的前提下精简H264编码信息,方法很简单,我们只需定义webrtc::VideoEncoderFactory子类,在子类的GetSupportedFormats()方法里面裁剪编解码器信息即可,源码如下:
user_define_video_codecs_factory.h
#include "api/video_codecs/video_encoder_factory.h"
#include "api/video_codecs/video_decoder_factory.h"
namespace webrtc{
std::unique_ptr<VideoEncoderFactory> CreateUserDefineVideoEncoderFactory();
std::unique_ptr<VideoDecoderFactory> CreateUserDefineVideoDecoderFactory();
}
user_define_video_codecs_factory.cc
#include "user_define_video_codecs_factory.h"
#include <vector>
#include <utility>
#include "api/video_codecs/sdp_video_format.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/logging.h"
static const std::string kDstH264ProfileLevelId = "42e01f";
//参考 media/engine/internalencoderfactory.h/cc 和 internaldecoderfactory.h/cc
namespace webrtc {
class UserDefineVideoEncoderFactory : public webrtc::VideoEncoderFactory
{
public:
std::vector<SdpVideoFormat> GetSupportedFormats() const override
{
std::vector<SdpVideoFormat> supported_codecs;
std::map<std::string, std::string> params;
params["level-asymmetry-allowed"] = "1";
params["packetization-mode"] = "1";
params["profile-level-id"] = kDstH264ProfileLevelId;
webrtc::SdpVideoFormat dst_format("H264", params);
for (const webrtc::SdpVideoFormat& format : webrtc::SupportedH264Codecs())
{
if (format == dst_format)
{
supported_codecs.push_back(format);
break;
}
}
supported_codecs.push_back(SdpVideoFormat(cricket::kVp8CodecName));
return supported_codecs;
}
webrtc::VideoEncoderFactory::CodecInfo QueryVideoEncoder(const webrtc::SdpVideoFormat& format) const override
{
CodecInfo info;
info.is_hardware_accelerated = false;
info.has_internal_source = false;
return info;
}
std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(const webrtc::SdpVideoFormat& format) override
{
if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
{
return H264Encoder::Create(cricket::VideoCodec(format));
}
if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
{
return VP8Encoder::Create();
}
if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName))
{
return VP9Encoder::Create(cricket::VideoCodec(format));
}
RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format " << format.name;
return nullptr;
}
};
class UserDefineVideoDecoderFactory : public VideoDecoderFactory
{
public:
std::vector<SdpVideoFormat> GetSupportedFormats() const override
{
std::vector<SdpVideoFormat> formats;
std::map<std::string, std::string> params;
params["level-asymmetry-allowed"] = "1";
params["packetization-mode"] = "1";
params["profile-level-id"] = kDstH264ProfileLevelId;
webrtc::SdpVideoFormat dst_format("H264", params);
for (const SdpVideoFormat& format : SupportedH264Codecs())
{
if (format == dst_format)
{
formats.push_back(format);
break;
}
}
formats.push_back(SdpVideoFormat(cricket::kVp8CodecName));
return formats;
}
std::unique_ptr<VideoDecoder> CreateVideoDecoder(const SdpVideoFormat& format) override
{
if (!IsFormatSupported(GetSupportedFormats(), format))
{
RTC_LOG(LS_ERROR) << "Trying to create decoder for unsupported format";
return nullptr;
}
if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
return H264Decoder::Create();
if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
return VP8Decoder::Create();
if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName))
return VP9Decoder::Create();
RTC_NOTREACHED();
return nullptr;
}
private:
bool IsFormatSupported(const std::vector<webrtc::SdpVideoFormat>& supported_formats, const webrtc::SdpVideoFormat& format)
{
for (const webrtc::SdpVideoFormat& supported_format : supported_formats)
{
if (cricket::IsSameCodec(format.name, format.parameters, supported_format.name, supported_format.parameters))
{
return true;
}
}
return false;
}
};
std::unique_ptr<VideoEncoderFactory> CreateUserDefineVideoEncoderFactory()
{
return absl::make_unique<UserDefineVideoEncoderFactory>();
}
std::unique_ptr<VideoDecoderFactory> CreateUserDefineVideoDecoderFactory()
{
return absl::make_unique<UserDefineVideoDecoderFactory>();
}
}
如上,在通过CreatePeerConnectionFactory()创建PeerConnectionFactory的时候,使用上面的两个接口创建的视频编解码器工厂对象即可:
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory = webrtc::CreateUserDefineVideoEncoderFactory();
std::unique_ptr<webrtc::VideoDecoderFactory> video_decoder_factory = webrtc::CreateUserDefineVideoDecoderFactory();
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> peer_connection_factory = webrtc::CreatePeerConnectionFactory(rtc::Thread::Current() /* network_thread */,
rtc::Thread::Current() /* worker_thread */,
rtc::Thread::Current() /* signaling_thread */,
audio_device /* default_adm */,
audio_encoder_factory,
audio_decoder_factory,
std::move(video_encoder_factory),
std::move(video_decoder_factory),
nullptr /* audio_mixer */,
audio_process /* audio_processing */);