网上学习资料一大堆,但如果学到的知识不成体系,遇到问题时只是浅尝辄止,不再深入研究,那么很难做到真正的技术提升。
一个人可以走的很快,但一群人才能走的更远!不论你是正从事IT行业的老鸟或是对IT行业感兴趣的新人,都欢迎加入我们的的圈子(技术交流、学习资源、职场吐槽、大厂内推、面试辅导),让我们一起学习成长!
}
//int y\_size = frame1->width\*frame1->height;
//fwrite(frame1->data[0], 1, y\_size, fp\_yuv); //Y
//fwrite(frame1->data[1], 1, y\_size / 4, fp\_yuv); //U
//fwrite(frame1->data[2], 1, y\_size / 4, fp\_yuv); //V
if ((RemoteVideoheight != frame1->height) && (RemoteVideowidth != frame1->width))
{
IsFistRecive = false;
//2.2、创建窗口
RemoteVideowidth = frame1->width;
RemoteVideoheight = frame1->height;
RemoteVideowin = SDL\_CreateWindowFrom(dlg->GetDlgItem(IDC_STATIC1)->GetSafeHwnd());
if (!RemoteVideowin) {
SDL\_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create window by SDL");
return;
}
//2.3、创建渲染器
RemoteVideorenderer = SDL\_CreateRenderer(RemoteVideowin, -1, 0);
if (!RemoteVideorenderer) {
SDL\_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create Renderer by SDL");
//要释放ffmpeg的相关内存
return;
}
//2.4、创建纹理
Remotetexture = SDL\_CreateTexture(RemoteVideorenderer,
SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING,
RemoteVideowidth,
RemoteVideoheight);
RemoteVideorect.x = 0;
RemoteVideorect.y = 0;
RemoteVideorect.w = RemoteVideowidth;
RemoteVideorect.h = RemoteVideoheight;
}
SDL\_UpdateYUVTexture(Remotetexture, NULL,
frame1->data[0], frame1->linesize[0],
frame1->data[1], frame1->linesize[1],
frame1->data[2], frame1->linesize[2]);
SDL\_RenderClear(RemoteVideorenderer);
SDL\_RenderCopy(RemoteVideorenderer, Remotetexture, NULL, &RemoteVideorect);
SDL\_RenderPresent(RemoteVideorenderer);
}
}
void H264De(const uint8_t *data, size_t data_size)
{
int ret;
while (data_size > 0) {
/\*pkt = av\_packet\_alloc();
frame = av_frame_alloc();*/
ret = av\_parser\_parse2(parser, c, &pkt->data, &pkt->size,
data, data_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
if (ret < 0) {
fprintf(stderr, "Error while parsing\n");
exit(1);
}
data += ret;
data_size -= ret;
if (pkt->size)
decode(c, frame, pkt);
/\*av\_packet\_free(&pkt);
av_frame_free(&frame);*/
}
}
std::thread *m_pVideoThread = NULL;
int g_videobuffSize;
bool bIsStartDecode = true;
void VideoReadThread(void *param)
{
uint8_t *data;
uint16_t firsttime;
uint16_t lasttime;
while (bIsStartDecode)
{
firsttime = av_gettime();
if (quVideoBuff.empty())
{
Sleep(2);
continue;
}
data = quVideoBuff.front();
H264De(data, g_videobuffSize);
quVideoBuff.pop();
free(data);
lasttime = av_gettime();
if (lasttime - firsttime > 63333)
{
av\_usleep(63333 - lasttime);
}
}
}
/*
视频采集的数据回在VideoCaptureDataCallback::OnIncomingCapturedFrame回调中返回
*/
class CameraCaptureCallback : public VideoCaptureDataCallback {
public:
CameraCaptureCallback()
{
}
~CameraCaptureCallback()
{
}
virtual void OnIncomingCapturedFrame(const int32\_t id,
const VideoFrame& videoFrame)
{
//printf("width:%d height:%d ntp\_time\_ms:%u render\_time\_ms:%u rotation:%d %d \n", videoFrame.width(), videoFrame.height(), videoFrame.ntp\_time\_ms(), videoFrame.render\_time\_ms(), videoFrame.rotation());
VideoFrame incoming_frame = videoFrame;
int64\_t current_time = clock_->TimeInMilliseconds();
incoming_frame.set\_render\_time\_ms(current_time);
int64\_t capture_ntp_time_ms;
if (videoFrame.ntp\_time\_ms() != 0) {
capture_ntp_time_ms = videoFrame.ntp\_time\_ms();
}
else if (videoFrame.render\_time\_ms() != 0) {
capture_ntp_time_ms = videoFrame.render\_time\_ms() + delta_ntp_internal_ms_;
}
else {
capture_ntp_time_ms = current_time + delta_ntp_internal_ms_;
}
incoming_frame.set\_ntp\_time\_ms(capture_ntp_time_ms);
incoming_frame.set\_timestamp(
90 \* static\_cast<uint32\_t>(incoming_frame.ntp\_time\_ms()));
rtc::scoped\_refptr<webrtc::VideoFrameBuffer> vfb = videoFrame.video\_frame\_buffer();
//将返回的数据更新到SDL纹理当中
SDL\_UpdateYUVTexture(Localtexture, NULL,
vfb.get()->DataY(), vfb.get()->StrideY(),
vfb.get()->DataU(), vfb.get()->StrideU(),
vfb.get()->DataV(), vfb.get()->StrideV());
//进行SDL刷新显示
SDL\_RenderClear(LocalVideorenderer);
SDL\_RenderCopy(LocalVideorenderer, Localtexture, NULL, &LocalVideorect);
SDL\_RenderPresent(LocalVideorenderer);
// 类将捕获的帧发送到视频发送流。
if (input)
input->IncomingCapturedFrame(incoming_frame);
}
virtual void OnCaptureDelayChanged(const int32\_t id,
const int32\_t delay)
{
}
};
class MyUdpTransport : public UdpTransport {
public:
MyUdpTransport() {}
~MyUdpTransport() {}
virtual int32\_t InitializeSendSockets(const char\* ipAddr,
const uint16\_t rtpPort,
const uint16\_t rtcpPort = 0)
{
}
};
class MyTransport : public Transport {
public:
MyTransport()
{
}
~MyTransport()
{
}
virtual bool SendRtp(const uint8_t\* packet,
size\_t length,
const PacketOptions& options)
{
return true;
}
virtual bool SendRtcp(const uint8_t\* packet, size\_t length)
{
return true;
}
};
class MySendUdpTransportData : public UdpTransportData {
public:
virtual ~MySendUdpTransportData() {};
virtual void IncomingRTPPacket(const int8_t\* incomingRtpPacket,
const size\_t rtpPacketLength,
const char\* fromIP,
const uint16\_t fromPort)
{
}
virtual void IncomingRTCPPacket(const int8_t\* incomingRtcpPacket,
const size\_t rtcpPacketLength,
const char\* fromIP,
const uint16\_t fromPort)
{
}
};
/*
所有接收到的调用的RTP和RTCP数据包都应该插入到这个PacketReceiver中
*/
class MyRecvUdpTransportData : public UdpTransportData {
public:
virtual ~MyRecvUdpTransportData() {};
virtual void IncomingRTPPacket(const int8_t\* incomingRtpPacket,
const size\_t rtpPacketLength,
const char\* fromIP,
const uint16\_t fromPort)
{
if (_call)
{
webrtc::PacketTime pt;
_call->Receiver()->DeliverPacket(MediaType::VIDEO, (const uint8_t \*)incomingRtpPacket, rtpPacketLength, pt);
}
}
virtual void IncomingRTCPPacket(const int8_t\* incomingRtcpPacket,
const size\_t rtcpPacketLength,
const char\* fromIP,
const uint16\_t fromPort)
{
if (_call)
{
webrtc::PacketTime pt;
_call->Receiver()->DeliverPacket(MediaType::VIDEO, (const uint8_t \*)incomingRtcpPacket, rtcpPacketLength, pt);
}
}
};
class MyEncodedFrameCallback : public EncodedFrameObserver {
public:
virtual ~MyEncodedFrameCallback() {}
virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame)
{
//uint8\_t \*g\_videoBuff;
H264De(encoded_frame.data_, encoded_frame.length_);
//g\_videoBuff = (uint8\_t\*)malloc(encoded\_frame.length\_);
//g\_videobuffSize = encoded\_frame.length\_;
memcpy(g\_videoBuff, encoded\_frame.data\_, encoded\_frame.length\_);
//quVideoBuff.push(g\_videoBuff);
printf("EncodedFrameCallback length:%d type:%d \n ", encoded_frame.length_, encoded_frame.frame_type_);
}
virtual void OnEncodeTiming(int64\_t capture_ntp_ms, int encode_duration_ms) {}
};
uint8_t* m_uBuffer = NULL;
//解码之后的yuv数据
class MyI420FrameCallback : public I420FrameCallback {
public:
virtual ~MyI420FrameCallback() {}
// This function is called with a I420 frame allowing the user to modify the
// frame content.
virtual void FrameCallback(VideoFrame* video_frame)
{
//if (IsFistRecive)
//{
// IsFistRecive = false;
// //2.2、创建窗口
// RemoteVideowidth = video_frame->width();
// RemoteVideoheight = video_frame->height();
// RemoteVideowin = SDL_CreateWindowFrom(dlg->GetDlgItem(IDC_STATIC1)->GetSafeHwnd());
// if (!RemoteVideowin) {
// SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, “Failed to create window by SDL”);
// return;
// }
// //2.3、创建渲染器
// RemoteVideorenderer = SDL_CreateRenderer(RemoteVideowin, -1, 0);
// if (!RemoteVideorenderer) {
// SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, “Failed to create Renderer by SDL”);
// //要释放ffmpeg的相关内存
// return;
// }
// //2.4、创建纹理
// Remotetexture = SDL_CreateTexture(RemoteVideorenderer,
// SDL_PIXELFORMAT_IYUV,
// SDL_TEXTUREACCESS_STREAMING,
// RemoteVideowidth,
// RemoteVideoheight);
// RemoteVideorect.x = 0;
// RemoteVideorect.y = 0;
// RemoteVideorect.w = RemoteVideowidth;
// RemoteVideorect.h = RemoteVideoheight;
// //m\_uBuffer = (uint8\_t\*)malloc(RemoteVideowidth\*RemoteVideoheight \* 3 / 4);
//}
//采集端sdl视频渲染
//rtc::scoped\_refptr<webrtc::VideoFrameBuffer> vfb = video\_frame->video\_frame\_buffer();
//将返回的数据更新到SDL纹理当中
//SDL\_UpdateYUVTexture(Remotetexture, NULL,
// vfb.get()->DataY(), vfb.get()->StrideY(),
// vfb.get()->DataU(), vfb.get()->StrideU(),
// vfb.get()->DataV(), vfb.get()->StrideV());
//SDL\_UpdateTexture(Remotetexture, NULL, m\_uBuffer, RemoteVideowidth\*RemoteVideoheight \* 3 / 4);
进行SDL刷新显示
//SDL\_RenderClear(RemoteVideorenderer);
//SDL\_RenderCopy(RemoteVideorenderer, Remotetexture, NULL, &RemoteVideorect);
//SDL\_RenderPresent(RemoteVideorenderer);
//printf("FrameCallback width:%d height:%d timestamp:%u\n ", video\_frame->width(), video\_frame->height(), video\_frame->timestamp());
}
};
CameraCaptureCallback *callback = new CameraCaptureCallback();
rtc::scoped_refptrwebrtc::VideoCaptureModule module = NULL;
webrtc::VideoCodec _videoCodec;
MyTransport myTransport;
MySendUdpTransportData mySendUdpTransportData;
MyRecvUdpTransportData myRecvUdpTransportData;
MyEncodedFrameCallback myEncodeFrameCallback;
MyI420FrameCallback myI420FrameCallback;
void initdecode()
{
av_register_all();
avcodec_register_all();
h264codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!h264codec) {
fprintf(stderr, “Codec not found\n”);
exit(1);
}
parser = av_parser_init(h264codec->id);
if (!parser) {
fprintf(stderr, “parser not found\n”);
exit(1);
}
c = avcodec_alloc_context3(h264codec);
if (!c) {
fprintf(stderr, “Could not allocate video codec context\n”);
exit(1);
}
if (avcodec_open2(c, h264codec, NULL) < 0) {
fprintf(stderr, “Could not open codec\n”);
exit(1);
}
pkt = av_packet_alloc();
frame = av_frame_alloc();
}
void CaptureVideo::testVideoCap()
{
initdecode();
//m\_pVideoThread = new std::thread(VideoReadThread, this);
VideoCaptureModule::DeviceInfo\* deviceInfo = VideoCaptureFactory::CreateDeviceInfo(0);
int nNum = deviceInfo->NumberOfDevices();
char deviceNameUTF8[128] = { 0 };
char deviceUniqueIdUTF8[128] = { 0 };
for (int i = 0; i < 1; i++)
{
if (deviceInfo->GetDeviceName(i, deviceNameUTF8, 128, deviceUniqueIdUTF8, 128, NULL, 0) == 0)
{
printf("camera: id:%d name:%s guid:%s \n", i, deviceNameUTF8, deviceUniqueIdUTF8);
}
}
int32\_t width;
int32\_t height;
int32\_t maxFPS;
int32\_t expectedCaptureDelay;
RawVideoType rawType;
VideoCodecType codecType;
VideoCaptureCapability capability;
int numOfCapabilyty = deviceInfo->NumberOfCapabilities(deviceUniqueIdUTF8);
for (int i = 0; i < 1; i++)
{
deviceInfo->GetCapability(deviceUniqueIdUTF8, i, capability);
printf(" capabilityId:%d width:%d height:%d maxFPS:%d expectedCaptureDelay:%d rawType:%d codecType:%d \n",
i, capability.width, capability.height, capability.maxFPS, capability.expectedCaptureDelay, capability.rawType, capability.codecType);
}
delete deviceInfo;
LocalVideowidth = capability.width;
LocalVideoheight = capability.height;
//2、SDL显示相关初始化
//2.1、初始化init
if (SDL\_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
SDL\_LogError(SDL_LOG_CATEGORY_APPLICATION, "Could not initialize SDL - %s\n", SDL\_GetError());
return;
}
//2.2、创建窗口
LocalVideowin = SDL\_CreateWindowFrom(dlg->GetDlgItem(IDC_STATIC2)->GetSafeHwnd());
if (!LocalVideowin) {
SDL\_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create window by SDL");
return;
}
//2.3、创建渲染器
LocalVideorenderer = SDL\_CreateRenderer(LocalVideowin, -1, 0);
if (!LocalVideorenderer) {
SDL\_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create Renderer by SDL");
//要释放ffmpeg的相关内存
return;
}
//2.4、创建纹理
Localtexture = SDL\_CreateTexture(LocalVideorenderer,
SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING,
LocalVideowidth,
LocalVideoheight);
LocalVideorect.x = 0;
LocalVideorect.y = 0;
LocalVideorect.w = LocalVideowidth;
LocalVideorect.h = LocalVideoheight;
module = VideoCaptureFactory::Create(0, deviceUniqueIdUTF8);
if (module)
{
module->RegisterCaptureDataCallback(\*callback);
module->StartCapture(capability);//开始捕获摄像头
}
//传输模块
Call::Config callConfig;
_call = Call::Create(callConfig);
//初始化发送端口 sendSocket\_transport\_ 是一对一对的 对于 发送有自己这边的端口及远端接口
uint8\_t socket_threads = 1;
int return_value;
UdpTransport\*sendSocket_transport_ = UdpTransport::Create(1, socket_threads);
static const int kNumReceiveSocketBuffers = 500;
return_value = sendSocket_transport_->InitializeReceiveSockets(&mySendUdpTransportData, 5008);//设置接收RTP端口
if (return_value == 0) {
sendSocket_transport_->StartReceiving(kNumReceiveSocketBuffers);
}
sendSocket_transport_->InitializeSendSockets("192.168.36.98", 4008);//设置RTP的ip和端口
//视频发送流下信息配置
VideoSendStream::Config streamConfig(sendSocket_transport_);
streamConfig.encoder_settings.payload_name = "H264";//H264 VP8
streamConfig.encoder_settings.payload_type = 121;
streamConfig.encoder_settings.encoder = webrtc::VideoEncoder::Create(VideoEncoder::kH264);//kH264 kVp8
streamConfig.rtp.ssrcs.push\_back(1);
//streamConfig.rtp.ssrcs.push\_back(2);
//streamConfig.rtp.ssrcs.push\_back(3);
//视频编码配置参数
webrtc::VideoEncoderConfig encodeConfig;
VCMCodecDataBase::Codec(webrtc::kVideoCodecH264, &_videoCodec);//kVideoCodecVP8 kVideoCodecH264
//encodeConfig.encoder\_specific\_settings = new rtc::RefCountedObject<webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings >
encodeConfig.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
VideoStream videoStream;
///\*videoStream.width = 320;
//videoStream.height = 180;
//videoStream.max\_framerate = 30;
//videoStream.min\_bitrate\_bps = 50000;
//videoStream.target\_bitrate\_bps = videoStream.max\_bitrate\_bps = 150000;
//videoStream.max\_qp = 56;
//encodeConfig.streams.push\_back(videoStream);
//videoStream.width = 640;
//videoStream.height = 360;
//videoStream.max\_framerate = 30;
//videoStream.min\_bitrate\_bps = 200000;
//videoStream.target\_bitrate\_bps = videoStream.max\_bitrate\_bps = 450000;
//videoStream.max\_qp = 56;
//encodeConfig.streams.push\_back(videoStream);\*/
videoStream.width = 1280;
videoStream.height = 720;
videoStream.max_framerate = 30;
videoStream.min_bitrate_bps = 700000;
videoStream.target_bitrate_bps = videoStream.max_bitrate_bps = 1500000;
videoStream.max_qp = 56;
encodeConfig.streams.push\_back(videoStream);
网上学习资料一大堆,但如果学到的知识不成体系,遇到问题时只是浅尝辄止,不再深入研究,那么很难做到真正的技术提升。
一个人可以走的很快,但一群人才能走的更远!不论你是正从事IT行业的老鸟或是对IT行业感兴趣的新人,都欢迎加入我们的的圈子(技术交流、学习资源、职场吐槽、大厂内推、面试辅导),让我们一起学习成长!
;
videoStream.min_bitrate_bps = 700000;
videoStream.target_bitrate_bps = videoStream.max_bitrate_bps = 1500000;
videoStream.max_qp = 56;
encodeConfig.streams.push_back(videoStream);
[外链图片转存中…(img-UPs5aqyM-1715844398205)]
[外链图片转存中…(img-vMyu0kEv-1715844398205)]
网上学习资料一大堆,但如果学到的知识不成体系,遇到问题时只是浅尝辄止,不再深入研究,那么很难做到真正的技术提升。
一个人可以走的很快,但一群人才能走的更远!不论你是正从事IT行业的老鸟或是对IT行业感兴趣的新人,都欢迎加入我们的的圈子(技术交流、学习资源、职场吐槽、大厂内推、面试辅导),让我们一起学习成长!