网易纯直播SDK使用 视频回调

一 合成后视频回调

1 视频回调

 

Nlss_SetVideoSamplerCB,接受合成后的预览的每帧图像rgb32 buffer。

	void  LsSession::SetVideoSamplerCB(){
		NLS_SDK_GET_FUNC(Nlss_SetVideoSamplerCB)(LsClient, VideoCallback);
	}
	void VideoCallback(_HNLSSERVICE hNLSService, ST_NLSS_VIDEO_SAMPLER *sampler)
	{
		if (sampler)
		{
			int ret = sampler->iDataSize;
			if (ret > 0)
			{
				
				int nLen = sampler->iWidth*sampler->iHeight;
				LssManange::IsBlackVideo = IsBlack((DWORD*)sampler->puaData, nLen);

				timeb time_now;
				ftime(&time_now); // 秒数
				__int64 cur_timestamp = time_now.time * 1000 + time_now.millitm; // 毫秒
				video_frame_mng_.AddVideoFrame(true, cur_timestamp, (const char*)sampler->puaData, sampler->iDataSize, \
					sampler->iWidth, sampler->iHeight, "", nim_comp::VideoFrameMng::Ft_ARGB);



			}
		}
	}

视频数据存放在

	//视频数据
	nim_comp::VideoFrameMng video_frame_mng_; 
	nim_comp::VideoFrameMng* LssManange::GetVideoFrameMng()
	{
		return &video_frame_mng_;
	}

这是个什么类呢?

namespace nim_comp
{
	struct PicRegion //一块颜色数据区的描述,便于参数传递
	{
		PicRegion()
		{
			pdata_ = NULL;
			//subtype_ = nim::kNIMVideoSubTypeARGB;
			size_max_ = 0;
			size_ = 0;
		}

		~PicRegion()
		{
			Clear();
		}
		void Clear()
		{
			if (pdata_)
			{
				delete[] pdata_;
				pdata_ = NULL;
			}
			size_max_ = 0;
			size_ = 0;
		}
		int ResetData(uint64_t time, const char* data, int size, unsigned int width, unsigned int height/*, nim::NIMVideoSubType subtype*/)
		{
			if (size > size_max_)
			{
				if (pdata_)
				{
					delete[] pdata_;
					pdata_ = NULL;
				}
				pdata_ = new char[size];
				size_max_ = size;
			}
			width_ = width;
			height_ = height;
			timestamp_ = time;
			//subtype_ = subtype;
			size_ = size;
			memcpy(pdata_, data, size);
			return size;
		}

		//nim::NIMVideoSubType subtype_;
		char*		pdata_;         //颜色数据首地址
		int			size_max_;
		int			size_;
		long        width_;         //像素宽度
		long        height_;        //像素高度
		uint64_t	timestamp_;     //时间戳(毫秒)
	};
	class VideoFrameMng
	{
	public:
		enum FrameType
		{
			Ft_I420 = 0,
			Ft_ARGB,
			Ft_ARGB_r,
		};
		VideoFrameMng();
		~VideoFrameMng();

		void Clear();
		void AddVideoFrame(bool capture, int64_t time, const char* data, int size, int width, int height, const std::string& json, FrameType frame_type = Ft_ARGB_r);
		bool GetVideoFrame(bool local_show, std::string account, int64_t& time, char* out_data, int& width, int& height, bool mirror = false, bool argb_or_yuv = true);
	
	public:
		nbase::NLock  lock_;
		PicRegion capture_video_pic_;
		std::map<std::string, PicRegion*> recv_video_pic_list_;
	};
}
	void VideoFrameMng::AddVideoFrame(bool capture, int64_t time, const char* data, int size, int width, int height, const std::string& json, FrameType frame_type)
	{
		Json::Value valus;
		Json::Reader reader;
		std::string account;
		if (reader.parse(json, valus))
		{
			//ToDo
			//account = valus[nim::kNIMDeviceDataAccount].asString();
		}
		if (!capture && account.empty())
		{
			return;
		}
		nbase::NAutoLock auto_lock(&lock_);
		//nim::NIMVideoSubType subtype = nim::kNIMVideoSubTypeI420;
		timeb time_now;
		ftime(&time_now); // 秒数
		int64_t cur_timestamp = time_now.time * 1000 + time_now.millitm; // 毫秒
		const char* src_buffer = data;
		std::string ret_data;
		if (frame_type != Ft_I420)
		{
			int byte_width = width * 4;
			width -= width % 2;
			height -= height % 2;
			int wxh = width * height;
			ret_data.append(wxh * 3 / 2, (char)0);
			uint8_t* des_y = (uint8_t*)ret_data.c_str();
			uint8_t* des_u = des_y + wxh;
			uint8_t* des_v = des_u + wxh / 4;
			const uint8_t* src_argb = (const uint8_t*)data;
			if (frame_type == Ft_ARGB_r)
			{
				src_argb = (const uint8_t*)data + size - byte_width;
				byte_width = -byte_width;
			}
			libyuv::ARGBToI420(src_argb, byte_width,
				des_y, width,
				des_u, width / 2,
				des_v, width / 2,
				width, height);
			src_buffer = ret_data.c_str();
			size = wxh * 3 / 2;
		}
		if (capture)
		{
			capture_video_pic_.ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
		}
		else
		{
			auto it = recv_video_pic_list_.find(account);
			if (it != recv_video_pic_list_.end())
			{
				it->second->ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
			}
			else
			{
				PicRegion* pic_info = new PicRegion;
				pic_info->ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
				recv_video_pic_list_[account] = pic_info;
			}
		}
	}

2 视频预览

到此,我们获得视频数据了,视频数据保存在VideoFrameMng video_frame_mng_;  对象中

下一步,如何将这个对象与Bitmap控件关联

duilib自定义控件,将VideoFrameMng对象作为参数传入

ui::Control* NLSLiveForm::CreateControl(const std::wstring& pstrClass)
{
	if (pstrClass == _T("BitmapControl"))
	{
		return new ui::CBitmapControl(nim_nls::LssManange::GetVideoFrameMng());
	}
	return NULL;
}

 

 

 

这个BITMAP是怎么定义的呢?

namespace ui
{
	class CBitmapControl : public ui::Box
	{
	public:
		CBitmapControl(nim_comp::VideoFrameMng* video_frame_mng);
		~CBitmapControl(void);

		void SetAccount(std::string account){ account_ = account; }
		std::string GetAccount() { return account_; }
		void Paint(HDC hDC, const UiRect& rcPaint) override;
		//清理数据
		void Clear();

		bool Refresh(Window* wnd, bool captrue = true, bool mirror = false, bool bCameraIsOpen = false);
		bool RefreshSuccess(){ return is_refresh_success_; };
	    

		void SetAutoSize(bool auto_size){ auto_size_ = auto_size; }

		bool IsRefreshTimeout();

		RECT  GetDrawMapMargin();
		RECT  GetPictureMargin();

		// add by yujian
		bool IsBlack(DWORD *pData, long nLen,int nTryTime=100);

	protected:
		std::string account_;
		bool auto_size_;
		int64_t timestamp_;
		std::string data_;
		int width_;
		int height_;
		int draw_map_x_;
		int draw_map_y_;
		int draw_map_width_;
		int draw_map_height_;
		bool is_refresh_success_;
		RECT DrawMapRect;
		RECT PictueMapRect;

		Window* parent_wnd_;
		nim_comp::VideoFrameMng* video_frame_mng_;

		bool m_bCameraIsOpen;
	};

}


	CBitmapControl::CBitmapControl(nim_comp::VideoFrameMng* video_frame_mng)
	{
		m_bCameraIsOpen = true;
		video_frame_mng_ = video_frame_mng;
		timestamp_ = 0;
		width_ = 0;
		height_ = 0;
		auto_size_ = false;
		is_refresh_success_ = false;
		DrawMapRect.bottom = 0;
		DrawMapRect.left= 0;
		DrawMapRect.right = 0;
		DrawMapRect.top = 0;
	}

将帧数据,转换到string data中

	bool CBitmapControl::Refresh(Window* wnd, bool captrue, bool mirror, bool bCameraIsOpen)
	{
		m_bCameraIsOpen = bCameraIsOpen;

		int item_w = m_rcItem.right - m_rcItem.left;
		int item_h = m_rcItem.bottom - m_rcItem.top;
		if (auto_size_)
		{
			item_w = GetMaxWidth();
			item_h = GetMaxHeight();
		}
		bool ret = false;
		if (item_w > 0 && item_h > 0)
		{
			parent_wnd_ = wnd;
			data_.resize(item_w * item_h * 4);

			try{
				//item_w item_h 会根据视频信息等比例调节,返回的是 视频的宽高
				ret = video_frame_mng_->GetVideoFrame(true, (captrue ? "" : account_), timestamp_, (char*)data_.c_str(), item_w, item_h, mirror);
				if (ret)
				{
					width_ = item_w;
					height_ = item_h;
					PictueMapRect.left = 0;
					PictueMapRect.top = 0;
					PictueMapRect.right = width_;
					PictueMapRect.bottom = height_;
					if (auto_size_)
					{
						SetFixedWidth(width_);
						SetFixedHeight(height_);
					}
					Invalidate();
				}
			}
			catch (...){

				throw "CBitmapControl::DoPaint";
				writelog3(LOG_LEVEL_CRITICAL, "CBitmapControl::Refresh catch exception!");
			
			}

		}
		is_refresh_success_ = ret;

		return ret;
	}

重载 void Paint(HDC hDC, const UiRect& rcPaint) override;

将data中数据绘制出来

	void CBitmapControl::Paint(HDC hDC, const UiRect& rcPaint)
	{
		static bool  bDrawing = false;
		if (bDrawing)
			return;
		bDrawing = true;
		try
		{
			if (!::IntersectRect(&m_rcPaint, &rcPaint, &m_rcItem))
				return;
			Control::Paint(hDC, rcPaint);
			//paint bitmap
			
			if (width_ * height_ > 0)
			{
				int item_w = m_rcItem.right - m_rcItem.left;  //控件的信息:宽高
				int item_h = m_rcItem.bottom - m_rcItem.top;
				int item_x = m_rcItem.left;
				int item_y = m_rcItem.top;

				int source_w = width_;   //视频的宽
				int source_h = height_;  //视频的高

				if (source_w > 0 && source_h > 0 && parent_wnd_)
				{
					//视频在控件中居中显示
					item_x += (item_w - source_w) / 2;
					item_y += (item_h - source_h) / 2;


					//父窗口宽高
					UiRect rcClient;
					::GetClientRect(parent_wnd_->GetHWND(), &rcClient);
					int width = rcClient.right - rcClient.left;
					int height = rcClient.bottom - rcClient.top;


					//计算实际绘制区域坐标
					int draw_x = max(rcPaint.left, item_x);
					draw_x = max(m_rcItem.left, draw_x);
					int draw_y = max(rcPaint.top, item_y);
					draw_y = max(m_rcItem.top, draw_y);
					int draw_h = min(rcPaint.bottom - draw_y, min(item_y + source_h, m_rcItem.bottom) - draw_y);
					draw_h = max(draw_h, 0);
					int src_x = draw_x - item_x;
					int src_y = draw_y - item_y;
					int src_w = min(rcPaint.right - draw_x, min(item_x + source_w, m_rcItem.right) - draw_x);
					src_w = max(src_w, 0);

					//视频帧字节信息
					int dest_byte_width = width * 4;
					int src_byte_width = source_w * 4;
					int paint_byte_width = src_w * 4;


					char* dest_data = (char*)parent_wnd_->GetBackgroundBits();

					int bottom = height - draw_y - 1;
					dest_data += bottom * dest_byte_width + draw_x * 4;



					char* src_data = (char*)data_.c_str();
					int nLen = source_h*source_w;
					bool isBlackVideo = IsBlack((DWORD*)src_data, nLen);

					src_data += src_y * src_byte_width + src_x * 4;


					for (int i = 0; i < draw_h; ++i)
					{
						memcpy(dest_data, src_data, paint_byte_width);
						dest_data -= dest_byte_width;
						src_data += src_byte_width;
					}
					DrawMapRect.left = draw_x;
					DrawMapRect.top = draw_y;
					DrawMapRect.bottom = draw_y + draw_h;
					DrawMapRect.right = draw_x + src_w;
               }
             }
					

			//绘制子控件
			for (auto it = m_items.begin(); it != m_items.end(); it++)
			{
				Control* pControl = *it;
				if (!pControl->IsVisible()) continue;
				UiRect controlPos = pControl->GetPos();
				if (!::IntersectRect(&m_rcPaint, &rcPaint, &controlPos)) continue;
				pControl->AlphaPaint(hDC, rcPaint);
			}
		}
		catch (...)
		{
			throw "CBitmapControl::DoPaint";
		}
		bDrawing = false;
	}

 

3 设置自己的回掉函数

因为需要两进程共享摄像头,所以,我设置了自己的回掉函数

 

	void LsSession::SetVideoSamplerCB(PFN_NLSS_MERGED_VIDEO_SAMPLER_CB cb)
	{
		//NLS_SDK_GET_FUNC(Nlss_SetVideoSamplerCB)(LsClient, VideoCallback);
		NLS_SDK_GET_FUNC(Nlss_SetVideoSamplerCB)(LsClient, cb);
	}

 


/**
*  @brief 获取最新一帧合并子视频画面后的视频截图后的回调
*
*  @param  hNLSService: 直播推流实例
*  @param pstSampler 最新一帧合并子视频画面后的视频截图的结构体参数指针
*/
typedef void(*PFN_NLSS_MERGED_VIDEO_SAMPLER_CB)(_HNLSSERVICE hNLSService, ST_NLSS_VIDEO_SAMPLER *pstSampler);

自己的回掉函数

void VideoCallback_G(_HNLSSERVICE hNLSService, ST_NLSS_VIDEO_SAMPLER *sampler)
{
	if (sampler)
	{
		int ret = sampler->iDataSize;
		if (ret > 0)
		{

			int nLen = sampler->iWidth*sampler->iHeight;
			nim_nls::LssManange::IsBlackVideo = IsBlack((DWORD*)sampler->puaData, nLen);

			timeb time_now;
			ftime(&time_now); // 秒数
			__int64 cur_timestamp = time_now.time * 1000 + time_now.millitm; // 毫秒
			nim_nls::LssManange::GetVideoFrameMng()->AddVideoFrame(true, cur_timestamp, (const char*)sampler->puaData, sampler->iDataSize, \
				sampler->iWidth, sampler->iHeight, "", nim_comp::VideoFrameMng::Ft_ARGB);

			//capture_video_pic_.ResetData(cur_timestamp, src_buffer, size, width, height/*, subtype*/);
			char* pData=m_memMng.getData();

			INT32 nFps = g_pLiveForm->m_videoFps;
			INT32 nWidth = sampler->iWidth;
			INT32 nHeight = sampler->iHeight;

			INT32 Info[] = { nFps, nWidth, nHeight };
			int len = sizeof(Info) / sizeof(Info[0]);

			if (pData)
			{
				WaitForSingleObject(m_memMng.m_hReadEvent, INFINITE);
				ResetEvent(m_memMng.m_hWriteEvent);

				memcpy((INT32*)pData, Info, sizeof(INT32)*len);

				memcpy(pData + 12, nim_nls::LssManange::GetVideoFrameMng()->capture_video_pic_.pdata_, nim_nls::LssManange::GetVideoFrameMng()->capture_video_pic_.size_);


				SetEvent(m_memMng.m_hWriteEvent);
			}

		}
	}
}

 

m_LiveStreaming.SetVideoSamplerCB(VideoCallback_G);    

 

4 自定义位图控件

要是想在其它窗口控件中,预览此视频,可以自定义位图对象,

	if (pstrClass == _T("BitmapControl")) //WIN32控件
	{
		return new ui::CBitmapControl(nim_nls::LssManange::GetVideoFrameMng());
	}

 

二 单个视频回调

 

void    Nlss_ChildVideoSetSoloPreviewCB(_HNLSSCHILDSERVICE hNLSSChild, PFN_NLSS_VIDEOSAMPLER_CB pFunVideoSamplerCB);
void    Nlss_ChildVideoSwitchSoloPreview(_HNLSSCHILDSERVICE hNLSSChild, bool bOn);
	void LsSession::OnChildVideoSetSoloPreviewCB(const std::string& accid, PFN_NLSS_CHILD_VIDEO_SAMPLER_CB pFunVideoSamplerCB)
	{
		std::map<std::string, _HNLSSCHILDSERVICE>::const_iterator iter = nlss_child_services_.find(accid);
		if (iter != nlss_child_services_.end())
			NLS_SDK_GET_FUNC(Nlss_ChildVideoSetSoloPreviewCB)(iter->second, pFunVideoSamplerCB);
	}

	void  LsSession::OnChildVideoSwitchSoloPreview(const std::string& accid, bool bOn)
	{
		std::map<std::string, _HNLSSCHILDSERVICE>::const_iterator iter = nlss_child_services_.find(accid);
		if (iter != nlss_child_services_.end())
			NLS_SDK_GET_FUNC(Nlss_ChildVideoSwitchSoloPreview)(iter->second, bOn);
	}

 

需要注意的是:  单视频设置为隐藏时,是不会回调数据的

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

清水迎朝阳

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值