最近玩了一下海康的IPcamera,与普通的WEBcamera输出RGB图像不同,其解码后输出的图像格式为YV12,这使得我们不能用熟悉的两行代码
VideoCapture cap(0);
cap << frame;
就获得可以用opencv直接处理的源图像了,这一度造成了不少麻烦。
所以贴出部分代码,希望对正在使用或者也想玩下海康摄像头的朋友一点参考。
能力有限,若有误,勿喷!
转YV12到YCrCb的函数实现有很多,代码不知谁写的,我是从雪水的这篇博客(超链接)看到的,亲测可以实现。
void yv12toYUV(char *outYuv, char *inYv12, int width, int height,int widthStep)
{
int col,row;
unsigned int Y,U,V;
int tmp;
int idx;
for (row=0; row<height; row++)
{
idx=row * widthStep;
int rowptr=row*width;
for (col=0; col<width; col++)
{
tmp = (row/2)*(width/2)+(col/2);
Y=(unsigned int) inYv12[row*width+col];
U=(unsigned int) inYv12[width*height+width*height/4+tmp];
V=(unsigned int) inYv12[width*height+tmp];
if((idx+col*3+2)> (1200 * widthStep))
{
//printf("row * widthStep=%d,idx+col*3+2=%d.\n",1200 * widthStep,idx+col*3+2);
}
outYuv[idx+col*3] = Y;
outYuv[idx+col*3+1] = U;
outYuv[idx+col*3+2] = V;
}
}
}
本文推荐用cvtColor()函数,opencv2.4.8以上版本自定义了宏CV_YUV2BGR_YV12,可以实现这一转换;
以下是主函数:
/*----------------------------
异常消息回调函数
----------------------------*/
void CALLBACK g_ExceptionCallBack(DWORD dwType, LONG lUserID, LONG lHandle, void *pUser)
{
char tempbuf[256] = {0};
switch(dwType)
{
case EXCEPTION_RECONNECT: //预览时重连
printf("----------reconnect--------%d\n", time(NULL));
break;
default:
break;
}
}
void main() {
//---------------------------------------
// 初始化
NET_DVR_Init();
//设置连接时间与重连时间
NET_DVR_SetConnectTime(2000, 1);
NET_DVR_SetReconnect(10000, true);
//---------------------------------------
// 获取控制台窗口句柄
//HMODULE hKernel32 = GetModuleHandle((LPCWSTR)"kernel32");
//GetConsoleWindow = (PROCGETCONSOLEWINDOW)GetProcAddress(hKernel32,"GetConsoleWindow");
//---------------------------------------
// 注册设备
LONG lUserID;
NET_DVR_DEVICEINFO_V30 struDeviceInfo;
lUserID = NET_DVR_Login_V30("10.102.7.88", 8000, "admin", "12345", &struDeviceInfo);
if (lUserID < 0)
{
printf("Login error, %d\n", NET_DVR_GetLastError());
NET_DVR_Cleanup();
return;
}
//---------------------------------------
//设置异常消息回调函数
NET_DVR_SetExceptionCallBack_V30(0, NULL,g_ExceptionCallBack, NULL);
/*NET_DVR_RealPlay_V30参数设置
NET_DVR_CLIENTINFO ClientInfo;
ClientInfo.hPlayWnd = NULL;//改为“= GetDlgItem(IDC_STATIC_PLAY)->m_hWnd”
ClientInfo.lChannel = 1;
ClientInfo.lLinkMode = 0;
ClientInfo.sMultiCastIP = NULL;
TRACE("Channel number:%d\n",ClientInfo.lChannel);*/
//NET_DVR_RealPlay_V40参数设置
NET_DVR_PREVIEWINFO struPlayInfo = {0};
struPlayInfo.hPlayWnd = NULL; //需要SDK解码时句柄设为有效值,仅取流不解码时可设为空
struPlayInfo.lChannel = 1; //预览通道号
struPlayInfo.dwStreamType = 0; //0-主码流,1-子码流,2-码流3,3-码流4,以此类推
struPlayInfo.dwLinkMode = 0; //0- TCP方式,1- UDP方式,2- 多播方式,3- RTP方式,4-RTP/RTSP,5-RSTP/HTTP
LONG lRealPlayHandle;
//lRealPlayHandle = NET_DVR_RealPlay_V30(lLoginID,&ClientInfo,fRealDataCallBack,NULL,false);
lRealPlayHandle = NET_DVR_RealPlay_V40(lLoginID,&struPlayInfo,fRealDataCallBack,NULL);
if (lRealPlayHandle<0)
{
printf("NET_DVR_RealPlay_V30 failed! Error number: %d\n",NET_DVR_GetLastError());
return;
}
//cvWaitKey(0);
Sleep(-1);
//fclose(fp);
//---------------------------------------
//关闭预览
if(!NET_DVR_StopRealPlay(lRealPlayHandle))
{
printf("NET_DVR_StopRealPlay error! Error number: %d\n",NET_DVR_GetLastError());
return;
}
//注销用户
NET_DVR_Logout(lUserID);
NET_DVR_Cleanup();
return;
}
以下是实时流回调函数fRealDataCallBack:
/********************************************
实时流回调
注:获得的是数据流,仍需在DecCBFun中解码
**********************************************/
void CALLBACK fRealDataCallBack(LONG lRealHandle,DWORD dwDataType,BYTE *pBuffer,DWORD dwBufSize,void *pUser)
{
//TRACE("fRealDataCallBack 函数被调用\n");
//TRACE("dwDataType: %ld\n",dwDataType);
DWORD dRet;
switch (dwDataType)
{
case NET_DVR_SYSHEAD: //系统头
//TRACE("系统头被执行\n");
if (!PlayM4_GetPort(&nPort)) //获取播放库未使用的通道号
{
break;
}
if(dwBufSize > 0)
{
if (!PlayM4_SetStreamOpenMode(nPort, STREAME_REALTIME)) //设置实时流播放模式
{
break;
}
if (!PlayM4_OpenStream(nPort,pBuffer,dwBufSize,10*1024*1024))
{
dRet=PlayM4_GetLastError(nPort);
break;
}
PlayM4_SetDecCBStream(nPort,1);//只解码视频流,不解音频流
//设置解码回调函数 只解码不显示
if (!PlayM4_SetDecCallBack(nPort,DecCBFun))
{
dRet=PlayM4_GetLastError(nPort);
break;
}
//打开视频解码
if (!PlayM4_Play(nPort,NULL))
{
dRet=PlayM4_GetLastError(nPort);
break;
}
//打开音频解码, 需要码流是复合流
//if (!PlayM4_PlaySound(nPort))
//{
// dRet=PlayM4_GetLastError(nPort);
// break;
//}
}
break;
case NET_DVR_STREAMDATA: //码流数据
//TRACE("码流数据被执行\n");
if (dwBufSize > 0 && nPort != -1)
{
BOOL inData = PlayM4_InputData(nPort,pBuffer,dwBufSize);
while (!inData)
{
Sleep(10);
inData = PlayM4_InputData(nPort,pBuffer,dwBufSize);
}
}
break;
}
}
以下是解码回调函数,在解码回调之前需声明一个List容器,用以存放帧数据,该List最好定义为全局变量,便于在线程里opencv处理。
解码回调函数,帧数据格式转化,存储
---------------------------------*/
void CALLBACK DecCBFun(long nPort,char * pBuf,long nSize,FRAME_INFO * pFrameInfo, long nReserved1,long nReserved2)
{
//TRACE("DecCBFun 函数被调用\n");
long lFrameType = pFrameInfo->nType;
//TRACE(" lFrameType: %ld\n", lFrameType);
if (lFrameType == T_YV12)
{
Mat pImg(pFrameInfo->nHeight, pFrameInfo->nWidth, CV_8UC3);
Mat pImg_YUV(pFrameInfo->nHeight + pFrameInfo->nHeight / 2, pFrameInfo->nWidth, CV_8UC1, pBuf);
Mat pImg_YCrCb(pFrameInfo->nHeight, pFrameInfo->nWidth, CV_8UC3);
cvtColor(pImg_YUV, pImg, CV_YUV2BGR_YV12);
cvtColor(pImg,pImg_YCrCb,CV_BGR2YCrCb);
// Sleep(-1);
imshow("IPCamera",pImg);
//waitKey(1);
//IplImage *pImg1 = &IplImage(pImg);
if(!IsTracking){
hEvent = CreateEvent(NULL,false,true,NULL);
//InitializeCriticalSection(&cs_frameQueue);
}
//HANDLE hThread = CreateThread(NULL, 0, dealFun, NULL, 0, NULL);
//CloseHandle(hThread);
//图片存储
//*--------回调函数当做存储视频帧线程-----------
//ResetEvent(hEvent);
//EnterCriticalSection(&cs_frameQueue);
realframe_count++;
//TRACE("实时帧数: %d\n",realframe_count);
if(0 == realframe_count%10)
{
WaitForSingleObject(hEvent,INFINITE);
frameQueue.push_back(pImg_YCrCb);
if(!IsTracking){
frameQueue.clear();
}
SetEvent(hEvent);
}
//LeaveCriticalSection(&cs_frameQueue);
}
}
此处利用解码回调函数做视频格式转换操作,并将RGB图像存入frameQueue。利用多线程编程,在另一个线程里用opencv处理图像或其他操作,我这里是进行跟踪的,其中,实时帧数realframe_count是用来控制取帧间隔时间的,因为opencv图像处理线程会运行相应复杂算法或者别的操作,时间消耗较大,不可能逐一处理每一帧。线程通过一个按钮开启,响应函数如下,由于贴的使部分代码,请自动忽略无关内容!
void CRealPlayDlg::OnBnClickedButtonTracking()
{
// TODO: 在此添加控件通知处理程序代码
cvNamedWindow("frame",0);
cvResizeWindow("frame",400,300);
cvMoveWindow("frame",420,0);
cvNamedWindow("result",0);
cvResizeWindow("result",400,300);
cvMoveWindow("result",840,0);
IsTracking = true;
setMouseCallback( "frame", onMouse, 0 );//消息响应机制
if(IsTracking)
{
// HANDLE hThread1;
// HANDLE hThread2;
// hEvent = CreateEvent(NULL,false,true,NULL);
// hThread1 = CreateThread(NULL, 0, storeFun, NULL, 0, NULL);
//InitializeCriticalSection(&cs_frameQueue);
hThread = CreateThread(NULL, 0, dealFun, NULL, 0, NULL);
// hMutex = CreateMutex(NULL, FALSE, "imgData");
// CloseHandle(hThread1);
CloseHandle(hThread);
}
以下为取帧线程函数:
[c++] view plain copy
/*----------------------------------
图像处理 目标跟踪 线程
---------------------------------*/
DWORD WINAPI dealFun(LPVOID lpParamter)
{
while (1)
{
if (!frameQueue.empty()){
WaitForSingleObject(hEvent, INFINITE);
//src = (Mat)(*(frameQueue.begin()));//frameQueue.front();
src_YCrCb = (Mat)(*(frameQueue.begin()));
cvtColor(src_YCrCb, src, CV_YCrCb2BGR);
frameQueue.pop_front();
SetEvent(hEvent);
//your code............
}
}
return 0;
}
到这一步就可以了,另外,在函数中是不需要waitkey来控制帧率的,海康的SDK好像是自动回调的!
转载地址:https://blog.csdn.net/m0_37901643/article/details/72817862