一直在用ffmpeg做音视频的编解码,但是ffmpeg还提供了很多的其它的功能,其中就包括了跨平台的音视频采集,该功能由avdevice模块提供,其实很早就已经有人介绍其使用方法了,并提供了很实用的代码,地址是:
http://blog.csdn.net/leixiaohua1020/article/details/39702113
但是其代码内部写死了视频设备名称,所以要根据自己的实际情况去修改下相应的代码,不然不能正常的工作。windows下由于ffmpeg不提供dshow采集设备枚举的功能(ffmpeg 2.6版本,后面的版本暂不清楚),导致其在很多场景是很不实用的,尤其是在互联网项目中(传统项目中硬件比较固定,可以考虑ffmpeg 来采集),玩家那里会存在多个物理、虚拟摄像头设备,代码使用起来很不灵活,这个时候还是要用dshow自己去枚举、采集才灵活;
linux下音视频采集项目一般用在传统项目中较多,可以自己实现v4l2采集, 可以参考我的另一篇文章,附源码:https://my.oschina.net/superfather/blog/798985。
window下使用dshow去采集摄像头或者采集的卡的代码网上一堆一堆的,现在我也贡献出我自己使用的一个类专门用于Windows下视频采集:
dshow_video.h
/*!
* \file dshow_video.h
*
* \author Administrator
* Windows下通过DirectShow枚举视频采集设备,并实时采集,可创建多个
* 通常情况下物理摄像头都是支持MEDIASUBTYPE_YUY2和MEDIASUBTYPE_RGB24的,
* 但是MEDIASUBTYPE_YUY2的采集帧率比MEDIASUBTYPE_RGB24要高,本类中会优先使用MEDIASUBTYPE_YUY2进行初始化,
* 如果输入设备不支持此格式(如虚拟摄像头),再尝试使用MEDIASUBTYPE_RGB24格式,一般这两种格式就足够了,
* 如果需要采集各种物理、虚拟摄像头的话,那就直接使用MEDIASUBTYPE_RGB24吧
*/
#pragma once
#ifdef WIN32
#include <string>
#include <tchar.h>
#include <dshow.h>
#include <atlbase.h>
#include <vector>
#include <qedit.h>
#pragma warning(disable:4251)
using namespace std;
class CDShowVideoDevice : public ISampleGrabberCB
{
public:
CDShowVideoDevice(void);
~CDShowVideoDevice(void);
/*可用于全局获取视频采集设备*/
static int EnumVideoCaptureDevice(char device_list_[][256], const int &max_size_);
/*初始化设备采集
*@param width_ 采集图像的宽度
*@param height_ 采集图像的高度
*@param frame_rate_ 采集帧率
*@param device_name_ 输入设备名称,如果为NULL,则使用默认设备
*@return true:success; false:failed.
**/
virtual bool InitCapture(const unsigned& width_, const unsigned& height_, const unsigned& frame_rate_,
const string &device_name_);
/*初始化设备采集
*@param width_ 采集图像的宽度
*@param height_ 采集图像的高度
*@param frame_rate_ 采集帧率
*@param dev_id_ 输入设备序号,-1或0为默认
*@return true:success; false:failed.
**/
virtual bool InitCapture(const unsigned& width_, const unsigned& height_, const unsigned& frame_rate_,
const int &dev_id_);
/*
*停止采集,并释放相关资源
**/
virtual void UninitCapture();
/*开始采集*/
virtual bool StartCapture();
/*
*停止采集*/
virtual bool StopCapture();
/*
*外部读取数据接口
*@param out_data_ 输出数据内存指针
*@param max_size_ 输出数据内存大小
*@param img_width_ 输出图像宽度
*@param img_height_ 输出图像高度
*@param media_sub_type_ 输出图像数据格式,RGB24或者YV12
*@return >0 成功, <=0 失败
**/
virtual int ReadVideoFrame(const unsigned char *out_data_, const int& max_size_,
int &img_width_, int &img_height_, GUID& media_sub_type_);
/*弹出摄像头设置窗口*/
virtual bool ShowVideoDialogBox(HWND hWndParent = NULL);
/*判断是否有设置窗口*/
virtual bool HasSettingDialog(int nType);
/*获取当前使用的输入设备*/
virtual LPCSTR GetCurrentCapVideoDeviceName();
virtual string& GetCurDevName() {return m_curDeviceName;}
virtual int GetCurDevID(){return m_curDeviceId;};
/*
*尝试获取采集设备事件,目前只抓取设备拔出事件
*可在ReadVideoFrame调用失败后调用,判断是否已失去设备
*@param dev_name_ 如果有设备被拔出,dev_name_被赋值为设备名
*@retrun 0:没有拔出;1 : 被拔出
**/
virtual int HandleEvent(__out string& dev_name_);
protected:
/*初始化DShow图表*/
virtual HRESULT EnumDevice(vector<string > &devices_vec);
/*枚举视频设备所支持的大小、帧率、和可采集格式,找到最符合自己的那一个*/
virtual HRESULT InitVideoFormat(int cx,int cy, int frame_rate_, GUID sub_type_);
/*安全删除AM_MEDIA_TYPE */
virtual void DeleteMediaType(AM_MEDIA_TYPE *pmt);
//通过friend name获取filter
virtual IBaseFilter* GetFilterByName(string filter_name_);
virtual IPin* GetInPin( IBaseFilter * pFilter, int nPin );
virtual IPin* GetOutPin( IBaseFilter * pFilter, int nPin );
virtual HRESULT GetPin( IBaseFilter * pFilter, PIN_DIRECTION dirrequired, int iNum, IPin **ppPin);
/*继承自ISampleGrabberCB的数据采集回调接口*/
virtual HRESULT STDMETHODCALLTYPE SampleCB( double SampleTime,IMediaSample *pSample);
virtual HRESULT STDMETHODCALLTYPE BufferCB( double SampleTime,BYTE *pBuffer,long BufferLen);
/*继承自ISampleGrabberCB的接口,可以实现为空,不采用*/
virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void __RPC_FAR *__RPC_FAR *ppvObject)
{
return E_FAIL;
}
virtual ULONG STDMETHODCALLTYPE AddRef(void)
{
return 0;
}
virtual ULONG STDMETHODCALLTYPE Release(void)
{
return 0;
}
/*创建引擎,连接DirectShow各组件*/
virtual bool CreateEngine(int cx, int cy);
/*销毁*/
virtual bool DestroyEngine();
protected:
/*默认最多缓存多少个raw数据块*/
#define MAX_BUFFER_FRMAE 3
IGraphBuilder *m_pGB;
ICaptureGraphBuilder2 *m_pCaptureGB2;
IBaseFilter *m_pCaptureFilter;
IAMVfwCaptureDialogs *m_pVfwDialogs;
IMediaControl *m_pMediaControl;
IMediaEventEx *m_pEvent;
CComPtr<ISampleGrabber> m_pSampleGrabber;
vector<unsigned char* > m_FrameList; //缓冲视频采集数据
CRITICAL_SECTION m_Lock; //为缓冲区数据加锁,保证线程安全
double m_dFrameInterval;
double m_dbLastFrameTime;
int m_nWidth;
int m_nHeigth;
int m_nFrameRate;
bool m_bIsPlaying;
int m_nNewFrameNum; //新采集未被读取的图像帧数
BITMAPINFOHEADER m_bmpInfoHeader;
int m_nCurrentSampleBufLen;
string m_curDeviceName;
int m_curDeviceId;
vector<GUID > m_SubType_Vec; //视频采集Raw pic格式,按先后确定优先使用次序
GUID m_cur_subtype; //当前使用的视频采集格式
static vector<string> m_VideoDeviceList; //视频采集设备列表
};
#endif //WIN32
dshow_video.cpp
#include "stdafx.h"
#ifdef WIN32
#include "dshow_video.h"
#include <assert.h>
#include "boost_log.h"
#include "charactor_convert.h"
#pragma comment(lib,"quartz.lib")
#pragma comment(lib,"winmm.lib")
#pragma comment(lib,"msacm32.lib")
#pragma comment(lib,"olepro32.lib")
#pragma comment(lib,"strmiids.lib")
#ifndef SAFE_RELEASE
#define SAFE_RELEASE(p) if(p){p->Release();p=NULL;}
#endif
vector<string> CDShowVideoDevice::m_VideoDeviceList;
//判断两个浮点数是否相等,保持小数点后3位有效数字
bool is_float_equal(float ft1, float ft2)
{
if (ft1 <= (ft2 + 0.001f) && ft1 >= (ft2 - 0.001f))
{
return true;
}
return false;
}
CDShowVideoDevice::CDShowVideoDevice(void)
{
::CoInitialize(NULL);
m_pGB = 0;
m_pCaptureGB2 = 0;
m_pCaptureFilter = 0;
m_pMediaControl = NULL;
m_pEvent = NULL;
m_nWidth = 0;
m_nHeigth = 0;
m_dbLastFrameTime = 0.0;
m_nCurrentSampleBufLen = 0;
m_pVfwDialogs = 0;
m_bIsPlaying = false;
m_nNewFrameNum = 0;
m_dFrameInterval = 0.0f;
m_curDeviceId = -1;
::InitializeCriticalSection(&m_Lock);
//添加采集图像格式,根据自己的需要添加或注释
// m_SubType_Vec.push_back(MEDIASUBTYPE_YUY2);
m_SubType_Vec.push_back(MEDIASUBTYPE_RGB24);
}
CDShowVideoDevice::~CDShowVideoDevice(void)
{
m_SubType_Vec.clear();
DestroyEngine();
::DeleteCriticalSection(&m_Lock);
::CoUninitialize();
}
bool CDShowVideoDevice::InitCapture(const unsigned& width_, const unsigned& height_, const unsigned& frame_rate_,
const string &device_name_)
{
if (0 == m_VideoDeviceList.size())
{
if (FAILED(EnumDevice(m_VideoDeviceList)) || 0 == m_VideoDeviceList.size())
{
BOOST_WARNING << "enum video device failed, or no video capture device.";
return false;
}
}
m_nWidth=width_;
m_nHeigth=height_;
//许多摄像头采集帧率很低,并不能按照我们帧率去采集,这里按照最高帧率去采集,
//如果高于我们想要的帧率,则由时间戳去控制以达到我们需要的帧率
m_nFrameRate = frame_rate_;
m_dFrameInterval=1.0/m_nFrameRate;
//device_name_如果为空,则使用默认采集设备
if (device_name_.empty())
{
m_curDeviceName = m_VideoDeviceList.front();
}else
{
bool bExistFlag = false;
for (size_t i(0); i < m_VideoDeviceList.size(); i++)
{
if (0 == device_name_.compare(m_VideoDeviceList.at(i)))
{
bExistFlag = true;
m_curDeviceId = i;
break;
}
}
//指定设备不存在,使用默认设备
if (!bExistFlag)
{
m_curDeviceName = m_VideoDeviceList.front();
}else
{
m_curDeviceName = device_name_;
}
}
return CreateEngine(width_, height_);
}
bool CDShowVideoDevice::InitCapture(const unsigned& width_, const unsigned& height_, const unsigned& frame_rate_,
const int &dev_id_)
{
if (0 == m_VideoDeviceList.size())
{
if (FAILED(EnumDevice(m_VideoDeviceList)))
{
return false;
}
}
if (0 == m_VideoDeviceList.size())
{
m_curDeviceName = "";
}else if (m_VideoDeviceList.size() < (dev_id_ - 1))
{
m_curDeviceName = m_VideoDeviceList.front();
}else
{
m_curDeviceName = m_VideoDeviceList.at(dev_id_);
}
return InitCapture(width_, height_, frame_rate_, m_curDeviceName);
}
void CDShowVideoDevice::UninitCapture()
{
DestroyEngine();
}
bool CDShowVideoDevice::CreateEngine(int cx,int cy)
{
//初始化接口
//Create the Capture Graph Builder
HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
IID_ICaptureGraphBuilder2, (void**)&m_pCaptureGB2);
if (FAILED(hr))
{
return false;
}
//Create the Filter Graph Manager
if (SUCCEEDED(hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (void**)&m_pGB)))
{
hr = m_pGB->QueryInterface(IID_IMediaControl, (void**)&m_pMediaControl);
if (FAILED(hr))
{
return false;
}
//Initialize the Capture Graph Builder
m_pCaptureGB2->SetFiltergraph(m_pGB);
} else
{
SAFE_RELEASE(m_pCaptureGB2);
return false; //Failed
}
//获取source capture filter
m_pCaptureFilter = GetFilterByName(m_curDeviceName);
if (NULL == m_pCaptureFilter)
{
return false;
}
if (FAILED(hr = m_pGB->AddFilter(m_pCaptureFilter, L"Capture Filter")))
{
return false;
}
if (FAILED(hr = m_pGB->QueryInterface(IID_IMediaEventEx, (void **)&m_pEvent)))
{
return false;
}
//获取ISampleGrabber filter
if (FAILED(m_pSampleGrabber.CoCreateInstance(CLSID_SampleGrabber)) || !m_pSampleGrabber)
{
return false;
}
m_nWidth = cx;
m_nHeigth = cy;
//设置视频采集图像格式
vector<GUID >::iterator itor = m_SubType_Vec.begin();
while (itor != m_SubType_Vec.end())
{
m_cur_subtype = *itor;
do
{
//设置视频采集规格参数
if (FAILED(InitVideoFormat(cx, cy, m_nFrameRate, m_cur_subtype)))
{
break;
}
CComQIPtr<IBaseFilter, &IID_IBaseFilter> pGrabBase(m_pSampleGrabber);
//set video format
AM_MEDIA_TYPE mt;
memset(&mt, 0, sizeof(mt));
mt.majortype = MEDIATYPE_Video;
mt.subtype = m_cur_subtype;
if (FAILED(hr = m_pSampleGrabber->SetMediaType(&mt)))
{
break;
}
if (FAILED(hr = m_pGB->AddFilter(pGrabBase, L"Grabber")))
{
break;
}
//开始连接graph
CComPtr< IPin > pSourcePin = GetOutPin(m_pCaptureFilter, 0);
CComPtr< IPin > pGrabPin = GetInPin(pGrabBase, 0);
if (FAILED(hr = m_pGB->Connect(pSourcePin, pGrabPin)))
{
break;
}
//获取连接媒体格式
if (FAILED(hr = m_pSampleGrabber->GetConnectedMediaType(&mt)))
{
break;
}
VIDEOINFOHEADER *vih = (VIDEOINFOHEADER*)mt.pbFormat;
m_bmpInfoHeader = vih->bmiHeader;
hr = m_pSampleGrabber->SetBufferSamples(false);
hr = m_pSampleGrabber->SetOneShot(false);
hr = m_pSampleGrabber->SetCallback(static_cast<ISampleGrabberCB*>(this), 1);
if (FAILED(hr))
{
break;
}
//获取camera配置窗口filter
m_pVfwDialogs = NULL;
m_pCaptureGB2->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_pCaptureFilter,
IID_IAMVfwCaptureDialogs, (void **)&m_pVfwDialogs);
//建立内存缓冲区
for (int i(0); i < MAX_BUFFER_FRMAE; ++i)
{
m_FrameList.push_back(new unsigned char[m_nCurrentSampleBufLen + 1]);
}
return true;
} while (false);
itor++;
}
return false;
}
bool CDShowVideoDevice::DestroyEngine()
{
if(m_pMediaControl)
{
m_pMediaControl->/*StopWhenReady*/Stop();
}
SAFE_RELEASE(m_pEvent);
SAFE_RELEASE(m_pCaptureFilter);
m_pSampleGrabber = NULL;
SAFE_RELEASE(m_pMediaControl);
if(m_pCaptureGB2)
{
m_pCaptureGB2->SetFiltergraph(NULL);
}
SAFE_RELEASE(m_pCaptureGB2);
SAFE_RELEASE(m_pGB);
SAFE_RELEASE(m_pVfwDialogs);
m_dbLastFrameTime=0.0;
m_nCurrentSampleBufLen=0;
m_pVfwDialogs=0;
m_bIsPlaying=false;
int i(0);
while(i < m_FrameList.size())
{
delete []m_FrameList[i];
++i;
}
m_FrameList.clear();
return true;
}
void CDShowVideoDevice::DeleteMediaType(AM_MEDIA_TYPE *pmt)
{
if (pmt != NULL)
{
if (pmt->cbFormat != 0)
{
CoTaskMemFree((PVOID)pmt->pbFormat);
pmt->cbFormat = 0;
pmt->pbFormat = NULL;
}
if (pmt->pUnk != NULL)
{
// Unecessary because pUnk should not be used, but safest.
pmt->pUnk->Release();
pmt->pUnk = NULL;
}
CoTaskMemFree(pmt);
}
}
HRESULT CDShowVideoDevice::GetPin( IBaseFilter * pFilter, PIN_DIRECTION dirrequired, int iNum, IPin **ppPin)
{
CComPtr< IEnumPins > pEnum;
*ppPin = NULL;
HRESULT hr = pFilter->EnumPins(&pEnum);
if(FAILED(hr))
return hr;
ULONG ulFound;
IPin *pPin;
hr = E_FAIL;
while(S_OK == pEnum->Next(1, &pPin, &ulFound))
{
PIN_DIRECTION pindir = (PIN_DIRECTION)3;
pPin->QueryDirection(&pindir);
if(pindir == dirrequired)
{
if(iNum == 0)
{
*ppPin = pPin; // Return the pin's interface
hr = S_OK; // Found requested pin, so clear error
break;
}
iNum--;
}
pPin->Release();
}
return hr;
}
IPin* CDShowVideoDevice::GetInPin( IBaseFilter * pFilter, int nPin )
{
CComPtr<IPin> pComPin=0;
GetPin(pFilter, PINDIR_INPUT, nPin, &pComPin);
return pComPin;
}
IPin* CDShowVideoDevice::GetOutPin( IBaseFilter * pFilter, int nPin )
{
CComPtr<IPin> pComPin=0;
GetPin(pFilter, PINDIR_OUTPUT, nPin, &pComPin);
return pComPin;
}
// 枚举可用视频采集设备
HRESULT CDShowVideoDevice::EnumDevice(vector<string > &devices_vec)
{
devices_vec.clear();
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
//Create the system device enumerator
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, reinterpret_cast<void**>(&pDevEnum));
if(SUCCEEDED(hr) && pDevEnum)
{
//创建一个枚举器,枚举视频设备
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
}else
{
return E_FAIL;
}
if(FAILED(hr) || NULL == pEnum)
{
return E_FAIL;
}
IMoniker *pMoniker = NULL;
IPropertyBag *pPropBag;
VARIANT varName;
while(S_OK == pEnum->Next(1, &pMoniker, NULL))
{
if(FAILED(hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag))))
{
pMoniker->Release();
continue;
}
if(SUCCEEDED(hr))
{
//有效可用设备
IBaseFilter *filter;
if(SUCCEEDED(hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&filter)))
{
//hr = m_pGB->AddFilter(m_pCaptureFilter, L"Capture Filter");
if(SUCCEEDED(hr))
{
varName.vt = VT_BSTR;
if(FAILED(hr = pPropBag->Read(L"Description", &varName, 0)))
{
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
}
USES_CONVERSION;
if (SUCCEEDED(hr))
{
devices_vec.push_back(unicode2utf(OLE2T(varName.bstrVal)));
}
VariantClear(&varName);
}
filter->Release();
}
}
SAFE_RELEASE(pPropBag);
SAFE_RELEASE(pMoniker);
}
pEnum->Release();
pDevEnum->Release();
return S_OK;
}
IBaseFilter *CDShowVideoDevice::GetFilterByName(string filter_name_)
{
if (filter_name_.empty())
{
return NULL;
}
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
//Create the system device enumerator
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, reinterpret_cast<void**>(&pDevEnum));
if(SUCCEEDED(hr) && pDevEnum)
{
//创建一个枚举器,枚举视频设备
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
}else
{
return NULL;
}
if(FAILED(hr) || NULL == pEnum)
{
return NULL;
}
IMoniker *pMoniker = NULL;
IPropertyBag *pPropBag;
VARIANT varName;
IBaseFilter *filter = NULL;
while(S_OK == pEnum->Next(1, &pMoniker, NULL))
{
if(FAILED(hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag))))
{
pMoniker->Release();
continue;
}
varName.vt = VT_BSTR;
if(SUCCEEDED(hr = pPropBag->Read(L"FriendlyName", &varName, 0)))
{
USES_CONVERSION;
char tmp_str[256] = {0};
WideCharToMultiByte( CP_ACP, 0, varName.bstrVal, -1,
tmp_str, 256, NULL, NULL );
if (0 == _stricmp(tmp_str, filter_name_.c_str()))
{
//有效可用设备
if(FAILED(hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&filter)))
{
filter = NULL;
break;
}
}
VariantClear(&varName);
}
SAFE_RELEASE(pPropBag);
SAFE_RELEASE(pMoniker);
}
return filter;
}
//枚举所有的视频设备
int CDShowVideoDevice::EnumVideoCaptureDevice(char device_list_[][256], const int &max_size_)
{
m_VideoDeviceList.clear();
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
//Create the system device enumerator
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, reinterpret_cast<void**>(&pDevEnum));
if(FAILED(hr))
{
return 0;
}
//创建一个枚举器,枚举视频设备
if(FAILED(hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
&pEnum, 0)) || NULL == pEnum)
{
SAFE_RELEASE(pDevEnum);
return 0;
}
IMoniker *pMoniker = NULL;
VARIANT varName;
int dev_count(0);
while( S_OK == pEnum->Next(1, &pMoniker, NULL) )
{
IPropertyBag *pPropBag;
if(FAILED(hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)(&pPropBag))))
{
pMoniker->Release();
continue; //Skip this one, maybe the next one will work
}
VariantInit(&varName);
varName.vt = VT_BSTR;
if(FAILED(hr = pPropBag->Read(L"Description", &varName, 0)))
{
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
}
if(SUCCEEDED(hr))
{
IBaseFilter *filter; ;
if(SUCCEEDED(hr = pMoniker->BindToObject(NULL, 0, IID_IBaseFilter, (void**)&filter)))
{
SAFE_RELEASE(filter);
USES_CONVERSION;
//设备可用,添加到设备列表
//devices_vec.push_back(OLE2T(varName.bstrVal));
string dev_name = unicode2utf(OLE2T(varName.bstrVal));
strncpy_s(device_list_[dev_count], dev_name.c_str(), dev_name.length());
dev_count++;
if (dev_count >= max_size_)
{
break;
}
m_VideoDeviceList.push_back(dev_name);
}
}
VariantClear(&varName);
pPropBag->Release();
pMoniker->Release();
}
SAFE_RELEASE(pEnum);
SAFE_RELEASE(pDevEnum);
return dev_count;
}
HRESULT CDShowVideoDevice::InitVideoFormat(int cx_, int cy_, int frame_rate_, GUID sub_type_)
{
if(m_pCaptureGB2==NULL)
{
return E_FAIL;
}
int sample_size(0);
if (MEDIASUBTYPE_YV12 == sub_type_)
{
sample_size = cx_ * cy_ * 3 / 2;
}else if (MEDIASUBTYPE_RGB24 == sub_type_)
{
sample_size = cx_ * cy_ * 3;
} else if (MEDIASUBTYPE_YUY2 == sub_type_)
{
sample_size = cx_ * cy_ * 2;
} else
{
return E_FAIL;
}
m_nCurrentSampleBufLen = sample_size;
IAMStreamConfig *pConfig=NULL;
AM_MEDIA_TYPE *pMediaType = NULL;
HRESULT hr=m_pCaptureGB2->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, m_pCaptureFilter,
IID_IAMStreamConfig, (void**)&pConfig);
if(FAILED(hr) || NULL == pConfig)
{
hr = m_pCaptureGB2->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Interleaved,
m_pCaptureFilter, IID_IAMStreamConfig, (void **)&pConfig);
if(FAILED(hr) || NULL == pConfig)
{
return hr;
}
}
do
{
if(FAILED(hr = pConfig->GetFormat(&pMediaType)) || !pMediaType)
{
break;
}
if(pMediaType->formattype == FORMAT_VideoInfo)
{
RECT rc = {0, 0, cx_, cy_};
((VIDEOINFOHEADER* )pMediaType->pbFormat)->rcSource = rc;
((VIDEOINFOHEADER*)pMediaType->pbFormat )->rcTarget = rc;
HEADER(pMediaType->pbFormat)->biWidth = cx_;
HEADER(pMediaType->pbFormat)->biHeight = cy_;
HEADER(pMediaType->pbFormat)->biSizeImage = sample_size;
((VIDEOINFOHEADER*)pMediaType->pbFormat)->AvgTimePerFrame = 10000000 / (frame_rate_ * 2); //frame per second(100ns)
//配置camera采集参数,如何配置失败,则查找最相似的配置
if(FAILED(hr = pConfig->SetFormat(pMediaType)))
{
VIDEO_STREAM_CONFIG_CAPS caps;
AM_MEDIA_TYPE *pmtPV = NULL;
int nCount(0), nSize(0);
if (FAILED(hr= pConfig->GetNumberOfCapabilities(&nCount, &nSize)))
{
break;
}
float fPercent = float(cx_)/(float)cy_; //宽高比
//首先获取camera所支持的所有格式参数
for(int i = 0; i < nCount; i++)
{
if(FAILED(hr = pConfig->GetStreamCaps(i,&pmtPV,(BYTE*)&caps)))
continue;
if (HEADER(pmtPV->pbFormat)->biWidth >= cx_
&& HEADER(pmtPV->pbFormat)->biHeight >= cy_
&& is_float_equal(fPercent, float(HEADER(pmtPV->pbFormat)->biWidth)/float(HEADER(pmtPV->pbFormat)->biHeight) )
&& pmtPV->lSampleSize == sample_size)
{
pmtPV->lSampleSize = HEADER(pmtPV->pbFormat)->biSizeImage = sample_size;
if(SUCCEEDED(hr = pConfig->SetFormat(pmtPV)))
{
m_nWidth = HEADER(pmtPV->pbFormat)->biWidth;
m_nHeigth = HEADER(pmtPV->pbFormat)->biHeight;
DeleteMediaType(pmtPV);
break;
}
}
DeleteMediaType(pmtPV);
}
}
}
DeleteMediaType(pMediaType);
SAFE_RELEASE(pConfig);
return S_OK;
}while(false);
//failed
DeleteMediaType(pMediaType);
SAFE_RELEASE(pConfig);
return hr;
}
bool CDShowVideoDevice::StartCapture()
{
if(m_pMediaControl)
{
if(SUCCEEDED(m_pMediaControl->Run()))
{
m_dbLastFrameTime=0.0;
m_bIsPlaying=true;
return true;
}
}
return false;
}
bool CDShowVideoDevice::StopCapture()
{
if(m_pMediaControl)
{
if(SUCCEEDED(m_pMediaControl->Stop()))
{
m_bIsPlaying=false;
return true;
}
}
return false;
}
HRESULT STDMETHODCALLTYPE CDShowVideoDevice::SampleCB( double SampleTime, IMediaSample *pSample)
{
return S_OK;
}
HRESULT STDMETHODCALLTYPE CDShowVideoDevice::BufferCB(double SampleTime, BYTE *pBuffer, long BufferLen)
{
if(0 != SampleTime && 0 != m_dbLastFrameTime)
{
if(SampleTime-m_dbLastFrameTime < m_dFrameInterval)
{
return S_OK;//控制帧频率
}
}
if(m_nCurrentSampleBufLen != BufferLen)
{
//获取到的图像与自己初始化设置的参数不一致,异常
return E_FAIL;
}
m_dbLastFrameTime += m_dFrameInterval;
::EnterCriticalSection(&m_Lock);
//最多缓冲 MAX_BUFFER_FRMAE 帧
if (m_nNewFrameNum >= MAX_BUFFER_FRMAE)
{
//覆盖第一个
unsigned char* buf = m_FrameList.front();
memcpy(buf, pBuffer, BufferLen);
m_FrameList.erase(m_FrameList.begin());
m_FrameList.push_back(buf);
} else
{
memcpy(m_FrameList[m_nNewFrameNum], pBuffer, BufferLen);
++m_nNewFrameNum;
}
::LeaveCriticalSection(&m_Lock);
return S_OK;
}
//尝试获取采集设备时间,目前只抓取设备拔出事件,
//@ dev_name_ : 如果有设备拔出,输出设备名称
//@ return 0:没有拔出;1 : 被拔出
int CDShowVideoDevice::HandleEvent(__out string& dev_name_)
{
dev_name_.clear();
if (m_pEvent)
{
LONG eventCode = 0, eventParam1 = 0, eventParam2 = 0;
while (SUCCEEDED(m_pEvent->GetEvent(&eventCode, &eventParam1, &eventParam2, 0)))
{
m_pEvent->FreeEventParams(eventCode, eventParam1, eventParam2);
switch (eventCode)
{
case EC_COMPLETE:
case EC_USERABORT:
case EC_ERRORABORT:
break;
case EC_DEVICE_LOST:
{
dev_name_ = m_curDeviceName;
return 1;
}
break;
default:
break;
}
}
}
return 0;
}
int CDShowVideoDevice::ReadVideoFrame(const unsigned char *out_data_, const int& max_size_,
int &img_width_, int &img_height_, GUID& media_sub_type_)
{
int ret = -1;
if (NULL == out_data_ || max_size_ <= 0 || 0 == m_FrameList.size() || m_nNewFrameNum <= 0)
{
return -1;
}
::EnterCriticalSection(&m_Lock);
if (m_nCurrentSampleBufLen > max_size_)
{
ret = -2;
}else
{
unsigned char* buf = m_FrameList.front();
memcpy((void *)out_data_, (void *)buf, m_nCurrentSampleBufLen);
--m_nNewFrameNum;
m_FrameList.erase(m_FrameList.begin());
m_FrameList.push_back(buf);
img_width_ = m_nWidth;
img_height_ = m_nHeigth;
ret = m_nCurrentSampleBufLen;
media_sub_type_ = m_cur_subtype;
}
::LeaveCriticalSection(&m_Lock);
return ret;
}
LPCSTR CDShowVideoDevice::GetCurrentCapVideoDeviceName()
{
return m_curDeviceName.c_str();
}
bool CDShowVideoDevice::HasSettingDialog(int nType)
{
if(m_pVfwDialogs)
return SUCCEEDED(m_pVfwDialogs->HasDialog(nType));
else
{
//尝试vga驱动
ISpecifyPropertyPages *pSpec=0;
CAUUID cauuid;
bool bHas=false;
HRESULT hr = m_pCaptureFilter->QueryInterface(IID_ISpecifyPropertyPages,
(void **)&pSpec);
if(hr == S_OK)
{
hr = pSpec->GetPages(&cauuid);
if(cauuid.cElems>0)
{
//有
bHas=true;
}
CoTaskMemFree(cauuid.pElems);
}
if(pSpec)
pSpec->Release();
return bHas;
}
}
bool CDShowVideoDevice::ShowVideoDialogBox(HWND hWndParent)
{
if (NULL == hWndParent)
{
hWndParent=::GetDesktopWindow();
}
HRESULT hr=E_FAIL;
if(m_pVfwDialogs)
{
// Check if the device supports this dialog box.
//必须暂停捕捉,否则vfw对话框不能正常工作
if(m_bIsPlaying)
{
StopCapture();
}
if (SUCCEEDED(m_pVfwDialogs->HasDialog(0)))
{
// Show the dialog box.
hr = m_pVfwDialogs->ShowDialog(0, hWndParent);
}
if(m_bIsPlaying)
{
StartCapture();
}
return SUCCEEDED(hr);
}
else if(m_pCaptureFilter)
{
//尝试vga驱动
ISpecifyPropertyPages *pSpec=0;
CAUUID cauuid;
hr = m_pCaptureFilter->QueryInterface(IID_ISpecifyPropertyPages,
(void **)&pSpec);
if(hr == S_OK)
{
hr = pSpec->GetPages(&cauuid);
if(cauuid.cElems>0)
{
hr = OleCreatePropertyFrame(hWndParent, 0, 0, L"视频调节", 1,
(IUnknown **)&m_pCaptureFilter, cauuid.cElems,
(GUID *)cauuid.pElems, 0, 0, NULL);
}
CoTaskMemFree(cauuid.pElems);
}
if(pSpec)
pSpec->Release();
return SUCCEEDED(hr);
}
return false;
}
#endif //WIN32