屏幕录像过滤器使用视频
近期编写了屏幕录像过滤器,现介绍给大家。使用该过滤器编写屏幕录像应用程序,将会十分简单。下面是,使用该过滤器录制的一段桌面视频。
屏幕录像过滤器
屏幕录像过滤器信息
过滤器名称:屏幕录像
过滤器GUID:{3E4FF11D-128C-4D71-8BCC-461FCBAE6320}
DLL注册函数名:DllRegisterServer
删除注册函数名:DllUnregisterServer
过滤器有2个输出引脚。
视频引脚标识:Video
视频引脚媒体类型:
主要类型:MEDIATYPE_Video
子类型:MEDIASUBTYPE_RGB32
格式类型:FORMAT_VideoInfo
样本为固定大小。
不使用时间压缩。
音频引脚标识:Audio
音频引脚脚媒体类型:
主要类型:MEDIATYPE_Audio
子类型:MEDIASUBTYPE_PCM
格式类型:FORMAT_WaveFormatEx
样本是固定大小。
样本不使用时间压缩。
样本为16位。
采样率48000。
屏幕录像过滤器的使用
1.过滤器的注册。
在过滤器下载文件中包含注册软件一个,可以使用该软件注册。在应用程序中使用,使用下面方法注册:
HMODULE h=LoadLibrary(L"屏幕录像.dll");
FARPROC Proc1 = GetProcAddress(h, "DllRegisterServer");//获取注册函数的地址
FARPROC Proc2 = GetProcAddress(h, "DllUnregisterServer");//获取删除注册函数的地址
if (Proc1 != NULL)//如果地址不为空
Proc1();//注册过滤器
//在应用程序退出时,删除注册
if (Proc2 != NULL)//如果地址不为空
Proc2();//删除注册
2.过滤器的设置。
在过滤器图编辑器中使用时,使用默认帧率,每秒30帧。在应用程序中使用时,可以更改帧率,更改录制区域,显示鼠标光标,获取丢帧数量,方法如下:
interface IMy001 : public IUnknown//声明IMy001自定义接口
{
public:
virtual HRESULT SetFramesPerSec(DWORD nFrames) = 0;//设置每秒帧数
virtual HRESULT SetRect(RECT rect) = 0;//设置录制矩形
virtual HRESULT GetLostNum(DWORD* Nu) = 0;//获取丢帧数量
virtual HRESULT GetPos(DWORD* cur) = 0;//获取当前时间,单位毫秒
};
GUID IID_IMy001 = { 0x9932100d, 0x67f0, 0x4265, 0x8c, 0x12, 0xde, 0x23, 0x54, 0xb1, 0x97, 0x6d };//指定IMy001自定义接口类标识
IBaseFilter* pFilter = NULL;
GUID guid = { 0x3e4ff11d, 0x128c, 0x4d71, 0x8b, 0xcc, 0x46, 0x1f, 0xcb, 0xae, 0x63, 0x20 };
hr = CoCreateInstance(guid, NULL, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pFilter));//创建屏幕录像过滤器
IMy001* pIMy001 = NULL;
hr = pFilter->QueryInterface(IID_IMy001, (void**)&pIMy001);//查询IMy001接口
hr = pIMy001->SetFramesPerSec(20);//更改帧率
RECT rect; rect.left = 200; rect.top = 100; rect.right = 832+200; rect.bottom = 552+100;
hr = pIMy001->SetRect(rect);//更改录制区域
DWORD Lost;
hr = pIMy001->GetLostNum(&Lost);//获取丢帧数量
对过滤器的设置需在过滤器创建后,连接引脚之前。因为设置将更改引脚媒体类型的一些参数。如果引脚已经连接,要更改过滤器设置,需断开引脚连接,设置完成后重新连接。建议最高帧率30帧。没有添加设置录制区域的安全代码,这就要求,提供的录制区域矩形为正常矩形,right大于left,bottom大于top,且要求这4个参数在屏幕矩形之内。否则可能导致崩溃。
3.对下游过滤器的要求。
现在大部分屏幕使用1920*1080分辨率,在全屏录制,高帧率情况下,视频输出引脚每秒数据量非常大,其它过滤器处理不了如此数据量/每秒。必须立刻连接视频压缩过滤器,推荐使用编者编写的“写视频文件”过滤器,或使用其它高质量视频压缩过滤器。有些过滤器要求视频宽度和高度是4的倍数,本人编写的“写视频文件”过滤器正是如此,需要在设置录制区域时,进行4倍取整。不然无法输出视频文件。
4.适用系统。
win8及以上系统。在win10,win11系统下已做过测试,可以正常使用。win8系统下没有测试。注意,不能在win7及以下系统中使用。
屏幕录像过滤器开发信息
屏幕图像的获取,是获取IDXGIOutputDuplication接口,使用该接口的AcquireNextFrame方法,获取屏幕图像数据。过滤器可以达到的最大帧率,与电脑的运算速度有关。并不是将帧率设置的越大越好,如果已接近最大运算能力,将会产生丢帧。如果超过最大运算能力,过滤器将无法输出合乎要求的视频。使用中,可能只录制屏幕的选定区域,必须添加图像剪裁代码,且需要上下翻转图像,左右不需要翻转。由于从CSource派生的源过滤器,对停止命令的响应时间要求苛刻,此次更改,从CBaseFilter创建源过滤器,经测试,可以可靠停止。以指定的时间间隔发送样本。使用递增索引的方法获得时间间隔,避免了时间的误差积累。音频引脚获取系统播放的声音,以16位PCM音频格式输出。
streams.h和strmbase10.lib是本人根据Windows SDK 7.1中DirectShow过滤器基础类文件,编写的过滤器基础类定义和实现文件。有此文件后,可以不必依赖win7系统和Windows SDK 7.1编写过滤器。在对该文件验证和完善后,提供给大家。
不能录制鼠标光标。
下面是屏幕录像过滤器DLL的全部代码
DLL.h
#ifndef DLL_FILE
#define DLL_FILE
#include "strmbase10.h"//过滤器基础类定义文件
#if _DEBUG
#pragma comment(lib, "strmbasd10.lib")//过滤器基础类实现文件调试版本
#else
#pragma comment(lib, "strmbase10.lib")//过滤器基础类实现文件发布版本
#endif
// {3E4FF11D-128C-4D71-8BCC-461FCBAE6320}
DEFINE_GUID(CLSID_ScreenRecorder,//过滤器GUID
0x3e4ff11d, 0x128c, 0x4d71, 0x8b, 0xcc, 0x46, 0x1f, 0xcb, 0xae, 0x63, 0x20);
// {9932100D-67F0-4265-8C12-DE2354B1976D}
DEFINE_GUID(IID_IMy001,//IMy001接口标识GUID
0x9932100d, 0x67f0, 0x4265, 0x8c, 0x12, 0xde, 0x23, 0x54, 0xb1, 0x97, 0x6d);
// {9AC4AE55-0B0E-49FF-BF54-86931DD22294}
DEFINE_GUID(CLSID_PropertyPage,//属性页GUID
0x9ac4ae55, 0xb0e, 0x49ff, 0xbf, 0x54, 0x86, 0x93, 0x1d, 0xd2, 0x22, 0x94);
interface IMy001 : public IUnknown//自定义接口,用于设置和获取屏幕录像过滤器参数
{
public:
virtual HRESULT SetFramesPerSec(DWORD nFrames) = 0;//设置每秒帧数
virtual HRESULT SetRect(RECT rect) = 0;//设置录制矩形
virtual HRESULT GetLostNum(DWORD* Nu) = 0;//获取丢帧数量
virtual HRESULT GetPos(DWORD* cur) = 0;//获取当前时间,单位毫秒
};
#include "D3D11.h"
#pragma comment(lib, "D3D11.lib")
#include "DXGI1_2.h"
#pragma comment(lib, "DXGI.lib")
class CPin2;
class CFilter;
class CPropertyPage;
class CPin1 : public CBaseOutputPin
{
friend class CPin2;
friend class CFilter;
public:
CPin1(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName);
~CPin1();
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES * pProperties);
HRESULT Active(void);
static DWORD WINAPI VideoThread(LPVOID pParam);//视频引脚工作线程
CFilter *pCFilter = NULL;
REFERENCE_TIME Star, End;//缓冲区时间戳
ID3D11Device* p3D11Device = NULL;
ID3D11DeviceContext* p3D11DeviceContext = NULL;
IDXGIOutputDuplication *pDuplication = NULL;
HRESULT InitDXGI();//初始化DXGI
HRESULT GetDesktop();//获取桌面图像
RECT ScreenRect;//整个屏幕矩形
RECT SelRect;//选择的矩形
UINT VIDEO_WIDTH;//视频图像宽度
UINT VIDEO_HEIGHT;//视频图像高度
UINT VIDEO_FPS = 30;//每秒帧数
LONGLONG nDur; //每帧占用时间,100纳秒单位
LONGLONG Lost = 0;//丢帧数量
ULONGLONG LOST;//丢帧数量公共变量
BYTE* pBuffer1;//共享缓冲区1
BYTE* pBuffer2;//共享缓冲区2
LONGLONG index2 = 0;//引脚样本时间戳索引
LONGLONG index = 0;//获取桌面图像时间索引
};
class CPin2 : public CBaseOutputPin
{
friend class CPin1;
friend class CFilter;
public:
CPin2(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName);
~CPin2();
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES * pProperties);
HRESULT Active(void);
static DWORD WINAPI AudioThread(LPVOID pParam);//音频引脚工作线程
CFilter *pCFilter = NULL;
REFERENCE_TIME Star, End;//缓冲区时间戳,100纳秒单位
REFERENCE_TIME nDur;//传递音频包默认时间间隔,100纳秒单位
ULONGLONG CUR;//当前位置公共变量
LONG BufferSize;//引脚样本缓冲区大小,单位字节
LONGLONG index = 0;//时间间隔索引
BYTE* pBuffer;//共享缓冲区
int count;//样本计数
#define REFTIMES_PER_SEC (double)10000000
REFERENCE_TIME hnsRequestedDuration = (REFERENCE_TIME)REFTIMES_PER_SEC;
void *pEnumeratorV = NULL;
void *pDeviceV = NULL;
void *pAudioClientV = NULL;
void *pCaptureClientV = NULL;
WAVEFORMATEX *pwfx;//音频端点音频流的数据格式
DWORD GetAudioEndpoint(void* pV);//获取音频端点
};
class CFilter : public CCritSec, public CBaseFilter, public IMy001, public ISpecifyPropertyPages
{
friend class CPin1;
friend class CPin2;
friend class CPropertyPage;
public:
CFilter(TCHAR* pName, LPUNKNOWN pUnk, HRESULT* hr);
~CFilter();
CBasePin* GetPin(int n);
int GetPinCount();
static CUnknown * WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT *phr);
STDMETHODIMP GetPages(CAUUID *pPages)
{
if (pPages == NULL) return E_POINTER;
pPages->cElems = 1;
pPages->pElems = (GUID*)CoTaskMemAlloc(sizeof(GUID));
if (pPages->pElems == NULL)
{
return E_OUTOFMEMORY;
}
pPages->pElems[0] = CLSID_PropertyPage;
return S_OK;
}
DECLARE_IUNKNOWN
STDMETHODIMP NonDelegatingQueryInterface(REFIID iid, void ** ppv);
STDMETHODIMP Pause();
STDMETHODIMP Stop();
HRESULT SetFramesPerSec(DWORD nFrames);//设置每秒帧数
HRESULT SetRect(RECT rect);//设置录制矩形
HRESULT GetLostNum(DWORD* Nu);//获取丢帧数量
HRESULT GetPos(DWORD* cur);//获取当前时间,单位毫秒
CPin1* pCPin1 = NULL;//视频引脚指针
CPin2* pCPin2 = NULL;//音频引脚指针
HANDLE hRun;//“运行”事件句柄
};
class CPropertyPage : public CBasePropertyPage
{
friend class CFilter;
friend class CPin1;
public:
CPropertyPage(IUnknown *pUnk);
~CPropertyPage();
static CUnknown * WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT *pHr);
virtual HRESULT OnConnect(IUnknown *pUnknown);
virtual HRESULT OnDeactivate();
virtual HRESULT OnActivate();//用于初始化属性页对话框
virtual INT_PTR OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam);//接收属性页对话框的消息
CFilter *pCFilter = NULL;//过滤器指针
HWND hEdit1, hEdit2, hText1, hText2;
HFONT hFont1;
HANDLE hThread = NULL;
};
template <class T> void SafeRelease(T** ppT)
{
if (*ppT)
{
(*ppT)->Release();
*ppT = NULL;
}
}
#endif //DLL_FILE
DLL.cpp
#include "DLL.h"
const REGPINTYPES sudPinTypes1 =
{
&MEDIATYPE_Video, //主要类型
&MEDIASUBTYPE_RGB32 //子类型
};
const REGPINTYPES sudPinTypes2 =
{
&MEDIATYPE_Audio, //主要类型
&MEDIASUBTYPE_PCM //子类型
};
const AMOVIESETUP_PIN sudPins[] =
{
{
L"Video", //引脚名称
FALSE, //渲染引脚
TRUE, //输出引脚
FALSE, //具有该引脚的零个实例
FALSE, //可以创建一个以上引脚的实例
&CLSID_NULL, //该引脚连接的过滤器的类标识
NULL, //该引脚连接的引脚名称
1, //引脚支持的媒体类型数
&sudPinTypes1 //引脚媒体类型
},
{
L"Audio", //引脚名称
FALSE, //渲染引脚
TRUE, //输出引脚
FALSE, //具有该引脚的零个实例
FALSE, //可以创建一个以上引脚的实例
&CLSID_NULL, //该引脚连接的过滤器的类标识
NULL, //该引脚连接的引脚名称
1, //引脚支持的媒体类型数
&sudPinTypes2 //引脚媒体类型
}
};
const AMOVIESETUP_FILTER sudScreenRecorder =//过滤器的注册信息
{
&CLSID_ScreenRecorder, //过滤器的类标识
L"屏幕录像", //过滤器的名称
MERIT_DO_NOT_USE, //过滤器优先值
2, //引脚数量
sudPins //引脚信息
};
int g_cTemplates = 2;
CFactoryTemplate g_Templates[] = {
{
L"屏幕录像"
, &CLSID_ScreenRecorder
, CFilter::CreateInstance
, NULL
, &sudScreenRecorder
},
{
L"属性页",
&CLSID_PropertyPage,
CPropertyPage::CreateInstance,
NULL,
NULL
}
};
STDAPI DllRegisterServer()//注册DLL
{
return AMovieDllRegisterServer2(TRUE);
}
STDAPI DllUnregisterServer()//删除DLL注册
{
return AMovieDllRegisterServer2(FALSE);
}
extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID);
BOOL APIENTRY DllMain(HANDLE hModule, DWORD dwReason, LPVOID lpReserved)
{
return DllEntryPoint((HINSTANCE)(hModule), dwReason, lpReserved);
}
CFilter.cpp
#include "DLL.h"
CFilter::CFilter(TCHAR *pName, LPUNKNOWN pUnk, HRESULT *phr) : CBaseFilter(NAME("屏幕录像"), pUnk, this, CLSID_ScreenRecorder)
{
pCPin1 = new CPin1(this, phr, L"Video");//创建视频引脚
pCPin2 = new CPin2(this, phr, L"Audio");//创建音频引脚
hRun = CreateEvent(NULL, TRUE, FALSE, NULL);//创建“运行”事件,手动重置,初始无信号
}
CFilter::~CFilter()
{
}
CBasePin *CFilter::GetPin(int n)
{
if (n == 0)return pCPin1;
if (n == 1)return pCPin2;
return NULL;
}
int CFilter::GetPinCount()
{
return 2;
}
CUnknown * WINAPI CFilter::CreateInstance(LPUNKNOWN pUnk, HRESULT *phr)
{
return new CFilter(NAME("屏幕录像"), pUnk, phr);
}
STDMETHODIMP CFilter::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
if (riid == IID_ISpecifyPropertyPages)
return GetInterface((ISpecifyPropertyPages*) this, ppv);
else if (riid == IID_IMy001)
return GetInterface((IMy001*)this, ppv);
else
return CBaseFilter::NonDelegatingQueryInterface(riid, ppv);
}
STDMETHODIMP CFilter::Pause()
{
HRESULT hr;
DWORD dw = WaitForSingleObject(hRun, 0);//检测“运行”信号
if (dw == WAIT_OBJECT_0)return CBaseFilter::Pause();//如果有“运行”信号,调用基类并返回
SetEvent(hRun); //如果没有“运行”信号,设置“运行”有信号
hr= CBaseFilter::Pause();
m_State = State_Running;//必须在这里更改过滤器状态
return hr;
}
STDMETHODIMP CFilter::Stop()
{
ResetEvent(hRun);//设置“运行”无信号
return CBaseFilter::Stop();
}
HRESULT CFilter::SetFramesPerSec(DWORD nFrames)//设置每秒帧数
{
if (pCPin1->IsConnected())//如果引脚已经连接,不能更改帧率
return S_FALSE;
pCPin1->VIDEO_FPS = nFrames;
pCPin1->nDur = 10000000 / pCPin1->VIDEO_FPS;//计算每帧占用时间,100纳秒单位
return S_OK;
}
HRESULT CFilter::SetRect(RECT rect)//设置录制矩形
{
if (m_State != State_Stopped)//如果过滤器在运行或暂停状态
{
if (pCPin1->VIDEO_WIDTH != rect.right - rect.left || pCPin1->VIDEO_HEIGHT != rect.bottom - rect.top)//不可更改录制矩形的宽高
{
return S_FALSE;
}
else//在不更改录制矩形的宽高的前提下,更改录制矩形的位置
{
pCPin1->SelRect = rect;
return S_OK;
}
}
if (pCPin1->IsConnected())//如果过滤器已停止,且引脚已经连接
{
if (pCPin1->VIDEO_WIDTH != rect.right - rect.left || pCPin1->VIDEO_HEIGHT != rect.bottom - rect.top)//不可更改录制矩形的宽高
{
return S_FALSE;
}
else//在不更改录制矩形的宽高的前提下,更改录制矩形的位置
{
pCPin1->SelRect = rect;
return S_OK;
}
}
//如果过滤器已停止,且引脚没有连接,可以更改录制矩形的任何参数
pCPin1->SelRect = rect;
pCPin1->VIDEO_WIDTH = pCPin1->SelRect.right - pCPin1->SelRect.left;//计算视频宽度
pCPin1->VIDEO_HEIGHT = pCPin1->SelRect.bottom - pCPin1->SelRect.top;//计算视频高度
return S_OK;
}
HRESULT CFilter::GetLostNum(DWORD* Nu)//获取丢帧数量
{
ULONGLONG Lost=InterlockedExchange(&pCPin1->LOST, (ULONGLONG)0);
*Nu = (DWORD)Lost;
return S_OK;
}
HRESULT CFilter::GetPos(DWORD* cur) //获取当前时间,单位毫秒
{
ULONGLONG CUR = InterlockedExchange(&pCPin2->CUR, (ULONGLONG)0);
*cur = (DWORD)(CUR / 10000);
return S_OK;
}
CPin1.cpp
#include "DLL.h"
#include "resource.h"
CPin1::CPin1(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName) : CBaseOutputPin(NAME("Out"), pFilter, pFilter, phr, pPinName)
{
pCFilter = pFilter;
VIDEO_WIDTH = GetSystemMetrics(SM_CXSCREEN);//获取主显示器的宽度,以像素为单位
VIDEO_HEIGHT = GetSystemMetrics(SM_CYSCREEN);//获取主显示器的高度,以像素为单位
ScreenRect.left = 0; ScreenRect.top = 0; ScreenRect.right = VIDEO_WIDTH; ScreenRect.bottom = VIDEO_HEIGHT;
SelRect.left = 0; SelRect.top = 0; SelRect.right = VIDEO_WIDTH; SelRect.bottom = VIDEO_HEIGHT;
nDur = 10000000 / VIDEO_FPS;//计算每帧占用时间,100纳秒单位
InitDXGI();//初始化DXGI
pBuffer1 = new BYTE[VIDEO_WIDTH * VIDEO_HEIGHT * 4];
}
CPin1::~CPin1()
{
SafeRelease(&p3D11Device); SafeRelease(&p3D11DeviceContext); SafeRelease(&pDuplication);
delete[] pBuffer1;
}
HRESULT CPin1::CheckMediaType(const CMediaType *pmt)
{
if (pmt->majortype == MEDIATYPE_Video && pmt->subtype == MEDIASUBTYPE_RGB32 && pmt->formattype == FORMAT_VideoInfo && pmt->bFixedSizeSamples
&& !pmt->bTemporalCompression)
return S_OK;
else
return S_FALSE;
}
HRESULT CPin1::GetMediaType(int iPosition, CMediaType *pmt)
{
if (iPosition == 0)
{
pmt->SetType(&MEDIATYPE_Video);//设置主要类型
pmt->SetSubtype(&MEDIASUBTYPE_RGB32);//设置子类型
pmt->SetFormatType(&FORMAT_VideoInfo);//设置格式类型
pmt->SetTemporalCompression(FALSE);//不使用时间压缩
pmt->SetSampleSize(VIDEO_WIDTH*VIDEO_HEIGHT * 4);//设置样本大小
VIDEOINFOHEADER *p = (VIDEOINFOHEADER*)pmt->AllocFormatBuffer(sizeof(VIDEOINFOHEADER));
if (NULL == p)return(E_OUTOFMEMORY);
ZeroMemory(p, sizeof(VIDEOINFOHEADER));
SetRectEmpty(&p->rcSource);
SetRectEmpty(&p->rcTarget);
p->dwBitRate = VIDEO_WIDTH * VIDEO_HEIGHT * 4 * VIDEO_FPS;
p->dwBitErrorRate = 0;
p->AvgTimePerFrame = nDur;//每帧占用时间,100纳秒单位
p->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);//图像信息结构大小
p->bmiHeader.biWidth = VIDEO_WIDTH;//图像宽度
p->bmiHeader.biHeight = VIDEO_HEIGHT;//图像高度
p->bmiHeader.biPlanes = 1;//平面数
p->bmiHeader.biBitCount = 32;//每个像素位数
p->bmiHeader.biCompression = BI_RGB;//编码方式
p->bmiHeader.biSizeImage = VIDEO_WIDTH * VIDEO_HEIGHT * 4;//图像的字节大小
p->bmiHeader.biXPelsPerMeter = 0;//水平分辨率
p->bmiHeader.biYPelsPerMeter = 0;//垂直分辨率
p->bmiHeader.biClrUsed = 0;//颜色表中颜色数量
p->bmiHeader.biClrImportant = 0;//重要颜色数量
return S_OK;
}
return S_FALSE;
}
HRESULT CPin1::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES * pProperties)//确定输出引脚样本缓冲区大小
{
HRESULT hr = NOERROR;
pProperties->cBuffers = 1;//1个缓冲区
pProperties->cbBuffer = VIDEO_WIDTH * VIDEO_HEIGHT * 4;//缓冲区的大小
ALLOCATOR_PROPERTIES Actual;
hr = pAlloc->SetProperties(pProperties, &Actual);
if (FAILED(hr))return hr;
if (Actual.cbBuffer < pProperties->cbBuffer)// 这个分配器是否不合适
{
return E_FAIL;
}
return NOERROR;
}
HRESULT CPin1::Active(void)
{
CreateThread(NULL, 0, VideoThread, this, 0, NULL);//创建视频引脚工作线程
return CBaseOutputPin::Active();
}
DWORD WINAPI CPin1::VideoThread(LPVOID pParam)//视频引脚工作线程
{
CPin1* pCPin1 = (CPin1*)pParam;
pCPin1->index = 0; pCPin1->index2 = 0; pCPin1->Lost = 0;
int SelWidth = pCPin1->SelRect.right - pCPin1->SelRect.left;//计算录制区域矩形的宽度
int SelHeight = pCPin1->SelRect.bottom - pCPin1->SelRect.top;//计算录制区域矩形的高度
int ScreenWidth = pCPin1->ScreenRect.right;//获取屏幕矩形的宽度
int ScreenHeight = pCPin1->ScreenRect.bottom;//获取屏幕矩形的高度
pCPin1->pBuffer2 = new BYTE[SelWidth * SelHeight * 4];//创建共享缓冲区2
DWORD star = timeGetTime();//获取开始时间,单位毫秒
Agan:
DWORD Cur = timeGetTime() - star;//当前时间,单位毫秒
if (Cur < pCPin1->index * pCPin1->nDur / 10000)goto Agan;//如果没有到帧显示时间,等待
if (Cur >= (pCPin1->index + 1) * pCPin1->nDur / 10000)//如果当前时间大于下一个帧的显示时间;时间索引,引脚样本时间索引,丢帧数量,加1;再次进行时间判断
{
pCPin1->index++; pCPin1->index2++; pCPin1->Lost++;
InterlockedExchange(&pCPin1->LOST, (ULONGLONG)pCPin1->Lost);//将丢帧数量传递给公共变量
goto Agan;
}
pCPin1->index++;
HRESULT hr;
FILTER_STATE fs;
pCPin1->pCFilter->GetState(0, &fs);
if (fs == State_Running)//只在运行时,才发送样本
{
pCPin1->GetDesktop();//获取桌面图像数据,复制到pBuffer1
//下面代码剪切并翻转图像,将结果复制到pBuffer2
BYTE* P = pCPin1->pBuffer1;
BYTE* pD = pCPin1->pBuffer2;
P += pCPin1->SelRect.top * ScreenWidth * 4;
for (int i = SelHeight - 1; i > -1; i--)//裁剪并上下翻转图像(左右不翻转)
{
P += pCPin1->SelRect.left * 4;
CopyMemory(pD + i * SelWidth * 4, P, SelWidth * 4);
P += (ScreenWidth - pCPin1->SelRect.left) * 4;
}
//下面代码发送样本
if (pCPin1->IsConnected())//如果视频引脚已连接
{
IMediaSample *pSample;
ReCreate:
hr = pCPin1->GetDeliveryBuffer(&pSample, NULL, NULL, 0);
if (FAILED(hr))
{
Sleep(1); goto ReCreate;
}
BYTE* pB = NULL;
hr = pSample->GetPointer(&pB);
CopyMemory(pB, pCPin1->pBuffer2, pCPin1->VIDEO_WIDTH * pCPin1->VIDEO_HEIGHT * 4);//从pBuffer2复制数据到引脚样本缓冲区
hr = pSample->SetActualDataLength(pCPin1->VIDEO_WIDTH * pCPin1->VIDEO_HEIGHT * 4);
REFERENCE_TIME star = pCPin1->index2 * pCPin1->nDur, end = (pCPin1->index2 + 1) * pCPin1->nDur;
hr = pSample->SetTime(&star, &end);
hr = pSample->SetSyncPoint(TRUE);
pCPin1->index2++;
hr = pCPin1->Deliver(pSample);
pSample->Release();
if (hr != S_OK)goto End;
}
}
DWORD dw = WaitForSingleObject(pCPin1->pCFilter->hRun, 0);//检测“运行”信号
if (dw == WAIT_OBJECT_0)goto Agan;//如果有信号,再次获取样本
End:
pCPin1->DeliverEndOfStream();
delete[] pCPin1->pBuffer2;//删除共享缓冲区2
return 1;
}
HRESULT CPin1::InitDXGI()//初始化DXGI
{
HRESULT hr = S_OK;
D3D_DRIVER_TYPE driver_types[] =
{
D3D_DRIVER_TYPE_HARDWARE,
D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE,
};
UINT n_driver_types = ARRAYSIZE(driver_types);
D3D_FEATURE_LEVEL feature_levels[] =
{
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_1
};
UINT n_feature_levels = ARRAYSIZE(feature_levels);
D3D_FEATURE_LEVEL feature_level;
hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, feature_levels, n_feature_levels, D3D11_SDK_VERSION, &p3D11Device, &feature_level, &p3D11DeviceContext);
IDXGIDevice* pIDXGIDevice = NULL;
if (hr == S_OK)
{
hr = p3D11Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&pIDXGIDevice));//获取对应的DXGI设备接口
}
IDXGIAdapter* pDXGIAdapter = NULL;
if (hr == S_OK)
{
hr = pIDXGIDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&pDXGIAdapter));//获取DXGI设备适配器
}
SafeRelease(&pIDXGIDevice);
IDXGIOutput* pDXGIOutput = NULL;
if (hr == S_OK)
{
hr = pDXGIAdapter->EnumOutputs(0, &pDXGIOutput); //获取设备输出接口
}
SafeRelease(&pDXGIAdapter);
DXGI_OUTPUT_DESC _output_des;
if (hr == S_OK)
{
hr = pDXGIOutput->GetDesc(&_output_des);//获取设备输出描述
}
IDXGIOutput1* pDXGIOutput1 = NULL;
if (hr == S_OK)
{
hr = pDXGIOutput->QueryInterface(__uuidof(pDXGIOutput1), reinterpret_cast<void**>(&pDXGIOutput1));
}
SafeRelease(&pDXGIOutput);
if (hr == S_OK)
{
hr = pDXGIOutput1->DuplicateOutput(p3D11Device, &pDuplication);//根据设备输出接口创建一个duplication接口
}
SafeRelease(&pDXGIOutput1);
if (hr != S_OK)MessageBox(0, L"DXGI初始化失败", L"屏幕录像", MB_OK);
return hr;
}
HRESULT CPin1::GetDesktop()//获取桌面
{
HRESULT hr;
IDXGIResource* dxgi_res = NULL;
DXGI_OUTDUPL_FRAME_INFO frame_info;
hr = pDuplication->AcquireNextFrame(3, &frame_info, &dxgi_res);
ID3D11Texture2D *_image = NULL;
if (hr == S_OK)
{
hr = dxgi_res->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void **>(&_image));//获取一帧图像纹理
}
SafeRelease(&dxgi_res);
D3D11_TEXTURE2D_DESC frame_desc;
UINT len = 0;
if (hr == S_OK && _image != NULL)
{
_image->GetDesc(&frame_desc);
len = frame_desc.Width*frame_desc.Height * 4;
}
ID3D11Texture2D *new_image = NULL;
frame_desc.Usage = D3D11_USAGE_STAGING;
frame_desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
frame_desc.BindFlags = 0;
frame_desc.MiscFlags = 0;
frame_desc.MipLevels = 1;
frame_desc.ArraySize = 1;
frame_desc.SampleDesc.Count = 1;
if (hr == S_OK)
{
hr = p3D11Device->CreateTexture2D(&frame_desc, NULL, &new_image);//创建新的纹理
}
if (hr == S_OK)
{
p3D11DeviceContext->CopyResource(new_image, _image);//拷贝图像
}
IDXGISurface *dxgi_surface = NULL;
if (hr == S_OK)
{
hr = new_image->QueryInterface(__uuidof(IDXGISurface), (void **)(&dxgi_surface));
}
SafeRelease(&new_image);
DXGI_MAPPED_RECT mapped_rect;
if (hr == S_OK)
{
hr = dxgi_surface->Map(&mapped_rect, DXGI_MAP_READ);//将图像从GPU映射到内存中
}
if (hr == S_OK)
{
CopyMemory(pBuffer1, mapped_rect.pBits, len);
}
if (hr == S_OK)
{
dxgi_surface->Unmap();
}
SafeRelease(&dxgi_surface);
return pDuplication->ReleaseFrame();
}
CPin2.cpp
#include "DLL.h"
#include "Mmdeviceapi.h"
#include "Audioclient.h"
CPin2::CPin2(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName) : CBaseOutputPin(NAME("Out"), pFilter, pFilter, phr, pPinName)
{
pCFilter = pFilter;
GetAudioEndpoint(this);//获取音频端点
pBuffer = new BYTE[BufferSize * 10];//创建公共缓冲区,可以容纳10个音频包
}
CPin2::~CPin2()
{
IMMDeviceEnumerator *pEnumerator = (IMMDeviceEnumerator*)pEnumeratorV;
IMMDevice *pDevice = (IMMDevice*)pDeviceV;
IAudioClient *pAudioClient = (IAudioClient*)pAudioClientV;
IAudioCaptureClient *pCaptureClient = (IAudioCaptureClient*)pCaptureClientV;
CoTaskMemFree(pwfx);
SafeRelease(&pEnumerator); SafeRelease(&pDevice); SafeRelease(&pAudioClient); SafeRelease(&pCaptureClient);
delete[] pBuffer;
}
HRESULT CPin2::CheckMediaType(const CMediaType *pmt)
{
if (pmt->majortype == MEDIATYPE_Audio && pmt->subtype == MEDIASUBTYPE_PCM && pmt->formattype == FORMAT_WaveFormatEx && pmt->bFixedSizeSamples
&& !pmt->bTemporalCompression)
{
WAVEFORMATEX* p = (WAVEFORMATEX*)pmt->pbFormat;
if (p->wBitsPerSample != 16)return S_FALSE;
return S_OK;
}
return S_FALSE;
}
HRESULT CPin2::GetMediaType(int iPosition, CMediaType *pmt)
{
if (iPosition == 0)
{
WAVEFORMATEX *pwf = (WAVEFORMATEX *)pmt->AllocFormatBuffer(sizeof(WAVEFORMATEX));
if (NULL == pwf)return(E_OUTOFMEMORY);
ZeroMemory(pwf, sizeof(WAVEFORMATEX));
pwf->wFormatTag = WAVE_FORMAT_PCM;
pwf->nChannels = pwfx->nChannels;
pwf->nSamplesPerSec = pwfx->nSamplesPerSec;
pwf->nAvgBytesPerSec = pwfx->nAvgBytesPerSec / 2;
pwf->nBlockAlign = 4;
pwf->wBitsPerSample = 16;
pwf->cbSize = 0;
pmt->SetType(&MEDIATYPE_Audio);//设置主要类型
pmt->SetSubtype(&MEDIASUBTYPE_PCM);//设置子类型
pmt->SetFormatType(&FORMAT_WaveFormatEx);//设置格式类型
pmt->SetTemporalCompression(FALSE);//不使用时间压缩
pmt->SetSampleSize(BufferSize);//设置样本大小
return S_OK;
}
return S_FALSE;
}
HRESULT CPin2::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES * pProperties)//确定输出引脚样本缓冲区大小
{
HRESULT hr = NOERROR;
pProperties->cBuffers = 1;//1个缓冲区
pProperties->cbBuffer = BufferSize;//以1个音频包的大小,指定引脚缓冲区的大小
ALLOCATOR_PROPERTIES Actual;
hr = pAlloc->SetProperties(pProperties, &Actual);
if (FAILED(hr))return hr;
if (Actual.cbBuffer < pProperties->cbBuffer)// 这个分配器是否不合适
{
return E_FAIL;
}
return NOERROR;
}
HRESULT CPin2::Active(void)
{
CreateThread(NULL, 0, AudioThread, this, 0, NULL);//创建音频引脚工作线程
return CBaseOutputPin::Active();
}
DWORD WINAPI CPin2::AudioThread(LPVOID pParam)//音频引脚工作线程
{
CPin2* pCPin2 = (CPin2*)pParam;
IAudioClient *pAudioClient = (IAudioClient*)pCPin2->pAudioClientV;
IAudioCaptureClient *pCaptureClient = (IAudioCaptureClient*)pCPin2->pCaptureClientV;
pCPin2->Star = -pCPin2->nDur; pCPin2->End = 0; pCPin2->index = 0;
HRESULT hr = pAudioClient->Start(); //启动音频流
DWORD star = timeGetTime();//获取开始时间,单位毫秒
Agan:
pCPin2->count = 1;//将样本计数置1
if (timeGetTime() - star < pCPin2->index * (pCPin2->nDur / 10000))goto Agan;//如果没有到音频包呈现时间(间隔10毫秒),等待
DWORD flags; UINT32 numFramesAvailable = 0; BYTE *pData = NULL;//音频包缓冲区指针
UINT32 packetLength = 0;//音频包大小,单位音频帧
hr = pCaptureClient->GetNextPacketSize(&packetLength);//获取音频包的大小,单位音频帧
if (packetLength != 0)//如果获取到音频包,将音频包数据转换为short类型,复制到缓冲区
{
hr = pCaptureClient->GetBuffer(&pData, &numFramesAvailable, &flags, NULL, NULL);//获取当前音频包的指针
if (hr == S_OK && numFramesAvailable != 0 && pData != NULL)
{
pCPin2->count = numFramesAvailable * 4 / pCPin2->BufferSize;
for (int i = 0; i < (int)numFramesAvailable; i++)//将浮点类型转换为short
{
float f1, f2;
CopyMemory(&f1, pData + i * 8, 4); CopyMemory(&f2, pData + i * 8 + 4, 4);
short L = (short)(f1 * 32767); short R = (short)(f2 * 32767);
CopyMemory(pCPin2->pBuffer + i * 4, &L, 2); CopyMemory(pCPin2->pBuffer + i * 4 + 2, &R, 2);//转换为16位音频后,填充样本缓冲区
}
hr = pCaptureClient->ReleaseBuffer(numFramesAvailable);//释放音频包
}
}
else//如果当前没有音频流,将缓冲区全部置0
{
memset(pCPin2->pBuffer, 0, pCPin2->BufferSize * 10);
}
pCPin2->index++;
FILTER_STATE fs;
pCPin2->pCFilter->GetState(0, &fs);
if (fs == State_Running && pCPin2->IsConnected())//只在运行状态下发送样本,且引脚已连接
{
BYTE* P = pCPin2->pBuffer;//获取公共缓冲区的指针
for (int i = 0; i < pCPin2->count; i++)
{
if (i > 0)P += pCPin2->BufferSize;//移动指针到下一个音频包的位置
IMediaSample *pSample = NULL;
ReCreate:
hr = pCPin2->GetDeliveryBuffer(&pSample, NULL, NULL, 0);
if (FAILED(hr))
{
Sleep(1); goto ReCreate;
}
BYTE* pB = NULL;
hr = pSample->GetPointer(&pB);
CopyMemory(pB, P, pCPin2->BufferSize);
hr = pSample->SetActualDataLength(pCPin2->BufferSize);
pCPin2->Star += pCPin2->nDur; pCPin2->End += pCPin2->nDur;
hr = pSample->SetTime(&pCPin2->Star, &pCPin2->End);
hr = pSample->SetSyncPoint(TRUE);
hr = pCPin2->Deliver(pSample);
pSample->Release();
if (hr != S_OK)
goto End;
InterlockedExchange(&pCPin2->CUR, (ULONGLONG)pCPin2->Star);
}
}
DWORD dw = WaitForSingleObject(pCPin2->pCFilter->hRun, 0);//检测“运行”信号
if (dw == WAIT_OBJECT_0)goto Agan;//如果有信号,再次获取样本
End:
pCPin2->DeliverEndOfStream();
hr = pAudioClient->Stop(); //停止音频流
return 1;
}
DWORD CPin2::GetAudioEndpoint(void* pV)//获取音频端点接口函数
{
IMMDeviceEnumerator *pEnumerator = NULL;
IMMDevice *pDevice = NULL;
IAudioClient *pAudioClient = NULL;
IAudioCaptureClient *pCaptureClient = NULL;
HRESULT hr;
hr = CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL, __uuidof(IMMDeviceEnumerator), (void**)&pEnumerator);//创建设备枚举器
if (hr != S_OK)
{
MessageBox(NULL, L"创建设备枚举器失败!", L"提示", MB_OK); return 0;
}
hr = pEnumerator->GetDefaultAudioEndpoint(eRender, eConsole, &pDevice);//获取默认音频端点设备
if (hr != S_OK)
{
MessageBox(NULL, L"获取默认音频端点设备失败!", L"提示", MB_OK); return 0;
}
hr = pDevice->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL, (void**)&pAudioClient);//激活默认音频端点设备
if (hr != S_OK)
{
MessageBox(NULL, L"激活默认音频端点设备失败!", L"提示", MB_OK); return 0;
}
hr = pAudioClient->GetMixFormat(&pwfx);//获取音频格式
if (hr != S_OK)
{
MessageBox(NULL, L"获取音频格式失败!", L"提示", MB_OK); return 0;
}
hr = pAudioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_LOOPBACK, hnsRequestedDuration, 0, pwfx, NULL);//创建端点缓冲区,可以容纳1秒的音频数据
if (hr != S_OK)
{
MessageBox(NULL, L"音频客户端初始化失败!", L"提示", MB_OK); return 0;
}
REFERENCE_TIME r2;
hr = pAudioClient->GetDevicePeriod(&nDur, &r2);//获取单个音频包持续时间,单位100纳秒
if (hr != S_OK)
{
MessageBox(NULL, L"获取缓冲区失败!", L"提示", MB_OK); return 0;
}
UINT32 Size;
hr = pAudioClient->GetBufferSize(&Size);//获取申请的端点缓冲区总大小,单位音频帧
if (hr != S_OK)
{
MessageBox(NULL, L"获取缓冲区大小失败!", L"提示", MB_OK); return 0;
}
BufferSize = (LONG)(Size * 4 * nDur / hnsRequestedDuration);//计算单个音频包大小,单位字节
hr = pAudioClient->GetService(__uuidof(IAudioCaptureClient), (void**)&pCaptureClient);//获取音频服务
if (hr != S_OK)
{
MessageBox(NULL, L"系统音频服务没有打开!", L"提示", MB_OK); return 0;
}
pEnumeratorV = (void*)pEnumerator;
pDeviceV = (void*)pDevice;
pAudioClientV = (void*)pAudioClient;
pCaptureClientV = (void*)pCaptureClient;
return 1;
}
CPropertyPage.cpp
#include "DLL.h"
#include "stdio.h"
#include "resource.h"
CPropertyPage::CPropertyPage(IUnknown *pUnk) : CBasePropertyPage(NAME("属性页"), pUnk, IDD_DIALOG1, IDS_STRING104)
{
hFont1 = CreateFont(20, 0, 0, 0, FW_NORMAL, 0, 0, 0, CHINESEBIG5_CHARSET, OUT_DEFAULT_PRECIS, CLIP_DEFAULT_PRECIS, DEFAULT_QUALITY, DEFAULT_PITCH | FF_SWISS, L"微软雅黑");
}
CPropertyPage::~CPropertyPage()
{
}
CUnknown * WINAPI CPropertyPage::CreateInstance(LPUNKNOWN pUnk, HRESULT *pHr)
{
CPropertyPage *pNewObject = new CPropertyPage(pUnk);
if (pNewObject == NULL)
{
*pHr = E_OUTOFMEMORY;
}
return pNewObject;
}
HRESULT CPropertyPage::OnConnect(IUnknown *pUnknown)
{
if (pUnknown == NULL)
{
return E_POINTER;
}
IMy001* pIMy001 = NULL;
HRESULT hr = pUnknown->QueryInterface(IID_IMy001, reinterpret_cast<void**>(&pIMy001));
if (hr == S_OK)
{
pCFilter = (CFilter*)pIMy001;
SafeRelease(&pIMy001);
}
return hr;
}
HRESULT CPropertyPage::OnDeactivate()
{
TerminateThread(hThread, 1);
return S_OK;
}
DWORD WINAPI OnTimer(LPVOID lpParameter)
{
CPropertyPage* pPage = (CPropertyPage*)lpParameter;
while (TRUE)
{
ULONGLONG L = InterlockedExchange(&pPage->pCFilter->pCPin1->LOST, 0);
char ch1[50];
sprintf_s(ch1, 50, "%I64u", L);
ULONGLONG T = InterlockedExchange(&pPage->pCFilter->pCPin2->CUR, (ULONGLONG)0);
int cur = (int)T / 10000000;
int shi = cur / 60 / 60;
int fen = (cur / 60) % 60;
int miao = cur % 60;
char ch2[500];
sprintf_s(ch2, 500, "%d时 %d分 %d秒", shi, fen, miao);
FILTER_STATE fs;
pPage->pCFilter->GetState(0, &fs);
if (fs == State_Running)
{
SetWindowTextA(pPage->hEdit1, ch1); SetWindowTextA(pPage->hEdit2, ch2);
}
Sleep(1000);
}
return 1;
}
HRESULT CPropertyPage::OnActivate(void)
{
hThread = CreateThread(NULL, 0, OnTimer, this, 0, NULL);
hText1 = GetDlgItem(m_Dlg, IDC_STA1); hText2 = GetDlgItem(m_Dlg, IDC_STA2);
hEdit1 = GetDlgItem(m_Dlg, IDC_EDIT1); hEdit2 = GetDlgItem(m_Dlg, IDC_EDIT2);
MoveWindow(hText1, 10, 10, 80, 20, 1); SendMessage(hText1, WM_SETFONT, (WPARAM)hFont1, (LPARAM)1);
MoveWindow(hEdit1, 100, 10, 160, 20, 1); SendMessage(hEdit1, WM_SETFONT, (WPARAM)hFont1, (LPARAM)1);
MoveWindow(hText2, 10, 40, 80, 20, 1); SendMessage(hText2, WM_SETFONT, (WPARAM)hFont1, (LPARAM)1);
MoveWindow(hEdit2, 100, 40, 160, 20, 1); SendMessage(hEdit2, WM_SETFONT, (WPARAM)hFont1, (LPARAM)1);
return 0;
}
INT_PTR CPropertyPage::OnReceiveMessage(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
return CBasePropertyPage::OnReceiveMessage(hwnd, uMsg, wParam, lParam);//让父类处理消息
}