下载本过滤器DLL
本过滤器将NV12视频流解压缩为RGB32视频流。
NV12解压缩过滤器信息
过滤器名称:NV12解压
过滤器GUID:{1A3A49C9-6E99-41E4-87F8-308BEF77592F}
DLL注册函数名:DllRegisterServer
删除注册函数名:DllUnregisterServer
过滤器有1个输入引脚和1个输出引脚。
输入引脚标识:In
输入引脚媒体类型:
主要类型:MEDIATYPE_Video
子类型:MEDIASUBTYPE_NV12
格式类型:FORMAT_VideoInfo或FORMAT_VideoInfo2
样本为12位。
输出引脚标识:Out
输出引脚媒体类型:
主要类型:MEDIATYPE_Video
子类型:MEDIASUBTYPE_RGB32
格式类型:FORMAT_VideoInfo
样本为固定大小。
不使用时间压缩。
样本为32位。
NV12解压缩过滤器开发信息
NV12使用YUV表示像素的颜色。Y为亮度值,U,V为两个色度值,它们的值范围为0-255。视频引脚样本通常是1帧图像的颜色信息。要将YUV颜色值转换为RGB颜色值,就需要知道NV12引脚样本YUV值的存储方式。NV12样本先存储1帧图像的Y值,1个Y值占1字节。首先是第1行的Y值,第1个Y值是左侧第1个像素的Y值;然后存储第2行的Y值;直至1帧图像所有像素的Y值。接下来是UV值,U和V是交替存储,各占1字节。如下图。该图假设图像行宽100像素。
NV12图像是4个像素使用一组相同的UV值,第1行的第1个,第2个像素与第2行的第1个,第2个像素,使用第1组UV;第1行的第3个,第4个像素与第2行的第3个,第4个像素,使用第2组UV。图像每个像素使用的Y,U,V值如下图,该图假设图像行宽100像素。
知道了NV12图像每个像素YUV值的存储位置,我们就可以获取YUV颜色值,将其转换为RGB颜色值,按RGB32样本的颜色值存储方式,创建RGB32视频引脚样本。
NV12解压缩过滤器DLL的全部代码
DLL.h
#ifndef DLL_FILE
#define DLL_FILE
#include "strmbase10.h"//过滤器基础类定义文件
#if _DEBUG
#pragma comment(lib, "strmbasd10.lib")//过滤器基础类实现文件调试版本
#else
#pragma comment(lib, "strmbase10.lib")//过滤器基础类实现文件发布版本
#endif
// {1A3A49C9-6E99-41E4-87F8-308BEF77592F}
DEFINE_GUID(CLSID_NV12Filter,//过滤器GUID
0x1a3a49c9, 0x6e99, 0x41e4, 0x87, 0xf8, 0x30, 0x8b, 0xef, 0x77, 0x59, 0x2f);
class COutPin;
class CFilter;
class CInPin : public CBaseInputPin
{
friend class COutPin;
friend class CFilter;
public:
CInPin(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName);
~CInPin();
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT SetMediaType(const CMediaType *pmt);
STDMETHODIMP Receive(IMediaSample *pSample);
STDMETHODIMP EndOfStream();
STDMETHODIMP BeginFlush();
STDMETHODIMP EndFlush();
STDMETHODIMP NewSegment(REFERENCE_TIME tStart, REFERENCE_TIME tStop, double dRate);
CFilter *pCFilter;
};
class COutPin : public CBaseOutputPin
{
friend class CInPin;
friend class CFilter;
IUnknown *m_pPosition = NULL;
COutputQueue *m_pOutputQueue = NULL;//样本队列
public:
COutPin(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName);
~COutPin();
STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void **ppvoid);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT DecideBufferSize(IMemAllocator *pMemAllocator, ALLOCATOR_PROPERTIES * ppropInputRequest);
HRESULT Active();
HRESULT Inactive();
HRESULT Deliver(IMediaSample *pMediaSample);
HRESULT DeliverEndOfStream();
HRESULT DeliverBeginFlush();
HRESULT DeliverEndFlush();
HRESULT DeliverNewSegment(REFERENCE_TIME tStart, REFERENCE_TIME tStop, double dRate);
STDMETHODIMP Notify(IBaseFilter *pSender, Quality q);
CFilter *pCFilter = NULL;
};
class CFilter : public CCritSec, public CBaseFilter
{
friend class CInPin;
friend class COutPin;
public:
CFilter(TCHAR* pName, LPUNKNOWN pUnk, HRESULT* hr);
~CFilter();
CBasePin* GetPin(int n);
int GetPinCount();
static CUnknown* WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT* phr);
CInPin* pCInPin = NULL;
COutPin* pCOutPin = NULL;
int width;//图像的宽度,单位像素
int height;//图像的高度,单位像素
int y_size;//亮度值占用内存的大小,单位字节
int uv_size;//UV色度值占用内存的大小,单位字节
int nFrame;//每秒帧数
LONGLONG AvgTimePerFrame;//帧持续时间,单位100纳秒
};
template <class T> void SafeRelease(T** ppT)
{
if (*ppT)
{
(*ppT)->Release();
*ppT = NULL;
}
}
#endif //DLL_FILE
DLL.cpp
#include "DLL.h"
const AMOVIESETUP_MEDIATYPE InPinType[] = // 输入引脚媒体类型
{
{
&MEDIATYPE_Video, //主要类型
&MEDIASUBTYPE_NV12 //子类型
}
};
const AMOVIESETUP_MEDIATYPE OutPinType[] = // 输出引脚媒体类型
{
{
&MEDIATYPE_Video, //主要类型
&MEDIASUBTYPE_RGB32 //子类型
}
};
const AMOVIESETUP_PIN sudPins[] = // 引脚信息
{
{
L"In", //引脚名称
FALSE, //渲染过滤器
FALSE, //输出引脚
FALSE, //具有该引脚的零个实例
FALSE, //可以创建一个以上引脚的实例
&CLSID_NULL, //该引脚连接的过滤器的类标识
NULL, //该引脚连接的引脚名称
1, //引脚支持的媒体类型数
InPinType //媒体类型信息
},
{
L"Out", //引脚名称
FALSE, //渲染过滤器
TRUE, //输出引脚
FALSE, //具有该引脚的零个实例
FALSE, //可以创建一个以上引脚的实例
&CLSID_NULL, //该引脚连接的过滤器的类标识
NULL, //该引脚连接的引脚名称
1, //引脚支持的媒体类型数
OutPinType //媒体类型信息
}
};
const AMOVIESETUP_FILTER NV12_FILTER = //过滤器的注册信息
{
&CLSID_NV12Filter, //过滤器的类标识
L"NV12解压", //过滤器的名称
MERIT_DO_NOT_USE, //过滤器优先值
2, //引脚数量
sudPins //引脚信息
};
CFactoryTemplate g_Templates[] = //类工厂模板数组
{
{
L"NV12解压", //过滤器名称
&CLSID_NV12Filter, //过滤器CLSID的指针
CFilter::CreateInstance, //创建过滤器实例的函数的指针
NULL, //指向从DLL入口点调用的函数的指针
&NV12_FILTER //指向AMOVIESETUP_FILTER结构的指针
}
};
int g_cTemplates = 1;//模板数组大小
STDAPI DllRegisterServer()//注册DLL
{
return AMovieDllRegisterServer2(TRUE);
}
STDAPI DllUnregisterServer()//删除DLL注册
{
return AMovieDllRegisterServer2(FALSE);
}
extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID);
BOOL APIENTRY DllMain(HANDLE hModule, DWORD dwReason, LPVOID lpReserved)
{
return DllEntryPoint((HINSTANCE)(hModule), dwReason, lpReserved);
}
CFilter.cpp
#include "DLL.h"
CFilter::CFilter(TCHAR *pName, LPUNKNOWN pUnk, HRESULT *phr) : CBaseFilter(NAME("NV12解压"), pUnk, this, CLSID_NV12Filter)
{
pCInPin = new CInPin(this, phr, L"In");//创建输入引脚
pCOutPin = new COutPin(this, phr, L"Out");//创建输出引脚
}
CFilter::~CFilter()
{
}
CBasePin *CFilter::GetPin(int n)
{
if (n == 0)return pCInPin;
if (n == 1)return pCOutPin;
return NULL;
}
int CFilter::GetPinCount()
{
return 2;
}
CUnknown * WINAPI CFilter::CreateInstance(LPUNKNOWN pUnk, HRESULT *phr)
{
return new CFilter(NAME("NV12解压"), pUnk, phr);
}
CInPin.cpp
#include "DLL.h"
#include "dvdmedia.h"
CInPin::CInPin(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName) : CBaseInputPin(NAME("In"), pFilter, pFilter, phr, pPinName)
{
pCFilter = pFilter;
}
CInPin::~CInPin()
{
}
HRESULT CInPin::CheckMediaType(const CMediaType *pmt)
{
if (pmt->majortype == MEDIATYPE_Video && pmt->subtype == MEDIASUBTYPE_NV12 && (pmt->formattype== FORMAT_VideoInfo || pmt->formattype== FORMAT_VideoInfo2))
{
if (pmt->formattype == FORMAT_VideoInfo)
{
VIDEOINFOHEADER* p = (VIDEOINFOHEADER*)pmt->pbFormat;
if(p->bmiHeader.biBitCount != 12 || p->bmiHeader.biCompression != 842094158 || p->AvgTimePerFrame==0)return S_FALSE;//要求每个像素12位,编码方式NV12
}
if (pmt->formattype == FORMAT_VideoInfo2)
{
VIDEOINFOHEADER2* p = (VIDEOINFOHEADER2*)pmt->pbFormat;
if (p->bmiHeader.biBitCount != 12 || p->bmiHeader.biCompression != 842094158 || p->AvgTimePerFrame == 0)return S_FALSE;//要求每个像素12位,编码方式NV12
}
return S_OK;
}
return S_FALSE;
}
HRESULT CInPin::SetMediaType(const CMediaType *pmt)
{
if (pmt->formattype == FORMAT_VideoInfo)
{
VIDEOINFOHEADER* p = (VIDEOINFOHEADER*)pmt->pbFormat;
pCFilter->width = p->bmiHeader.biWidth; pCFilter->height = p->bmiHeader.biHeight;//获取图像宽高
pCFilter->nFrame = (int)(10000000 / p->AvgTimePerFrame);//获取每秒帧数
pCFilter->AvgTimePerFrame= p->AvgTimePerFrame;//获取帧持续时间,单位100纳秒
}
if (pmt->formattype == FORMAT_VideoInfo2)
{
VIDEOINFOHEADER2* p=(VIDEOINFOHEADER2*)pmt->pbFormat;
pCFilter->width = p->bmiHeader.biWidth; pCFilter->height = p->bmiHeader.biHeight;//获取图像宽高
pCFilter->nFrame = (int)(10000000 / p->AvgTimePerFrame);//获取每秒帧数
pCFilter->AvgTimePerFrame = p->AvgTimePerFrame;//获取帧持续时间,单位100纳秒
}
pCFilter->y_size = pCFilter->width * pCFilter->height;//获取亮度值占用内存的大小
pCFilter->uv_size = (pCFilter->width / 2) * pCFilter->height;//获取//UV色度值占用内存的大小
return CBasePin::SetMediaType(pmt);
}
STDMETHODIMP CInPin::Receive(IMediaSample *pSample)
{
HRESULT hr;
BYTE* nv12_data = NULL;
hr = pSample->GetPointer(&nv12_data);//获取引脚样本缓冲区指针
long len = pSample->GetActualDataLength();//获取有效数据长度
REFERENCE_TIME star, end;
hr = pSample->GetTime(&star, &end);//获取样本时间戳
IMediaSample *pOutSample = NULL;
hr = pCFilter->pCOutPin->GetDeliveryBuffer(&pOutSample, NULL, NULL, 0);//获取一个空的输出引脚样本
if (hr == S_OK)
{
BYTE* pOutBuffer = NULL;
hr = pOutSample->GetPointer(&pOutBuffer);//获取输出引脚样本缓冲区指针
int index, Y, U, V, R, G, B;BYTE RGBColor[4];
for (int i = 0; i < pCFilter->height; i++)
{
for (int j = 0; j < pCFilter->width; j++)
{
index = i * pCFilter->width + j;
Y = nv12_data[index];
U = nv12_data[pCFilter->y_size + (i / 2) * pCFilter->width + (j / 2) * 2] - 128;
V = nv12_data[pCFilter->y_size + (i / 2) * pCFilter->width + (j / 2) * 2 + 1] - 128;
R = (int)(Y + 1.402 * V);
G = (int)(Y - 0.344136 * U - 0.714136 * V);
B = (int)(Y + 1.772 * U);
R = R < 0 ? 0 : (R > 255 ? 255 : R);
G = G < 0 ? 0 : (G > 255 ? 255 : G);
B = B < 0 ? 0 : (B > 255 ? 255 : B);
RGBColor[0] = (BYTE)B; RGBColor[1] = (BYTE)G; RGBColor[2] = (BYTE)R; RGBColor[3] = (BYTE)0;//第1个字节为蓝色,第2个字节为绿色,第3个为红色,最后1个字节不使用
CopyMemory(pOutBuffer, RGBColor, 4); pOutBuffer += 4;//复制RGB颜色值,移动指针到下一个颜色位置
}
}
hr = pOutSample->SetTime(&star, &end);//设置输出引脚样本时间戳
hr = pOutSample->SetActualDataLength(pCFilter->width*pCFilter->height*4);//设置输出引脚样本有效数据长度
hr = pCFilter->pCOutPin->Deliver(pOutSample);//输出引脚向下游发送样本
pOutSample->Release();//释放输出引脚样本
}
return S_OK;
}
STDMETHODIMP CInPin::EndOfStream()
{
return pCFilter->pCOutPin->DeliverEndOfStream();
}
STDMETHODIMP CInPin::BeginFlush()
{
HRESULT hr = S_OK;
hr = pCFilter->pCOutPin->DeliverBeginFlush();
if (FAILED(hr))
return hr;
return CBaseInputPin::BeginFlush();
}
STDMETHODIMP CInPin::EndFlush()
{
HRESULT hr = S_OK;
hr = pCFilter->pCOutPin->DeliverEndFlush();
if (FAILED(hr))
return hr;
return CBaseInputPin::EndFlush();
}
STDMETHODIMP CInPin::NewSegment(REFERENCE_TIME tStart, REFERENCE_TIME tStop, double dRate)
{
HRESULT hr = S_OK;
hr = pCFilter->pCOutPin->DeliverNewSegment(tStart, tStop, dRate);
if (FAILED(hr))
return hr;
return CBaseInputPin::NewSegment(tStart, tStop, dRate);
}
COutPin.cpp
#include "DLL.h"
COutPin::COutPin(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName) : CBaseOutputPin(NAME("Out"), pFilter, pFilter, phr, pPinName)
{
pCFilter = pFilter;
}
COutPin::~COutPin()
{
SafeRelease(&m_pPosition);
}
STDMETHODIMP COutPin::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
ASSERT(ppv);
*ppv = NULL;
HRESULT hr = NOERROR;
if (riid == IID_IMediaPosition || riid == IID_IMediaSeeking)
{
if (m_pPosition == NULL)
{
hr = CreatePosPassThru(GetOwner(), FALSE, (IPin *)pCFilter->pCInPin, &m_pPosition);
if (FAILED(hr)) return hr;
}
return m_pPosition->QueryInterface(riid, ppv);
}
return CBaseOutputPin::NonDelegatingQueryInterface(riid, ppv);
}
HRESULT COutPin::GetMediaType(int iPosition, CMediaType *pMediaType)
{
if (iPosition == 0)
{
pMediaType->SetType(&MEDIATYPE_Video);//设置主要类型
pMediaType->SetSubtype(&MEDIASUBTYPE_RGB32);//设置子类型
pMediaType->SetFormatType(&FORMAT_VideoInfo);//设置格式类型
pMediaType->SetTemporalCompression(FALSE);//不使用时间压缩
pMediaType->SetSampleSize(pCFilter->width*pCFilter->height * 4);//设置样本大小
VIDEOINFOHEADER* p = (VIDEOINFOHEADER*)pMediaType->AllocFormatBuffer(sizeof(VIDEOINFOHEADER));//为格式块分配内存
if (NULL == p)return(E_OUTOFMEMORY);
ZeroMemory(p, sizeof(VIDEOINFOHEADER));
p->rcSource.left = 0; p->rcSource.top = 0; p->rcSource.right = 0; p->rcSource.bottom = 0;//源矩形
p->rcTarget.left = 0; p->rcTarget.top = 0; p->rcTarget.right = 0; p->rcTarget.bottom = 0;//目标矩形
p->dwBitRate = pCFilter->width*pCFilter->height*4* pCFilter->nFrame;//数据传输率
p->dwBitErrorRate = 0;//数据错误率
p->AvgTimePerFrame = pCFilter->AvgTimePerFrame;//帧持续时间
p->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);//图像信息结构大小
p->bmiHeader.biWidth = pCFilter->width;//图像宽度
p->bmiHeader.biHeight = pCFilter->height;//图像高度
p->bmiHeader.biPlanes = 1;//平面数
p->bmiHeader.biBitCount = 32;//每个像素位数
p->bmiHeader.biCompression = 0;//编码方式
p->bmiHeader.biSizeImage = pCFilter->width*pCFilter->height * 4;//图像的大小,单位字节
p->bmiHeader.biXPelsPerMeter = 0;//水平分辨率
p->bmiHeader.biYPelsPerMeter = 0;//垂直分辨率
p->bmiHeader.biClrUsed = 0;//颜色表中颜色数量
p->bmiHeader.biClrImportant = 0;//重要颜色数量
return S_OK;
}
return S_FALSE;
}
HRESULT COutPin::CheckMediaType(const CMediaType *pmt)
{
if (!pCFilter->pCInPin->IsConnected())
{
MessageBox(0, L"须先连接输入引脚", L"NV12解压", MB_OK); return S_FALSE;
}
if (pmt->majortype == MEDIATYPE_Video && pmt->subtype == MEDIASUBTYPE_RGB32 && pmt->formattype== FORMAT_VideoInfo)return S_OK;
return S_FALSE;
}
HRESULT COutPin::DecideBufferSize(IMemAllocator *pMemAllocator, ALLOCATOR_PROPERTIES * ppropInputRequest)//确定输出引脚样本缓冲区大小
{
HRESULT hr = S_OK;
ppropInputRequest->cBuffers = 1;//1个缓冲区
ppropInputRequest->cbBuffer = pCFilter->width*pCFilter->height*4;//缓冲区的大小
ALLOCATOR_PROPERTIES Actual;
hr = pMemAllocator->SetProperties(ppropInputRequest, &Actual);
if (FAILED(hr))return hr;
if (Actual.cbBuffer < ppropInputRequest->cbBuffer)// 这个分配器是否不合适
{
return E_FAIL;
}
ASSERT(Actual.cBuffers == 1);// 确保我们只有 1 个缓冲区
return S_OK;
}
HRESULT COutPin::Active()
{
HRESULT hr = NOERROR;
if (m_Connected == NULL)
return NOERROR;
if (m_pOutputQueue == NULL)
{
m_pOutputQueue = new COutputQueue(m_Connected, &hr, TRUE, FALSE);
if (m_pOutputQueue == NULL)
return E_OUTOFMEMORY;
if (FAILED(hr))
{
delete m_pOutputQueue;
m_pOutputQueue = NULL;
return hr;
}
}
CBaseOutputPin::Active();
return NOERROR;
}
HRESULT COutPin::Inactive()
{
if (m_pOutputQueue)
{
delete m_pOutputQueue;
m_pOutputQueue = NULL;
}
CBaseOutputPin::Inactive();
return NOERROR;
}
HRESULT COutPin::Deliver(IMediaSample *pMediaSample)
{
if (m_pOutputQueue == NULL)
return NOERROR;
pMediaSample->AddRef();
return m_pOutputQueue->Receive(pMediaSample);
}
HRESULT COutPin::DeliverEndOfStream()
{
if (m_pOutputQueue == NULL)
return NOERROR;
m_pOutputQueue->EOS();
return NOERROR;
}
HRESULT COutPin::DeliverBeginFlush()
{
if (m_pOutputQueue == NULL)
return NOERROR;
m_pOutputQueue->BeginFlush();
return NOERROR;
}
HRESULT COutPin::DeliverEndFlush()
{
if (m_pOutputQueue == NULL)
return NOERROR;
m_pOutputQueue->EndFlush();
return NOERROR;
}
HRESULT COutPin::DeliverNewSegment(REFERENCE_TIME tStart, REFERENCE_TIME tStop, double dRate)
{
if (m_pOutputQueue == NULL)
return NOERROR;
m_pOutputQueue->NewSegment(tStart, tStop, dRate);
return NOERROR;
}
STDMETHODIMP COutPin::Notify(IBaseFilter *pSender, Quality q)
{
if (pCFilter->pCInPin->m_pQSink != NULL)
{
return pCFilter->pCInPin->m_pQSink->Notify(pCFilter, q);
}
else
{
HRESULT hr;
IQualityControl * pIQC;
hr = VFW_E_NOT_FOUND;
if (pCFilter->pCInPin->m_Connected)
{
pCFilter->pCInPin->m_Connected->QueryInterface(IID_IQualityControl, (void**)&pIQC);
if (pIQC != NULL)
{
hr = pIQC->Notify(pCFilter, q);
pIQC->Release();
}
}
return hr;
}
return NOERROR;
}