在https://blog.csdn.net/robothn/article/details/78781321一文中使用shader来显示FFMPEG硬解码后的YUV420P,本文调用D3D11的videoprocessor来进行图像空间变换和尺寸变换,取得了较好的效果。
从实际使用效果来看,FFMPEG解码后的数据使用UpdateSubresource需要把数据进行搬运出来处理,使用videoprocessorBLT设置适当的入参,可直接对Subresource进行色度空间和尺寸变换,减少了数据搬运过程。
frame->data[1]是subresource的index。
处理如下:
int index = (intptr_t)frame->data[1];
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC pInDesc;
ZeroMemory(&pInDesc, sizeof(pInDesc));
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC InputViewDesc;
pInDesc.FourCC = 0;
pInDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
pInDesc.Texture2D.MipSlice = 0;
pInDesc.Texture2D.ArraySlice = index;
参见render部分代码。
1.初始化D3D11设备、videodevice等数据结构
FFMPEG调用D3D11硬解码,获取其对应的device,videodevice。。。
FFMPEG使用av_hwdevice_find_type_by_name调用D3d11va硬解码
av_hwdevice_find_type_by_name(“d3d11va”);
详细代码:
bool D3D11VARender::prepareDecoderContext(AVCodecContext *context, AVDictionary **options)
{
int err = 0;
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"D3D11VARender::initialize");
m_ctx=context;
enum AVHWDeviceType type = av_hwdevice_find_type_by_name("d3d11va");
enum AVPixelFormat hwPixFmt = find_fmt_by_hw_type(type);
if (hwPixFmt == AV_PIX_FMT_NONE)
{
return false;
}
g_hw_pix_fmt = hwPixFmt;
err = av_hwdevice_ctx_create(&hwDeviceCtx, type, NULL, NULL, 0);
if (err < 0)
{
avcodec_close(context);
av_free(context);
if (hwDeviceCtx)
{
av_buffer_unref(&hwDeviceCtx);
hwDeviceCtx = NULL;
}
return false;
}
//set hw_device_ctx to AVCodecContext.D3D11VA
context->hw_device_ctx = av_buffer_ref(hwDeviceCtx);
AVHWDeviceContext* hw_device_ctx = (AVHWDeviceContext*)hwDeviceCtx->data;
AVD3D11VADeviceContext* hw_d3d11_dev_ctx = (AVD3D11VADeviceContext*)hw_device_ctx->hwctx;
m_pD3D11Device=hw_d3d11_dev_ctx->device;
m_pD3D11DeviceContext=hw_d3d11_dev_ctx->device_context;
m_pD3D11VideoDevice=hw_d3d11_dev_ctx->video_device;
m_pD3D11VideoContext=hw_d3d11_dev_ctx->video_context;
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"D3D11VARenderprepareDecoderContext::m_VideoWidth is: %d m_VideoHeight is :%d ",m_VideoWidth,m_VideoHeight);
InitDisplay();
return true;
}
2.显示初始化
初始化swapChainDesc,wnd
HRESULT D3D11VARender::InitDisplay()
{
HRESULT hr = S_OK;
try {
IF_FAILED_THROW(m_pD3D11Device->QueryInterface(__uuidof(IDXGIDevice3), (void**)&m_pDXGIdevice));
IF_FAILED_THROW( m_pDXGIdevice->GetParent(__uuidof(IDXGIAdapter), (void **)&m_pDXGIAdapter));
IF_FAILED_THROW(m_pDXGIAdapter->GetParent(__uuidof(IDXGIFactory3), (void **)&m_pIDXGIFactory3));
IF_FAILED_THROW(m_pDXGIAdapter->EnumOutputs(0,&m_pDXGIOutput));
DXGI_SWAP_CHAIN_DESC1 swapChainDesc;
swapChainDesc.Width = m_DisplayWidth;
swapChainDesc.Height = m_DisplayHeight;
swapChainDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
swapChainDesc.Stereo = FALSE;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.BufferCount = 2;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_DISCARD;
swapChainDesc.AlphaMode = DXGI_ALPHA_MODE::DXGI_ALPHA_MODE_IGNORE;
swapChainDesc.Scaling = DXGI_SCALING_STRETCH;
swapChainDesc.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING ;
IF_FAILED_THROW(m_pIDXGIFactory3->CreateSwapChainForHwnd(m_pD3D11Device,m_HandleWindow,&swapChainDesc, NULL,NULL,&m_swapChain1));
// IF_FAILED_THROW(m_pIDXGIFactory3->CreateSwapChainForComposition(m_pD3D11Device, &swapChainDesc, nullptr, &m_swapChain1));
m_swapchain2=(IDXGISwapChain2 *)m_swapChain1;
DXGI_SWAP_CHAIN_DESC1 swapChainDesc2;
m_swapChain1->GetDesc1(&swapChainDesc2);
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,
"D3D11VAm_pSwapChain :%d :%d :%d",swapChainDesc.Width,swapChainDesc.Height,swapChainDesc.BufferCount);
m_swapchain2->SetMaximumFrameLatency(1);
if(iswindowed==false)
IF_FAILED_THROW(m_swapchain2->SetFullscreenState(TRUE, NULL));//full screen
ResizeSwapChain();
SDL_LogInfo(SDL_LOG_CATEGORY_APPLICATION,"InitDisplay::success!");
} catch (HRESULT) {
}
return hr;
}
3.初始化viewport
HRESULT D3D11VARender::MakeRTV()
{
// Get backbuffer
HRESULT hr = S_OK;
ID3D11Texture2D* BackBuffer = nullptr;
hr = m_swapchain2->GetBuffer(0, __uuidof(ID3D11Texture2D), reinterpret_cast<void**>(&BackBuffer));
if (FAILED(hr))
{
//return ProcessFailure(m_Device, L"Failed to get backbuffer for making render target view in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Create a render target view
hr = m_pD3D11Device->CreateRenderTargetView(BackBuffer, nullptr, &_rtv);
BackBuffer->Release();
if (FAILED(hr))
{
//return ProcessFailure(m_Device, L"Failed to create render target view in OUTPUTMANAGER", L"Error", hr, SystemTransitionsExpectedErrors);
}
// Set new render target
m_pD3D11DeviceContext->OMSetRenderTargets(1, &_rtv, nullptr);
return S_OK;
}
void D3D11VARender::SetViewPort(UINT Width, UINT Height)
{
D3D11_VIEWPORT VP;
VP.Width = static_cast(Width);
VP.Height = static_cast(Height);
VP.MinDepth = 0.0f;
VP.MaxDepth = 1.0f;
VP.TopLeftX = 0;
VP.TopLeftY = 0;
m_pD3D11DeviceContext->RSSetViewports(1, &VP);
}
4.渲染
使用VideoProcessor前设置ID3D11VideoProcessorInputView和ID3D11VideoProcessorOutputView,配置好相关的ID3D11Texture2D。
ID3D11VideoProcessorInputView来自ffmpeg硬解码后的AVframe,ID3D11Texture2D* hwTexture = (ID3D11Texture2D*)frame->data[0];
ID3D11VideoProcessorOutputView对应的是显示DXGI获取的ID3D11Texture2D。
VideoProcessorBlt完成色度空间转换和尺寸变换。DXGI_FORMAT_B8G8R8A8_UNORM是显示输出的。
尺寸变换的参数:
// NV12 surface to RGB backbuffer
RECT srcrc = {0, 0, (LONG)texture_desc.Width, (LONG)texture_desc.Height};
RECT destcrc = {0, 0, (LONG)bktexture_desc.Width, (LONG)bktexture_desc.Height};
渲染:
hr =m_swapchain2->Present1(0, DXGI_PRESENT_ALLOW_TEARING , ¶meters);
详细代码如下:
void D3D11VARender::renderFrame(AVFrame *frame)
{
HRESULT hr = S_OK;
D3D11_TEXTURE2D_DESC texture_desc;
D3D11_TEXTURE2D_DESC bktexture_desc;
ID3D11VideoProcessorInputView* pD3D11VideoProcessorInputViewIn = NULL;
ID3D11VideoProcessorOutputView* pD3D11VideoProcessorOutputView = NULL;
if(frame==NULL)
return;
int index = (intptr_t)frame->data[1];
if(frame->format==AV_PIX_FMT_D3D11)
{
ID3D11Texture2D* hwTexture = (ID3D11Texture2D*)frame->data[0];
hwTexture->GetDesc(&texture_desc);
IF_FAILED_THROW(m_swapchain2->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&m_pDXGIBackBuffer));
m_pDXGIBackBuffer->GetDesc(&bktexture_desc);
if (!m_pD3D11VideoProcessorEnumerator || !m_pD3D11VideoProcessor)
{
SAFE_RELEASE(m_pD3D11VideoProcessorEnumerator);
SAFE_RELEASE(m_pD3D11VideoProcessor);
D3D11_VIDEO_PROCESSOR_CONTENT_DESC ContentDesc;
ZeroMemory( &ContentDesc, sizeof( ContentDesc ) );
ContentDesc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
ContentDesc.InputWidth = texture_desc.Width;
ContentDesc.InputHeight = texture_desc.Height;
ContentDesc.OutputWidth = bktexture_desc.Width;
ContentDesc.OutputHeight = bktexture_desc.Height;
ContentDesc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
IF_FAILED_THROW(m_pD3D11VideoDevice->CreateVideoProcessorEnumerator(&ContentDesc, &m_pD3D11VideoProcessorEnumerator));
UINT uiFlags;
DXGI_FORMAT VP_Output_Format = DXGI_FORMAT_B8G8R8A8_UNORM;
IF_FAILED_THROW(m_pD3D11VideoProcessorEnumerator->CheckVideoProcessorFormat(VP_Output_Format, &uiFlags));
DXGI_FORMAT VP_input_Format = texture_desc.Format;
IF_FAILED_THROW(m_pD3D11VideoProcessorEnumerator->CheckVideoProcessorFormat(VP_input_Format, &uiFlags));
// NV12 surface to RGB backbuffer
RECT srcrc = {0, 0, (LONG)texture_desc.Width, (LONG)texture_desc.Height};
RECT destcrc = {0, 0, (LONG)bktexture_desc.Width, (LONG)bktexture_desc.Height};
IF_FAILED_THROW(m_pD3D11VideoDevice->CreateVideoProcessor(m_pD3D11VideoProcessorEnumerator, 0, &m_pD3D11VideoProcessor));
m_pD3D11VideoContext->VideoProcessorSetStreamFrameFormat(m_pD3D11VideoProcessor, 0, D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE);
m_pD3D11VideoContext->VideoProcessorSetStreamOutputRate(m_pD3D11VideoProcessor, 0, D3D11_VIDEO_PROCESSOR_OUTPUT_RATE_NORMAL, TRUE, NULL);
m_pD3D11VideoContext->VideoProcessorSetStreamSourceRect(m_pD3D11VideoProcessor, 0, TRUE, &srcrc);
m_pD3D11VideoContext->VideoProcessorSetStreamDestRect(m_pD3D11VideoProcessor, 0, TRUE, &destcrc);
m_pD3D11VideoContext->VideoProcessorSetOutputTargetRect(m_pD3D11VideoProcessor, TRUE, &destcrc);
D3D11_VIDEO_COLOR color;
color.YCbCr = { 0.0625f, 0.5f, 0.5f, 0.5f }; // black color
m_pD3D11VideoContext->VideoProcessorSetOutputBackgroundColor(m_pD3D11VideoProcessor, TRUE, &color);
}
// IF_FAILED_THROW(m_pD3D11Device->CreateRenderTargetView(m_pDXGIBackBuffer, nullptr, &_rtv));
// D3D11_VIEWPORT VP;
// ZeroMemory(&VP, sizeof(VP));
// VP.Width = bktexture_desc.Width;
// VP.Height = bktexture_desc.Height;
// VP.MinDepth = 0.0f;
// VP.MaxDepth = 1.0f;
// VP.TopLeftX = 0;
// VP.TopLeftY = 0;
// m_pD3D11DeviceContext->RSSetViewports(1, &VP);
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC pInDesc;
ZeroMemory(&pInDesc, sizeof(pInDesc));
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC InputViewDesc;
pInDesc.FourCC = 0;
pInDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
pInDesc.Texture2D.MipSlice = 0;
pInDesc.Texture2D.ArraySlice = index;
IF_FAILED_THROW(m_pD3D11VideoDevice->CreateVideoProcessorInputView(hwTexture, m_pD3D11VideoProcessorEnumerator, &pInDesc, &pD3D11VideoProcessorInputViewIn));
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC pOutDesc;
ZeroMemory(&pOutDesc, sizeof(pOutDesc));
pOutDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
pOutDesc.Texture2D.MipSlice = 0;
IF_FAILED_THROW(m_pD3D11VideoDevice->CreateVideoProcessorOutputView(m_pDXGIBackBuffer, m_pD3D11VideoProcessorEnumerator, &pOutDesc, &pD3D11VideoProcessorOutputView));
D3D11_VIDEO_PROCESSOR_STREAM StreamData;
ZeroMemory(&StreamData, sizeof(StreamData));
StreamData.Enable = TRUE;
StreamData.OutputIndex = 0;
StreamData.InputFrameOrField = 0;
StreamData.PastFrames = 0;
StreamData.FutureFrames = 0;
StreamData.ppPastSurfaces = NULL;
StreamData.ppFutureSurfaces = NULL;
StreamData.pInputSurface = pD3D11VideoProcessorInputViewIn;
StreamData.ppPastSurfacesRight = NULL;
StreamData.ppFutureSurfacesRight = NULL;
hr=m_pD3D11VideoContext->VideoProcessorBlt(m_pD3D11VideoProcessor, pD3D11VideoProcessorOutputView, 0, 1, &StreamData);
DXGI_PRESENT_PARAMETERS parameters;
ZeroMemory(¶meters, sizeof(parameters));
if(m_swapchain2!=NULL)
{
hr =m_swapchain2->Present1(0, DXGI_PRESENT_ALLOW_TEARING , ¶meters);
}
SAFE_RELEASE(hwTexture);
SAFE_RELEASE(_rtv);
SAFE_RELEASE(pD3D11VideoProcessorOutputView);
SAFE_RELEASE(pD3D11VideoProcessorInputViewIn);
}
}