Oculus 0.7.0 SDK DX11渲染流程

分四个大阶段:初始化(initialize),加载(load),渲染(render),销毁(release)。


一.初始化

初始化DX11设备(DIRECTX.InitDevice),分以下几步:

1.创建DXGI(DirectX Graphics Infrastructure)对象,枚举系统中的显卡设备。其中DXGI是直接与硬件打交道的,它是Windows系统中用户模式下最底层的图形设备接口,Direct3D是基于其上的。(DXGI(DirectX Graphics Inf rastructure)是独立于Direct3D的API,用于处理与图形关联的东西,例如交换链(帧缓冲)等。DXGI与Direct3D分离的目的在于其他图形API(例如Direct2D)也需要交换链、图形硬件枚举、在窗口和全屏模式之间切换,通过这种设计,多个图形API都能使用DXGI API。)

 IDXGIAdapter * Adapter = nullptr;
        for (UINT iAdapter = 0; DXGIFactory->EnumAdapters(iAdapter, &Adapter) != DXGI_ERROR_NOT_FOUND; ++iAdapter)
        {
            DXGI_ADAPTER_DESC adapterDesc;
            Adapter->GetDesc(&adapterDesc);
            if ((pLuid == nullptr) || memcmp(&adapterDesc.AdapterLuid, pLuid, sizeof(LUID)) == 0)
                break;
            Release(Adapter);
        }


2.创建D3D11设备及其上下文,通过1中获取到的显卡及其驱动类型以及D3D SDK版本创建一个D3D设备以及它的context。

auto DriverType = Adapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE;
hr = D3D11CreateDevice(Adapter, DriverType, 0, 0, 0, 0, D3D11_SDK_VERSION, &Device, 0, &Context);


3.创建DXGI后台交换链帧缓冲(可与opengl类比理解)。设置后台交换链帧缓冲区个数(BufferCount),高宽尺寸,格式,刷新率(是否开启垂直同步定),将场景渲染到后台缓冲区,输出窗口句柄。

// Create swap chain
DXGI_SWAP_CHAIN_DESC scDesc;
memset(&scDesc, 0, sizeof(scDesc));
scDesc.BufferCount = 2;
scDesc.BufferDesc.Width = WinSizeW;
scDesc.BufferDesc.Height = WinSizeH;
scDesc.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
scDesc.BufferDesc.RefreshRate.Denominator = 1;
scDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
scDesc.OutputWindow = Window;
scDesc.SampleDesc.Count = 1;
scDesc.Windowed = windowed;
scDesc.SwapEffect = DXGI_SWAP_EFFECT_SEQUENTIAL;
hr = DXGIFactory->CreateSwapChain(Device, &scDesc, &SwapChain);
Release(DXGIFactory);
VALIDATE((hr == ERROR_SUCCESS), "CreateSwapChain failed");


4.创建渲染目标视图,通过GetBuffer获取一个交换链的后台帧缓冲指针,第一个参数表示要获取的后台帧缓冲区的索引值为0,第二个参数表示缓冲区的类型,第三个参数返回指向后台缓冲区的指针;通过CreateRenderTargetView创建渲染目标视图,第一个参数表示将要作为渲染目标的资源,第二个参数为NULL,表示以资源的第一个mipmap层次作为视图的格式。第三个参数返回创建后的渲染目标视图对象。

// Create backbuffer
SwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&BackBuffer);
hr = Device->CreateRenderTargetView(BackBuffer, NULL, &BackBufferRT);
VALIDATE((hr == ERROR_SUCCESS), "CreateRenderTargetView failed");


5.创建深度模板缓冲区及其视图,且将视图绑定到输出混合器

MainDepthBuffer = new DepthBuffer(Device, WinSizeW, WinSizeH);
Context->OMSetRenderTargets(1, &BackBufferRT, MainDepthBuffer->TexDsv);
//------------------
 DepthBuffer(ID3D11Device * Device, int sizeW, int sizeH, int sampleCount = 1)
 {
        DXGI_FORMAT format = DXGI_FORMAT_D32_FLOAT;
        D3D11_TEXTURE2D_DESC dsDesc;
        dsDesc.Width = sizeW;
        dsDesc.Height = sizeH;
        dsDesc.MipLevels = 1;
        dsDesc.ArraySize = 1;
        dsDesc.Format = format;
        dsDesc.SampleDesc.Count = sampleCount;
        dsDesc.SampleDesc.Quality = 0;
        dsDesc.Usage = D3D11_USAGE_DEFAULT;
        dsDesc.CPUAccessFlags = 0;
        dsDesc.MiscFlags = 0;
        dsDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
        ID3D11Texture2D * Tex;
        Device->CreateTexture2D(&dsDesc, NULL, &Tex);
        Device->CreateDepthStencilView(Tex, NULL, &TexDsv);
 }


6.创建数据常量缓冲,用于存放顶点数据资源。

// Buffer for shader constants
UniformBufferGen = new DataBuffer(Device, D3D11_BIND_CONSTANT_BUFFER, NULL, UNIFORM_DATA_SIZE);
Context->VSSetConstantBuffers(0, 1, &UniformBufferGen->D3DBuffer);


7.设置最大帧延迟为1,也就是允许等待渲染的帧的最大数量。

// Set max frame latency to 1
IDXGIDevice1* DXGIDevice1 = nullptr;
hr = Device->QueryInterface(__uuidof(IDXGIDevice1), (void**)&DXGIDevice1);
DXGIDevice1->SetMaximumFrameLatency(1);
Release(DXGIDevice1);
VALIDATE((hr == ERROR_SUCCESS), "QueryInterface failed");



ovr_Create创建HMD设备以及获取设备所关联的显卡luid,ovr_GetHmdDesc获取设备的分辨率以及双眼默认的视场角,DIRECTX.InitDevice配置与设备相关的显卡以及创建D3D11设备(D3D11CreateDevice),创建与设备相关2D纹理交换链(DXGIFactory->CreateSwapChain),后缓冲(Device->CreateRenderTargetView),主深度缓冲Device->CreateTexture2D(&dsDesc, NULL, &Tex);Device->CreateDepthStencilView(Tex, NULL, &TexDsv);,着色器数据常量缓冲(Device->CreateBuffer(&desc, buffer ? &sr : NULL, &D3DBuffer);)以及最大帧延迟(DXGIDevice1->SetMaximumFrameLatency(1);)。

二.加载:
加载场景资源
1.获取视场角纹理尺寸大小,创建纹理交换链集,并为每个纹理创建一个渲染目标视图。并为纹理创建相对应的深度缓冲,再将其大小赋给眼部渲染视口。

{
		ovrSizei idealSize = ovr_GetFovTextureSize(HMD, (ovrEyeType)eye, hmdDesc.DefaultEyeFov[eye], 1.0f);
		pEyeRenderTexture[eye] = new OculusTexture();
        if (!pEyeRenderTexture[eye]->Init(HMD, idealSize.w, idealSize.h))
        {
            if (retryCreate) goto Done;
	        VALIDATE(OVR_SUCCESS(result), "Failed to create eye texture.");
        }
		pEyeDepthBuffer[eye] = new DepthBuffer(DIRECTX.Device, idealSize.w, idealSize.h);
		eyeRenderViewport[eye].Pos.x = 0;
		eyeRenderViewport[eye].Pos.y = 0;
		eyeRenderViewport[eye].Size = idealSize;
        if (!pEyeRenderTexture[eye]->TextureSet)
        {
            if (retryCreate) goto Done;
            VALIDATE(false, "Failed to create texture.");
        }
	}



其中OculusTexture类中初始化时,创建了交换链纹理集:

bool Init(ovrHmd _hmd, int sizeW, int sizeH)
{
        hmd = _hmd;

		D3D11_TEXTURE2D_DESC dsDesc;
		dsDesc.Width = sizeW;
		dsDesc.Height = sizeH;
		dsDesc.MipLevels = 1;
		dsDesc.ArraySize = 1;
        dsDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
		dsDesc.SampleDesc.Count = 1;   // No multi-sampling allowed
		dsDesc.SampleDesc.Quality = 0;
		dsDesc.Usage = D3D11_USAGE_DEFAULT;
		dsDesc.CPUAccessFlags = 0;
		dsDesc.MiscFlags = 0;
		dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;

		ovrResult result = ovr_CreateSwapTextureSetD3D11(hmd, DIRECTX.Device, &dsDesc, ovrSwapTextureSetD3D11_Typeless, &TextureSet);
        if (!OVR_SUCCESS(result))
            return false;

        VALIDATE(TextureSet->TextureCount == TextureCount, "TextureCount mismatch.");

		for (int i = 0; i < TextureCount; ++i)
		{
			ovrD3D11Texture* tex = (ovrD3D11Texture*)&TextureSet->Textures[i];
			D3D11_RENDER_TARGET_VIEW_DESC rtvd = {};
			rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
			rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
			DIRECTX.Device->CreateRenderTargetView(tex->D3D11.pTexture, &rtvd, &TexRtv[i]);
		}

        return true;
    }


2.在屏幕上显示创建镜像纹理

	// Create a mirror to see on the monitor.
	td.ArraySize = 1;
    td.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
	td.Width = DIRECTX.WinSizeW;
	td.Height = DIRECTX.WinSizeH;
	td.Usage = D3D11_USAGE_DEFAULT;
	td.SampleDesc.Count = 1;
	td.MipLevels = 1;
    result = ovr_CreateMirrorTextureD3D11(HMD, DIRECTX.Device, &td, 0, &mirrorTexture);


3.创建场景模型

// Create the room model
 roomScene = new Scene(false);
 void Init(bool includeIntensiveGPUobject)
{
		TriangleSet cube;
		cube.AddSolidColorBox(0.5f, -0.5f, 0.5f, -0.5f, 0.5f, -0.5f, 0xff404040);
		Add(
            new Model(&cube, XMFLOAT3(0, 0, 0), XMFLOAT4(0, 0, 0, 1),
                new Material(
                    new Texture(false, 256, 256, Texture::AUTO_CEILING)
                )
            )
        );

		TriangleSet spareCube;
		spareCube.AddSolidColorBox(0.1f, -0.1f, 0.1f, -0.1f, +0.1f, -0.1f, 0xffff0000);
		Add(
            new Model(&spareCube, XMFLOAT3(0, -10, 0), XMFLOAT4(0, 0, 0, 1),
                new Material(
                    new Texture(false, 256, 256, Texture::AUTO_CEILING)
                )
            )
        );

		TriangleSet walls;
		walls.AddSolidColorBox(10.1f, 0.0f, 20.0f, 10.0f, 4.0f, -20.0f, 0xff808080);  // Left Wall
		walls.AddSolidColorBox(10.0f, -0.1f, 20.1f, -10.0f, 4.0f, 20.0f, 0xff808080); // Back Wall
		walls.AddSolidColorBox(-10.0f, -0.1f, 20.0f, -10.1f, 4.0f, -20.0f, 0xff808080);   // Right Wall
		Add(
            new Model(&walls, XMFLOAT3(0, 0, 0), XMFLOAT4(0, 0, 0, 1),
                new Material(
                    new Texture(false, 256, 256, Texture::AUTO_WALL)
                )
            )
        );

		if (includeIntensiveGPUobject)
		{
			TriangleSet partitions;
			for (float depth = 0.0f; depth > -3.0f; depth -= 0.1f)
				partitions.AddSolidColorBox(9.0f, 0.5f, -depth, -9.0f, 3.5f, -depth, 0x10ff80ff); // Partition
			Add(
                new Model(&partitions, XMFLOAT3(0, 0, 0), XMFLOAT4(0, 0, 0, 1),
                    new Material(
                        new Texture(false, 256, 256, Texture::AUTO_FLOOR)
                    )
                )
            ); // Floors
		}

		TriangleSet floors;
		floors.AddSolidColorBox(10.0f, -0.1f, 20.0f, -10.0f, 0.0f, -20.1f, 0xff808080); // Main floor
		floors.AddSolidColorBox(15.0f, -6.1f, -18.0f, -15.0f, -6.0f, -30.0f, 0xff808080); // Bottom floor
		Add(
            new Model(&floors, XMFLOAT3(0, 0, 0), XMFLOAT4(0, 0, 0, 1),
                new Material(
                    new Texture(false, 256, 256, Texture::AUTO_FLOOR)
                )
            )
        ); // Floors

		TriangleSet ceiling;
		ceiling.AddSolidColorBox(10.0f, 4.0f, 20.0f, -10.0f, 4.1f, -20.1f, 0xff808080);
		Add(
            new Model(&ceiling, XMFLOAT3(0, 0, 0), XMFLOAT4(0, 0, 0, 1),
                new Material(
                    new Texture(false, 256, 256, Texture::AUTO_CEILING)
                )
            )
        ); // Ceiling

		TriangleSet furniture;
		furniture.AddSolidColorBox(-9.5f, 0.75f, -3.0f, -10.1f, 2.5f, -3.1f, 0xff383838);    // Right side shelf// Verticals
		furniture.AddSolidColorBox(-9.5f, 0.95f, -3.7f, -10.1f, 2.75f, -3.8f, 0xff383838);   // Right side shelf
		furniture.AddSolidColorBox(-9.55f, 1.20f, -2.5f, -10.1f, 1.30f, -3.75f, 0xff383838); // Right side shelf// Horizontals
		furniture.AddSolidColorBox(-9.55f, 2.00f, -3.05f, -10.1f, 2.10f, -4.2f, 0xff383838); // Right side shelf
		furniture.AddSolidColorBox(-5.0f, 1.1f, -20.0f, -10.0f, 1.2f, -20.1f, 0xff383838);   // Right railing   
		furniture.AddSolidColorBox(10.0f, 1.1f, -20.0f, 5.0f, 1.2f, -20.1f, 0xff383838);   // Left railing  
		for (float f = 5; f <= 9; f += 1)
            furniture.AddSolidColorBox(-f, 0.0f, -20.0f, -f - 0.1f, 1.1f, -20.1f, 0xff505050); // Left Bars
		for (float f = 5; f <= 9; f += 1)
            furniture.AddSolidColorBox(f, 1.1f, -20.0f, f + 0.1f, 0.0f, -20.1f, 0xff505050); // Right Bars
		furniture.AddSolidColorBox(1.8f, 0.8f, -1.0f, 0.0f, 0.7f, 0.0f, 0xff505000);  // Table
		furniture.AddSolidColorBox(1.8f, 0.0f, 0.0f, 1.7f, 0.7f, -0.1f, 0xff505000); // Table Leg 
		furniture.AddSolidColorBox(1.8f, 0.7f, -1.0f, 1.7f, 0.0f, -0.9f, 0xff505000); // Table Leg 
		furniture.AddSolidColorBox(0.0f, 0.0f, -1.0f, 0.1f, 0.7f, -0.9f, 0xff505000);  // Table Leg 
		furniture.AddSolidColorBox(0.0f, 0.7f, 0.0f, 0.1f, 0.0f, -0.1f, 0xff505000);  // Table Leg 
		furniture.AddSolidColorBox(1.4f, 0.5f, 1.1f, 0.8f, 0.55f, 0.5f, 0xff202050);  // Chair Set
		furniture.AddSolidColorBox(1.401f, 0.0f, 1.101f, 1.339f, 1.0f, 1.039f, 0xff202050); // Chair Leg 1
		furniture.AddSolidColorBox(1.401f, 0.5f, 0.499f, 1.339f, 0.0f, 0.561f, 0xff202050); // Chair Leg 2
		furniture.AddSolidColorBox(0.799f, 0.0f, 0.499f, 0.861f, 0.5f, 0.561f, 0xff202050); // Chair Leg 2
		furniture.AddSolidColorBox(0.799f, 1.0f, 1.101f, 0.861f, 0.0f, 1.039f, 0xff202050); // Chair Leg 2
		furniture.AddSolidColorBox(1.4f, 0.97f, 1.05f, 0.8f, 0.92f, 1.10f, 0xff202050); // Chair Back high bar
		for (float f = 3.0f; f <= 6.6f; f += 0.4f)
            furniture.AddSolidColorBox(3, 0.0f, -f, 2.9f, 1.3f, -f - 0.1f, 0xff404040); // Posts
		Add(
            new Model(&furniture, XMFLOAT3(0, 0, 0), XMFLOAT4(0, 0, 0, 1),
                new Material(
                    new Texture(false, 256, 256, Texture::AUTO_WHITE)
                )
            )
        ); // Fixtures & furniture
	}

	Scene() : numModels(0) {}
	Scene(bool includeIntensiveGPUobject) :
        numModels(0)
    {
        Init(includeIntensiveGPUobject);}


4.创建摄像头

// Create camera
__pragma(warning(push))
__pragma(warning(disable:4316)) // Win32: object allocated on the heap may not be aligned 16
mainCam = new Camera(&XMVectorSet(0.0f, 1.6f, 5.0f, 0), &XMQuaternionIdentity());
__pragma(warning(pop))


5.配置VR组件,填充渲染描述符

// Setup VR components, filling out description
ovrEyeRenderDesc eyeRenderDesc[2];
eyeRenderDesc[0] = ovr_GetRenderDesc(HMD, ovrEye_Left, hmdDesc.DefaultEyeFov[0]);
eyeRenderDesc[1] = ovr_GetRenderDesc(HMD, ovrEye_Right, hmdDesc.DefaultEyeFov[1]);


三,渲染:
1.在主循环内,使用键盘按键控制摄像头位移旋转:

XMVECTOR forward = XMVector3Rotate(XMVectorSet(0, 0, -0.05f, 0), mainCam->Rot);
XMVECTOR right   = XMVector3Rotate(XMVectorSet(0.05f, 0, 0, 0),  mainCam->Rot);
if (DIRECTX.Key['W'] || DIRECTX.Key[VK_UP])	  mainCam->Pos = XMVectorAdd(mainCam->Pos, forward);
if (DIRECTX.Key['S'] || DIRECTX.Key[VK_DOWN]) mainCam->Pos = XMVectorSubtract(mainCam->Pos, forward);
if (DIRECTX.Key['D'])                         mainCam->Pos = XMVectorAdd(mainCam->Pos, right);
if (DIRECTX.Key['A'])                         mainCam->Pos = XMVectorSubtract(mainCam->Pos, right);
static float Yaw = 0;
if (DIRECTX.Key[VK_LEFT])  mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw += 0.02f, 0);
if (DIRECTX.Key[VK_RIGHT]) mainCam->Rot = XMQuaternionRotationRollPitchYaw(0, Yaw -= 0.02f, 0);


2.获取眼睛姿态信息

ovrPosef         EyeRenderPose[2];
ovrVector3f      HmdToEyeViewOffset[2] = { eyeRenderDesc[0].HmdToEyeViewOffset,
                                      eyeRenderDesc[1].HmdToEyeViewOffset };
ovrFrameTiming   ftiming = ovr_GetFrameTiming(HMD, 0);
ovrTrackingState hmdState = ovr_GetTrackingState(HMD, ftiming.DisplayMidpointSeconds);
ovr_CalcEyePoses(hmdState.HeadPose.ThePose, HmdToEyeViewOffset, EyeRenderPose);

3.渲染场景到眼睛缓冲中

{
// Increment to use next texture, just before writing
pEyeRenderTexture[eye]->AdvanceToNextTexture();

// Clear and set up rendertarget
int texIndex = pEyeRenderTexture[eye]->TextureSet->CurrentIndex;
DIRECTX.SetAndClearRenderTarget(pEyeRenderTexture[eye]->TexRtv[texIndex], pEyeDepthBuffer[eye]);
DIRECTX.SetViewport((float)eyeRenderViewport[eye].Pos.x, (float)eyeRenderViewport[eye].Pos.y,
(float)eyeRenderViewport[eye].Size.w, (float)eyeRenderViewport[eye].Size.h);

//Get the pose information in XM format
XMVECTOR eyeQuat = XMVectorSet(EyeRenderPose[eye].Orientation.x, EyeRenderPose[eye].Orientation.y,
			EyeRenderPose[eye].Orientation.z, EyeRenderPose[eye].Orientation.w);
XMVECTOR eyePos = XMVectorSet(EyeRenderPose[eye].Position.x, EyeRenderPose[eye].Position.y, EyeRenderPose[eye].Position.z, 0);

// Get view and projection matrices for the Rift camera
XMVECTOR CombinedPos = XMVectorAdd(mainCam->Pos, XMVector3Rotate(eyePos, mainCam->Rot));
Camera finalCam(&CombinedPos, &(XMQuaternionMultiply(eyeQuat,mainCam->Rot)));
XMMATRIX view = finalCam.GetViewMatrix();
ovrMatrix4f p = ovrMatrix4f_Projection(eyeRenderDesc[eye].Fov, 0.2f, 1000.0f, ovrProjection_RightHanded);
XMMATRIX proj = XMMatrixSet(p.M[0][0], p.M[1][0], p.M[2][0], p.M[3][0],
		            p.M[0][1], p.M[1][1], p.M[2][1], p.M[3][1],
		            p.M[0][2], p.M[1][2], p.M[2][2], p.M[3][2],
		            p.M[0][3], p.M[1][3], p.M[2][3], p.M[3][3]);
XMMATRIX prod = XMMatrixMultiply(view, proj);
roomScene->Render(&prod, 1, 1, 1, 1, true);
}


4.初始化单个全屏Fov层,并提交

// Initialize our single full screen Fov layer.
ovrLayerEyeFov ld;
ld.Header.Type = ovrLayerType_EyeFov;
ld.Header.Flags = 0;

for (int eye = 0; eye < 2; ++eye)
{
	ld.ColorTexture[eye] = pEyeRenderTexture[eye]->TextureSet;
	ld.Viewport[eye] = eyeRenderViewport[eye];
	ld.Fov[eye] = hmdDesc.DefaultEyeFov[eye];
	ld.RenderPose[eye] = EyeRenderPose[eye];
}

        ovrLayerHeader* layers = &ld.Header;
        result = ovr_SubmitFrame(HMD, 0, nullptr, &layers, 1);


5.渲染镜像

ovrD3D11Texture* tex = (ovrD3D11Texture*)mirrorTexture;
DIRECTX.Context->CopyResource(DIRECTX.BackBuffer, tex->D3D11.pTexture);
DIRECTX.SwapChain->Present(0, 0);


四,销毁:

delete mainCam;
delete roomScene;
if (mirrorTexture) ovr_DestroyMirrorTexture(HMD, mirrorTexture);
for (int eye = 0; eye < 2; ++eye)
{
    delete pEyeRenderTexture[eye];
    delete pEyeDepthBuffer[eye];
}
DIRECTX.ReleaseDevice();
ovr_Destroy(HMD);


  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值