凹凸贴图(bump mapping)实现的技术有几种,normal mapping属于其中的一种,这里WALL的实现在物体坐标系空间中,其他都在物体的切线空间中实现,国际惯例,上图先:
由于时间关系我就不一一列举了,感兴趣的童鞋下载我的文件吧!
好了讲下原理
可以根据高度图生成法线量图,生成方法:在高度图的某一点用它上面的一点和右面的一点的高度减去这一点的高度,得到两个向量,再叉乘就是这点的法向量了,然后再单位化
然后用这个单位法向量去算光照
我们这里的光照的计算在物体空间计算的
但是注意一个问题,算出来的单位法向量里面的某些分量可能为负,它的范围为(-1,-1,-1)到(1,1,1),但是我们把单位法向量存储到纹理里面,里面的值只允许为正,所以我们需要把整个范围压缩到(0,0,0)到(1,1,1),公式为:x --> 0.5 + 0.5 * x,在用的时候我们始终解压,解压公式为: x --> ( x - 0.5) * 2
另外,在片断着色器里面,我们需要每个片断的指向灯光的单位向量和半角单位向量,我们可以在顶点着色器里面算出指向灯光的向量以及半角向量,单位化后传过去,
但是我们采用一个有单位化功能的立方体纹理,它的速度比直接单位化会快一些,它的输入为一个向量,输出为这个向量的单位向量.当然由于纹理存储的是压缩后的单位向量,取出向量后还需要解压
好了,原理大概如此,下面给出源码:
/*------------------------------------------------------------
SphereEnvMapping.cpp -- achieve sphere environment mapping
(c) Seamanj.2013/8/1
------------------------------------------------------------*/
#include "DXUT.h"
#include "resource.h"
//phase1 : add wall quadrilateral
//phase2 : add camera
//phase3 : add normal mapping shader
//phase4 : add light sphere
//phase5 : add animation
//phase6 : add multi model & add combo box
//phase7 : add multi textures
#define phase1 1
#define phase2 1
#define phase3 1
#define phase4 1
#define phase5 1
#define phase6 1
#define phase7 1
#if phase1
// Vertex Buffer
LPDIRECT3DVERTEXBUFFER9 g_pVB = NULL;
// Index Buffer
LPDIRECT3DINDEXBUFFER9 g_pIB = NULL;
#endif
#if phase6
#include <vector>
// Vertex Buffer
LPDIRECT3DVERTEXBUFFER9 g_pCubeVB = NULL;
// Index Buffer
LPDIRECT3DINDEXBUFFER9 g_pCubeIB = NULL;
IDirect3DVertexDeclaration9* g_pVertexDecl;
LPDIRECT3DVERTEXBUFFER9 g_pTorusVB = NULL;
LPDIRECT3DINDEXBUFFER9 g_pTorusIB = NULL;
//--------------------------------------------------------------------------------------
// Id numbers for different mode
//--------------------------------------------------------------------------------------
enum MODEL_IDS
{
PLANE,
CUBE,
CUBEBLACKWHITE,
TORUS
};
const int sides = 20, rings = 40;
int g_nCurrentModelID = PLANE;
#define IDC_COMBO_MODEL 1
#include "DXUTgui.h"
CDXUTDialogResourceManager g_dlg_resource_manager;
CDXUTDialog g_control_dlg;
#endif
#if phase7
enum TEXTURE_IDS
{
BRICK,
STONE,
ROCK,
WALL,
FOUR,
CONCRETE,
SAINT
};
WCHAR* g_strBaseTextureNames[] =
{
L"stones.bmp",
L"rocks.jpg",
L"wall.jpg",
L"wood.jpg",
L"concrete.bmp",
L"wood.jpg"
};
WCHAR* g_strNMHTextureNames[] =
{
L"stones_NM_height.tga",
L"rocks_NM_height.tga",
L"wall_NM_height.tga",
L"four_NM_height.tga",
L"bump_NM_height.tga",
L"saint_NM_height.tga",
};
const int s_iNUM_TEXTURES = 6; // Number of loaded texturs in the program
IDirect3DTexture9** g_pBaseTextures = NULL; // Array of base map texture surfaces
IDirect3DTexture9** g_pNMHTextures = NULL; // Array of normal / height map texture surfaces
int g_nCurrentTextureID = BRICK;
#define IDC_COMBO_TEXTURE 2
#endif
#if phase2
#include "DXUTcamera.h"
CModelViewerCamera g_Camera;
#endif
#if phase3
#include "SDKmisc.h"//加载文件时会用到
ID3DXEffect* g_pEffect = NULL; // D3DX effect interface
static const unsigned char
myBrickNormalMapImage[3*(128*128+64*64+32*32+16*16+8*8+4*4+2*2+1*1)] = {
/* RGB8 image data for a mipmapped 128x128 normal map for a brick pattern */
#include "brick_image.h"
};
static const unsigned char
myNormalizeVectorCubeMapImage[6*3*32*32] = {
/* RGB8 image data for a normalization vector cube map with 32x32 faces */
#include "normcm_image.h"
};
static PDIRECT3DTEXTURE9 g_pMyBrickNormalMapTex = NULL;
static PDIRECT3DCUBETEXTURE9 g_pMyNormalizeVectorCubeMapTex = NULL;
D3DXHANDLE g_hTech = 0;
#if phase6
D3DXHANDLE g_hCubeTech = 0;
D3DXHANDLE g_hCubeBlackWhiteTech = 0;
D3DXHANDLE g_hTorusTech = 0;
#endif
#if phase7
D3DXHANDLE g_hBaseTech = 0;
D3DXHANDLE g_hBaseCubeTech = 0;
D3DXHANDLE g_hBaseCubeBlackWhiteTech = 0;
D3DXHANDLE g_hBaseTorusTech = 0;
#endif
D3DXHANDLE g_hWorldViewProj = NULL; // Handle for world+view+proj matrix in effect
D3DXHANDLE g_hWorldInv = NULL;
D3DXHANDLE g_hAmbient = NULL;
D3DXHANDLE g_hLightMaterialDiffuse = NULL;
D3DXHANDLE g_hLightMaterialSpecular = NULL;
D3DXHANDLE g_hLightPosition = NULL;
D3DXHANDLE g_hEyePosition = NULL;
D3DXHANDLE g_hBrickNormal2DTex = NULL;
D3DXHANDLE g_hNormalizeVectorCubeTex = NULL;
#if phase7
D3DXHANDLE g_hBase2DTex = NULL;
#endif
D3DXHANDLE g_hOuterRadius = NULL;
D3DXHANDLE g_hInnerRadius = NULL;
#endif
#if phase4
ID3DXMesh* g_pLightSphereMesh = 0;
#endif
#if phase5
static float g_fLightAngle = 4.0f;
static bool g_bAnimation = false;
#endif
//--------------------------------------------------------------------------------------
// Rejects any D3D9 devices that aren't acceptable to the app by returning false
//--------------------------------------------------------------------------------------
bool CALLBACK IsD3D9DeviceAcceptable( D3DCAPS9* pCaps, D3DFORMAT AdapterFormat, D3DFORMAT BackBufferFormat,
bool bWindowed, void* pUserContext )
{
// Typically want to skip back buffer formats that don't support alpha blending
IDirect3D9* pD3D = DXUTGetD3D9Object();
if( FAILED( pD3D->CheckDeviceFormat( pCaps->AdapterOrdinal, pCaps->DeviceType,
AdapterFormat, D3DUSAGE_QUERY_POSTPIXELSHADER_BLENDING,
D3DRTYPE_TEXTURE, BackBufferFormat ) ) )
return false;
return true;
}
//--------------------------------------------------------------------------------------
// Before a device is created, modify the device settings as needed
//--------------------------------------------------------------------------------------
bool CALLBACK ModifyDeviceSettings( DXUTDeviceSettings* pDeviceSettings, void* pUserContext )
{
#if phase2
pDeviceSettings->d3d9.pp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
#endif
return true;
}
#if phase3
static HRESULT initTextures( IDirect3DDevice9* pd3dDevice )
{
HRESULT hr;
unsigned int size, level;
int face;
const unsigned char *image;
D3DLOCKED_RECT lockedRect;
//创建法向量纹理
if( FAILED( pd3dDevice->CreateTexture( 128, 128, 0, 0, D3DFMT_X8R8G8B8,
D3DPOOL_MANAGED, &g_pMyBrickNormalMapTex, NULL ) ) )
{
return E_FAIL;
}
for ( size = 128, level = 0, image = myBrickNormalMapImage;
size > 0;
image += 3 * size * size, size /= 2, level++)
{
if( FAILED(g_pMyBrickNormalMapTex->LockRect( level, &lockedRect, 0, 0 ) ) )
{
return E_FAIL;
}
DWORD *texel = (DWORD*) lockedRect.pBits;
const int bytes = size * size * 3;
for( int i = 0; i < bytes; i+= 3 )
{
*texel++ = image[i + 0] << 16 | image[i + 1] << 8 | image[i + 2];
}
g_pMyBrickNormalMapTex->UnlockRect(level);
}
//创建向量单位化的立方体纹理
if( FAILED( pd3dDevice->CreateCubeTexture(32, 1, 0, D3DFMT_X8R8G8B8,
D3DPOOL_MANAGED, &g_pMyNormalizeVectorCubeMapTex, NULL) ) )
return E_FAIL;
const int bytesPerFace = 32 * 32 * 3;
for( face = D3DCUBEMAP_FACE_POSITIVE_X, image = myNormalizeVectorCubeMapImage;
face <= D3DCUBEMAP_FACE_NEGATIVE_Z;
face += 1, image += bytesPerFace)
{
if( FAILED( g_pMyNormalizeVectorCubeMapTex->LockRect
((D3DCUBEMAP_FACES)face, 0, &lockedRect, 0/*lock entire surface*/, 0)))
return E_FAIL;
DWORD *texel = (DWORD*) lockedRect.pBits;
for (int i=0; i<bytesPerFace; i+=3)
{
*texel++ = image[i+0] << 16 |
image[i+1] << 8 |
image[i+2];
}
}
g_pMyNormalizeVectorCubeMapTex->UnlockRect((D3DCUBEMAP_FACES)face, 0);
#if phase7
WCHAR str[MAX_PATH];
g_pBaseTextures = ( IDirect3DTexture9** )malloc( sizeof( IDirect3DTexture9* ) * s_iNUM_TEXTURES );
if( g_pBaseTextures == NULL )
{
// ERROR allocating the array for base texture pointers storage!
::MessageBoxA(NULL,"ERROR allocating the array for base texture pointers storage!\n",NULL,0);
return E_FAIL;
}
g_pNMHTextures = ( IDirect3DTexture9** )malloc( sizeof( IDirect3DTexture9* ) * s_iNUM_TEXTURES );
if( g_pNMHTextures == NULL )
{
// ERROR allocating the array for normal map / height map texture pointers storage!
::MessageBoxA(NULL, "ERROR allocating the array for normal map / height map texture pointers storage!\n",NULL ,0 );
return E_FAIL;
}
for( int iTextureIndex = BRICK; iTextureIndex < s_iNUM_TEXTURES; iTextureIndex++)
{
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, g_strBaseTextureNames[iTextureIndex] ) );
V_RETURN( D3DXCreateTextureFromFileEx( pd3dDevice, str, D3DX_DEFAULT, D3DX_DEFAULT,
D3DX_DEFAULT, 0, D3DFMT_UNKNOWN, D3DPOOL_MANAGED,
D3DX_DEFAULT, D3DX_DEFAULT, 0,
NULL, NULL, &g_pBaseTextures[iTextureIndex] ) );
// Load the normal map / height map texture
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, g_strNMHTextureNames[iTextureIndex] ) );
V_RETURN( D3DXCreateTextureFromFileEx( pd3dDevice, str, D3DX_DEFAULT, D3DX_DEFAULT,
D3DX_DEFAULT, 0, D3DFMT_UNKNOWN, D3DPOOL_MANAGED,
D3DX_DEFAULT, D3DX_DEFAULT, 0,
NULL, NULL, &g_pNMHTextures[iTextureIndex] ) );
}
#endif
return S_OK;
}
#endif
//--------------------------------------------------------------------------------------
// Create any D3D9 resources that will live through a device reset (D3DPOOL_MANAGED)
// and aren't tied to the back buffer size
//--------------------------------------------------------------------------------------
HRESULT CALLBACK OnD3D9CreateDevice( IDirect3DDevice9* pd3dDevice, const D3DSURFACE_DESC* pBackBufferSurfaceDesc,
void* pUserContext )
{
#if phase3
HRESULT hr;
#endif
#if phase6
V_RETURN(g_dlg_resource_manager.OnD3D9CreateDevice(pd3dDevice));
#endif
#if phase4
D3DXCreateSphere(pd3dDevice, 0.4, 12, 12, &g_pLightSphereMesh, 0);
#endif
#if phase2
// Setup the camera's view parameters
D3DXVECTOR3 vecEye( 0.0f, 0.0f, -20.0f );
D3DXVECTOR3 vecAt ( 0.0f, 0.0f, 0.0f );
g_Camera.SetViewParams( &vecEye, &vecAt );
//FLOAT fObjectRadius=1;
//摄像机缩放的3个参数
//g_Camera.SetRadius( fObjectRadius * 3.0f, fObjectRadius * 0.5f, fObjectRadius * 10.0f );
g_Camera.SetEnablePositionMovement( false );
#endif
#if phase3
if( FAILED( initTextures( pd3dDevice ) ) )
return E_FAIL;
// Create the effect
WCHAR str[MAX_PATH];
// Read the D3DX effect file
V_RETURN( DXUTFindDXSDKMediaFileCch( str, MAX_PATH, L"NormalMapping.fx" ) );
// Create the effect
LPD3DXBUFFER pErrorBuff;
V_RETURN( D3DXCreateEffectFromFile(
pd3dDevice, // associated device
str, // effect filename
NULL, // no preprocessor definitions
NULL, // no ID3DXInclude interface
D3DXSHADER_DEBUG, // compile flags
NULL, // don't share parameters
&g_pEffect, // return effect
&pErrorBuff // return error messages
) );
// Get handle
g_hTech = g_pEffect->GetTechniqueByName("myTechnique");
#if phase6
g_hCubeTech = g_pEffect->GetTechniqueByName("cubeTechnique");
g_hCubeBlackWhiteTech = g_pEffect->GetTechniqueByName( "cubeWhiteBlackTechnique" );
g_hTorusTech = g_pEffect->GetTechniqueByName( "torusTechnique" );
g_hOuterRadius = g_pEffect->GetParameterByName(0, "g_outerRadius");
g_hInnerRadius = g_pEffect->GetParameterByName(0, "g_innerRadius");
#endif
#if phase7
g_hBaseTech = g_pEffect->GetTechniqueByName( "baseTechnique" );
g_hBaseCubeTech = g_pEffect->GetTechniqueByName( "baseCubeTechnique" );
g_hBaseCubeBlackWhiteTech = g_pEffect->GetTechniqueByName( "baseCubeWhiteBlackTechnique" );
g_hBaseTorusTech = g_pEffect->GetTechniqueByName( "baseTorusTechnique" );
#endif
g_hWorldViewProj = g_pEffect->GetParameterByName(0, "g_mWorldViewProj");
g_hWorldInv = g_pEffect->GetParameterByName(0, "g_mWorldInv");
g_hAmbient = g_pEffect->GetParameterByName( 0, "g_Ambient");
g_hLightMaterialDiffuse = g_pEffect->GetParameterByName( 0, "g_LMd");
g_hLightMaterialSpecular = g_pEffect->GetParameterByName( 0, "g_LMs");
g_hLightPosition = g_pEffect->GetParameterByName( 0, "g_lightPosition" );
g_hEyePosition = g_pEffect->GetParameterByName( 0, "g_eyePosition" );
g_hBrickNormal2DTex = g_pEffect->GetParameterByName(0, "g_txBrickNormal2D");
#if phase7
g_hBase2DTex = g_pEffect->GetParameterByName(0, "g_baseTexture");
#endif
g_hNormalizeVectorCubeTex = g_pEffect->GetParameterByName(0, "g_txNormalizeVectorCube");
#endif
return S_OK;
}
#if phase6
struct TorusVertexFormat
{
FLOAT x, y, z;
};
// draw flat patch to vertex buffer & index buffer
HRESULT drawFlatPatchToVBIB(IDirect3DDevice9* pd3dDevice, float rows, float columns,LPDIRECT3DVERTEXBUFFER9& pVB,LPDIRECT3DINDEXBUFFER9& pIB)
{
const float m = 1.0f / columns;
const float n = 1.0f / rows;
WORD i, j;
std::vector<TorusVertexFormat> vecVertexData;
std::vector<WORD> vecIndexData;
for (i = 0; i <= columns; i++)
for (j = 0; j <= rows; j++)
{
TorusVertexFormat TVF={i * m, j * n, 0};
vecVertexData.push_back( TVF );
}
if (FAILED(pd3dDevice->CreateVertexBuffer(vecVertexData.size() * sizeof(TorusVertexFormat),
0, D3DFVF_XYZ,
D3DPOOL_DEFAULT,
&g_pTorusVB, NULL)))
{
return E_FAIL;
}
void* pTorusVertices;
if (FAILED(g_pTorusVB->Lock(0, 0, &pTorusVertices, 0)))
{
return E_FAIL;
}
copy(vecVertexData.begin(), vecVertexData.end(), (TorusVertexFormat*)pTorusVertices);
g_pTorusVB->Unlock();
for (i = 0; i < columns; i++)
{
for (j = 0; j < rows; j++)
{
vecIndexData.push_back(i * (rows + 1) + j);
vecIndexData.push_back(i * (rows + 1) + j + 1);
vecIndexData.push_back((i + 1) * (rows + 1) + j + 1);
vecIndexData.push_back(i * (rows + 1) + j);
vecIndexData.push_back((i + 1) * (rows + 1) + j + 1);
vecIndexData.push_back((i + 1) * (rows + 1) + j );
}
vecIndexData.push_back(i * (rows + 1) + rows);
vecIndexData.push_back(i * (rows + 1) );
vecIndexData.push_back((i + 1) * (rows + 1) );
vecIndexData.push_back(i * (rows + 1) + rows);
vecIndexData.push_back((i + 1) * (rows + 1));
vecIndexData.push_back((i + 1) * (rows + 1) + rows );
}
if (FAILED(pd3dDevice->CreateIndexBuffer(vecIndexData.size() * sizeof(WORD),
D3DUSAGE_WRITEONLY,
D3DFMT_INDEX16,
D3DPOOL_DEFAULT,
&g_pTorusIB, NULL)))
{
return E_FAIL;
}
void* pTorusIndices;
if (FAILED(g_pTorusIB->Lock(0, 0, &pTorusIndices, 0)))
{
return E_FAIL;
}
copy(vecIndexData.begin(), vecIndexData.end(), (WORD*)pTorusIndices);
g_pTorusIB->Unlock();
return S_OK;
}
//create Vertex Declaration
HRESULT createVertexDeclaration( IDirect3DDevice9* pd3dDevice )
{
D3DVERTEXELEMENT9 decl[] =
{
// offsets in bytes
{0, 0, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_POSITION, 0},
{0, 12, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_NORMAL, 0},
{0, 24, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TANGENT, 0},
{0, 36, D3DDECLTYPE_FLOAT3, D3DDECLMETHOD_DEFAULT, D3DDECLUSAGE_TEXCOORD, 0},
D3DDECL_END()
};
return pd3dDevice->CreateVertexDeclaration(decl, &g_pVertexDecl);
}
struct CubeVertexFormat
{
FLOAT x, y, z;
FLOAT nx, ny, nz;
FLOAT tx, ty, tz;
FLOAT u, v;
};
#endif
#if phase1
struct MyVertexFormat
{
FLOAT x, y, z;
FLOAT u, v;
};
#define FVF_VERTEX (D3DFVF_XYZ | D3DFVF_TEX1)
static HRESULT initVertexIndexBuffer(IDirect3DDevice9* pd3dDevice)
{
// Create and initialize vertex buffer
static const MyVertexFormat Vertices[] =
{
{ -7.0f, -7.0f, 7.0f, 0.0f, 0.0f },
{ -7.0f, 7.0f, 7.0f, 0.0f, 1.0f },
{ 7.0f, 7.0f, 7.0f, 1.0f, 1.0f },
{ 7.0f, -7.0f, 7.0f, 1.0f, 0.0f }
};
if (FAILED(pd3dDevice->CreateVertexBuffer(sizeof(Vertices),
0, FVF_VERTEX,
D3DPOOL_DEFAULT,
&g_pVB, NULL))) {
return E_FAIL;
}
void* pVertices;
if (FAILED(g_pVB->Lock(0, 0, /* map entire buffer */
&pVertices, 0))) {
return E_FAIL;
}
memcpy(pVertices, Vertices, sizeof(Vertices));
g_pVB->Unlock();
// Create and initialize index buffer
static const WORD Indices[] =
{
0, 1, 2,
0, 2, 3
};
if (FAILED(pd3dDevice->CreateIndexBuffer(sizeof(Indices),
D3DUSAGE_WRITEONLY,
D3DFMT_INDEX16,
D3DPOOL_DEFAULT,
&g_pIB, NULL))) {
return E_FAIL;
}
void* pIndices;
if (FAILED(g_pIB->Lock(0, 0, /* map entire buffer */
&pIndices, 0))) {
return E_FAIL;
}
memcpy(pIndices, Indices, sizeof(Indices));
g_pIB->Unlock();
#if phase6
// Create and initialize cube vertex buffer
static const CubeVertexFormat CubeVertices[] =
{
//后
{ -7.0f, -7.0f, 7.0f, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f },
{ -7.0f, 7.0f, 7.0f, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f },
{ 7.0f, 7.0f, 7.0f, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f },
{ 7.0f, -7.0f, 7.0f, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f },
//下
{ -7.0f, -7.0f, -7.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f },
{ -7.0f, -7.0f, 7.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f },
{ 7.0f, -7.0f, 7.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f },
{ 7.0f, -7.0f, -7.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f },
//左
{ -7.0f, -7.0f, -7.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f },
{ -7.0f, 7.0f, -7.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f },
{ -7.0f, 7.0f, 7.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f },
{ -7.0f, -7.0f, 7.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f },
//右
{ 7.0f, -7.0f, 7.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f },
{ 7.0f, 7.0f, 7.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f },
{ 7.0f, 7.0f, -7.0f,-1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 1.0f, 1.0f },
{ 7.0f, -7.0f, -7.0f,-1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f },
//上
{ 7.0f, 7.0f, -7.0f, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f },
{ 7.0f, 7.0f, 7.0f, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f },
{ -7.0f, 7.0f, 7.0f, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f },
{ -7.0f, 7.0f,-7.0f, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f },
};
if (FAILED(pd3dDevice->CreateVertexBuffer(sizeof(CubeVertices),
0,
0,// using vertex declaration
D3DPOOL_DEFAULT,
&g_pCubeVB, NULL))) {
return E_FAIL;
}
void* pCubeVertices;
if (FAILED(g_pCubeVB->Lock(0, 0, /* map entire buffer */
&pCubeVertices, 0))) {
return E_FAIL;
}
memcpy(pCubeVertices, CubeVertices, sizeof(CubeVertices));
g_pCubeVB->Unlock();
// Create and initialize index buffer
static const WORD CubeIndices[] =
{
0, 1, 2,
0, 2, 3,
4, 5, 6,
4, 6, 7,
8, 9, 10,
8,10, 11,
12,13,14,
12,14,15,
16,17,18,
16,18,19
};
if (FAILED(pd3dDevice->CreateIndexBuffer(sizeof(CubeIndices),
D3DUSAGE_WRITEONLY,
D3DFMT_INDEX16,
D3DPOOL_DEFAULT,
&g_pCubeIB, NULL))) {
return E_FAIL;
}
void* pCubeIndices;
if (FAILED(g_pCubeIB->Lock(0, 0, /* map entire buffer */
&pCubeIndices, 0))) {
return E_FAIL;
}
memcpy(pCubeIndices, CubeIndices, sizeof(CubeIndices));
g_pCubeIB->Unlock();
// Create vertex declaration
createVertexDeclaration(pd3dDevice);
// Create and initialize torus vertex buffer
if( FAILED(drawFlatPatchToVBIB(pd3dDevice, sides, rings, g_pTorusVB, g_pTorusIB) ))
return E_FAIL;
#endif
return S_OK;
}
#endif
//--------------------------------------------------------------------------------------
// Create any D3D9 resources that won't live through a device reset (D3DPOOL_DEFAULT)
// or that are tied to the back buffer size
//--------------------------------------------------------------------------------------
HRESULT CALLBACK OnD3D9ResetDevice( IDirect3DDevice9* pd3dDevice, const D3DSURFACE_DESC* pBackBufferSurfaceDesc,
void* pUserContext )
{
#if phase3
HRESULT hr;
if( g_pEffect )
V_RETURN( g_pEffect->OnResetDevice() );
#endif
#if phase6
V_RETURN(g_dlg_resource_manager.OnD3D9ResetDevice());
// set dialog position and size
g_control_dlg.SetLocation(pBackBufferSurfaceDesc->Width - 170, pBackBufferSurfaceDesc->Height - 350);
g_control_dlg.SetSize(170, 300);
#endif
#if phase2
pd3dDevice->SetRenderState( D3DRS_CULLMODE, D3DCULL_NONE );
//Setup the camera's projection parameters
float fAspectRatio = pBackBufferSurfaceDesc->Width / ( FLOAT )pBackBufferSurfaceDesc->Height;
g_Camera.SetProjParams( D3DX_PI / 2, fAspectRatio, 0.1f, 5000.0f );
g_Camera.SetWindow( pBackBufferSurfaceDesc->Width, pBackBufferSurfaceDesc->Height );
g_Camera.SetButtonMasks( MOUSE_LEFT_BUTTON, MOUSE_WHEEL, MOUSE_RIGHT_BUTTON );
#endif
#if phase1
return initVertexIndexBuffer(pd3dDevice);
#endif
}
static const double my2Pi = 2.0 * 3.14159265358979323846;
//--------------------------------------------------------------------------------------
// Handle updates to the scene. This is called regardless of which D3D API is used
//--------------------------------------------------------------------------------------
void CALLBACK OnFrameMove( double fTime, float fElapsedTime, void* pUserContext )
{
#if phase2
g_Camera.FrameMove( fElapsedTime );
#endif
#if phase5
if( g_bAnimation )
{
g_fLightAngle += 0.0002;
if( g_fLightAngle > my2Pi)
{
g_fLightAngle -= my2Pi;
}
}
#endif
}
//--------------------------------------------------------------------------------------
// Render the scene using the D3D9 device
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D9FrameRender( IDirect3DDevice9* pd3dDevice, double fTime, float fElapsedTime, void* pUserContext )
{
HRESULT hr;
#if phase3
D3DXVECTOR3 lightPosition(5.f * sin(g_fLightAngle), 5.f * cos(g_fLightAngle), 0);
#endif
// Clear the render target and the zbuffer
V( pd3dDevice->Clear( 0, NULL, D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER, D3DCOLOR_ARGB( 0, 45, 50, 170 ), 1.0f, 0 ) );
// Render the scene
if( SUCCEEDED( pd3dDevice->BeginScene() ) )
{
#if phase6
V(g_control_dlg.OnRender(fElapsedTime));
#endif
#if phase3
UINT iPass, cPasses;
D3DXMATRIXA16 mWorldViewProjection, mWorld, mWorldInv;
#if phase3 & !phase6
V( g_pEffect->SetTechnique( g_hTech ) );
#endif
#if phase6 & !phase7
switch( g_nCurrentModelID )
{
case PLANE:
V( g_pEffect->SetTechnique( g_hTech ) );
break;
case CUBE:
V( g_pEffect->SetTechnique( g_hCubeTech ) );
break;
case CUBEBLACKWHITE:
V( g_pEffect->SetTechnique( g_hCubeBlackWhiteTech ) );
break;
case TORUS:
lightPosition = D3DXVECTOR3(7.f * sin(g_fLightAngle) + 7.0f, 0 , 7.f * cos(g_fLightAngle));
V( g_pEffect->SetTechnique( g_hTorusTech ) );
break;
}
#endif
#if phase6 & phase7
if( !g_nCurrentTextureID)
{
switch( g_nCurrentModelID )
{
case PLANE:
V( g_pEffect->SetTechnique( g_hTech ) );
break;
case CUBE:
V( g_pEffect->SetTechnique( g_hCubeTech ) );
break;
case CUBEBLACKWHITE:
V( g_pEffect->SetTechnique( g_hCubeBlackWhiteTech ) );
break;
case TORUS:
lightPosition = D3DXVECTOR3(7.f * sin(g_fLightAngle) + 7.0f, 0 , 7.f * cos(g_fLightAngle));
V( g_pEffect->SetTechnique( g_hTorusTech ) );
break;
}
}
else
{
switch( g_nCurrentModelID )
{
case PLANE:
V( g_pEffect->SetTechnique( g_hBaseTech ) );
break;
case CUBE:
V( g_pEffect->SetTechnique( g_hBaseCubeTech ) );
break;
case CUBEBLACKWHITE:
V( g_pEffect->SetTechnique( g_hBaseCubeBlackWhiteTech ) );
break;
case TORUS:
lightPosition = D3DXVECTOR3(7.f * sin(g_fLightAngle) + 7.0f, 0 , 7.f * cos(g_fLightAngle));
V( g_pEffect->SetTechnique( g_hBaseTorusTech ) );
break;
}
}
#endif
V( g_pEffect->Begin( &cPasses, 0 ) );
for( iPass = 0; iPass < cPasses ; iPass++ )
{
V( g_pEffect->BeginPass( iPass ) );
// set WorldInv matrix
mWorld = *g_Camera.GetWorldMatrix();
mWorldInv = *D3DXMatrixInverse(&mWorldInv, 0, &mWorld);
V( g_pEffect->SetMatrix( g_hWorldInv, &mWorldInv ) );
//set WorldViewProject matrix
mWorldViewProjection = *g_Camera.GetWorldMatrix() * *g_Camera.GetViewMatrix() *
*g_Camera.GetProjMatrix();
V( g_pEffect->SetMatrix( g_hWorldViewProj, &mWorldViewProjection) );
//set texture
#if phase6 & !phase7
V( g_pEffect->SetTexture( g_hBrickNormal2DTex, g_pMyBrickNormalMapTex ) );
#endif
#if phase7
switch(g_nCurrentTextureID)
{
case BRICK:
V( g_pEffect->SetTexture( g_hBrickNormal2DTex, g_pMyBrickNormalMapTex ) );
break;
case STONE:
case ROCK:
case WALL:
case FOUR:
case CONCRETE:
case SAINT:
V(g_pEffect->SetTexture( g_hBase2DTex, g_pBaseTextures[g_nCurrentTextureID- 1]));
V( g_pEffect->SetTexture( g_hBrickNormal2DTex, g_pNMHTextures[g_nCurrentTextureID- 1] ));
break;
}
#endif
V( g_pEffect->SetTexture( g_hNormalizeVectorCubeTex, g_pMyNormalizeVectorCubeMapTex ) );
// set g_Ambient
g_pEffect->SetFloat( g_hAmbient, 0.2f );
// set light position
g_pEffect->SetFloatArray( g_hLightPosition, lightPosition, 3 );
// set eye position
//const float* eyePositionInObject = (const FLOAT *)g_Camera.GetEyePt();
g_pEffect->SetFloatArray( g_hEyePosition, (const FLOAT *)g_Camera.GetEyePt(), 3 );
float LMd[3] = {0.8f, 0.7f, 0.2f};
g_pEffect->SetFloatArray( g_hLightMaterialDiffuse, LMd, 3 );
float LMs[3] = {0.5f, 0.5f, 0.8f};
g_pEffect->SetFloatArray( g_hLightMaterialSpecular, LMs, 3 );
#if phase6
const float outerRadius = 6, innerRadius = 2;
g_pEffect->SetFloat( g_hOuterRadius, outerRadius );
g_pEffect->SetFloat( g_hInnerRadius, innerRadius );
#endif
#if phase2 && !phase3
// Set world matrix
D3DXMATRIX world = *g_Camera.GetWorldMatrix() ; //注意茶壶总在摄像机前面,相对于摄像机静止
pd3dDevice->SetTransform(D3DTS_WORLD, &world) ;
D3DXMATRIX view = *g_Camera.GetViewMatrix() ;
pd3dDevice->SetTransform(D3DTS_VIEW, &view) ;
// Set projection matrix
D3DXMATRIX proj = *g_Camera.GetProjMatrix() ;
pd3dDevice->SetTransform(D3DTS_PROJECTION, &proj) ;
#endif
#if phase1 & !phase6
pd3dDevice->SetStreamSource(0, g_pVB, 0, sizeof(MyVertexFormat));
pd3dDevice->SetIndices(g_pIB);//sets the current index buffer.
pd3dDevice->SetFVF(FVF_VERTEX);//Sets the current vertex stream declaration.
pd3dDevice->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0, 4, 0, 2);
#endif
#if phase6
switch( g_nCurrentModelID )
{
case PLANE:
pd3dDevice->SetStreamSource(0, g_pVB, 0, sizeof(MyVertexFormat));
pd3dDevice->SetIndices(g_pIB);//sets the current index buffer.
pd3dDevice->SetFVF(FVF_VERTEX);//Sets the current vertex stream declaration.
pd3dDevice->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0, 4, 0, 2);
break;
case CUBE:
case CUBEBLACKWHITE:
pd3dDevice->SetVertexDeclaration(g_pVertexDecl);
pd3dDevice->SetStreamSource(0, g_pCubeVB, 0, sizeof(CubeVertexFormat));
pd3dDevice->SetIndices(g_pCubeIB);
pd3dDevice->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0, 20, 0, 10);
break;
case TORUS:
pd3dDevice->SetStreamSource(0, g_pTorusVB, 0, sizeof(TorusVertexFormat) );
pd3dDevice->SetIndices( g_pTorusIB );
pd3dDevice->SetFVF( D3DFVF_XYZ );
//pd3dDevice->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0,
// (sides + 1) * (rings + 1), 0, (sides + 1) * 10 * 2 + 6);
pd3dDevice->DrawIndexedPrimitive(D3DPT_TRIANGLELIST, 0, 0,
(sides + 1) * (rings + 1), 0, (sides + 1) * rings * 2);
break;
}
#endif
V( g_pEffect->EndPass() );
}
V( g_pEffect->End() );
#endif
#if phase4
pd3dDevice->SetRenderState(D3DRS_LIGHTING, false);
// Set world matrix
D3DXMATRIX M;
D3DXMatrixIdentity( &M ); // M = identity matrix
D3DXMatrixTranslation(&M, lightPosition.x, lightPosition.y, lightPosition.z);
pd3dDevice->SetTransform(D3DTS_WORLD, &M) ;
//这里三角形更像是世界坐标中静止的物体(比如墙)因为按W它会相对与摄像机会动,不像茶壶总在摄像机前面,相对于摄像机静止
// Set view matrix
D3DXMATRIX view = *g_Camera.GetViewMatrix() ;
pd3dDevice->SetTransform(D3DTS_VIEW, &view) ;
// Set projection matrix
D3DXMATRIX proj = *g_Camera.GetProjMatrix() ;
pd3dDevice->SetTransform(D3DTS_PROJECTION, &proj) ;
g_pLightSphereMesh->DrawSubset(0);
#endif
V( pd3dDevice->EndScene() );
}
}
//--------------------------------------------------------------------------------------
// Handle messages to the application
//--------------------------------------------------------------------------------------
LRESULT CALLBACK MsgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam,
bool* pbNoFurtherProcessing, void* pUserContext )
{
#if phase2
g_Camera.HandleMessages( hWnd, uMsg, wParam, lParam );
#endif
#if phase6
*pbNoFurtherProcessing = g_dlg_resource_manager.MsgProc(hWnd, uMsg, wParam, lParam);
if(*pbNoFurtherProcessing)
return 0;
*pbNoFurtherProcessing = g_control_dlg.MsgProc(hWnd, uMsg, wParam, lParam);
if(*pbNoFurtherProcessing)
return 0;
#endif
return 0;
}
//--------------------------------------------------------------------------------------
// Release D3D9 resources created in the OnD3D9ResetDevice callback
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D9LostDevice( void* pUserContext )
{
#if phase1
SAFE_RELEASE(g_pVB);
SAFE_RELEASE(g_pIB);
#endif
#if phase6
SAFE_RELEASE(g_pCubeVB);
SAFE_RELEASE(g_pCubeIB);
SAFE_RELEASE(g_pVertexDecl);
g_dlg_resource_manager.OnD3D9LostDevice();
SAFE_RELEASE(g_pTorusVB);
SAFE_RELEASE(g_pTorusIB);
#endif
#if phase3
if( g_pEffect )
g_pEffect->OnLostDevice();
#endif
}
//--------------------------------------------------------------------------------------
// Release D3D9 resources created in the OnD3D9CreateDevice callback
//--------------------------------------------------------------------------------------
void CALLBACK OnD3D9DestroyDevice( void* pUserContext )
{
#if phase3
SAFE_RELEASE(g_pEffect);
SAFE_RELEASE(g_pMyBrickNormalMapTex);
SAFE_RELEASE(g_pMyNormalizeVectorCubeMapTex);
#endif
#if phase4
SAFE_RELEASE(g_pLightSphereMesh);
#endif
#if phase6
g_dlg_resource_manager.OnD3D9DestroyDevice();
#endif
#if phase7
for( int iTextureIndex = BRICK; iTextureIndex < s_iNUM_TEXTURES; iTextureIndex++ )
{
SAFE_RELEASE( g_pBaseTextures[iTextureIndex] );
SAFE_RELEASE( g_pNMHTextures[iTextureIndex] );
}
free( g_pBaseTextures );
free( g_pNMHTextures );
#endif
}
#if phase5
void CALLBACK OnKeyboardProc(UINT character, bool is_key_down, bool is_alt_down, void* user_context)
{
if(is_key_down)
{
switch(character)
{
case VK_SPACE:
g_bAnimation = !g_bAnimation;
break;
}
}
}
#endif
#if phase6
//--------------------------------------------------------------------------------------
// Handle events for controls
//--------------------------------------------------------------------------------------
void CALLBACK OnGUIEvent(UINT event, int control_id, CDXUTControl* control, void* user_context)
{
switch(control_id)
{
case IDC_COMBO_MODEL:
{
DXUTComboBoxItem* pSelectedItem = ( ( CDXUTComboBox* )control )->GetSelectedItem();
if( pSelectedItem )
g_nCurrentModelID = ( int )( INT_PTR )pSelectedItem->pData;
break;
}
case IDC_COMBO_TEXTURE:
{
DXUTComboBoxItem* pSelectedItem = ( ( CDXUTComboBox* )control )->GetSelectedItem();
if( pSelectedItem )
g_nCurrentTextureID = ( int )( INT_PTR )pSelectedItem->pData;
break;
}
}
}
//--------------------------------------------------------------------------------------
// Initialize dialogs
//--------------------------------------------------------------------------------------
void InitDialogs()
{
int x = 35, y = 10, width = 125, height = 22;
g_control_dlg.Init(&g_dlg_resource_manager);
g_control_dlg.SetCallback(OnGUIEvent);
y = 10;
g_control_dlg.AddComboBox(IDC_COMBO_MODEL, x, y += 24, width, height);
g_control_dlg.GetComboBox(IDC_COMBO_MODEL)->AddItem(L"Wall", ( LPVOID )PLANE);
g_control_dlg.GetComboBox(IDC_COMBO_MODEL)->AddItem(L"Cube", ( LPVOID )CUBE);
g_control_dlg.GetComboBox(IDC_COMBO_MODEL)->AddItem(L"CubeBlackWhite", ( LPVOID )CUBEBLACKWHITE);
g_control_dlg.GetComboBox(IDC_COMBO_MODEL)->AddItem(L"Torus", ( LPVOID )TORUS);
#if phase7
g_control_dlg.AddComboBox(IDC_COMBO_TEXTURE, x, y += 24, width , height);
g_control_dlg.GetComboBox(IDC_COMBO_TEXTURE)->AddItem(L"Brick", ( LPVOID )BRICK);
g_control_dlg.GetComboBox(IDC_COMBO_TEXTURE)->AddItem(L"Stone", ( LPVOID )STONE);
g_control_dlg.GetComboBox(IDC_COMBO_TEXTURE)->AddItem(L"Rock", ( LPVOID )ROCK);
g_control_dlg.GetComboBox(IDC_COMBO_TEXTURE)->AddItem(L"Wall", ( LPVOID )WALL);
g_control_dlg.GetComboBox(IDC_COMBO_TEXTURE)->AddItem(L"Four", ( LPVOID )FOUR);
g_control_dlg.GetComboBox(IDC_COMBO_TEXTURE)->AddItem(L"Concrete", ( LPVOID )CONCRETE);
g_control_dlg.GetComboBox(IDC_COMBO_TEXTURE)->AddItem(L"Saint", ( LPVOID )SAINT);
#endif
}
#endif
//--------------------------------------------------------------------------------------
// Initialize everything and go into a render loop
//--------------------------------------------------------------------------------------
INT WINAPI wWinMain( HINSTANCE, HINSTANCE, LPWSTR, int )
{
// Enable run-time memory check for debug builds.
#if defined(DEBUG) | defined(_DEBUG)
_CrtSetDbgFlag( _CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF );
#endif
// Set the callback functions
DXUTSetCallbackD3D9DeviceAcceptable( IsD3D9DeviceAcceptable );
DXUTSetCallbackD3D9DeviceCreated( OnD3D9CreateDevice );
DXUTSetCallbackD3D9DeviceReset( OnD3D9ResetDevice );
DXUTSetCallbackD3D9FrameRender( OnD3D9FrameRender );
DXUTSetCallbackD3D9DeviceLost( OnD3D9LostDevice );
DXUTSetCallbackD3D9DeviceDestroyed( OnD3D9DestroyDevice );
DXUTSetCallbackDeviceChanging( ModifyDeviceSettings );
DXUTSetCallbackMsgProc( MsgProc );
DXUTSetCallbackFrameMove( OnFrameMove );
#if phase5
DXUTSetCallbackKeyboard( OnKeyboardProc );
#endif
#if phase6
InitDialogs();
#endif
// TODO: Perform any application-level initialization here
// Initialize DXUT and create the desired Win32 window and Direct3D device for the application
DXUTInit( true, true ); // Parse the command line and show msgboxes
DXUTSetHotkeyHandling( true, true, true ); // handle the default hotkeys
DXUTSetCursorSettings( true, true ); // Show the cursor and clip it when in full screen
DXUTCreateWindow( L"3D_Shader_NormalMapping" );
DXUTCreateDevice( true, 800, 600 );
// Start the render loop
DXUTMainLoop();
// TODO: Perform any application-level cleanup here
return DXUTGetExitCode();
}
/*--------------------------------------------------------------------------
NormalMapping.fx -- normal mapping shader
(c) Seamanj.2013/8/1
--------------------------------------------------------------------------*/
//--------------------------------------------------------------------------------------
// Global variables
//--------------------------------------------------------------------------------------
float3 g_lightPosition; // Object - space
float3 g_eyePosition; // Object - space
float4x4 g_mWorldViewProj;
float4x4 g_mWorldInv;
//--------------------------------------------------------------------------------------
// Vertex shader output structure
//--------------------------------------------------------------------------------------
struct VS_Output {
float4 oPosition : POSITION;
float2 oTexCoord : TEXCOORD0;
float3 lightDirection : TEXCOORD1;
float3 halfAngle : TEXCOORD2;
};
//--------------------------------------------------------------------------------------
// Vertex shader
//--------------------------------------------------------------------------------------
VS_Output myVertexEntry(float4 position : POSITION,float2 texCoord : TEXCOORD0)
{
VS_Output OUT;
OUT.oPosition = mul ( position, g_mWorldViewProj);
OUT.oTexCoord = texCoord;
//将灯光位置置于物体坐标系中
float3 tempLightPosition = mul( float4(g_lightPosition, 1), g_mWorldInv).xyz;
OUT.lightDirection = tempLightPosition - position.xyz;
float3 tempEyePosition = mul( float4(g_eyePosition, 1), g_mWorldInv).xyz;
float3 eyeDirection = tempEyePosition - position.xyz;
//这里不单位法也可以
OUT.halfAngle = normalize( normalize(OUT.lightDirection) + normalize(eyeDirection) );
return OUT;
}
//--------------------------------------------------------------------------------------
// Vertex shader
//--------------------------------------------------------------------------------------
VS_Output cubeVertexEntry( float4 position : POSITION,
float3 normal : NORMAL,
float3 tangent : TANGENT,
float2 texCoord : TEXCOORD0)
{
VS_Output OUT;
OUT.oPosition = mul ( position, g_mWorldViewProj);
OUT.oTexCoord = texCoord;
//将灯光位置置于物体坐标系中
float3 tempLightPosition = mul( float4(g_lightPosition, 1), g_mWorldInv).xyz;
OUT.lightDirection = tempLightPosition - position.xyz;
float3 tempEyePosition = mul( float4(g_eyePosition, 1), g_mWorldInv).xyz;
float3 eyeDirection = tempEyePosition - position.xyz;
//构造物体坐标系到纹理坐标系的3x3转换矩阵
//float3 binormal = cross( normal, tangent );
float3 binormal = cross( tangent, normal );
//注意cross是右手坐标系建立的,我们要的是左手坐标系,所以交换下位置
float3x3 rotation = float3x3( tangent,
binormal,
normal);
OUT.lightDirection = mul( rotation, OUT.lightDirection );
eyeDirection = mul( rotation, eyeDirection );
//这里不单位化也可以
OUT.halfAngle = normalize( normalize(OUT.lightDirection) + normalize(eyeDirection) );
return OUT;
}
//--------------------------------------------------------------------------------------
// Global variables
//--------------------------------------------------------------------------------------
float g_outerRadius;
float g_innerRadius;
//--------------------------------------------------------------------------------------
// Vertex shader
//--------------------------------------------------------------------------------------
VS_Output torusVertexEntry( float3 position : POSITION )
{
VS_Output OUT;
const float pi2 = 6.28318530; // 2 times Pi
OUT.oTexCoord = position.xy * float2(-6, 2);
float cosS, sinS;
sincos(pi2 * position.x, sinS, cosS);
float cosT, sinT;
sincos(pi2 * position.y, sinT, cosT);
float3 torusPosition = float3( (g_outerRadius + g_innerRadius * cosT ) * cosS,
(g_outerRadius + g_innerRadius * cosT ) * sinS,
-g_innerRadius * sinT);//左手坐标系中z值相反
OUT.oPosition = mul ( float4(torusPosition, 1), g_mWorldViewProj);
//将灯光位置置于物体坐标系中
float3 tempLightPosition = mul( float4(g_lightPosition, 1), g_mWorldInv).xyz;
OUT.lightDirection = tempLightPosition - torusPosition;
float3 tempEyePosition = mul( float4(g_eyePosition, 1), g_mWorldInv).xyz;
float3 eyeDirection = tempEyePosition - torusPosition;
float3 tangent = float3( -sinS * ( g_outerRadius + g_innerRadius * cosT ),
cosS * ( g_outerRadius + g_innerRadius * cosT ),
0);
tangent = normalize( tangent );
float3 normal = float3( cosS * cosT, sinS * cosT, -sinT );//左手坐标系中的法向量
float3 binormal = cross ( tangent, normal);
//构造物体坐标系到纹理坐标系的3x3转换矩阵
float3x3 rotation = float3x3( tangent,
binormal,
normal);
// Rotate object-space vectors to texture space
OUT.lightDirection = mul( rotation, OUT.lightDirection );
eyeDirection = mul( rotation, eyeDirection );
//这里不单位化也可以
OUT.halfAngle = normalize( normalize(OUT.lightDirection) + normalize(eyeDirection) );
return OUT;
}
float3 expand( float3 v )
{
return ( v - 0.5 ) * 2;
}
//--------------------------------------------------------------------------------------
// Global variables used by Pixel shader
//--------------------------------------------------------------------------------------
float g_Ambient;
float4 g_LMd;
float4 g_LMs;
texture g_txBrickNormal2D;
texture g_txNormalizeVectorCube;
texture g_baseTexture; // Base color texture
//-----------------------------------------------------------------------------
// Sampler
//-----------------------------------------------------------------------------
sampler2D g_samBrickNormal2D =
sampler_state
{
Texture = <g_txBrickNormal2D>;
MinFilter = Linear;
MagFilter = Linear;
MipFilter = Linear;
AddressU = Wrap;
AddressV = Wrap;
};
samplerCUBE g_samNormalizeVectorCube1 =
sampler_state
{
Texture = <g_txNormalizeVectorCube>;
MinFilter = Linear;
MagFilter = Linear;
MipFilter = Linear;
AddressU = Clamp;
AddressV = Clamp;
};
samplerCUBE g_samNormalizeVectorCube2 =
sampler_state
{
Texture = <g_txNormalizeVectorCube>;
MinFilter = Linear;
MagFilter = Linear;
MipFilter = Linear;
AddressU = Clamp;
AddressV = Clamp;
};
sampler2D g_samBase =
sampler_state
{
Texture = <g_baseTexture>;
MinFilter = Linear;
MagFilter = Linear;
MipFilter = Linear;
AddressU = Wrap;
AddressV = Wrap;
};
//--------------------------------------------------------------------------------------
// Pixel shader
//--------------------------------------------------------------------------------------
float4 myPixelEntry(float2 normalMapTexCoord : TEXCOORD0,
float3 lightDirection : TEXCOORD1,
float3 halfAngle : TEXCOORD2
) : COLOR
{
float3 normalTex = tex2D(g_samBrickNormal2D, normalMapTexCoord).xyz;
float3 normal = expand( normalTex );
normal.z = -normal.z;//这里的法向量是以OPENGL的坐标系为基础提供的,转成DX的Z轴相反
float3 normLightDirTex = texCUBE( g_samNormalizeVectorCube1, lightDirection ).xyz;
float3 normLightDir = expand( normLightDirTex );
float3 normHalfAngleTex = texCUBE ( g_samNormalizeVectorCube2, halfAngle ).xyz;
float3 normHalfAngle = expand( normHalfAngleTex );
float diffuse = saturate( dot( normal, normLightDir ) );
float specular = saturate( dot( normal, normHalfAngle ) );
return g_LMd * (g_Ambient + diffuse ) + g_LMs * pow( specular, 8 );
}
//--------------------------------------------------------------------------------------
// Pixel shader
//--------------------------------------------------------------------------------------
float4 cubePixelEntry(float2 normalMapTexCoord : TEXCOORD0,
float3 lightDirection : TEXCOORD1,
float3 halfAngle : TEXCOORD2
) : COLOR
{
float3 normalTex = tex2D(g_samBrickNormal2D, normalMapTexCoord).xyz;
float3 normal = expand( normalTex );
float3 normLightDirTex = texCUBE( g_samNormalizeVectorCube1, lightDirection ).xyz;
float3 normLightDir = expand( normLightDirTex );
float3 normHalfAngleTex = texCUBE ( g_samNormalizeVectorCube2, halfAngle ).xyz;
float3 normHalfAngle = expand( normHalfAngleTex );
float diffuse = saturate( dot( normal, normLightDir ) );
float specular = saturate( dot( normal, normHalfAngle ) );
return g_LMd * (g_Ambient + diffuse ) + g_LMs * pow( specular, 8 );
}
float4 cubePixelBlackWhiteEntry(float2 normalMapTexCoord : TEXCOORD0,
float3 lightDirection : TEXCOORD1,
float3 halfAngle : TEXCOORD2
) : COLOR
{
float3 normalTex = tex2D(g_samBrickNormal2D, normalMapTexCoord).xyz;
float3 normal = expand( normalTex );
float3 normLightDirTex = texCUBE( g_samNormalizeVectorCube1, lightDirection ).xyz;
float3 normLightDir = expand( normLightDirTex );
float3 normHalfAngleTex = texCUBE ( g_samNormalizeVectorCube2, halfAngle ).xyz;
float3 normHalfAngle = expand( normHalfAngleTex );
float diffuse = saturate( dot( normal, normLightDir ) );
float specular = saturate( dot( normal, normHalfAngle ) );
return diffuse;
}
float4 basePixelEntry(float2 normalMapTexCoord : TEXCOORD0,
float3 lightDirection : TEXCOORD1,
float3 halfAngle : TEXCOORD2
) : COLOR
{
float3 normalTex = tex2D(g_samBrickNormal2D, normalMapTexCoord).xyz;
float3 normal = expand( normalTex );
normal.z = -normal.z;//这里的法向量是以OPENGL的坐标系为基础提供的,转成DX的Z轴相反
normal.y = -normal.y;
float3 normLightDirTex = texCUBE( g_samNormalizeVectorCube1, lightDirection ).xyz;
float3 normLightDir = expand( normLightDirTex );
float3 normHalfAngleTex = texCUBE ( g_samNormalizeVectorCube2, halfAngle ).xyz;
float3 normHalfAngle = expand( normHalfAngleTex );
float4 cBaseColor = tex2D( g_samBase, normalMapTexCoord );
float diffuse = saturate( dot( normal, normLightDir ) );
float specular = saturate( dot( normal, normHalfAngle ) );
return cBaseColor * (g_Ambient + diffuse ) + g_LMs * pow( specular, 8 );
}
float4 baseCubePixelEntry(float2 normalMapTexCoord : TEXCOORD0,
float3 lightDirection : TEXCOORD1,
float3 halfAngle : TEXCOORD2
) : COLOR
{
float3 normalTex = tex2D(g_samBrickNormal2D, normalMapTexCoord).xyz;
float3 normal = expand( normalTex );
normal.y = -normal.y;
float3 normLightDirTex = texCUBE( g_samNormalizeVectorCube1, lightDirection ).xyz;
float3 normLightDir = expand( normLightDirTex );
float3 normHalfAngleTex = texCUBE ( g_samNormalizeVectorCube2, halfAngle ).xyz;
float3 normHalfAngle = expand( normHalfAngleTex );
float4 cBaseColor = tex2D( g_samBase, normalMapTexCoord );
float diffuse = saturate( dot( normal, normLightDir ) );
float specular = saturate( dot( normal, normHalfAngle ) );
return cBaseColor * (g_Ambient + diffuse ) + g_LMs * pow( specular, 8 );
}
float4 baseCubePixelBlackWhiteEntry(float2 normalMapTexCoord : TEXCOORD0,
float3 lightDirection : TEXCOORD1,
float3 halfAngle : TEXCOORD2
) : COLOR
{
float3 normalTex = tex2D(g_samBrickNormal2D, normalMapTexCoord).xyz;
float3 normal = expand( normalTex );
normal.y = -normal.y;
float3 normLightDirTex = texCUBE( g_samNormalizeVectorCube1, lightDirection ).xyz;
float3 normLightDir = expand( normLightDirTex );
float3 normHalfAngleTex = texCUBE ( g_samNormalizeVectorCube2, halfAngle ).xyz;
float3 normHalfAngle = expand( normHalfAngleTex );
float4 cBaseColor = tex2D( g_samBase, normalMapTexCoord );
float diffuse = saturate( dot( normal, normLightDir ) );
float specular = saturate( dot( normal, normHalfAngle ) );
return diffuse;
}
//--------------------------------------------------------------------------------------
// Renders scene to render target
//--------------------------------------------------------------------------------------
technique myTechnique
{
pass P0
{
VertexShader = compile vs_2_0 myVertexEntry();
PixelShader = compile ps_2_0 myPixelEntry();
}
}
technique cubeTechnique
{
pass P0
{
VertexShader = compile vs_2_0 cubeVertexEntry();
PixelShader = compile ps_2_0 cubePixelEntry();
}
}
technique cubeWhiteBlackTechnique
{
pass P0
{
VertexShader = compile vs_2_0 cubeVertexEntry();
PixelShader = compile ps_2_0 cubePixelBlackWhiteEntry();
}
}
technique torusTechnique
{
pass P0
{
VertexShader = compile vs_2_0 torusVertexEntry();
PixelShader = compile ps_2_0 cubePixelEntry();
}
}
technique baseTechnique
{
pass P0
{
VertexShader = compile vs_2_0 myVertexEntry();
PixelShader = compile ps_2_0 basePixelEntry();
}
}
technique baseCubeTechnique
{
pass P0
{
VertexShader = compile vs_2_0 cubeVertexEntry();
PixelShader = compile ps_2_0 baseCubePixelEntry();
}
}
technique baseCubeWhiteBlackTechnique
{
pass P0
{
VertexShader = compile vs_2_0 cubeVertexEntry();
PixelShader = compile ps_2_0 baseCubePixelBlackWhiteEntry();
}
}
technique baseTorusTechnique
{
pass P0
{
VertexShader = compile vs_2_0 torusVertexEntry();
PixelShader = compile ps_2_0 baseCubePixelEntry();
}
}