背景
PBR除了直接光照,利用BRDF直接累加就可,前文结合THREE代码有阐述,但是间接光就不能累加了,需要利用半球积分来求,积分计算量太大,webgl这块可以使用一种近似间接方式,涉及蒙特卡洛方法,本文结合THREE在间接光这块的实现。
主要参考下文
https://zhuanlan.zhihu.com/p/58641686
预处理部分 第一步 漫反射的辐照图
涉及蒙特卡洛、重要性采样
正常只需要一张
预处理(按照consine分布采样,然后直接累加颜色值,然后重新生成Diffuse的环境光图片)和使用详见下博客环境光部分
https://zhuanlan.zhihu.com/p/58641686
直接对原始的环境光图片按照consine分布采样,然后直接累加颜色值,然后重新生成Diffuse的环境光图片,使用的时候,只需要用点的normal去采样这个环境光图片,就是diffuse的光照结果,下面代码是用来生成图片
Vector4 DiffuseIBL(Vector2 Random, Vector3 N, BitmapData &img, bool hdr){
Vector4 DiffuseLighting;
unsigned NumSamples = 4096 * 2;
//for i in range(NumSamples) :
for (unsigned int i = 0; i < NumSamples; i++){
Vector2 E = Hammersley(i, NumSamples, Random);
Vector3 C = CosineSampleHemisphere(E);
Vector3 L = TangentToWorld(C, N);
float NoL = saturate(N.dot(L));;
//#print("NoL", NoL);
if (NoL > 0){
Vector2 uv2 = pos2uv(L);
Vector4 result = img.getPixel32Int(uv2.x * img.width, uv2.y * img.height);
//#lambert = DiffuseColor * NoL / PI
//#pdf = NoL / PI
float e = 1.0;
if (hdr)
{
e = result.w - 128.0;
e = pow(2, e);
}
DiffuseLighting.x += result.x * e;
DiffuseLighting.y += result.y * e;
DiffuseLighting.z += result.z * e;
DiffuseLighting.w += 255;
}
}
float Weight = 1.0 / NumSamples;
DiffuseLighting = DiffuseLighting * (Weight);
return DiffuseLighting;
}
预处理部分 第二步 镜面反射的辐照图
涉及蒙特卡洛、重要性采样
实际就是Pre-Filtered Environment Map 预处理环境纹理
float3 PrefilterEnvMap( float Roughness , float3 R )
{
float3 N = R;
float3 V = R;
float3 PrefilteredColor = 0;
const uint NumSamples = 1024;
for( uint i = 0; i < NumSamples; i++ )
{
float2 Xi = Hammersley( i, NumSamples );
float3 H = ImportanceSampleGGX( Xi, Roughness , N );
float3 L = 2 * dot( V, H ) * H - V;
float NoL = saturate( dot( N, L ) );
if( NoL > 0 )
{
PrefilteredColor += EnvMap.SampleLevel( EnvMapSampler , L, 0 ).rgb * NoL;
TotalWeight += NoL;
}
}
return PrefilteredColor / TotalWeight;
}
float3 ImportanceSampleGGX( float2 Xi, float Roughness , float3 N )
{
float a = Roughness * Roughness;
float Phi = 2 * PI * Xi.x;
float CosTheta = sqrt( (1 - Xi.y) / ( 1 + (a*a - 1) * Xi.y ) );
float SinTheta = sqrt( 1 - CosTheta * CosTheta );
float3 H;
H.x = SinTheta * cos( Phi );
H.y = SinTheta * sin( Phi );
H.z = CosTheta;
float3 UpVector = abs(N.z) < 0.999 ? float3(0,0,1) : float3(1,0,0);
float3 TangentX = normalize( cross( UpVector , N ) );
float3 TangentY = cross( N, TangentX );
// Tangent to world space
return TangentX * H.x + TangentY * H.y + N * H.z;
}
预处理部分 第三步 镜面反射的LUT
涉及蒙特卡洛、重要性采样
转为
float Vis_SmithJointApprox(float Roughness, float NoV, float NoL) {
float a = Roughness * Roughness;
float Vis_SmithV = NoL * (NoV * (1.0 - a) + a);
float Vis_SmithL = NoV * (NoL * (1.0 - a) + a);
return 0.5 / (Vis_SmithV + Vis_SmithL);
}
Vector2 IntegrateBRDF(Vector2 Random, float Roughness, float NoV){
Vector3 V;
V.x = sqrt(1.0 - NoV * NoV);// sin;
V.y = 0;
V.z = NoV;// # cos
float A = 0;
float B = 0;
int NumSamples = 128;
for (int i = 0;i < NumSamples;i++){
Vector2 E = Hammersley(i, NumSamples, Random);
Vector3 H = ImportanceSampleGGX(E, Roughness);
Vector3 L = H * (2 * V.dot(H)) - V;
float NoL = saturate(L.z);
float NoH = saturate(H.z);
float VoH = saturate(V.dot(H));
if (NoL > 0){
float Vis = Vis_SmithJointApprox(Roughness, NoV, NoL);
//#Incident light = NoL
//#pdf = D * NoH / (4 * VoH)
//#NoL * Vis / pdf
float NoL_Vis_PDF = NoL * Vis * (4 * VoH / NoH);
float Fc = pow(1 - VoH, 5);
A += (1 - Fc) * NoL_Vis_PDF;
B += Fc * NoL_Vis_PDF;
}
}
return Vector2(A / NumSamples, B / NumSamples);
}
THREE中对于环境间接光的实现主要使用IBL技术,但是BRDF运算量太大,对于高端平台来说,它是使用Monte Carlo蒙特卡洛方法,把积分问题转换成求和问题,把积分问题转换成求和问题,整合来进行预计算的,并存储于二维LUT中,LUT(https://www.jianshu.com/p/fdec2a5e889f?utm_campaign=hugo&utm_content=note&utm_medium=seo_notes&utm_source=recommendation) 是 LookUpTable 的简称,也称作颜色查找表。
THREE针对的web平台需要一个更加高效的方式,不使用LUT采样图片的方式,LUT的曲线本身是平滑的,所以可以用数学的方式来拟合曲线,参考UE针对移动端设备计算方式,THREE中拟合代码如下:
// bsdfs.glsl.js
// Analytical approximation of the DFG LUT, one half of the
// split-sum approximation used in indirect specular lighting.
// via 'environmentBRDF' from "Physically Based Shading on Mobile"
// https://www.unrealengine.com/blog/physically-based-shading-on-mobile - environmentBRDF for GGX on mobile
vec2 integrateSpecularBRDF( const in float dotNV, const in float roughness ) {
const vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );
const vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );
vec4 r = roughness * c0 + c1;
float a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;
return vec2( -1.04, 1.04 ) * a004 + r.zw;
}
three中读取预烘培好的漫反射预处理辐照度图和镜面反射辐照度图代码实现
getLightProbeIndirectIrradiance 漫反射辐照图读取,利用法向量
getLightProbeIndirectRadiance 镜面辐照图读取,利用 视线方向的反射方向
envmap_physical_pars_fragment.glsl.js
export default /* glsl */`
#if defined( USE_ENVMAP )
#ifdef ENVMAP_MODE_REFRACTION
uniform float refractionRatio;
#endif
// 漫反射 利用法线读取
vec3 getLightProbeIndirectIrradiance( /*const in SpecularLightProbe specularLightProbe,*/ const in GeometricContext geometry, const in int maxMIPLevel ) {
vec3 worldNormal = inverseTransformDirection( geometry.normal, viewMatrix );
#ifdef ENVMAP_TYPE_CUBE
vec3 queryVec = vec3( flipEnvMap * worldNormal.x, worldNormal.yz );
// TODO: replace with properly filtered cubemaps and access the irradiance LOD level, be it the last LOD level
// of a specular cubemap, or just the default level of a specially created irradiance cubemap.
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = textureCubeLodEXT( envMap, queryVec, float( maxMIPLevel ) );
#else
// force the bias high to get the last LOD level as it is the most blurred.
vec4 envMapColor = textureCube( envMap, queryVec, float( maxMIPLevel ) );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#elif defined( ENVMAP_TYPE_CUBE_UV )
vec4 envMapColor = textureCubeUV( envMap, worldNormal, 1.0 );
#else
vec4 envMapColor = vec4( 0.0 );
#endif
return PI * envMapColor.rgb * envMapIntensity;
}
// Trowbridge-Reitz distribution to Mip level, following the logic of http://casual-effects.blogspot.ca/2011/08/plausible-environment-lighting-in-two.html
float getSpecularMIPLevel( const in float roughness, const in int maxMIPLevel ) {
float maxMIPLevelScalar = float( maxMIPLevel );
float sigma = PI * roughness * roughness / ( 1.0 + roughness );
float desiredMIPLevel = maxMIPLevelScalar + log2( sigma );
// clamp to allowable LOD ranges.
return clamp( desiredMIPLevel, 0.0, maxMIPLevelScalar );
}
// 不同于漫反射,镜面反射是用反射方向来采样
vec3 getLightProbeIndirectRadiance( /*const in SpecularLightProbe specularLightProbe,*/ const in vec3 viewDir, const in vec3 normal, const in float roughness, const in int maxMIPLevel ) {
#ifdef ENVMAP_MODE_REFLECTION
vec3 reflectVec = reflect( -viewDir, normal );
// Mixing the reflection with the normal is more accurate and keeps rough objects from gathering light from behind their tangent plane.
reflectVec = normalize( mix( reflectVec, normal, roughness * roughness) );
#else
vec3 reflectVec = refract( -viewDir, normal, refractionRatio );
#endif
reflectVec = inverseTransformDirection( reflectVec, viewMatrix );
float specularMIPLevel = getSpecularMIPLevel( roughness, maxMIPLevel );
#ifdef ENVMAP_TYPE_CUBE
vec3 queryReflectVec = vec3( flipEnvMap * reflectVec.x, reflectVec.yz );
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = textureCubeLodEXT( envMap, queryReflectVec, specularMIPLevel );
#else
vec4 envMapColor = textureCube( envMap, queryReflectVec, specularMIPLevel );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#elif defined( ENVMAP_TYPE_CUBE_UV )
vec4 envMapColor = textureCubeUV( envMap, reflectVec, roughness );
#elif defined( ENVMAP_TYPE_EQUIREC )
vec2 sampleUV;
sampleUV.y = asin( clamp( reflectVec.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;
sampleUV.x = atan( reflectVec.z, reflectVec.x ) * RECIPROCAL_PI2 + 0.5;
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = texture2DLodEXT( envMap, sampleUV, specularMIPLevel );
#else
vec4 envMapColor = texture2D( envMap, sampleUV, specularMIPLevel );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#elif defined( ENVMAP_TYPE_SPHERE )
vec3 reflectView = normalize( ( viewMatrix * vec4( reflectVec, 0.0 ) ).xyz + vec3( 0.0,0.0,1.0 ) );
#ifdef TEXTURE_LOD_EXT
vec4 envMapColor = texture2DLodEXT( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
#else
vec4 envMapColor = texture2D( envMap, reflectView.xy * 0.5 + 0.5, specularMIPLevel );
#endif
envMapColor.rgb = envMapTexelToLinear( envMapColor ).rgb;
#endif
return envMapColor.rgb * envMapIntensity;
}
#endif
`;
参考资料
https://zhuanlan.zhihu.com/p/280893027
https://zhuanlan.zhihu.com/p/288112896
https://zhuanlan.zhihu.com/p/290634397
还有three里面用的多重scatter
https://zhuanlan.zhihu.com/p/91238829
完整详细实现参考:
https://learnopengl-cn.github.io/07%20PBR/03%20IBL/01%20Diffuse%20irradiance/
https://learnopengl-cn.github.io/07%20PBR/03%20IBL/02%20Specular%20IBL/
BRDF_Specular_Multiscattering_Environment( geometry, material.specularColor, material.specularRoughness, singleScattering, multiScattering );
// Fdez-Agüera's "Multiple-Scattering Microfacet Model for Real-Time Image Based Lighting"
// Approximates multiscattering in order to preserve energy.
// http://www.jcgt.org/published/0008/01/03/
void BRDF_Specular_Multiscattering_Environment( const in GeometricContext geometry, const in vec3 specularColor, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {
float dotNV = saturate( dot( geometry.normal, geometry.viewDir ) );
vec3 F = F_Schlick_RoughnessDependent( specularColor, dotNV, roughness );
vec2 brdf = integrateSpecularBRDF( dotNV, roughness );
vec3 FssEss = F * brdf.x + brdf.y;
float Ess = brdf.x + brdf.y;
float Ems = 1.0 - Ess;
vec3 Favg = specularColor + ( 1.0 - specularColor ) * 0.047619; // 1/21
vec3 Fms = FssEss * Favg / ( 1.0 - Ems * Favg );
singleScatter += FssEss;
multiScatter += Fms * Ems;
}