来源:https://zhuanlan.zhihu.com/p/92315967
感觉写的很好,记录下来,如果侵权的话请留言,必删
总结:
2种计算深度值对应世界坐标的的方法以及正交相机下的处理
------------------------------------------------------------------------------------------
可视化深度github:https://link.zhihu.com/?target=https%3A//github.com/keijiro/DepthInverseProjection
代码
- NDC方法
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f
{
float4 vertex : SV_POSITION;
float4 screenPos : TEXCOORD0;
float3 viewVec : TEXCOORD1;
};
v2f vert(appdata_base v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
// Compute texture coordinate
o.screenPos = ComputeScreenPos(o.vertex);
// NDC position
float4 ndcPos = (o.screenPos / o.screenPos.w) * 2 - 1;
// Camera parameter
float far = _ProjectionParams.z;
// View space vector pointing to the far plane
float3 clipVec = float3(ndcPos.x, ndcPos.y, 1.0) * far;
o.viewVec = mul(unity_CameraInvProjection, clipVec.xyzz).xyz;
return o;
}
sampler2D _CameraDepthTexture;
half4 frag(v2f i) : SV_Target
{
// Sample the depth texture to get the linear 01 depth
float depth = UNITY_SAMPLE_DEPTH(tex2Dproj(_CameraDepthTexture, i.screenPos));
depth = Linear01Depth(depth);
// View space position
float3 viewPos = i.viewVec * depth;
// Pixel world position
float3 worldPos = mul(UNITY_MATRIX_I_V, float4(viewPos, 1)).xyz;
return float4(worldPos, 1.0);
}
- 世界空间方法
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f
{
float4 vertex : SV_POSITION;
float4 screenPos : TEXCOORD0;
float3 worldSpaceDir : TEXCOORD1;
float viewSpaceZ : TEXCOORD2;
};
v2f vert(appdata_base v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
// World space vector from camera to the vertex position
o.worldSpaceDir = WorldSpaceViewDir(v.vertex);
// Z value of the vector in view space
o.viewSpaceZ = mul(UNITY_MATRIX_V, float4(o.worldSpaceDir, 0.0)).z;
// Compute texture coordinate
o.screenPos = ComputeScreenPos(o.vertex);
return o;
}
sampler2D _CameraDepthTexture;
half4 frag(v2f i) : SV_Target
{
// Sample the depth texture to get the linear eye depth
float eyeDepth = UNITY_SAMPLE_DEPTH(tex2Dproj(_CameraDepthTexture, i.screenPos));
eyeDepth = LinearEyeDepth(eyeDepth);
// Rescale the vector
i.worldSpaceDir *= -eyeDepth / i.viewSpaceZ;
// Pixel world position
float3 worldPos = _WorldSpaceCameraPos + i.worldSpaceDir;
return float4(worldPos, 1.0);
}
- 正交摄像机的情况
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f
{
float4 vertex : SV_POSITION;
float4 screenPos : TEXCOORD0;
float3 viewVec : TEXCOORD1;
};
v2f vert(appdata_base v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
// Compute texture coordinate
o.screenPos = ComputeScreenPos(o.vertex);
// NDC position
float4 ndcPos = (o.screenPos / o.screenPos.w) * 2 - 1;
// View space vector from near plane pointing to far plane
o.viewVec = float3(unity_OrthoParams.xy * ndcPos.xy, 0);
return o;
}
sampler2D _CameraDepthTexture;
half4 frag(v2f i) : SV_Target
{
// Camera parameters
float near = _ProjectionParams.y;
float far = _ProjectionParams.z;
// Sample the depth texture to get the linear depth
float rawDepth = UNITY_SAMPLE_DEPTH(tex2D(_CameraDepthTexture, i.screenPos));
float ortho = (far - near) * (1 - rawDepth) + near;
float depth = lerp(LinearEyeDepth(rawDepth), ortho, unity_OrthoParams.w) / far;
// Linear interpolate between near plane and far plane by depth value
float z = -lerp(near, far, depth);
// View space position
float3 viewPos = float3(i.viewVec.xy, z);
// Pixel world position
float3 worldPos = mul(UNITY_MATRIX_I_V, float4(viewPos, 1)).xyz;
return float4(worldPos, 1.0);
}