unity通过深度缓冲计算该深度对应的世界坐标

来源:https://zhuanlan.zhihu.com/p/92315967

感觉写的很好,记录下来,如果侵权的话请留言,必删

总结:

2种计算深度值对应世界坐标的的方法以及正交相机下的处理

------------------------------------------------------------------------------------------


 

可视化深度github:https://link.zhihu.com/?target=https%3A//github.com/keijiro/DepthInverseProjection

 

 

代码

  • NDC方法
#pragma vertex vert
#pragma fragment frag

#include "UnityCG.cginc"

struct v2f
{
    float4 vertex : SV_POSITION;
    float4 screenPos : TEXCOORD0;
    float3 viewVec : TEXCOORD1;
};

v2f vert(appdata_base v)
{
    v2f o;
    o.vertex = UnityObjectToClipPos(v.vertex);

    // Compute texture coordinate
    o.screenPos = ComputeScreenPos(o.vertex);

    // NDC position
    float4 ndcPos = (o.screenPos / o.screenPos.w) * 2 - 1;

    // Camera parameter
    float far = _ProjectionParams.z;

    // View space vector pointing to the far plane
    float3 clipVec = float3(ndcPos.x, ndcPos.y, 1.0) * far;
    o.viewVec = mul(unity_CameraInvProjection, clipVec.xyzz).xyz;

    return o;
}

sampler2D _CameraDepthTexture;

half4 frag(v2f i) : SV_Target
{
    // Sample the depth texture to get the linear 01 depth
    float depth = UNITY_SAMPLE_DEPTH(tex2Dproj(_CameraDepthTexture, i.screenPos));
    depth = Linear01Depth(depth);

    // View space position
    float3 viewPos = i.viewVec * depth;

    // Pixel world position
    float3 worldPos = mul(UNITY_MATRIX_I_V, float4(viewPos, 1)).xyz;

    return float4(worldPos, 1.0);
}
  • 世界空间方法
#pragma vertex vert
#pragma fragment frag

#include "UnityCG.cginc"

struct v2f
{
    float4 vertex : SV_POSITION;
    float4 screenPos : TEXCOORD0;
    float3 worldSpaceDir : TEXCOORD1;
    float viewSpaceZ : TEXCOORD2;
};

v2f vert(appdata_base v)
{
    v2f o;
    o.vertex = UnityObjectToClipPos(v.vertex);

    // World space vector from camera to the vertex position
    o.worldSpaceDir = WorldSpaceViewDir(v.vertex);

    // Z value of the vector in view space
    o.viewSpaceZ = mul(UNITY_MATRIX_V, float4(o.worldSpaceDir, 0.0)).z;

    // Compute texture coordinate
    o.screenPos = ComputeScreenPos(o.vertex);
    return o;
}

sampler2D _CameraDepthTexture;

half4 frag(v2f i) : SV_Target
{
    // Sample the depth texture to get the linear eye depth
    float eyeDepth = UNITY_SAMPLE_DEPTH(tex2Dproj(_CameraDepthTexture, i.screenPos));
    eyeDepth = LinearEyeDepth(eyeDepth);

    // Rescale the vector
    i.worldSpaceDir *= -eyeDepth / i.viewSpaceZ;

    // Pixel world position
    float3 worldPos = _WorldSpaceCameraPos + i.worldSpaceDir;

    return float4(worldPos, 1.0);
}
  • 正交摄像机的情况
#pragma vertex vert
#pragma fragment frag

#include "UnityCG.cginc"

struct v2f
{
    float4 vertex : SV_POSITION;
    float4 screenPos : TEXCOORD0;
    float3 viewVec : TEXCOORD1;
};

v2f vert(appdata_base v)
{
    v2f o;
    o.vertex = UnityObjectToClipPos(v.vertex);

    // Compute texture coordinate
    o.screenPos = ComputeScreenPos(o.vertex);

    // NDC position
    float4 ndcPos = (o.screenPos / o.screenPos.w) * 2 - 1;

    // View space vector from near plane pointing to far plane
    o.viewVec = float3(unity_OrthoParams.xy * ndcPos.xy, 0);

    return o;
}

sampler2D _CameraDepthTexture;

half4 frag(v2f i) : SV_Target
{
    // Camera parameters
    float near = _ProjectionParams.y;
    float far = _ProjectionParams.z;

    // Sample the depth texture to get the linear depth
    float rawDepth = UNITY_SAMPLE_DEPTH(tex2D(_CameraDepthTexture, i.screenPos));
    float ortho = (far - near) * (1 - rawDepth) + near;
    float depth = lerp(LinearEyeDepth(rawDepth), ortho, unity_OrthoParams.w) / far;

    // Linear interpolate between near plane and far plane by depth value
    float z = -lerp(near, far, depth);

    // View space position
    float3 viewPos = float3(i.viewVec.xy, z);

    // Pixel world position
    float3 worldPos = mul(UNITY_MATRIX_I_V, float4(viewPos, 1)).xyz;

    return float4(worldPos, 1.0);
}
  • 1
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值