Unity中的反射效果(基于URP)

一,cubemap

通过采样一张cubemap模拟反射效果。

            v2f vert (appdata v)
            {
                v2f o = (v2f) 0;
                o.vertex = TransformObjectToHClip(v.vertex.xyz);
                o.worldNormal = TransformObjectToWorldNormal(v.normal.xyz);
                return o;
            }

            half4 frag (v2f i) : SV_Target
            {
                Light light=GetMainLight();
                float3 worldLightDir= light.direction;
                float3 reflectDir=normalize(reflect(-worldLightDir,i.worldNormal));

                half4 envCol = texCUBE(_CubeMap, reflectDir);
                half3 envHDRCol = DecodeHDREnvironment(envCol, unity_SpecCube0_HDR);
                return half4(envHDRCol,1);
            }

CubeMapReflection(默认天空球)

二,ReflectionProbe

ReflectionProbe反射探头会采样周围生成一张CubeMap,在Shader中通过变量访问。ReflectionProbe有三种模式,baked,custom,realtime。baked提前烘培,custom可以烘培,也可以在定义一张CubeMap

            v2f vert (appdata v)
            {
                v2f o = (v2f) 0;
                o.vertex = TransformObjectToHClip(v.vertex.xyz);
                o.worldNormal = TransformObjectToWorldNormal(v.normal.xyz);
                return o;
            }

            half4 frag (v2f i) : SV_Target
            {
                Light light=GetMainLight();
                float3 worldLightDir= light.direction;
                float3 reflectDir=normalize(reflect(-worldLightDir,i.worldNormal));

                half4 envCol = SAMPLE_TEXTURECUBE(unity_SpecCube0, samplerunity_SpecCube0, reflectDir);
                half3 envHDRCol = DecodeHDREnvironment(envCol, unity_SpecCube0_HDR);
                return half4(envHDRCol,1);
            }

三,PlanarReflection

适用于表明平整物体的反射,镜子,湖面等。通过反射矩阵将需要反射的物体再渲染一遍。

C#部分

using System;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Serialization;
using Unity.Mathematics;

namespace UnityEngine.Rendering.Universal
{
    [ExecuteAlways]
    public class PlanarReflections : MonoBehaviour
    {
        [Serializable]
        public enum ResolutionMulltiplier
        {
            Full,
            Half,
            Third,
            Quarter
        }

        [Serializable]
        public class PlanarReflectionSettings
        {
            public ResolutionMulltiplier m_ResolutionMultiplier = ResolutionMulltiplier.Third;
            public float m_ClipPlaneOffset = 0.07f;
            public LayerMask m_ReflectLayers = -1;
            public bool m_Shadows;
        }

        [SerializeField]
        public PlanarReflectionSettings m_settings = new PlanarReflectionSettings();

        public GameObject target;
        [FormerlySerializedAs("camOffset")] public float m_planeOffset;

        private static Camera _reflectionCamera;
        private RenderTexture _reflectionTexture;
        private readonly int _planarReflectionTextureId = Shader.PropertyToID("_PlanarReflectionTexture");

        private int2 _oldReflectionTextureSize;

        public static event Action<ScriptableRenderContext, Camera> BeginPlanarReflections;

        private void OnEnable()
        {
            RenderPipelineManager.beginCameraRendering += ExecutePlanarReflections;
        }
        
        private void OnDisable()
        {
            Cleanup();
        }

        private void OnDestroy()
        {
            Cleanup();
        }

        private void Cleanup()
        {
            RenderPipelineManager.beginCameraRendering -= ExecutePlanarReflections;

            if(_reflectionCamera)
            {
                _reflectionCamera.targetTexture = null;
                SafeDestroy(_reflectionCamera.gameObject);
            }
            if (_reflectionTexture)
            {
                RenderTexture.ReleaseTemporary(_reflectionTexture);
            }
        }

        private static void SafeDestroy(Object obj)
        {
            if (Application.isEditor)
            {
                DestroyImmediate(obj);
            }
            else
            {
                Destroy(obj);
            }
        }

        private void UpdateCamera(Camera src, Camera dest)
        {
            if (dest == null) return;

            dest.CopyFrom(src);
            dest.useOcclusionCulling = false;
            if (dest.gameObject.TryGetComponent(out UniversalAdditionalCameraData camData))
            {
                camData.renderShadows = m_settings.m_Shadows;
            }
        }

        private void UpdateReflectionCamera(Camera realCamera)
        {
            if (_reflectionCamera == null)
                _reflectionCamera = CreateMirrorObjects();
            
            Vector3 pos = Vector3.zero;
            Vector3 normal = Vector3.up;
            if (target != null)
            {
                pos = target.transform.position + Vector3.up * m_planeOffset;
                normal = target.transform.up;
            }

            UpdateCamera(realCamera, _reflectionCamera);
            
            var d = -Vector3.Dot(normal, pos) - m_settings.m_ClipPlaneOffset;
            var reflectionPlane = new Vector4(normal.x, normal.y, normal.z, d);

            var reflection = Matrix4x4.identity;
            reflection *= Matrix4x4.Scale(new Vector3(1, -1, 1));

            CalculateReflectionMatrix(ref reflection, reflectionPlane);
            var oldPosition = realCamera.transform.position - new Vector3(0, pos.y * 2, 0);
            var newPosition = ReflectPosition(oldPosition);
            _reflectionCamera.transform.forward = Vector3.Scale(realCamera.transform.forward, new Vector3(1, -1, 1));
            _reflectionCamera.worldToCameraMatrix = realCamera.worldToCameraMatrix * reflection;
            
            var clipPlane = CameraSpacePlane(_reflectionCamera, pos - Vector3.up * 0.1f, normal, 1.0f);
            var projection = realCamera.CalculateObliqueMatrix(clipPlane);
            _reflectionCamera.projectionMatrix = projection;
            _reflectionCamera.cullingMask = m_settings.m_ReflectLayers;
            _reflectionCamera.transform.position = newPosition;
        }

        private static void CalculateReflectionMatrix(ref Matrix4x4 reflectionMat, Vector4 plane)
        {
            reflectionMat.m00 = (1F - 2F * plane[0] * plane[0]);
            reflectionMat.m01 = (-2F * plane[0] * plane[1]);
            reflectionMat.m02 = (-2F * plane[0] * plane[2]);
            reflectionMat.m03 = (-2F * plane[3] * plane[0]);

            reflectionMat.m10 = (-2F * plane[1] * plane[0]);
            reflectionMat.m11 = (1F - 2F * plane[1] * plane[1]);
            reflectionMat.m12 = (-2F * plane[1] * plane[2]);
            reflectionMat.m13 = (-2F * plane[3] * plane[1]);

            reflectionMat.m20 = (-2F * plane[2] * plane[0]);
            reflectionMat.m21 = (-2F * plane[2] * plane[1]);
            reflectionMat.m22 = (1F - 2F * plane[2] * plane[2]);
            reflectionMat.m23 = (-2F * plane[3] * plane[2]);

            reflectionMat.m30 = 0F;
            reflectionMat.m31 = 0F;
            reflectionMat.m32 = 0F;
            reflectionMat.m33 = 1F;
        }

        private static Vector3 ReflectPosition(Vector3 pos)
        {
            var newPos = new Vector3(pos.x, -pos.y, pos.z);
            return newPos;
        }

        private float GetScaleValue()
        {
            switch(m_settings.m_ResolutionMultiplier)
            {
                case ResolutionMulltiplier.Full:
                    return 1f;
                case ResolutionMulltiplier.Half:
                    return 0.5f;
                case ResolutionMulltiplier.Third:
                    return 0.33f;
                case ResolutionMulltiplier.Quarter:
                    return 0.25f;
                default:
                    return 0.5f;
            }
        }

        private Vector4 CameraSpacePlane(Camera cam, Vector3 pos, Vector3 normal, float sideSign)
        {
            var offsetPos = pos + normal * m_settings.m_ClipPlaneOffset;
            var m = cam.worldToCameraMatrix;
            var cameraPosition = m.MultiplyPoint(offsetPos);
            var cameraNormal = m.MultiplyVector(normal).normalized * sideSign;
            return new Vector4(cameraNormal.x, cameraNormal.y, cameraNormal.z, -Vector3.Dot(cameraPosition, cameraNormal));
        }

        private Camera CreateMirrorObjects()
        {
            var go = new GameObject("Planar Reflections",typeof(Camera));
            var cameraData = go.AddComponent(typeof(UniversalAdditionalCameraData)) as UniversalAdditionalCameraData;

            cameraData.requiresColorOption = CameraOverrideOption.Off;
            cameraData.requiresDepthOption = CameraOverrideOption.Off;
            cameraData.SetRenderer(1);

            var t = transform;
            var reflectionCamera = go.GetComponent<Camera>();
            reflectionCamera.transform.SetPositionAndRotation(t.position, t.rotation);
            reflectionCamera.depth = -10;
            reflectionCamera.enabled = false;
            go.hideFlags = HideFlags.HideAndDontSave;

            return reflectionCamera;
        }

        private void PlanarReflectionTexture(Camera cam)
        {
            if (_reflectionTexture == null)
            {
                var res = ReflectionResolution(cam, UniversalRenderPipeline.asset.renderScale);
                bool useHdr10 = RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.RGB111110Float);
                RenderTextureFormat hdrFormat = useHdr10 ? RenderTextureFormat.RGB111110Float : RenderTextureFormat.DefaultHDR;
                _reflectionTexture = RenderTexture.GetTemporary(res.x, res.y, 16,
                    GraphicsFormatUtility.GetGraphicsFormat(hdrFormat, true));
            }
            _reflectionCamera.targetTexture =  _reflectionTexture;
        }

        private int2 ReflectionResolution(Camera cam, float scale)
        {
            var x = (int)(cam.pixelWidth * scale * GetScaleValue());
            var y = (int)(cam.pixelHeight * scale * GetScaleValue());
            return new int2(x, y);
        }

        private void ExecutePlanarReflections(ScriptableRenderContext context, Camera camera)
        {
            if (camera.cameraType == CameraType.Reflection || camera.cameraType == CameraType.Preview)
                return;

            UpdateReflectionCamera(camera); 
            PlanarReflectionTexture(camera);

            var data = new PlanarReflectionSettingData();
            data.Set();

            BeginPlanarReflections?.Invoke(context, _reflectionCamera); 
            UniversalRenderPipeline.RenderSingleCamera(context, _reflectionCamera); 

            data.Restore();
            Shader.SetGlobalTexture(_planarReflectionTextureId, _reflectionTexture);
        }

        class PlanarReflectionSettingData
        {
            private readonly bool _fog;
            private readonly int _maxLod;
            private readonly float _lodBias;

            public PlanarReflectionSettingData()
            {
                _fog = RenderSettings.fog;
                _maxLod = QualitySettings.maximumLODLevel;
                _lodBias = QualitySettings.lodBias;
            }

            public void Set()
            {
                GL.invertCulling = true;
                RenderSettings.fog = false;
                QualitySettings.maximumLODLevel = 1;
                QualitySettings.lodBias = _lodBias * 0.5f;
            }

            public void Restore()
            {
                GL.invertCulling = false;
                RenderSettings.fog = _fog;
                QualitySettings.maximumLODLevel = _maxLod;
                QualitySettings.lodBias = _lodBias;
            }
        }
    }
}

Shader部分

v2f vert (appdata v)
            {
                v2f o = (v2f) 0;
                o.vertex = TransformObjectToHClip(v.vertex.xyz);
                o.uv=ComputeScreenPos(o.vertex);
                return o;
            }

            half4 frag (v2f i) : SV_Target
            {
                half3 screenUV = i.uv.xyz/i.uv.w;
                half4 col = tex2D(_PlanarReflectionTexture,screenUV.xy);
                return col;
            }

PlanarReflection

四,SSR

通过步进方式获取反射信息,只会反射屏幕中出现过的像素。

bool rayMarching(float3 o, float3 r, out float2 hitUV)
{
	float3 end = o;
	float stepSize = 0.5;
	float thinkness = 0.1;
	float triveled = 0;
	int max_marching = 256;
	float max_distance = 500;

	UNITY_LOOP
	for (int i = 1; i <= max_marching; ++i)
	{
		end += r * stepSize;
		triveled += stepSize;

		if (triveled > max_distance)
		return false;

		float collied = compareWithDepth(end);
		if (collied < 0)
		{
			if (abs(collied) < thinkness)
			{
				hitUV = ViewPosToCS(end);
				return true;
			}
			end -= r * stepSize;
			triveled -= stepSize;
			stepSize *= 0.5;
		}
	}
	return false;
}

......

v2f vert (appdata v)
{
	v2f o;
	o.vertex = TransformObjectToHClip(v.vertex);
	o.uv = v.uv;

	o.positionWS = TransformObjectToWorld(v.vertex).xyz;
	o.positionOS = v.vertex.xyzw;

	float4 screenPos = TransformObjectToHClip(v.vertex);
	screenPos.xyz /= screenPos.w;
	screenPos.xy = screenPos.xy * 0.5 + 0.5;

	o.positionCS = screenPos;

	#if UNITY_UV_STARTS_AT_TOP
		o.positionCS.y = 1 - o.positionCS.y;
	#endif

	float zFar = _ProjectionParams.z;
	float4 vsRay = float4(float3(o.positionCS.xy * 2.0 - 1.0, 1) * zFar, zFar);
	vsRay = mul(unity_CameraInvProjection, vsRay);

	o.vsRay = vsRay;
	return o;
}

float4 frag (v2f i) : SV_Target
{
	float4 screenPos = i.positionCS;

	float depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, sampler_CameraDepthTexture, screenPos.xy);
	depth = Linear01Depth(depth, _ZBufferParams);
				
	float2 noiseTex = (SAMPLE_TEXTURE2D(_Noise, sampler_Noise, (i.uv * 5) + _Time.x).xy * 2 - 1) * 0.1;

	float3 wsNormal = normalize(float3(noiseTex.x, 1, noiseTex.y));
	float3 vsNormal = (TransformWorldToViewDir(wsNormal));

	float3 vsRayOrigin = (i.vsRay) * depth;
	float3 reflectionDir = normalize(reflect(vsRayOrigin, vsNormal));

	float2 hitUV = 0;
	float3 col = SAMPLE_TEXTURE2D(_CameraOpaqueTexture, sampler_CameraOpaqueTexture, screenPos.xy).xyz;
	if (rayMarching(vsRayOrigin, reflectionDir, hitUV))
	{
		float3 hitCol = SAMPLE_TEXTURE2D(_CameraOpaqueTexture, sampler_CameraOpaqueTexture, hitUV).xyz;
		col += hitCol;
	}
	else 
        {
		float3 viewPosToWorld = normalize(i.positionWS.xyz - _WorldSpaceCameraPos.xyz);
		float3 reflectDir = reflect(viewPosToWorld, wsNormal);
		col = SAMPLE_TEXTURECUBE(_SkyBoxCubeMap, sampler_SkyBoxCubeMap, reflectDir);
	}

	return float4(col, 1);
}

ScreenSpaceReflection

五,SSPR

SSR的一种改进方案,性能更好,会用到ComputeShader,某些手机不支持在Shader里写Buffer,需要注意。

C#部分

using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;

public class MobileSSPRRendererFeature : ScriptableRendererFeature
{
    public static MobileSSPRRendererFeature instance; 
    [System.Serializable]
    public class PassSettings
    {
        [Header("Settings")]
        public bool ShouldRenderSSPR = true;
        public float HorizontalReflectionPlaneHeightWS = 0.01f; 
        [Range(0.01f, 1f)]
        public float FadeOutScreenBorderWidthVerticle = 0.25f;
        [Range(0.01f, 1f)]
        public float FadeOutScreenBorderWidthHorizontal = 0.35f;
        [Range(0,8f)]
        public float ScreenLRStretchIntensity = 4;
        [Range(-1f,1f)]
        public float ScreenLRStretchThreshold = 0.7f;
        [ColorUsage(true,true)]
        public Color TintColor = Color.white;
        
        [Header("Performance Settings")]
        [Range(128, 1024)]
        [Tooltip("set to 512 or below for better performance, if visual quality lost is acceptable")]
        public int RT_height = 512;
        [Tooltip("can set to false for better performance, if visual quality lost is acceptable")]
        public bool UseHDR = true;
        [Tooltip("can set to false for better performance, if visual quality lost is acceptable")]
        public bool ApplyFillHoleFix = true;
        [Tooltip("can set to false for better performance, if flickering is acceptable")]
        public bool ShouldRemoveFlickerFinalControl = true;
        
        [Header("Danger Zone")]
        [Tooltip("You should always turn this on, unless you want to debug")]
        public bool EnablePerPlatformAutoSafeGuard = true;
    }
    public PassSettings Settings = new PassSettings();

    public class CustomRenderPass : ScriptableRenderPass
    {
        static readonly int _SSPR_ColorRT_pid = Shader.PropertyToID("_MobileSSPR_ColorRT");
        static readonly int _SSPR_PackedDataRT_pid = Shader.PropertyToID("_MobileSSPR_PackedDataRT");
        static readonly int _SSPR_PosWSyRT_pid = Shader.PropertyToID("_MobileSSPR_PosWSyRT");
        RenderTargetIdentifier _SSPR_ColorRT_rti = new RenderTargetIdentifier(_SSPR_ColorRT_pid);
        RenderTargetIdentifier _SSPR_PackedDataRT_rti = new RenderTargetIdentifier(_SSPR_PackedDataRT_pid);
        RenderTargetIdentifier _SSPR_PosWSyRT_rti = new RenderTargetIdentifier(_SSPR_PosWSyRT_pid);

        ShaderTagId lightMode_SSPR_sti = new ShaderTagId("MobileSSPR");

        const int SHADER_NUMTHREAD_X = 8; 
        const int SHADER_NUMTHREAD_Y = 8; 

        PassSettings settings;
        ComputeShader cs;
        public CustomRenderPass(PassSettings settings)
        {
            this.settings = settings;

            cs = (ComputeShader)Resources.Load("MobileSSPRComputeShader");
        }

        int GetRTHeight()
        {
            return Mathf.CeilToInt(settings.RT_height / (float)SHADER_NUMTHREAD_Y) * SHADER_NUMTHREAD_Y;
        }
        int GetRTWidth()
        {
            float aspect = (float)Screen.width / Screen.height;
            return Mathf.CeilToInt(GetRTHeight() * aspect / (float)SHADER_NUMTHREAD_X) * SHADER_NUMTHREAD_X;
        }

        bool ShouldUseSinglePassUnsafeAllowFlickeringDirectResolve()
        {
            if (settings.EnablePerPlatformAutoSafeGuard)
            {
               
                if (!SystemInfo.SupportsRenderTextureFormat(RenderTextureFormat.RInt))
                    return true;
                
                if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal)
                    return true;
#if UNITY_EDITOR
                //PC(DirectX) can use RenderTextureFormat.RInt + InterlockedMin() without any problem, use Non-Mobile path.
                //Non-Mobile path will NOT produce any flickering
                if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D11 || SystemInfo.graphicsDeviceType == GraphicsDeviceType.Direct3D12)
                    return false;
#elif UNITY_ANDROID
                //- samsung galaxy A70(Adreno612) will fail if use RenderTextureFormat.RInt + InterlockedMin() in compute shader
                //- but Lenovo S5(Adreno506) is correct, WTF???
                //because behavior is different between android devices, we assume all android are not safe to use RenderTextureFormat.RInt + InterlockedMin() in compute shader
                //so android always go mobile path
                return true;
#endif
            }
            
            return !settings.ShouldRemoveFlickerFinalControl;
        }
        
        public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
        {    
            RenderTextureDescriptor rtd = new RenderTextureDescriptor(GetRTWidth(), GetRTHeight(),RenderTextureFormat.Default, 0, 0);

            rtd.sRGB = false; 
            rtd.enableRandomWrite = true; 
            
            bool shouldUseHDRColorRT = settings.UseHDR;
            if (cameraTextureDescriptor.colorFormat == RenderTextureFormat.ARGB32)
                shouldUseHDRColorRT = false;
            rtd.colorFormat = shouldUseHDRColorRT ? RenderTextureFormat.ARGBHalf : RenderTextureFormat.ARGB32; 
            cmd.GetTemporaryRT(_SSPR_ColorRT_pid, rtd);
            
            if (ShouldUseSinglePassUnsafeAllowFlickeringDirectResolve())
            {
                rtd.colorFormat = RenderTextureFormat.RFloat;
                cmd.GetTemporaryRT(_SSPR_PosWSyRT_pid, rtd);
            }
            else
            {
                rtd.colorFormat = RenderTextureFormat.RInt;
                cmd.GetTemporaryRT(_SSPR_PackedDataRT_pid, rtd);
            }
        }
        
        public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
        {
            CommandBuffer cb = CommandBufferPool.Get("SSPR");

            int dispatchThreadGroupXCount = GetRTWidth() / SHADER_NUMTHREAD_X;
            int dispatchThreadGroupYCount = GetRTHeight() / SHADER_NUMTHREAD_Y;
            int dispatchThreadGroupZCount = 1;

            if (settings.ShouldRenderSSPR)
            {
                cb.SetComputeVectorParam(cs, Shader.PropertyToID("_RTSize"), new Vector2(GetRTWidth(), GetRTHeight()));
                cb.SetComputeFloatParam(cs, Shader.PropertyToID("_HorizontalPlaneHeightWS"), settings.HorizontalReflectionPlaneHeightWS);

                cb.SetComputeFloatParam(cs, Shader.PropertyToID("_FadeOutScreenBorderWidthVerticle"), settings.FadeOutScreenBorderWidthVerticle);
                cb.SetComputeFloatParam(cs, Shader.PropertyToID("_FadeOutScreenBorderWidthHorizontal"), settings.FadeOutScreenBorderWidthHorizontal);
                cb.SetComputeVectorParam(cs, Shader.PropertyToID("_CameraDirection"), renderingData.cameraData.camera.transform.forward);
                cb.SetComputeFloatParam(cs, Shader.PropertyToID("_ScreenLRStretchIntensity"), settings.ScreenLRStretchIntensity);
                cb.SetComputeFloatParam(cs, Shader.PropertyToID("_ScreenLRStretchThreshold"), settings.ScreenLRStretchThreshold);
                cb.SetComputeVectorParam(cs, Shader.PropertyToID("_FinalTintColor"), settings.TintColor);
                
                Camera camera = renderingData.cameraData.camera;
                Matrix4x4 VP = GL.GetGPUProjectionMatrix(camera.projectionMatrix, true) * camera.worldToCameraMatrix;
                cb.SetComputeMatrixParam(cs, "_VPMatrix", VP);

                if (ShouldUseSinglePassUnsafeAllowFlickeringDirectResolve())
                {
                    int kernel_MobilePathSinglePassColorRTDirectResolve = cs.FindKernel("MobilePathSinglePassColorRTDirectResolve");
                    cb.SetComputeTextureParam(cs, kernel_MobilePathSinglePassColorRTDirectResolve, "ColorRT", _SSPR_ColorRT_rti);
                    cb.SetComputeTextureParam(cs, kernel_MobilePathSinglePassColorRTDirectResolve, "PosWSyRT", _SSPR_PosWSyRT_rti);
                    cb.SetComputeTextureParam(cs, kernel_MobilePathSinglePassColorRTDirectResolve, "_CameraOpaqueTexture", new RenderTargetIdentifier("_CameraOpaqueTexture"));
                    cb.SetComputeTextureParam(cs, kernel_MobilePathSinglePassColorRTDirectResolve, "_CameraDepthTexture", new RenderTargetIdentifier("_CameraDepthTexture"));
                    cb.DispatchCompute(cs, kernel_MobilePathSinglePassColorRTDirectResolve, dispatchThreadGroupXCount, dispatchThreadGroupYCount, dispatchThreadGroupZCount);

                }
                else
                {
                   
                    int kernel_NonMobilePathClear = cs.FindKernel("NonMobilePathClear");
                    cb.SetComputeTextureParam(cs, kernel_NonMobilePathClear, "HashRT", _SSPR_PackedDataRT_rti);
                    cb.SetComputeTextureParam(cs, kernel_NonMobilePathClear, "ColorRT", _SSPR_ColorRT_rti);
                    cb.DispatchCompute(cs, kernel_NonMobilePathClear, dispatchThreadGroupXCount, dispatchThreadGroupYCount, dispatchThreadGroupZCount);
                    
                    int kernel_NonMobilePathRenderHashRT = cs.FindKernel("NonMobilePathRenderHashRT");
                    cb.SetComputeTextureParam(cs, kernel_NonMobilePathRenderHashRT, "HashRT", _SSPR_PackedDataRT_rti);
                    cb.SetComputeTextureParam(cs, kernel_NonMobilePathRenderHashRT, "_CameraDepthTexture", new RenderTargetIdentifier("_CameraDepthTexture"));

                    cb.DispatchCompute(cs, kernel_NonMobilePathRenderHashRT, dispatchThreadGroupXCount, dispatchThreadGroupYCount, dispatchThreadGroupZCount);
                    
                    int kernel_NonMobilePathResolveColorRT = cs.FindKernel("NonMobilePathResolveColorRT");
                    cb.SetComputeTextureParam(cs, kernel_NonMobilePathResolveColorRT, "_CameraOpaqueTexture", new RenderTargetIdentifier("_CameraOpaqueTexture"));
                    cb.SetComputeTextureParam(cs, kernel_NonMobilePathResolveColorRT, "ColorRT", _SSPR_ColorRT_rti);
                    cb.SetComputeTextureParam(cs, kernel_NonMobilePathResolveColorRT, "HashRT", _SSPR_PackedDataRT_rti);
                    cb.DispatchCompute(cs, kernel_NonMobilePathResolveColorRT, dispatchThreadGroupXCount, dispatchThreadGroupYCount, dispatchThreadGroupZCount);
                }
                
                if(settings.ApplyFillHoleFix)
                {
                    int kernel_FillHoles = cs.FindKernel("FillHoles");
                    cb.SetComputeTextureParam(cs, kernel_FillHoles, "ColorRT", _SSPR_ColorRT_rti);
                    cb.SetComputeTextureParam(cs, kernel_FillHoles, "PackedDataRT", _SSPR_PackedDataRT_rti);
                    cb.DispatchCompute(cs, kernel_FillHoles, Mathf.CeilToInt(dispatchThreadGroupXCount / 2f), Mathf.CeilToInt(dispatchThreadGroupYCount / 2f), dispatchThreadGroupZCount);
                }
                
                cb.SetGlobalTexture(_SSPR_ColorRT_pid, _SSPR_ColorRT_rti);
                cb.EnableShaderKeyword("_MobileSSPR");
            }
            else
            {
                cb.DisableShaderKeyword("_MobileSSPR");
            }

            context.ExecuteCommandBuffer(cb);
            CommandBufferPool.Release(cb);

            DrawingSettings drawingSettings = CreateDrawingSettings(lightMode_SSPR_sti, ref renderingData, SortingCriteria.CommonOpaque);
            FilteringSettings filteringSettings = new FilteringSettings(RenderQueueRange.all);
            context.DrawRenderers(renderingData.cullResults, ref drawingSettings, ref filteringSettings);
        }
        public override void FrameCleanup(CommandBuffer cmd)
        {
            cmd.ReleaseTemporaryRT(_SSPR_ColorRT_pid);

            if(ShouldUseSinglePassUnsafeAllowFlickeringDirectResolve())
                cmd.ReleaseTemporaryRT(_SSPR_PosWSyRT_pid);
            else
                cmd.ReleaseTemporaryRT(_SSPR_PackedDataRT_pid);
        }
    }

    CustomRenderPass m_ScriptablePass;

    public override void Create()
    {
        instance = this;

        m_ScriptablePass = new CustomRenderPass(Settings);
        
        m_ScriptablePass.renderPassEvent = RenderPassEvent.AfterRenderingTransparents;
    }

    public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
    {
        renderer.EnqueuePass(m_ScriptablePass);
    }
}

ComputeShader部分

#define NUMTHREAD_X 8
#define NUMTHREAD_Y 8

#define MAX_UINT 4294967295

#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"

SamplerState PointClampSampler;
SamplerState LinearClampSampler;

float2 _RTSize;
float _HorizontalPlaneHeightWS;
float _FadeOutScreenBorderWidthVerticle;
float _FadeOutScreenBorderWidthHorizontal; 
float3 _CameraDirection;

float4x4 _VPMatrix; 
float _ScreenLRStretchIntensity;
float _ScreenLRStretchThreshold;
float4 _FinalTintColor;

RWTexture2D<half4> ColorRT;
Texture2D<half4> _CameraOpaqueTexture;
Texture2D<float> _CameraDepthTexture;

RWTexture2D<uint> HashRT; 

RWTexture2D<float> PosWSyRT;

float3 ConvertScreenIDToPosWS(uint2 id)
{
	float2 screenUV = float2(id.x / (_RTSize.x), id.y / (_RTSize.y)); 
	float inputPixelRawDepth = _CameraDepthTexture.SampleLevel(PointClampSampler, screenUV, 0);
	
	float4 posCS = float4(screenUV * 2.0 - 1.0, inputPixelRawDepth, 1.0);
#if UNITY_UV_STARTS_AT_TOP
	posCS.y = -posCS.y;
#endif
	
	float4 posHWS = mul(UNITY_MATRIX_I_VP, posCS);
	float3 posWS = posHWS.xyz / posHWS.w;

	return posWS;
}
float3 MirrorPosWS(float3 inputPosWS)
{
	float3 reflectedPosWS = inputPosWS;
	reflectedPosWS.y -= _HorizontalPlaneHeightWS;
	reflectedPosWS.y *= -1;
	reflectedPosWS.y += _HorizontalPlaneHeightWS;

	return reflectedPosWS;
}
float2 ConvertReflectedPosWSToScreenUV(float3 reflectedPosWS)
{
	float4 reflectedPosCS = mul(_VPMatrix, float4(reflectedPosWS, 1));
	float2 reflectedPosNDCxy = reflectedPosCS.xy / reflectedPosCS.w;

	float2 reflectedScreenUV = reflectedPosNDCxy * 0.5 + 0.5;

	float Threshold = _ScreenLRStretchThreshold;
	float Intensity = _ScreenLRStretchIntensity;

	float HeightStretch = (abs(reflectedPosWS.y - _HorizontalPlaneHeightWS));
	float AngleStretch = (-_CameraDirection.y);
	float ScreenStretch = saturate(abs(reflectedScreenUV.x * 2 - 1) - Threshold);

	reflectedScreenUV.x = reflectedScreenUV.x * 2 - 1;
	reflectedScreenUV.x *= 1 + HeightStretch * AngleStretch * ScreenStretch * Intensity;
	reflectedScreenUV.x = saturate(reflectedScreenUV.x * 0.5 + 0.5);

#if UNITY_UV_STARTS_AT_TOP
	reflectedScreenUV.y = 1.0 - reflectedScreenUV.y;
#endif

	return reflectedScreenUV;
}
half ConvertOpaqueColorRTScreenUVToFadeAlphaParam(float2 screenUV, float reflectedPosWSy)
{
	half fadeoutAlpha = smoothstep(1, 1-_FadeOutScreenBorderWidthVerticle, screenUV.y);
	fadeoutAlpha *= smoothstep(1, 1 - _FadeOutScreenBorderWidthHorizontal * -reflectedPosWSy, abs(screenUV.x * 2 - 1));
	return fadeoutAlpha;
}

#pragma kernel NonMobilePathClear

[numthreads(NUMTHREAD_X, NUMTHREAD_Y, 1)]
void NonMobilePathClear(uint3 id : SV_DispatchThreadID)
{
	HashRT[id.xy] = MAX_UINT;
	ColorRT[uint2(id.xy)] = half4(0, 0, 0, 0);
}

#pragma kernel NonMobilePathRenderHashRT

[numthreads(NUMTHREAD_X,NUMTHREAD_Y,1)]
void NonMobilePathRenderHashRT(uint3 id : SV_DispatchThreadID)
{
	float3 posWS = ConvertScreenIDToPosWS(id);
	
	if(posWS.y <= _HorizontalPlaneHeightWS)
		return;
	
	float3 reflectedPosWS = MirrorPosWS(posWS);
	
	float2 reflectedScreenUV = ConvertReflectedPosWSToScreenUV(reflectedPosWS);
	
	float2 earlyExitTest = abs(reflectedScreenUV - 0.5);
	if (earlyExitTest.x >= 0.5 || earlyExitTest.y >= 0.5)
		return;
	uint2 reflectedScreenID = reflectedScreenUV * _RTSize;
	
	float2 screenUV = id.xy / _RTSize;
	half fadeoutAlpha = ConvertOpaqueColorRTScreenUVToFadeAlphaParam(screenUV, reflectedPosWS.y);

	uint fadeoutAlphaInt = fadeoutAlpha * 255;
	uint hash = id.y << 20 | id.x << 8 | fadeoutAlphaInt; 
	InterlockedMin(HashRT[reflectedScreenID],hash);
}

#pragma kernel NonMobilePathResolveColorRT

[numthreads(NUMTHREAD_X, NUMTHREAD_Y, 1)]
void NonMobilePathResolveColorRT(uint3 id : SV_DispatchThreadID)
{
	uint packedData = HashRT[id.xy];	
	if (packedData == MAX_UINT) 
	{
		ColorRT[id.xy] = 0;
		return;
	}	
	uint2 sampleID = uint2((packedData >> 8) & 0xFFF, packedData >> 20); 
	uint alphaAsInt = packedData & 0xFF;
	half alphaAsFloatingPoint = alphaAsInt / 255.0;

	float2 sampleUV = sampleID.xy / _RTSize;
	half3 sampledColor = _CameraOpaqueTexture.SampleLevel(LinearClampSampler, sampleUV, 0);

	half4 finalColor = half4(sampledColor, alphaAsFloatingPoint) * _FinalTintColor;
	finalColor.a = saturate(finalColor.a);
	ColorRT[id.xy] = finalColor;
}
#pragma kernel MobilePathSinglePassColorRTDirectResolve

[numthreads(NUMTHREAD_X,NUMTHREAD_Y,1)]
void MobilePathSinglePassColorRTDirectResolve(uint3 id : SV_DispatchThreadID)
{
    ColorRT[uint2(id.xy)] = half4(0,0,0,0);
    PosWSyRT[uint2(id.xy)] = 9999999;
	
	float3 posWS = ConvertScreenIDToPosWS(id);
	
	if(posWS.y <= _HorizontalPlaneHeightWS)
		return;
	
	float3 reflectedPosWS = MirrorPosWS(posWS);

	
	float2 reflectedScreenUV = ConvertReflectedPosWSToScreenUV(reflectedPosWS);
	
	float2 earlyExitTest = abs(reflectedScreenUV - 0.5);
	if (earlyExitTest.x >= 0.5 || earlyExitTest.y >= 0.5) 
		return;
	uint2 reflectedScreenID = reflectedScreenUV * _RTSize;
	
	
	if(posWS.y < PosWSyRT[reflectedScreenID])
	{
		float2 screenUV = id.xy / _RTSize;
		half3 inputPixelSceneColor = _CameraOpaqueTexture.SampleLevel(LinearClampSampler, screenUV, 0).rgb;

		half fadeoutAlpha = ConvertOpaqueColorRTScreenUVToFadeAlphaParam(screenUV, reflectedPosWS.y);
		
		half4 color = half4(inputPixelSceneColor,fadeoutAlpha) * _FinalTintColor;
		color.a = saturate(color.a);
		ColorRT[reflectedScreenID] = color;
		PosWSyRT[reflectedScreenID] = posWS.y;
	}
}

#pragma kernel FillHoles

[numthreads(NUMTHREAD_X, NUMTHREAD_Y, 1)]
void FillHoles(uint3 id : SV_DispatchThreadID)
{
	id.xy *= 2;
	
	half4 center = ColorRT[id.xy + uint2(0, 0)];
	half4 right = ColorRT[id.xy + uint2(0, 1)];
	half4 bottom = ColorRT[id.xy + uint2(1, 0)];
	half4 bottomRight = ColorRT[id.xy + uint2(1, 1)];

	half4 best = center;
	best = right.a > best.a + 0.5 ? right : best;
	best = bottom.a > best.a + 0.5 ? bottom : best;
	best = bottomRight.a > best.a + 0.5 ? bottomRight : best;

	ColorRT[id.xy + uint2(0, 0)] = best.a > center.a + 0.5 ? best : center;
	ColorRT[id.xy + uint2(0, 1)] = best.a > right.a + 0.5 ? best : right;
	ColorRT[id.xy + uint2(1, 0)] = best.a > bottom.a + 0.5 ? best : bottom;
	ColorRT[id.xy + uint2(1, 1)] = best.a > bottomRight.a + 0.5 ? best : bottomRight;
}

Shader部分

Varyings vert(Attributes IN)
            {
                Varyings OUT;
                OUT.positionHCS = TransformObjectToHClip(IN.positionOS.xyz);
                OUT.uv = TRANSFORM_TEX(IN.uv, _BaseMap) + _Time.y*_UV_MoveSpeed;
                OUT.screenPos = ComputeScreenPos(OUT.positionHCS);
                OUT.posWS = TransformObjectToWorld(IN.positionOS.xyz);
                return OUT;
            }

            half4 frag(Varyings IN) : SV_Target
            { 
                half3 baseColor = SAMPLE_TEXTURE2D(_BaseMap, sampler_BaseMap, IN.uv) * _BaseColor.rgb;
                float2 noise = SAMPLE_TEXTURE2D(_SSPR_UVNoiseTex,sampler_SSPR_UVNoiseTex, IN.uv);
                noise = noise *2-1;
                noise.y = -abs(noise);
                noise.x *= 0.25;
                noise *= _SSPR_NoiseIntensity;

                half3 viewWS = (IN.posWS - _WorldSpaceCameraPos);
                viewWS = normalize(viewWS);
                half3 reflectDirWS = viewWS * half3(1,-1,1);
                half3 reflectionProbeResult = GlossyEnvironmentReflection(reflectDirWS,_Roughness,1);
                half2 screenUV = IN.screenPos.xy/IN.screenPos.w;
                half4 SSPRResult = SAMPLE_TEXTURE2D(_MobileSSPR_ColorRT,LinearClampSampler, screenUV + noise);
                half3 finalReflection = lerp(reflectionProbeResult,SSPRResult.rgb, SSPRResult.a * _BaseColor.a);

                half reflectionArea = SAMPLE_TEXTURE2D(_ReflectionAreaTex,sampler_ReflectionAreaTex, IN.uv);
                half3 finalRGB = lerp(baseColor,finalReflection,reflectionArea);

                return half4(finalReflection.rgb,1);
            }

ScreenSpacePlanarReflection

以下是使用Unity URP实现玻璃shader的步骤: 1.创建一个新的Shader Graph,将其命名为“Glass”。 2.在Shader Graph,创建一个新的Unlit Master节点,并将其命名为“Glass”。 3.在Shader Graph,创建一个新的Input节点,并将其命名为“Glass Texture”。将其类型设置为“Texture 2D”,并将其连接到Unlit Master节点的Base Color输入。 4.在Shader Graph,创建一个新的Input节点,并将其命名为“Glass Normal Map”。将其类型设置为“Texture 2D”,并将其连接到Unlit Master节点的Normal输入。 5.在Shader Graph,创建一个新的Input节点,并将其命名为“Glass Tint Color”。将其类型设置为“Color”,并将其连接到Unlit Master节点的Base Color输入。 6.在Shader Graph,创建一个新的Input节点,并将其命名为“Glass Refraction”. 将其类型设置为“Vector1”,并将其连接到Unlit Master节点的Refraction输入。 7.在Shader Graph,创建一个新的Sample Texture 2D节点,并将其命名为“Glass Texture Sample”。将其连接到“Glass Texture”输入节点。 8.在Shader Graph,创建一个新的Sample Texture 2D节点,并将其命名为“Glass Normal Map Sample”。将其连接到“Glass Normal Map”输入节点。 9.在Shader Graph,创建一个新的Normal Map节点,并将其命名为“Glass Normal Map”. 将其连接到“Glass Normal Map Sample”节点的RGB输出。 10.在Shader Graph,创建一个新的Multiply节点,并将其命名为“Glass Normal Strength”. 将其连接到“Glass Normal Map”节点的Output输出,并将其另一个输入连接到“Glass Normal Strength”输入节点。 11.在Shader Graph,创建一个新的Lerp节点,并将其命名为“Glass Refraction Lerp”. 将其一个输入连接到“Glass Refraction”输入节点,将其另一个输入连接到常量值“1.0”,并将其第三个输入连接到常量值“0.1”。 12.在Shader Graph,创建一个新的Multiply节点,并将其命名为“Glass Refraction Strength”. 将其一个输入连接到“Glass Refraction Lerp”节点的Output输出,并将其另一个输入连接到常量值“0.1”。 13.在Shader Graph,创建一个新的Add节点,并将其命名为“Glass Refraction Add”. 将其一个输入连接到“Glass Refraction Strength”节点的Output输出,并将其另一个输入连接到常量值“1.0”。 14.在Shader Graph,创建一个新的Multiply节点,并将其命名为“Glass Refraction Multiply”. 将其一个输入连接到“Glass Refraction Add”节点的Output输出,并将其另一个输入连接到“Glass Texture Sample”节点的Alpha输出。 15.在Shader Graph,创建一个新的Lerp节点,并将其命名为“Glass Tint Lerp”. 将其一个输入连接到“Glass Tint Color”输入节点,将其另一个输入连接到常量值“1.0”,并将其第三个输入连接到常量值“0.1”。 16.在Shader Graph,创建一个新的Multiply节点,并将其命名为“Glass Tint Strength”. 将其一个输入连接到“Glass Tint Lerp”节点的Output输出,并将其另一个输入连接到常量值“0.1”。 17.在Shader Graph,创建一个新的Add节点,并将其命名为“Glass Tint Add”. 将其一个输入连接到“Glass Tint Strength”节点的Output输出,并将其另一个输入连接到常量值“1.0”。 18.在Shader Graph,创建一个新的Multiply节点,并将其命名为“Glass Tint Multiply”. 将其一个输入连接到“Glass Tint Add”节点的Output输出,并将其另一个输入连接到“Glass Texture Sample”节点的RGB输出。 19.在Shader Graph,创建一个新的Add节点,并将其命名为“Glass Final Add”. 将其一个输入连接到“Glass Tint Multiply”节点的Output输出,并将其另一个输入连接到“Glass Refraction Multiply”节点的Output输出。 20.在Shader Graph,将“Glass Final Add”节点连接到Unlit Master节点的Base Color输入。 21.在场景创建一个Plane对象,并将其缩放为适当的大小。 22.将玻璃纹理拖放到Plane对象上,并将其材质设置为“Glass”。 23.选择Main Camera,在Inspector面板勾选Glass Ctrl组件的Show Glass即可看到效果
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值