unity HLSL 投影器Shader V0.1

目的:

因为URP项目用不了Projector,所以自己实现了一个简单的投影器,本文将介绍Unity Shader投影器的实现思路(本人shader小白,有错误欢迎各位大佬指出。)

1.创建一个Cube
一个可爱的Cube

2.创建shader和纹理并将该材质赋予该Cube
3.相机开启深度Buffer和ColorBuffer
ON!

4.shader输入

struct Attributes {
	float4 positionOS	: POSITION;
	float2 uv			: TEXCOORD0;
};
 
struct Varyings {
	float4 positionCS 	: SV_POSITION;
	float2 uv			: TEXCOORD0;
	float4 screenPos	: TEXCOORD1;
};
 
TEXTURE2D(_BaseMap);				SAMPLER(sampler_BaseMap);
TEXTURE2D(_CameraOpaqueTexture);	SAMPLER(sampler_CameraOpaqueTexture);
TEXTURE2D(_CameraDepthTexture);		SAMPLER(sampler_CameraDepthTexture);

5.获取屏幕坐标

OUT.screenPos	= ComputeScreenPos(OUT.positionCS);

6.获取当前像素位置的深度值

float2 uv = IN.screenPos.xy / IN.screenPos.ww;
float depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, sampler_CameraDepthTexture, uv);

7.根据该深度值重新计算当前像素的世界空间位置

float4 clipPos = float4(uv * 2 - 1, -depth * 2 + 1, 1);
float4 cameraSpacePos = mul(unity_CameraInvProjection, clipPos);
float4 worldSpacePos = mul(unity_MatrixInvV, cameraSpacePos);

8.求出对象空间下的位置,将位置的数值映射到-0.5到0.5

float4 localPos = mul(unity_WorldToObject, worldSpacePos);
localPos = localPos / localPos.w;
// 除于w是相当于摄像机的齐次除法。

9.在将 【-0.5,0.5】映射为【0,1】(uv的坐标)

float2 projUV = localPos.xy + 0.5f;

10.设置shader队列为3000,设置Cube的RotaionX为90,测试效果

half3  color = SAMPLE_TEXTURE2D(_BaseMap, sampler_BaseMap, projUV);
return half4(color, color.r);

在这里插入图片描述11.思路总结
获取像素深度,用来重构世界坐标,再转到对象空间,就相当于将Cube作为摄像机,cube中心为摄像机位置,cube边框为摄像机视野范围;再将对象空间位置映射为UV坐标,用UV坐标采样贴图就ok了。

12.做个小小特效,以下是比较“艺术”的代码了

float2 uv_effect = projUV - float2(0.5f, 0.5f);
float angle  = abs(atan2(uv_effect.x, uv_effect.y) * 1.0f/6.28f);
float radius = length(uv_effect) * 2;
float area   = 1 - step(_Angle, angle);
float mask   = step(radius, 1) * area;

// Base Shape
float baseShape = (
	  radius * 0.1f
	+ smoothstep(0.98f, 1.0f, radius)
	+ (1 - smoothstep(0.0f, 0.003f, _Angle - angle))
) * mask;

// Flow Bound
float flowMask  = step(radius, _FlowBound % 4);
float flowShape = smoothstep(0.0f, _FlowBound % 4, radius) * mask;
float flowBound = flowShape * flowMask;

float2 uv_Polar  = float2(angle * floor(_Tighten), saturate(radius * _Scale - _Offset));
half effectShape = SAMPLE_TEXTURE2D(_BaseMap, sampler_BaseMap, uv_Polar).r * mask;

// Final
half3 finalColor = _Color;
half  finalALpha = effectShape.xxx + baseShape.xxx + flowBound;

return half4(finalColor, finalALpha);

在这里插入图片描述
13.完整代码

Shader "EffectProjector" {
	Properties {
		_BaseMap (" Texture", 2D) = "white" {}
		
		_Color ("Color", Color) = (1,1,1,1)
		
		_Tighten		("Tighten", Range(0, 100)) = 1
		_Scale			("Scale", Range(1, 10)) = 1
		_Offset			("Offset", Range(0, 10)) = 0
		_Angle			("Angle",   Range(0, 0.51))  = 0
		_FlowBound		("FlowBound", Range(0, 40)) = 0
	}
	SubShader {
		Tags { "RenderType"="Transparent" "RenderPipeline"="UniversalPipeline" }
 
		HLSLINCLUDE
			#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
 
			CBUFFER_START(UnityPerMaterial)
			half3 _Color;

			float _Scale;
		    float _Offset;
			float _Tighten;
			float _Angle;
			float _FlowBound;
			CBUFFER_END
		ENDHLSL
 
		Pass {
			Name "Example"
			Tags { "LightMode"="UniversalForward" "Queue" = "Transparent"}
			Blend SrcAlpha OneMinusSrcAlpha
			ZWrite Off
			ZTest On
 
			HLSLPROGRAM
			#pragma vertex vert
			#pragma fragment frag
 
			struct Attributes {
				float4 positionOS	: POSITION;
				float2 uv			: TEXCOORD0;
			};
 
			struct Varyings {
				float4 positionCS 	: SV_POSITION;
				float2 uv			: TEXCOORD0;
				float4 screenPos	: TEXCOORD1;
			};
 
			TEXTURE2D(_BaseMap);				SAMPLER(sampler_BaseMap);
			TEXTURE2D(_CameraOpaqueTexture);	SAMPLER(sampler_CameraOpaqueTexture);
			TEXTURE2D(_CameraDepthTexture);		SAMPLER(sampler_CameraDepthTexture);
			
			Varyings vert(Attributes IN) {
				Varyings OUT;
 
				VertexPositionInputs positionInputs = GetVertexPositionInputs(IN.positionOS.xyz);

				OUT.positionCS	= positionInputs.positionCS;
				OUT.uv			= IN.uv;
				OUT.screenPos	= ComputeScreenPos(OUT.positionCS);

				return OUT;
			}
 
			half4 frag(Varyings IN) : SV_Target {

				float2 uv = IN.screenPos.xy / IN.screenPos.ww;
				float depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, sampler_CameraDepthTexture, uv);

				float4 clipPos = float4(uv * 2 - 1, -depth * 2 + 1, 1);
				float4 cameraSpacePos = mul(unity_CameraInvProjection, clipPos);
				float4 worldSpacePos = mul(unity_MatrixInvV, cameraSpacePos);
				
				float4 localPos = mul(unity_WorldToObject, worldSpacePos);
				localPos = localPos / localPos.w;

				float2 projUV = localPos.xy + 0.5f;
				//half3  color = SAMPLE_TEXTURE2D(_BaseMap, sampler_BaseMap, projUV);
				//return half4(color, color.r);

				// Effect
				float2 uv_effect = projUV - float2(0.5f, 0.5f);
				float angle  = abs(atan2(uv_effect.x, uv_effect.y) * 1.0f/6.28f);
				float radius = length(uv_effect) * 2;
				float area   = 1 - step(_Angle, angle);
				float mask   = step(radius, 1) * area;

				// Base Shape
				float baseShape = (
					  radius * 0.1f
					+ smoothstep(0.98f, 1.0f, radius)
					+ (1 - smoothstep(0.0f, 0.003f, _Angle - angle))
				) * mask;

				// Flow Bound
				float flowMask  = step(radius, _FlowBound % 4);
				float flowShape = smoothstep(0.0f, _FlowBound % 4, radius) * mask;
				float flowBound = flowShape * flowMask;

				float2 uv_Polar  = float2(angle * floor(_Tighten), saturate(radius * _Scale - _Offset));
				half effectShape = SAMPLE_TEXTURE2D(_BaseMap, sampler_BaseMap, uv_Polar).r * mask;

				// Final
				half3 finalColor = _Color; 
				half  finalALpha = effectShape.xxx + baseShape.xxx + flowBound;

				return half4(finalColor, finalALpha);
			}
			ENDHLSL
		}
	}
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值