Threejs后期处理描边

自定义threejs后期处理图像描边

vs

varying vec2 vUv;
	void main() {
	vUv = uv;
	gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}

fs

1.sobel算子

float sobeldepthX = (-1.0)*getColor(-1, -1)
				+ (-2.0)*getColor(-1, 0) 
				+ (-1.0)*getColor(-1, 1)
				+ getColor(1, -1)
				+ (2.0)*getColor(1, 0)
				+ getColor(1, 1);
				float sobeldepthY = getColor(-1, -1)
				+ (2.0)*getColor(0, -1)
				+ getColor(1, -1)
				+ (-1.0)*getColor(-1, 1)
				+ (-2.0)*getColor(0, 1)
				+ (-1.0)*getColor(1, 1);

				float G = abs(sobeldepthX)+abs(sobeldepthY);
				float G2 = G>colorThreshold?1.0:0.0;

2.深度

float leftDepth = abs(getPixelDepth(-1,0) - getPixelDepth(0,0));
				float rightDepth = abs(getPixelDepth(1,0) - getPixelDepth(0,0));
				float topDepth = abs(getPixelDepth(0,1) - getPixelDepth(0,0));
				float bottomDepth = abs(getPixelDepth(0,-1) - getPixelDepth(0,0));
				float depth = saturate(leftDepth + rightDepth + topDepth + bottomDepth);

3.法向量

float mynormalDiff = 0.0;
				mynormalDiff += abs(distance(normal, getPixelNormal(-1, 0)));
				mynormalDiff += abs(distance(normal, getPixelNormal(1, 0)));
				mynormalDiff += abs(distance(normal, getPixelNormal(0, 1)));
				mynormalDiff += abs(distance(normal, getPixelNormal(0, -1)));

这3种方法都可以实现描边,优缺点如下:

1.sobel整体描边效果好,但是对于平面图像纹理也会描边

2.深度阈值不好控制,对于有深度变化与平面深度增值相近的面描边误差很大

3.法向量对于平行的两个接触物体描边失效

附全部代码:

MyCustomOutlinePass.js

import * as THREE from "three";
import { Pass } from "three/examples/jsm/postprocessing/Pass.js";


class myCustomOutlinePass extends Pass {
	constructor(resolution, scene, camera) {
		super();

		this.renderScene = scene;
		this.renderCamera = camera;
		this.resolution = new THREE.Vector2(resolution.x, resolution.y);

		this.fsQuad = new Pass.FullScreenQuad(null);
		this.fsQuad.material = this.createOutlinePostProcessMaterial();

		// Create a buffer to store the normals of the scene onto
		const normalTarget = new THREE.WebGLRenderTarget(
			this.resolution.x,
			this.resolution.y
		);
		normalTarget.texture.format = THREE.RGBFormat;
		normalTarget.texture.minFilter = THREE.NearestFilter;
		normalTarget.texture.magFilter = THREE.NearestFilter;
		normalTarget.texture.generateMipmaps = false;
		normalTarget.stencilBuffer = false;
		this.normalTarget = normalTarget;

		this.normalOverrideMaterial = new THREE.MeshNormalMaterial();
	}

	dispose() {
		this.normalTarget.dispose();
		this.fsQuad.dispose();
	}

	setSize(width, height) {
		this.normalTarget.setSize(width, height);
		this.resolution.set(width, height);

		this.fsQuad.material.uniforms.screenSize.value.set(
			this.resolution.x,
			this.resolution.y,
			1 / this.resolution.x,
			1 / this.resolution.y
		);
	}

	render(renderer, writeBuffer, readBuffer) {
		// Turn off writing to the depth buffer
		// because we need to read from it in the subsequent passes.
		const depthBufferValue = writeBuffer.depthBuffer;
		writeBuffer.depthBuffer = false;

		// 1. Re-render the scene to capture all normals in texture.
		// Ideally we could capture this in the first render pass along with
		// the depth texture.
		renderer.setRenderTarget(this.normalTarget);

		const overrideMaterialValue = this.renderScene.overrideMaterial;
		this.renderScene.overrideMaterial = this.normalOverrideMaterial;
		renderer.render(this.renderScene, this.renderCamera);
		this.renderScene.overrideMaterial = overrideMaterialValue;

		this.fsQuad.material.uniforms["depthBuffer"].value =
			readBuffer.depthTexture;
		this.fsQuad.material.uniforms[
			"normalBuffer"
		].value = this.normalTarget.texture;
		this.fsQuad.material.uniforms["sceneColorBuffer"].value =
			readBuffer.texture;

		// 2. Draw the outlines using the depth texture and normal texture
		// and combine it with the scene color
		if (this.renderToScreen) {
			// If this is the last effect, then renderToScreen is true.
			// So we should render to the screen by setting target null
			// Otherwise, just render into the writeBuffer that the next effect will use as its read buffer.
			renderer.setRenderTarget(null);
			this.fsQuad.render(renderer);
		} else {
			renderer.setRenderTarget(writeBuffer);
			this.fsQuad.render(renderer);
		}

		// Reset the depthBuffer value so we continue writing to it in the next render.
		writeBuffer.depthBuffer = depthBufferValue;
	}

	get vertexShader() {
		return `
			varying vec2 vUv;
			void main() {
				vUv = uv;
				gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
			}
			`;
	}
	get fragmentShader() {
		return `
			#include <packing>
			// The above include imports "perspectiveDepthToViewZ"
			// and other GLSL functions from ThreeJS we need for reading depth.
			uniform sampler2D sceneColorBuffer;
			uniform sampler2D depthBuffer;
			uniform sampler2D normalBuffer;
			uniform float cameraNear;
			uniform float cameraFar;
			uniform vec4 screenSize;
			uniform float colorThreshold;
			uniform float depthThreshold;
			uniform float colorScale;
			// uniform vec3 outlineColor;

			varying vec2 vUv;

			// Helper functions for reading from depth buffer.
			float readDepth (sampler2D depthSampler, vec2 coord) {
				float fragCoordZ = texture2D(depthSampler, coord).x;
				float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
				return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
			}
			// float getLinearDepth(vec3 pos) {
			// 	return -(viewMatrix * vec4(pos, 1.0)).z;
			// }

			// float getLinearScreenDepth(sampler2D map) {
			// 		vec2 uv = gl_FragCoord.xy * screenSize.zw;
			// 		return readDepth(map,uv);
			// }

			// Helper functions for reading normals and depth of neighboring pixels.
			float getPixelDepth(int x, int y) {
				return readDepth(depthBuffer, vUv + screenSize.zw * vec2(x, y));
			}
			vec3 getPixelNormal(int x, int y) {
				return texture2D(normalBuffer, vUv + screenSize.zw * vec2(x, y)).rgb;
			}

			//限制数值范围
			float saturate(float num) {
				return clamp(num, 0.0, 1.0);
			}

			//获取当前颜色
			float getColor(int x,int y) {
				vec4 nowColor =  texture2D(sceneColorBuffer, vUv + screenSize.zw * vec2(x, y));
				return nowColor.x/3.0 + nowColor.y/3.0 + nowColor.z/3.0;
			}

			void main() {
				vec4 sceneColor = texture2D(sceneColorBuffer, vUv);
				// float depth = getPixelDepth(0, 0);
				vec3 normal = getPixelNormal(0, 0);

				//sobel算子
				float sobeldepthX = (-1.0)*getColor(-1, -1)
				+ (-2.0)*getColor(-1, 0) 
				+ (-1.0)*getColor(-1, 1)
				+ getColor(1, -1)
				+ (2.0)*getColor(1, 0)
				+ getColor(1, 1);
				float sobeldepthY = getColor(-1, -1)
				+ (2.0)*getColor(0, -1)
				+ getColor(1, -1)
				+ (-1.0)*getColor(-1, 1)
				+ (-2.0)*getColor(0, 1)
				+ (-1.0)*getColor(1, 1);

				float G = abs(sobeldepthX)+abs(sobeldepthY);
				float G2 = G>colorThreshold?1.0:0.0;
			
				// vec4 outlineColor = vec4(outlineColor, 1.0);
				//深度
				float leftDepth = abs(getPixelDepth(-1,0) - getPixelDepth(0,0));
				float rightDepth = abs(getPixelDepth(1,0) - getPixelDepth(0,0));
				float topDepth = abs(getPixelDepth(0,1) - getPixelDepth(0,0));
				float bottomDepth = abs(getPixelDepth(0,-1) - getPixelDepth(0,0));
				float depth = saturate(leftDepth + rightDepth + topDepth + bottomDepth);

				//法向量
				float mynormalDiff = 0.0;
				mynormalDiff += abs(distance(normal, getPixelNormal(-1, 0)));
				mynormalDiff += abs(distance(normal, getPixelNormal(1, 0)));
				mynormalDiff += abs(distance(normal, getPixelNormal(0, 1)));
				mynormalDiff += abs(distance(normal, getPixelNormal(0, -1)));
				float finishColor = 0.0;
				if(mynormalDiff!=0.0){
					finishColor = mynormalDiff;
					float G3 = saturate(G2 + finishColor);
					float darkColorX = saturate(sceneColor.x - sceneColor.x/colorScale);
					float darkColorY = saturate(sceneColor.y - sceneColor.y/colorScale);
					float darkColorZ = saturate(sceneColor.z - sceneColor.z/colorScale);
					vec4 darkColor = vec4(darkColorX,darkColorY,darkColorZ,1.0);
					gl_FragColor = vec4(mix(sceneColor, darkColor, G3));
				}else if(mynormalDiff==0.0 && depth>depthThreshold){
					finishColor = 1.0;
					float G3 = saturate(G2 + finishColor);
					float darkColorX = saturate(sceneColor.x - sceneColor.x/colorScale);
					float darkColorY = saturate(sceneColor.y - sceneColor.y/colorScale);
					float darkColorZ = saturate(sceneColor.z - sceneColor.z/colorScale);
					vec4 darkColor = vec4(darkColorX,darkColorY,darkColorZ,1.0);
					gl_FragColor = vec4(mix(sceneColor, darkColor, G3));
					gl_FragColor = vec4(mix(sceneColor, darkColor, finishColor));
				}else if(mynormalDiff==0.0){
					gl_FragColor = vec4(sceneColor);
				}else{
					float G3 = saturate(G2 + finishColor);
					float darkColorX = saturate(sceneColor.x - sceneColor.x/colorScale);
					float darkColorY = saturate(sceneColor.y - sceneColor.y/colorScale);
					float darkColorZ = saturate(sceneColor.z - sceneColor.z/colorScale);
					vec4 darkColor = vec4(darkColorX,darkColorY,darkColorZ,1.0);
					gl_FragColor = vec4(mix(sceneColor, darkColor, G3));
					gl_FragColor = vec4(mix(sceneColor, darkColor, finishColor));
				}
				
			}
			`;
	}

	createOutlinePostProcessMaterial() {
		return new THREE.ShaderMaterial({
			uniforms: {
				sceneColorBuffer: {},
				depthBuffer: {},
				normalBuffer: {},
				colorThreshold: { value: 0.34 },//颜色阈值
				colorScale: { value: 5.0 },//颜色深度比例
				depthThreshold: { value: 0.066 },//深度阈值
				// outlineColor: { value: new THREE.Color(0xffffff) },
				cameraNear: { value: this.renderCamera.near },
				cameraFar: { value: this.renderCamera.far },
				screenSize: {
					value: new THREE.Vector4(
						this.resolution.x,
						this.resolution.y,
						1 / this.resolution.x,
						1 / this.resolution.y
					),
				},
			},
			vertexShader: this.vertexShader,
			fragmentShader: this.fragmentShader,
		});
	}
}

export { myCustomOutlinePass };

使用:

let customOutline = new myCustomOutlinePass(
        new THREE.Vector2(window.innerWidth, window.innerHeight),
        scene,
        camera
      );
      const uniforms = customOutline.fsQuad.material.uniforms;
      uniforms.colorScale.value = 4.0
      uniforms.colorThreshold.value = 0.34
      //黑色线uniforms.colorScale.value = 1.0
      uniforms.depthThreshold.value = 0.066
      composer.addPass(customOutline);
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值