接着上一个讨论的话题,关于3.8.x的后效,今天来分享自定义后效来制作模糊效果,并将他应用到弹窗中做背景,话不多说开整。
一:最终效果
首先咱们来看官网自定义后效怎么搞的,从它的实例开始:自定义后效
二:定义PostProcessSettings给节点提供资源(通过编辑器修改参数的方式)
首先自定义后效pass,需要一个组件用来我们可以修改具体的参数,传入具体的数据,就要用到postProcess.PostProcessSetting这个类:
import { _decorator, gfx, postProcess, Material, EffectAsset, renderer, rendering, Vec4, Camera, CachedArray, Sprite } from 'cc';
const { Format } = gfx
const { ccclass, property, menu, executeInEditMode } = _decorator;
/**
*
* 就是一个普通的组件 自定义后处理节点的资源和行为
*
*/
@ccclass('GaussianBlur')
@menu('PostProcess/GaussianBlur')
@executeInEditMode
export class GaussianBlur extends postProcess.PostProcessSetting {
/** 3d场景的摄像机 */
@property(Camera)
mainCamera: Camera = null;
/* 需要把后效产生的图片输出到特定的Sprite上 */
@property(Sprite)
preview: Sprite = null;
/** 模糊材质 */
@property(EffectAsset)
_effectAsset: EffectAsset | undefined
@property(EffectAsset)
get effect() {
return this._effectAsset;
}
set effect(v) {
/** 根据传入的模糊效果shader创建一个材质 当然你也可以在编辑器中拖入一个已经绑定模糊shader的材质 */
this._effectAsset = v;
if (this._effectAsset == null) {
this._material = null;
}
else {
if (this._material == null) {
this._material = new Material();
}
this._material.reset({ effectAsset: this._effectAsset });
}
this.updateMaterial();
}
@property
iterations = 3;
@property
get blurRadius() {
return this._blurParams.x;
}
set blurRadius(v) {
this._blurParams.x = v;
this.updateMaterial();
}
private _material: Material;
public get material(): Material {
return this._material;
}
@property
private _blurParams: Vec4 = new Vec4(1.0, 0.0, 0.0, 0.0);
public get blurParams(): Vec4 {
return this._blurParams;
}
updateMaterial() {
/** 设置材质属性 */
if (!this._material) {
return;
}
this._material.setProperty('blurParams', this.blurParams);
}
protected start(): void {
if (this._effectAsset) {
this._material = new Material();
this._material.initialize({ effectAsset: this._effectAsset });
this._material.setProperty('blurParams', this.blurParams);
}
}
}
三:定义接收输入定向输出的节点 SettingPass
既然是自定义管线,你做的效果总得有一个流入流出的节点吧,就相当于blender里面的材质节点,虚幻的蓝图,你当前的效果是需要依赖上一个流程中的输入才可以正常工作的,当然你处理完了还要将处理的结果返回到渲染管线当中去利用,再处理等等操作。所以现在需要定义一个这样一个节点,反应在cocos中就是SettingPass类:我们定义自己的SettingPass类
import { Camera, RenderTexture, SpriteFrame, Texture2D, UITransform, Vec2, Vec3, gfx, postProcess, renderer, rendering } from "cc";
import { GaussianBlur } from "./GaussianBlur";
export class GaussianBlurPass extends postProcess.SettingPass {
get setting() {
return this.getSetting(GaussianBlur);
}
checkEnable(camera: renderer.scene.Camera) {
// 判断次后效是否开启
let enable = super.checkEnable(camera);
if (postProcess.disablePostProcessForDebugView()) {
enable = false;
}
return enable && this.setting.material != null;
}
name = 'GaussianBlurPass';
outputNames = ['GaussianBlurMap'];
private _blurPreview(camera: renderer.scene.Camera) {
const setting = this.setting;
let w, h;
[w, h] = [camera.window.width, camera.window.height];
let frame = new SpriteFrame();
let texture = new RenderTexture();
texture.initialize({
name: "s",
width: w,
height: h,
});
setting.preview.node.setScale(new Vec3(1, -1, 1));
setting.mainCamera.targetTexture = texture;
setting.scheduleOnce(() => {
frame.texture = texture;
setting.preview.spriteFrame = frame;
setting.mainCamera.targetTexture = null;
}, 0);
}
public render(camera: renderer.scene.Camera, ppl: rendering.Pipeline): void {
const setting = this.setting;
const previewSprite = setting.preview;
if (!setting.material) {
return;
}
// 节点的数据
let passContext = this.context;
passContext.material = setting.material;
const cameraID = this.getCameraUniqueID(camera);
const cameraName = `Camera${cameraID}`;
const passViewport = passContext.passViewport;
// 清理背景
passContext.clearBlack();
const format = gfx.Format.RGBA8;
// 需要使用上一个处理流程结束后的画面内容 也就是从管线中接收上一个流程的信息
let input = this.lastPass!.slotName(camera, 0);
console.log("input is ", input);
for (let i = 0; i < setting.iterations; ++i) {
passContext
.updatePassViewPort() // 相对分辨率大小
.addRenderPass(`blur-x`, `blur-x${cameraID}`) // 执行绘制流程 blur-x表示shader中的 pass名称
.setPassInput(input, 'outputResultMap') // input: 自定义管线资源分配的资源名 shaderName: 对应shader的 uniform sampler2D
.addRasterView('GaussianBlurMap_TMP', format) // 输出结果
.blitScreen(0) // 执行绘制 pass0
.version();
passContext
.updatePassViewPort()
.addRenderPass(`blur-y`, `blur-y${cameraID}`)
.setPassInput('GaussianBlurMap_TMP', 'outputResultMap')
.addRasterView(this.slotName(camera), format) // 输出的RenderTexture名称 及 RenderTexture的格式
.blitScreen(1) // pass1
.version();
input = this.slotName(camera);
}
// this._blurPreview(camera);
}
}
let builder = rendering.getCustomPipeline("Custom") as postProcess.PostProcessBuilder;
if (builder) {
// 加入到引擎pass列表中才能够执行
builder.insertPass(new GaussianBlurPass(), postProcess.BlitScreenPass);
}
四:引擎源码的几个关键方法理解
1:slotName
// 需要使用上一个处理流程结束后的画面内容
let input = this.lastPass!.slotName(camera, 0);
这段代码的作用是获取上一个pass的名字源码如下:
slotName (camera: Camera, index = 0) {
const name = this.outputNames[index] + this.name;
return `${name}_${this._id}_${getCameraUniqueID(camera)}`;
}
2: passContext
首先看一下源码pass-context.ts文件:跟pass相关的一系列操作,设置pass输入,向pass加入渲染队列准备去渲染,更新passViewport视口,添加renderpass
import { EDITOR } from 'internal:constants';
import { QueueHint, ResourceResidency, SceneFlags } from '../../custom/types';
import { ClearFlagBit, Color, Format, LoadOp, Rect, StoreOp, Viewport } from '../../../gfx';
import { Pipeline, RenderPassBuilder } from '../../custom/pipeline';
import { Camera } from '../../../render-scene/scene';
import { Material } from '../../../asset/assets';
import { PostProcess } from '../components';
import { getRenderArea } from '../../custom/define';
import { Vec4 } from '../../../core';
export class PassContext {
clearFlag: ClearFlagBit = ClearFlagBit.COLOR;
clearColor = new Color()
clearDepthColor = new Color()
ppl: Pipeline | undefined
camera: Camera | undefined
material: Material | undefined
pass: RenderPassBuilder | undefined
rasterWidth = 0
rasterHeight = 0
layoutName = ''
shadingScale = 1;
viewport = new Rect();
passViewport = new Rect();
passPathName = '';
passVersion = 0;
isFinalCamera = false;
isFinalPass = false;
depthSlotName = '';
shadowPass: any = undefined;
forwardPass: any = undefined;
postProcess: PostProcess | undefined;
setClearFlag(clearFlag: ClearFlagBit) {
this.clearFlag = clearFlag;
return this;
}
setClearColor(x: number, y: number, z: number, w: number) {
Vec4.set(this.clearColor, x, y, z, w);
return this;
}
setClearDepthColor(x: number, y: number, z: number, w: number) {
Vec4.set(this.clearDepthColor, x, y, z, w);
return this;
}
version() {
if (!EDITOR) {
this.passPathName += `_${this.pass!.name}_${this.layoutName}`;
this.pass!.setVersion(this.passPathName, this.passVersion);
}
return this;
}
clearBlack() {
this.clearFlag = ClearFlagBit.COLOR;
Vec4.set(passContext.clearColor, 0, 0, 0, 1);
}
addRenderPass(layoutName: string, passName: string) {
const passViewport = this.passViewport;
const pass = this.ppl!.addRenderPass(passViewport.width, passViewport.height, layoutName);
pass.name = passName;
this.pass = pass;
this.layoutName = layoutName;
this.rasterWidth = passViewport.width;
this.rasterHeight = passViewport.height;
pass.setViewport(new Viewport(passViewport.x, passViewport.y, passViewport.width, passViewport.height));
return this;
}
updateViewPort() {
const camera = this.camera;
if (!camera) {
return;
}
let shadingScale = 1;
if (this.postProcess && (!EDITOR || this.postProcess.enableShadingScaleInEditor)) {
shadingScale *= this.postProcess.shadingScale;
}
this.shadingScale = shadingScale;
const area = getRenderArea(camera, camera.window.width * shadingScale, camera.window.height * shadingScale, null, 0, this.viewport);
area.width = Math.floor(area.width);
area.height = Math.floor(area.height);
}
/**
* 根据传入的 着色宽高缩放因子和位置缩放因子 来更新 passViewPort的大小和位置 基于 viewPort
* @param {} shadingScale=1
* @param {} offsetScale=0
*/
updatePassViewPort(shadingScale = 1, offsetScale = 0) {
this.passViewport.width = this.viewport.width * shadingScale;
this.passViewport.height = this.viewport.height * shadingScale;
this.passViewport.x = this.viewport.x * offsetScale;
this.passViewport.y = this.viewport.y * offsetScale;
return this;
}
// setViewport (x: number, y: number, w: number, h: number) {
// this.pass!.setViewport(new Viewport(x, y, w, h));
// return this;
// }
addRasterView(name: string, format: Format, offscreen = true, residency?: ResourceResidency) {
const ppl = this.ppl;
const camera = this.camera;
const pass = this.pass;
if (!ppl || !camera || !pass) {
return this;
}
if (!ppl.containsResource(name)) {
if (format === Format.DEPTH_STENCIL) {
ppl.addDepthStencil(name, format, this.rasterWidth, this.rasterHeight, ResourceResidency.MANAGED);
} else if (offscreen) {
ppl.addRenderTarget(name, format, this.rasterWidth, this.rasterHeight, residency || ResourceResidency.MANAGED);
} else {
// 离屏渲染
ppl.addRenderWindow(name, format, this.rasterWidth, this.rasterHeight, camera.window);
}
}
if (format !== Format.DEPTH_STENCIL) {
if (!offscreen) {
ppl.updateRenderWindow(name, camera.window);
} else {
ppl.updateRenderTarget(name, this.rasterWidth, this.rasterHeight);
}
} else {
ppl.updateDepthStencil(name, this.rasterWidth, this.rasterHeight);
}
// let view: RasterView;
if (format === Format.DEPTH_STENCIL) {
const clearFlag = this.clearFlag & ClearFlagBit.DEPTH_STENCIL;
let loadOp = LoadOp.CLEAR;
if (clearFlag === ClearFlagBit.NONE) {
loadOp = LoadOp.LOAD;
}
pass.addDepthStencil(name, loadOp, StoreOp.STORE, this.clearDepthColor.x, this.clearDepthColor.y, clearFlag);
} else {
const clearColor = new Color();
clearColor.copy(this.clearColor);
const clearFlag = this.clearFlag & ClearFlagBit.COLOR;
let loadOp = LoadOp.CLEAR;
if (clearFlag === ClearFlagBit.NONE) {
loadOp = LoadOp.LOAD;
}
// 添加光栅化渲染目标 设置rasterView
pass.addRenderTarget(name, loadOp, StoreOp.STORE, clearColor);
}
return this;
}
setPassInput(inputName: string, shaderName: string) {
// 检查渲染管线中是否包含输入的名字
if (this.ppl!.containsResource(inputName)) {
// 向当前pass输入图片资源来做进一步处理
this.pass!.addTexture(inputName, shaderName);
}
return this;
}
blitScreen(passIdx = 0) {
this.pass!.addQueue(QueueHint.RENDER_TRANSPARENT).addCameraQuad(
this.camera!, this.material!, passIdx,
SceneFlags.NONE,
);
return this;
}
}
export const passContext = new PassContext();
里面的东西挺多的,捡重要的说几个 ppl,pass,viewport,passViewport, rasterWidth,rasterHeight,layoutName,
a: ppl 顾名思义就是Pipeline了,也就是渲染管线
那么它起的作用到底是什么呢,我们到知道cocos支持自定义pass到管线中去执行,那么ppl的作用当然是向管线注入各种pass了,还有什么呢,加入想从渲染管线输出到数据到屏幕上和别的什么缓冲区,这也是ppl干的事情,比如:
/** 离屏渲染 */
ppl.updateRenderTarget(name, this.rasterWidth, this.rasterHeight);
// 渲染到屏幕
ppl.updateRenderWindow(name, camera.window);
// 添加光栅化渲染目标 设置rasterView
pass.addRenderTarget(name, loadOp, StoreOp.STORE, clearColor);
还有最最重要的是怎么向当前pass注入上一个流程输入的图片信息呢:
setPassInput(inputName: string, shaderName: string) {
// 检查渲染管线中是否包含输入的名字
if (this.ppl!.containsResource(inputName)) {
// 向当前pass输入图片资源来做进一步处理
this.pass!.addTexture(inputName, shaderName);
}
return this;
}
b: pass 渲染通道
那么pass从哪里来呢,看源码知道pass是通过addRenderPass来添加的,那么layoutName就对应shader文件的pass名字,来多通道处理效果,passViewPort就是渲染通道输出的视图大小,viewPort一般是屏幕的尺寸大小,有了passViewPort可以增加输出图形的灵活性,可以有你来自由控制了。
addRenderPass(layoutName: string, passName: string) {
const passViewport = this.passViewport;
const pass = this.ppl!.addRenderPass(passViewport.width, passViewport.height, layoutName);
pass.name = passName;
this.pass = pass;
this.layoutName = layoutName;
this.rasterWidth = passViewport.width;
this.rasterHeight = passViewport.height;
pass.setViewport(new Viewport(passViewport.x, passViewport.y, passViewport.width, passViewport.height));
return this;
}
c: 一些passContext的方法说明
你一定注意到了,有这么一大串代码是干啥用的,看的头皮发麻,但是咱们一个一个来理解一下
passContext
.updatePassViewPort() // 相对分辨率大小
.addRenderPass(`blur-x`, `blur-x${cameraID}`) // 执行绘制流程 blur-x表示shader中的 pass名称
.setPassInput(input, 'outputResultMap') // input: 自定义管线资源分配的资源名 shaderName: 对应shader的 uniform sampler2D
.addRasterView('GaussianBlurMap_TMP', format) // 输出结果
.blitScreen(0) // 执行绘制 pass0
.version();
首先向通道里处理图片,你得设置passViewPort吧,也就是通道里面你想要的尺寸是什么样子的,这就是updatePassViewPort干的事情。
第二 你得把你当前的想要加入管线中的pass加入进去吧,这也是addRenderPass的作用,
其次你得向这个管线注入一些东西吧,比如上一个处理流程中图片,这就是setPassInput干的事情
再然后你想让这个pass输出什么类型的图片,大小是多少,临时输出是什么,addRasterView负责管理
最后当然是一切准备就绪开始渲染绘制pass的内容,blitScreen()负责渲染对应通道逻辑,然后该pass将处理的结果输出 ”GaussianBlurMap_TMP“输入到下一个pass也就是下面这句代码:
passContext
.updatePassViewPort()
.addRenderPass(`blur-y`, `blur-y${cameraID}`)
.setPassInput('GaussianBlurMap_TMP', 'outputResultMap')
setPassInput输出给谁呢,也就是第二个参数 ”outputResultMap“对应shader内容的 sampler2D对象,这也是我们做高斯模糊的源图片。所以现在流程清楚了吧,
五:将后效渲染的图片给拿出来,当做弹窗的背景
前面讲了一大堆事怎么生成,以及生成的逻辑,现在我们要把他实用起来,要不然我们研究它干嘛是吧,那么接下来就讲自定义后效渲染出来的东西给显示出来,怎么显示呢,当然还是我们的老朋友RenderTexture,将它挂在到Camera上就可以保存每一帧的图片了,我们拿来就可以了,最终有我们的_blurPreview方法
private _blurPreview(camera: renderer.scene.Camera) {
const setting = this.setting;
let w, h;
[w, h] = [camera.window.width, camera.window.height];
let frame = new SpriteFrame();
let texture = new RenderTexture();
texture.initialize({
name: "s",
width: w,
height: h,
});
setting.preview.node.setScale(new Vec3(1, -1, 1));
setting.mainCamera.targetTexture = texture;
setting.scheduleOnce(() => {
frame.texture = texture;
setting.preview.spriteFrame = frame;
setting.mainCamera.targetTexture = null;
}, 0);
}
这里强调一点为什么加了延时操作呢,因为摄像机赋值targetTexture的时候不是当前帧生效的,而是在下一帧生效,于是才有scheduleOnce的操作,完成之后要将camera的targetTexture设置为null,要不然你下一帧捕获的画面就是黑乎乎的画面。
六:shader实现
下面贴出来具体的高斯模糊shader代码:
CCEffect %{
techniques:
- passes:
- vert: blur-hor-vs
frag: blur-fs
pass: blur-x
depthStencilState:
depthTest: false
depthWrite: false
- vert: blur-vert-vs
frag: blur-fs
pass: blur-y
depthStencilState:
depthTest: false
depthWrite: false
}%
CCProgram blur-hor-vs %{
precision highp float;
#include <legacy/input-standard>
#include <builtin/uniforms/cc-global>
#include <common/common-define>
uniform MyConstants {
vec4 blurParams;
};
out vec2 v_uv;
out vec2 v_uv1;
out vec2 v_uv2;
out vec2 v_uv3;
out vec2 v_uv4;
void main () {
StandardVertInput In;
CCVertInput(In);
CC_HANDLE_GET_CLIP_FLIP(In.position.xy);
gl_Position = In.position;
gl_Position.y = gl_Position.y;
v_uv = a_texCoord;
vec2 texelSize = cc_nativeSize.zw;
float blurOffsetX = blurParams.x * texelSize.x;
v_uv1 = v_uv + vec2(blurOffsetX * 1.0, 0.0);
v_uv2 = v_uv - vec2(blurOffsetX * 1.0, 0.0);
v_uv3 = v_uv + vec2(blurOffsetX * 2.0, 0.0);
v_uv4 = v_uv - vec2(blurOffsetX * 2.0, 0.0);
}
}%
CCProgram blur-vert-vs %{
precision highp float;
#include <legacy/input-standard>
#include <builtin/uniforms/cc-global>
#include <common/common-define>
uniform MyConstants {
vec4 blurParams;
};
out vec2 v_uv;
out vec2 v_uv1;
out vec2 v_uv2;
out vec2 v_uv3;
out vec2 v_uv4;
void main () {
StandardVertInput In;
CCVertInput(In);
CC_HANDLE_GET_CLIP_FLIP(In.position.xy);
gl_Position = In.position;
gl_Position.y = gl_Position.y;
v_uv = a_texCoord;
vec2 texelSize = cc_nativeSize.zw;
float blurOffsetY = blurParams.x * texelSize.y;
v_uv1 = v_uv + vec2(0.0, blurOffsetY * 1.0);
v_uv2 = v_uv - vec2(0.0, blurOffsetY * 1.0);
v_uv3 = v_uv + vec2(0.0, blurOffsetY * 2.0);
v_uv4 = v_uv - vec2(0.0, blurOffsetY * 2.0);
}
}%
CCProgram blur-fs %{
precision highp float;
#include <builtin/uniforms/cc-global>
in vec2 v_uv;
in vec2 v_uv1;
in vec2 v_uv2;
in vec2 v_uv3;
in vec2 v_uv4;
#pragma rate outputResultMap pass
uniform sampler2D outputResultMap;
layout(location = 0) out vec4 fragColor;
void main () {
vec3 weights = vec3(0.4026,0.2442,0.0545);
vec3 sum = texture(outputResultMap, v_uv).rgb * weights.x;
sum += texture(outputResultMap, v_uv1).rgb * weights.y;
sum += texture(outputResultMap, v_uv2).rgb * weights.y;
sum += texture(outputResultMap, v_uv3).rgb * weights.z;
sum += texture(outputResultMap, v_uv4).rgb * weights.z;
fragColor = vec4(sum, 1.0);
}
}%
希望这篇博文对你有用,加油陌生人。