背景:3d场景可以通过render pass渲染到一张纹理上,然后在2d图片上展示。反过来,无法实现,本次探究了可行性。
1. 创建插件工程
先创建一个插件工程Node2Dto3D
,
创建插件工程参考 使用kanzi开发仪表HMI插件
2. 实现插件功能
打开vs,修改node2dto3d.hpp
#ifndef NODE2DTO3D_HPP
#define NODE2DTO3D_HPP
#include <kanzi/kanzi.hpp>
class Node2Dto3D;
typedef kanzi::shared_ptr<Node2Dto3D> Node2Dto3DSharedPtr;
// The template component.
class NODE2DTO3D_API Node2Dto3D : public kanzi::Node2D
{
public:
static kanzi::PropertyType<kanzi::ResourceSharedPtr> RenderTextureProperty;
KZ_METACLASS_BEGIN(Node2Dto3D, Node2D, "Node2Dto3D")
KZ_METACLASS_PROPERTY_TYPE(RenderTextureProperty)
KZ_METACLASS_END()
// Creates a Node2Dto3D.
static Node2Dto3DSharedPtr create(kanzi::Domain* domain, kanzi::string_view name);
virtual void onAttached() KZ_OVERRIDE;
virtual void onDetached() KZ_OVERRIDE;
void onNodePropertyChanged(kanzi::AbstractPropertyType propertyType, kanzi::PropertyNotificationReason reason) KZ_OVERRIDE;
void renderOverride(kanzi::Renderer3D& renderer, kanzi::CompositionStack& compositionStack) KZ_OVERRIDE;
protected:
// Constructor.
explicit Node2Dto3D(kanzi::Domain* domain, kanzi::string_view name);
void initialize();
void createTexture();
void onTimeOutEventHandler();
private:
int m_framebufferID = -1;
kanzi::FramebufferSharedPtr m_framebuffer;
kanzi::TextureSharedPtr m_texture = NULL;
kanzi::Renderer* m_renderer = NULL;
kanzi::Renderer3D* m_renderer3d = NULL;
int m_width=0;
int m_height=0;
kanzi::TimerSubscriptionToken timerid;
unsigned int m_counter = 0;
};
#endif
修改node2dto3d.cpp
#include "node2dto3d.hpp"
using namespace kanzi;
Node2Dto3DSharedPtr Node2Dto3D::create(Domain* domain, string_view name)
{
Node2Dto3DSharedPtr enginePlugin(new Node2Dto3D(domain, name));
enginePlugin->initialize();
return enginePlugin;
}
void Node2Dto3D::onAttached()
{
kzLogDebug(("onAttached~"));
Node2D::onAttached();
m_counter = 0;
timerid = addTimerHandler(this->getMessageDispatcher(), kanzi::chrono::milliseconds(20), KZU_TIMER_MESSAGE_MODE_REPEAT, bind(&Node2Dto3D::onTimeOutEventHandler, this));
}
void Node2Dto3D::onDetached()
{
Node2D::onDetached();
removeTimerHandler(this->getMessageDispatcher(), timerid);
}
void Node2Dto3D::onNodePropertyChanged(kanzi::AbstractPropertyType propertyType, kanzi::PropertyNotificationReason reason)
{
if (RenderTextureProperty == propertyType) {
//kzLogDebug(("onNodePropertyChanged~"));
}
Node2D::onNodePropertyChanged(propertyType, reason);
}
void Node2Dto3D::initialize()
{
// Initialize base class.
Node2D::initialize();
}
void Node2Dto3D::createTexture() {
int w = getProperty(Node::WidthProperty);
int h = getProperty(Node::HeightProperty);
static int lastW = 0;
static int lastH = 0;
if (w != lastW || h != lastH) {
lastW = w;
lastH = h;
m_width = w;
m_height = h;
m_width = m_width <= 0 ? 100 : m_width;
m_height = m_height <= 0 ? 100 : m_height;
kzLogDebug(("createTexture=,w={},h={}", w, h));
kanzi::Texture::CreateInfoNode2DRenderTarget createInfo1(m_width, m_height, kanzi::GraphicsFormatR8G8B8A8_UNORM);
if (m_texture) {
delete m_texture.get();
m_texture.reset();
m_texture = NULL;
}
m_texture = kanzi::Texture::create(getDomain(), createInfo1, "color target 1");
m_framebufferID = m_texture->getNativeFramebufferHandle();
kzLogDebug(("m_framebufferID={}", m_framebufferID));
}
}
void Node2Dto3D::onTimeOutEventHandler()
{
if (m_counter < 10) {
setProperty(RenderTextureProperty, m_texture);
}
}
Node2Dto3D::Node2Dto3D(kanzi::Domain* domain, kanzi::string_view name) :
kanzi::Node2D(domain, name)
{
kzLogDebug(("Node2Dto3D~"));
if (!m_renderer3d) {
m_renderer3d = getDomain()->getRenderer3D();
m_renderer = m_renderer3d->getCoreRenderer();
}
}
void Node2Dto3D::renderOverride(Renderer3D& renderer, CompositionStack& compositionStack)
{
createTexture();
SRTValue2D _SRTValue2D = getProperty(Node2D::RenderTransformationProperty);
ViewportRectangle v = m_renderer->getViewport();
//v.setX(_SRTValue2D.getTranslationX());
//v.setY(_SRTValue2D.getTranslationY() + m_height - 720);
v.setX(0);
v.setY(0);
v.setWidth(m_width);
v.setHeight(m_height);
m_renderer->setViewport(v);
if (m_framebufferID == -1) {
return;
}
m_renderer->bindFramebuffer(m_framebufferID);
m_renderer->setDefaultFramebuffer(m_framebufferID);
m_renderer->clear();
Matrix3x3 baseTransform = Matrix3x3::createIdentity();
renderChildren(renderer, compositionStack, baseTransform);
m_renderer->setDefaultFramebuffer(0);
m_renderer->bindFramebuffer(0);
setProperty(RenderTextureProperty, m_texture);
m_counter++;
}
kanzi::PropertyType<ResourceSharedPtr> Node2Dto3D::RenderTextureProperty(kzMakeFixedString("Node2Dto3D.RenderTextureProperty"), ResourceSharedPtr(), 0, false,
KZ_DECLARE_EDITOR_METADATA
(
metadata.category = "Node2Dto3D";
metadata.displayName = "RenderTextureProperty";
metadata.tooltip = "RenderTextureProperty.";
metadata.valueProvider = "ProjectObject:Texture";
metadata.host = "Node2Dto3D:auto";
));
主要实现
- 通过定时器实现初次显示画面(因为画面没变化,没触发
renderOverride
,没输出RenderTextureProperty
,导致图片为黑色) createTexture
在节点宽高变化时候重新创建纹理renderOverride
会实时把子节点内容绘制到纹理对应的帧缓存里
3. 使用插件
在插件节点下创建测试图片和测试按钮
- 宽高为
512
- 屏幕高度
720
,减去节点高度,需要设置Y
坐标208
,因为viewport坐标是左下角原点
在Scene
下创建Plane
和Box
,并把材质贴图绑到RenderTextureProperty
为了方便测试创建图片绑定RenderTextureProperty
4. 优化
贴图有锯齿,在模型材质里使用高斯模糊优化锯齿
VertexPhongTextured_blur.frag.glsl
precision mediump float;
uniform lowp float BlendIntensity;
uniform lowp vec4 Ambient;
varying lowp vec3 vNormal;
varying mediump vec3 vViewDirection;
uniform mediump vec2 kzTextureSize0;
uniform mediump vec2 BlurDirection;
uniform lowp float BlurRadius;
varying mediump vec2 vTexCoord;
uniform sampler2D Texture;
vec4 gaussianBlur(mediump vec2 coord, lowp vec2 dir)
{
// Defines the one-dimensional Gaussian Kernel with 9 samples.
float GAUSSIAN_KERNEL[9];
GAUSSIAN_KERNEL[0] = 0.028532;
GAUSSIAN_KERNEL[1] = 0.067234;
GAUSSIAN_KERNEL[2] = 0.124009;
GAUSSIAN_KERNEL[3] = 0.179044;
GAUSSIAN_KERNEL[4] = 0.20236;
GAUSSIAN_KERNEL[5] = 0.179044;
GAUSSIAN_KERNEL[6] = 0.124009;
GAUSSIAN_KERNEL[7] = 0.067234;
GAUSSIAN_KERNEL[8] = 0.028532;
vec2 texel = 1.0/kzTextureSize0;
vec4 sum = vec4(0.0);
// Get the original texture coordinate for this fragment.
vec2 tc = coord;
// Get the amount to blur.
float blur = BlurRadius;
// Set the amount of blur in the horizontal direction.
float hstep = dir.x*texel.x;
// Set the amount of blur in the vertical direction.
float vstep = dir.y*texel.y;
// Sample the texture 9 times for every fragment.
for(int i = 0; i < 9; i++)
{
float pixelOffset = (float(i) - floor(9.0 * 0.5));
mediump vec2 coord = vec2(tc.x + pixelOffset * blur * hstep, tc.y + pixelOffset * blur * vstep);
#if KANZI_SHADER_USE_BASECOLOR_TEXTURE
sum += texture2D(Texture, coord) * GAUSSIAN_KERNEL[i];
#endif
}
return sum;
}
void main()
{
precision lowp float;
lowp vec3 color = vec3(0.0);
lowp vec4 baseColor = texture2D(Texture, vTexCoord).rgba;
color += baseColor.rgb;
//gl_FragColor = vec4(color, baseColor.a) * BlendIntensity;
gl_FragColor = gaussianBlur(vTexCoord, BlurDirection) * BlendIntensity;
}