微信小程序xr-frame典型案例
在之前的工作中,我大量使用XR-Frame框架进行AR开发,并积累了一些案例和业务代码。其中包括2D图像识别、手部动作识别、Gltf模型加载、动态模型加载、模型动画等内容。小程序部分使用TypeScript编写,而XR-Frame组件部分则使用JavaScript编写。如果您正在学习XR-Frame,这些案例可能对您有参考价值。
有些写的不对的地方欢迎批评指正。
AR:扫描图片视频 2D Marker
大体流程
-
AR场景加载完成后保存ar-system对象到this实例(handleReady)
<xr-scene ar-system="modes:Marker" bind:ready="handleReady">
handleReady: function ({ detail }) { const xrScene = this.scene = detail.value; }
-
新建asset-load视频资源,新建asset-material资源(id为mat),将视频作为一种背景,赋给material的uniforms属性,修改camera为AR相机
<xr-assets bind:progress="handleAssetsProgress" bind:loaded="handleAssetsLoaded"> <xr-asset-load type="video-texture" asset-id="hikari" src="https://mmbizwxaminiprogram-1258344707.cos.ap-guangzhou.myqcloud.com/xr-frame/demo/xr-frame-team/2dmarker/hikari-v.mp4" options="loop:true" /> <!-- 把视频作为背景图片传递给材质作为背景色,再将材质渲染到mat这个物体上, --> <xr-asset-material asset-id="mat" effect="simple" uniforms="u_baseColorMap: video-hikari" /> </xr-assets>
-
新建ar-tracker AR追踪器,新建mesh网格,将之前创建好的mat材质资源(那个视频的方块)赋给material属性
<xr-ar-tracker mode="Marker" src="https://mmbizwxaminiprogram-1258344707.cos.ap-guangzhou.myqcloud.com/xr-frame/demo/xr-frame-team/2dmarker/hikari.jpg" bind:ar-tracker-switch="handleTrackerSwitch"> <!-- 当AR追踪器追踪成功后,加载mat物体(实际上就是个视频) --> <xr-mesh node-id="mesh-plane" geometry="plane" material="mat" /> </xr-ar-tracker> <!-- AR相机:https://developers.weixin.qq.com/miniprogram/dev/component/xr-frame/render/camera.html#AR%E7%9B%B8%E5%85%B3 -->
-
绑定ar-tracker的追踪状态事件:bind:ar-tracker-switch=“handleTrackerSwitch”,控制视频的播放暂停
视图层
<!-- ar-system 组件:https://developers.weixin.qq.com/miniprogram/dev/component/xr-frame/ar/ -->
<!-- bind:ready 用于在AR场景加载完成后保存 AR场景对象 -->
<xr-scene ar-system="modes:Marker" bind:ready="handleReady">
<!-- xr-assets 资源系统:https://developers.weixin.qq.com/miniprogram/dev/component/xr-frame/assets/ -->
<!-- handleAssetsProgress 用于展示资源加载进度,可以做来做进度条 -->
<!-- handleAssetsLoaded 用于标记资源加载已完成,可以用来处理一些复杂情况,这里用于资源加载完成后,再展示 ar-tracker -->
<xr-assets bind:progress="handleAssetsProgress" bind:loaded="handleAssetsLoaded">
<!-- xr-asset-load:https://developers.weixin.qq.com/miniprogram/dev/component/xr-frame/assets/elements.html -->
<xr-asset-load type="video-texture" asset-id="hikari" src="https://mmbizwxaminiprogram-1258344707.cos.ap-guangzhou.myqcloud.com/xr-frame/demo/xr-frame-team/2dmarker/hikari-v.mp4" options="loop:true" />
<!-- 把视频作为背景图片传递给材质作为背景色,再将材质渲染到mat这个物体上, -->
<xr-asset-material asset-id="mat" effect="simple" uniforms="u_baseColorMap: video-hikari" />
</xr-assets>
<xr-node wx:if="{
{loaded}}">
<!-- ar-tracker AR追踪器:https://developers.weixin.qq.com/miniprogram/dev/component/xr-frame/ar/tracker.html -->
<xr-ar-tracker mode="Marker" src="https://mmbizwxaminiprogram-1258344707.cos.ap-guangzhou.myqcloud.com/xr-frame/demo/xr-frame-team/2dmarker/hikari.jpg" bind:ar-tracker-switch="handleTrackerSwitch">
<!-- 当AR追踪器追踪成功后,加载mat物体(实际上就是个视频) -->
<xr-mesh node-id="mesh-plane" geometry="plane" material="mat" />
</xr-ar-tracker>
<!-- AR相机:https://developers.weixin.qq.com/miniprogram/dev/component/xr-frame/render/camera.html#AR%E7%9B%B8%E5%85%B3 -->
<xr-camera id="camera" node-id="camera" position="1 1 1" clear-color="0.925 0.925 0.925 1" background="ar" is-ar-camera />
</xr-node>
</xr-scene>
逻辑层
Component({
behaviors: [require('../common/share-behavior').default],
properties: {
},
data: {
loaded: false, // 资源加载完成,再加载ar-tracker音视频对象视频
},
// this对象上的属性
scene: null,
// 生命周期
lifetimes: {
async attached() {
} },
// 方法
methods: {
handleReady: function ({
detail }) {
const xrScene = this.scene = detail.value;
console.log('xr-scene', xrScene);
},
handleAssetsProgress: function ({
detail }) {
// 资源加载进度
console.log('assets progress', detail.value);
},
handleAssetsLoaded: function ({
detail }) {
// 资源加载完成
console.log('assets loaded', detail.value);
this.setData({
loaded: true });
},
handleTrackerSwitch: function ({
detail }) {
// 实时检测ar-tracker相机识别成功
const active = detail.value;
// 根据识别结果对视频资源进行播放暂停操作
const video = this.scene.assets.getAsset('video-texture', 'hikari');
if (active) {
video.play();
} else {
video.stop();
}
}
},
})
AR:OSD Maker(物体识别)
OSD Maker实现出来的效果和2D Marker效果差不多,代码也差不多。
与 2D Marker区别
识别比较快,模型没法跟过去,适合用来识别特点角度的大模型,比如说在特点角度拍摩天大楼,不成熟,少用。
OSD(One-shot Detection)Marker识别模式,也会将传入的
src
或是image
(image
类型资源id
,优先使用)作为特征去识别。但不同于2D Marker,这是一个纯屏幕空间算法,只会影响到所有子节点的位置和缩放,不会影响旋转。其一般以一个现实中物体的照片作为识别源,来识别出这个物体的在屏幕中的二维区域,我们已经做好了到三维空间的转换,但开发者需要自己保证tracker
下模型的比例是符合识别源的。OSD模式在识别那些二维的、特征清晰的物体效果最好,比如广告牌。
代码实现
视图层
<xr-scene ar-system="modes:OSD" id="xr-scene" bind:ready="handleReady">
<xr-assets bind:progress="handleAssetsProgress" bind:loaded="handleAssetsLoaded">
<xr-asset-material asset-id="simple" effect="simple" />
<xr-asset-material asset-id="text-simple" effect="simple" />
</xr-assets>
<xr-node>
<xr-ar-tracker mode="OSD" src="https://mmbizwxaminiprogram-1258344707.cos.ap-guangzhou.myqcloud.com/xr-frame/demo/marker/osdmarker-test.jpg" bind:ar-tracker-switch="handleToySwitch">
<xr-node wx:if="{
{toyReady}}" rotation="0 180 0">
<xr-mesh node-id="text-wrap" position="0.9 0.4 0" rotation="90 0 0" scale="0.8 1 0.2" geometry="plane" material="simple" uniforms="u_baseColorFactor: 0.2 0.6 0.4 0.95" states="alphaMode: BLEND"></xr-mesh>
<xr-mesh node-id="text-wrap-sub" position="0.9 0.1 0" rotation="90 0 0" scale="0.8 1 0.4" geometry="plane" material="simple" uniforms="u_baseColorFactor: 0 0 0 0.95" states="alphaMode: BLEND"></xr-mesh>
<!-- 文本处于beta版本,功能不完备,仅支持使用独立材质的基础渲染,不能更新渲染(修复中) -->
<xr-text node-id="text-name" position="0.7 0.36 0.01" scale="0.1 0.1 1" material="text-simple" value="牛年公仔"></xr-text>
<xr-text node-id="text-name" position="0.6 0.16 0.01" scale="0.06 0.06 1" material="text-simple" value="牛年发布的奶牛公仔"></xr-text>
<xr-text node-id="text-name" position="0.6 0.06 0.01" scale="0.06 0.06 1" material="text-simple" value="礼盒中还包含玩具盲盒"></xr-text>
</xr-node>
</xr-ar-tracker>
<xr-ar-tracker mode="OSD" src="https://mmbizwxaminiprogram-1258344707.cos.ap-guangzhou.myqcloud.com/xr-frame/demo/gz-tower/day.jpg" bind:ar-tracker-switch="handleDaySwitch">
<xr-node wx:if="{
{gzDayReady}}" rotation="0 180 0">
<xr-mesh node-id="text-wrap" position="1 0.4 0" rotation="90 0 0" scale="1 1 0.2" geometry="plane" material="simple" uniforms="u_baseColorFactor: 0.2 0.6 0.4 0.95" states="alphaMode: BLEND"></xr-mesh>
<xr-mesh node-id="text-wrap-sub" position="1 0.1 0" rotation="90 0 0" scale="1 1 0.4" geometry="plane" material="simple" uniforms="u_baseColorFactor: 0 0 0 0.95" states="alphaMode: BLEND"></xr-mesh>
<xr-text node-id="text-name" position="0.85 0.36 0.01" scale="0.1 0.1 1" material="text-simple" value="广州塔"></xr-text>
<xr-text node-id="text-name" position="0.6 0.18 0.01" scale="0.05 0.05 1" material="text-simple" value="广州塔(英语:Canton Tower)"></xr-text>
<xr-text node-id="text-name" position="0.6 0.08 0.01" scale="0.05 0.05 1" material="text-simple" value="又称广州新电视塔,昵称小蛮腰"></xr-text>
<xr-text node-id="text-name" position="0.6 -0.02 0.01" scale="0.05 0.05 1" material="text-simple" value="海拔高程600米,距离珠江南岸125米"></xr-text>
</xr-node>
</xr-ar-tracker>
<xr-camera id="camera" node-id="camera" position="1 1 1" clear-color="0.925 0.925 0.925 1" far="2000" background="ar" is-ar-camera></xr-camera>
</xr-node>
<xr-node node-id="lights">
<xr-light type="ambient" color="1 1 1" intensity="0.3" />
<xr-light type="directional" rotation="30 60 0" color="1 1 1" intensity="1" />
</xr-node>
</xr-scene>
逻辑层
Component({
behaviors: [require('../common/share-behavior').default],
data: {
loaded: false, // 资源加载完成,暂时没用到
toyReady: false, // 展示玩偶信息
gzDayReady: false, // 展示广州塔信息
},
lifetimes: {
async attached() {
}
},
methods: {
handleReady({
detail }) {
const xrScene = this.scene = detail.value;
console.log('xr-scene', xrScene);
},
handleAssetsProgress: function ({
detail }) {
console.log('assets progress', detail.value);
},
handleAssetsLoaded: function ({
detail }) {
this.setData({
loaded: true });
},
handleToySwitch: function ({
detail }) {
const active = detail.value;
if (active) {
this.setData({
toyReady: true });
} else {
this.setData({
toyReady: false });
}
},
handleDaySwitch: function ({
detail }) {
const active = detail.value;
if (active) {
this.setData({
gzDayReady: true });
} else {
this.setData({
gzDayReady: false });
}
},
}
})
AR:Share 截图和分享
大体流程
-
编写一个点击事件,判定点击区域
handleShare(event) { const { clientX, clientY } = event.touches[0]; const { frameWidth: width, frameHeight: height } = this.scene; if (clientY / height > 0.7 && clientX / width > 0.7) { this.scene.share.captureToFriends(); } }
-
在AR场景加载完成后绑定该事件
<xr-scene id="xr-scene" bind:ready="handleReady">
handleReady({ detail }) { this.scene = detail.value; this.scene.event.add('touchstart', this.handleShare.bind(this)); }
AR:AR图片视频识别综合案例
ar-tacker-2d页面
视图层
<view class="page">
<xr-tracker-2d
disable-scroll
id="xr-frame"
width="{
{xrFrame.renderWidth}}"
height="{
{xrFrame.renderHeight}}"
style="width:{
{
xrFrame.width}}px;height:{
{
xrFrame.height}}px;display:block;"
/>
<view class="share">
<view class="share_button" bind:tap="share">分享画面</view>
</view>
</view>
逻辑层
// pages/ar-tracker-2d/index.ts
Page({
// 自定义对象
xrFrameInstance: null,
// Page自带对象
data: {
xrFrame: {
width: 300,
height: 300,
renderWidth: 300,
renderHeight: 300,
}
},
onReady() {
this.xrFrameInstance = this.selectComponent("#xr-frame")
},
onLoad() {
const {
windowWidth, windowHeight, pixelRatio } = wx.getSystemInfoSync();
const xrFrame = {
width: windowWidth,
height: windowHeight,
renderWidth: windowWidth * pixelRatio,
renderHeight: windowHeight * pixelRatio,
}
this.setData({
xrFrame });
},
share() {
this.xrFrameInstance.handleShare();
},
})
xr-tracker-2d组件
配置层
{
"component": true,
"usingComponents": {
},
"renderer": "xr-frame"
}
视图层
<xr-scene id="xr-scene" ar-system="modes:Marker" bind:ready="handleReady">
<xr-assets bind:progress="handleAssetsProgress" bind:loaded="handleAssetsLoaded">
<xr-asset-load type="video-texture" asset-id="asset-video-flower" src="{
{video.src}}" options="loop:true" />
<xr-asset-material asset-id="mat" effect="simple" uniforms="u_baseColorMap: video-asset-video-flower" />
</xr-assets>
<xr-node>
<xr-ar-tracker mode="Marker" src="{
{marker.img}}" bind:ar-tracker-switch="handleTrackerSwitch">
<xr-mesh
wx:if="{
{assetLoaded && video.loaded}}"
node-id="mesh-plane"
geometry="plane"
material="mat"
scale="{
{marker.width}} 1 {
{marker.height}}"
/>
</xr-ar-tracker>
<xr-camera id="camera" node-id="camera" position="1 1 1" background="ar" near="0.1" far="2000" clear-color="0.96 0.96 0.96 1" is-ar-camera />
</xr-node>
</xr-scene>
逻辑层
Component({
data: {
showLoading: false,
assetLoaded: false,
marker: {
img: "https://pic.amlab.com.cn/wechat/niao-chao-yi-shu/markerImg/haibaomarker.jpg",
width: 1,
height: 1,
},
video: {
src: "https://pic.amlab.com.cn/wechat/niao-chao-yi-shu/video/haibaomarker-video.mp4",
loaded: false,
},
},
lifetimes: {
},
methods: {
// 事件
handleReady({
detail }) {
this.scene = detail.value;
this.videoHandler();
},
handleAssetsProgress: function ({
detail }) {
const {
value: {
progress },
} = detail;
// 资源加载进度
wx.showLoading({
title: `资源加载中 ${
progress*100}%`,
});
},
handleAssetsLoaded: function () {
this.setData({
assetLoaded: true,
});
wx.hideLoading(