概念
将真实场景融入到虚拟场景中的技术称为增强虚拟环境,主要研究视频与虚拟场景的关系。
视频融合技术是虚拟现实技术的一个分支,也可以说是虚拟现实的一个发展阶段。
视频融合技术指将一个或多个由视频采集设备采集的关于某场景或模型的图像序列视频与一个与之相关的虚拟场景加以融合,以生成一个新的关于此场景的虚拟场景或模型。
本文实现监控视频数据导入到倾斜摄影模型中,摄像头获取的实时视频流直接在三维场景中叠加。
目前Web端的三维GIS视频融合技术,可以Cesium、Googel earth等三维平台实现,
作者除了在esri比赛官网上看到可以实现这种效果,但在没有找到任何的技术思路。
esri2019年三维组比赛形式举例:http://contest.geoscene.cn/html/jingsaifenzu/sanweijianmoyushejizu/
步骤
首先ArcGIS API for JavaScript中支持第三方3D引擎(Three.js),由 Three.js创建外部渲染模型,
利用HTML5中video的特性,以动态纹理的方式实时动态的显示监控视频,
然后将外部渲染模型以ExternalRenderers接口导入到三维场景中。
此时视频的特征点有虚拟场景的特征点存在些许的差距,需要用户手动微调,从而获得最佳视角下的最好视觉效果。
ExternalRenderersAPI参考:https://developers.arcgis.com/javascript/latest/api-reference/esri-views-3d-externalRenderers.html
一个很重要的demo:https://developers.arcgis.com/javascript/latest/sample-code/scene-external-renderer-threejs/
效果
代码
没想到从一个大项目中剥夺出一个小功能还要花这么久。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0">
<meta name="description" content="">
<meta name="author" content="ThemeBucket">
<link rel="shortcut icon" href="#" type="image/png">
<title>北斗园区</title>
<link rel="stylesheet" href="api/arcgis_js_v414_api/arcgis_js_api/library/4.14/esri/themes/dark-blue/main.css" />
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="js/html5shiv.js"></script>
<script src="js/respond.min.js"></script>
<![endif]-->
<style>
html,
body,
#ViewDiv
{
padding: 0;
margin: 0;
height: 100%;
width: 100%;
}
#video {
position: absolute;
width: 0;
height: 0;
}
</style>
</head>
<video id="video" autoplay loop muted controls>VID</video>
<body class="sticky-header">
<div id="ViewDiv" ></div>>
<!-- 引入three.js库 -->
<script src="api/libs/threejs/three104.min.js"></script>
<script src="api/libs/threejs/GPUParticleSystem.js"></script>
<!-- 引入arcgis api库 -->
<!--数据配置 end-->
<!--js api start-->
<!--<script src="api/arcgis_js_v416_api/arcgis_js_api/library/4.16/init.js"></script>
<script src="js/scripts.js"></script>
-->
<!--common scripts for all pages-->
<script src="api/arcgis_js_v414_api/arcgis_js_api/library/4.14/init.js"></script>
</body>
</html>
var view;
var scene, webscene;
var dynamicCylinderLayer;
var videoCurrent,videoFlag;
<script type="text/javascript">
require([
"esri/WebScene",
"esri/config",
"esri/views/SceneView",
"esri/layers/WebTileLayer",
"esri/layers/KMLLayer",
"esri/layers/BaseDynamicLayer",
"esri/views/3d/externalRenderers",
"esri/geometry/SpatialReference",
"dojo/dom",
"dojo/on",
], function (
WebScene,
esriConfig,
SceneView,
WebTileLayer,
KMLLayer,
BaseDynamicLayer,
externalRenderers,
SpatialReference,
dom,
on,
) {
var x=0, y=0,z=0;
// 创建一副空的三维场景
// var scene = new WebScene({});
esriConfig.portalUrl="https://trail.arcgisonline.cn/portal";
scene = new WebScene({
portalItem: { // 自动创建一个PortalItem()
id: "c2ca6bd95b104890905f321d6db08174"
} });
webscene=scene;
view = new SceneView({
container: "ViewDiv",
map: scene,
//viewingMode: "global",
camera: {
position: [119.19470299543177, 36.69956091981171, 318.9954051710665 ],
heading: 359.3912760953511,
tilt: 48.94296290684675
},
environment: {
atmosphere: {
quality: "high"
},
lighting: {
date: new Date(),
directShadowsEnabled: true
}
},
});
// 开始定义一个自定义的外部渲染器Create our custom external renderer
var myFirstExternalRenderer = {
renderer: null, // three.js renderer
camera: null, // three.js camera
scene: null, // three.js scene
ambient: null, // three.js ambient light source
sun: null, // three.js sun light source
/**
* Setup function, called once by the ArcGIS JS API.
注册对象时被调用,只调用一次,用于初始化three.js创建的对象
*/
setup: function (context) {
this.renderer = new THREE.WebGLRenderer({
context: context.gl,
premultipliedAlpha: false //表示是否可以设置像素深度
});
this.renderer.setPixelRatio(window.devicePixelRatio); //设置设备像素比
this.renderer.setViewport(0, 0, view.width, view.height);
// prevent three.js from clearing the buffers provided by the ArcGIS JS API.
this.renderer.autoClearDepth = false; //不清楚深度缓冲区
this.renderer.autoClearStencil = false; //不清理模板缓冲区
this.renderer.autoClearColor = false; //不清理颜色缓冲区
var originalSetRenderTarget = this.renderer.setRenderTarget.bind(
this.renderer
);
this.renderer.setRenderTarget = function (target) {
originalSetRenderTarget(target);
if (target == null) {
context.bindRenderTarget();
}
};
// 基础设置 end
this.scene = new THREE.Scene();
this.camera = new THREE.PerspectiveCamera(); //透视投影相机
// setup scene lighting
this.ambient = new THREE.AmbientLight(0xffffff, 0.5);
this.scene.add(this.ambient);
this.sun = new THREE.DirectionalLight(0xffffff, 0.5);
this.scene.add(this.sun);
var znum=4;
zgeoData=new Array();
for(var i=0;i<znum;i++){
zgeoData[i]=new Array();}
zgeoData[0][0]=119.194612; <!-- 2 -->
zgeoData[0][1]=36.703352;
zgeoData[0][2]=37.5;
zgeoData[1][0]=119.194656; <!-- 3 -->
zgeoData[1][1]=36.703288;
zgeoData[1][2]=38;
zgeoData[2][0]=119.194656; <!-- 4 -->
zgeoData[2][1]=36.703288;
zgeoData[2][2]=43.3;
zgeoData[3][0]=119.194610; <!-- 1 -->
zgeoData[3][1]=36.703349;
zgeoData[3][2]=43.5;
var zx=new Array();
var zy=new Array();
var zz=new Array();
for(var i=0;i<znum;i++){
var transform = new THREE.Matrix4(); //4*4构建矩阵
transform.fromArray(externalRenderers.renderCoordinateTransformAt(view, zgeoData[i], SpatialReference.WGS84, new Array(16))); //fromArray方法将存储Matrix4(4x4矩阵)元素值的数组赋值给当前Matrix4(4x4矩阵)对象
zx[i] = transform.elements[12];
zy[i] = transform.elements[13];
zz[i] = transform.elements[14];
}
var geometry = new THREE.Geometry();
var p=new Array();
for(var i=0;i<znum;i++){
p[i]=new THREE.Vector3(zx[i],zy[i],zz[i]);
}
geometry.vertices.push(p[0],p[1],p[2],p[3]);
var normal = new THREE.Vector3(0, 0, 1); //三角面法向量
//利用顶点 0, 1, 2 创建一个面
var face0 = new THREE.Face3( 0, 1, 2, normal); //三角面1
var face1 = new THREE.Face3( 0, 2, 3, normal); //三角面2
geometry.faces.push( face0,face1); //三角面1、2添加到几何体
var t0=new THREE.Vector2(0,0);
var t1=new THREE.Vector2(1,0);
var t2=new THREE.Vector2(1,1);
var t3=new THREE.Vector2(0,1);
var uv1=[t0,t1,t2];
var uv2=[t0,t2,t3];
geometry.faceVertexUvs[0].push(uv1,uv2);
var material = new THREE.MeshBasicMaterial({ color: 0xff0000, side: THREE.DoubleSide });
//创建网格对象
var meshObj = new THREE.Mesh(geometry, material);
this.scene.add(meshObj);
var video = document.getElementById('video');
var texture = new THREE.VideoTexture(video);
var texture33=new THREE.MeshBasicMaterial({ map: texture });
// 重新为网格对象设置材质
meshObj.material = texture33;
// cleanup after ourselfs
context.resetWebGLState();
},
render: function (context) {
// update camera parameters
///
var cam = context.camera;
this.camera.position.set(cam.eye[0], cam.eye[1], cam.eye[2]);
this.camera.up.set(cam.up[0], cam.up[1], cam.up[2]);
this.camera.lookAt(
new THREE.Vector3(cam.center[0], cam.center[1], cam.center[2])
);
// Projection matrix can be copied directly
this.camera.projectionMatrix.fromArray(cam.projectionMatrix);
// update lighting
/
// view.environment.lighting.date = Date.now();
var l = context.sunLight;
this.sun.position.set(
l.direction[0],
l.direction[1],
l.direction[2]
);
this.sun.intensity = l.diffuse.intensity;
this.sun.color = new THREE.Color(
l.diffuse.color[0],
l.diffuse.color[1],
l.diffuse.color[2]
);
this.ambient.intensity = l.ambient.intensity;
this.ambient.color = new THREE.Color(
l.ambient.color[0],
l.ambient.color[1],
l.ambient.color[2]
);
// draw the scene
/
this.renderer.state.reset();
this.renderer.render(this.scene, this.camera);
// as we want to smoothly animate the ISS movement, immediately request a re-render
externalRenderers.requestRender(view);
// cleanup
context.resetWebGLState();
},
};
var videoSrc = "video/9hao.mp4";//新的视频播放地址
var cam = view.camera.clone();
document.getElementById("video").src=videoSrc ;
document.getElementById("video").play();
cam.heading= 73.967579887275, // face due east
cam.tilt= 76.59737032861595, // looking from a bird's eye view
cam.position = {
latitude: 36.70328541718525,
longitude: 119.194463268771,
z: 43.54298318736255 // altitude in meters
};
view.goTo(cam);
externalRenderers.add(view, myFirstExternalRenderer);
videoFlag=true;
});
</script>
声明:zqk原创,转载请注明出处。