itowns 2.42.1-next.20 → 2.42.1-next.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -69,7 +69,7 @@ export const deprecatedFeature2MeshOptions = options => {
69
69
  };
70
70
  export const deprecatedC3DEngineWebGLOptions = options => {
71
71
  if (options.isWebGL2 === false) {
72
- console.warn('WebGL1 support (isWebGL2=false) is deprecated and will be removed in iTowns 2.43. This follows its deprecation by three.js. If you are impacted by this change, please discuss in the following issue: https://github.com/iTowns/itowns/issues/2152.');
72
+ console.error('WebGL1 support ended in 2.43.0. Falling-back to the WebGL2 renderer.');
73
73
  }
74
74
  };
75
75
  export default {};
@@ -74,14 +74,6 @@ class GlobeView extends View {
74
74
  * }
75
75
  * var view = new itowns.GlobeView(viewerDiv, placement);
76
76
  *
77
- * @example <caption><b>Enable WebGl 1.0 instead of WebGl 2.0.</b></caption>
78
- * var viewerDiv = document.getElementById('viewerDiv');
79
- * const placement = {
80
- * coord: new itowns.Coordinates('EPSG:4326', 2.351323, 48.856712),
81
- * range: 25000000,
82
- * }
83
- * var view = new itowns.GlobeView(viewerDiv, placement, { renderer: { isWebGL2: false } });
84
- *
85
77
  * @param {HTMLDivElement} viewerDiv - Where to attach the view and display it
86
78
  * in the DOM.
87
79
  * @param {CameraTransformOptions|Extent} placement - An object to place view
@@ -8,11 +8,6 @@ class PlanarView extends View {
8
8
  * @constructor
9
9
  * @extends View
10
10
  *
11
- * @example <caption><b>Enable WebGl 1.0 instead of WebGl 2.0.</b></caption>
12
- * var viewerDiv = document.getElementById('viewerDiv');
13
- * const extent = new Extent('EPSG:3946', 1837816.94334, 1847692.32501, 5170036.4587, 5178412.82698);
14
- * var view = new itowns.PlanarView(viewerDiv, extent, { renderer: { isWebGL2: false } });
15
- *
16
11
  * @example <caption><b>Instance with placement on the ground.</b></caption>
17
12
  * var viewerDiv = document.getElementById('viewerDiv');
18
13
  * const extent = new Extent('EPSG:3946', 1837816.94334, 1847692.32501, 5170036.4587, 5178412.82698);
@@ -55,12 +55,12 @@ export default function newTileGeometry(builder, params) {
55
55
  geometry.dispose = () => {
56
56
  geometry._count--;
57
57
  if (geometry._count <= 0) {
58
- // To avoid remove index buffer and attribute buffer uv_0
58
+ // To avoid remove index buffer and attribute buffer uv
59
59
  // error un-bound buffer in webgl with VAO rendering.
60
60
  // Could be removed if the attribute buffer deleting is
61
61
  // taken into account in the buffer binding state (in THREE.WebGLBindingStates code).
62
62
  geometry.index = null;
63
- delete geometry.attributes.uv_0;
63
+ delete geometry.attributes.uv;
64
64
  THREE.BufferGeometry.prototype.dispose.call(geometry);
65
65
  cacheTile.delete(south, params.level, bufferKey);
66
66
  }
@@ -9,7 +9,7 @@ export default function computeBuffers(params) {
9
9
  index: null,
10
10
  position: null,
11
11
  normal: null,
12
- // 2 UV set per tile: wgs84 (uv_0) and pm (uv_1)
12
+ // 2 UV set per tile: wgs84 (uv[0]) and pm (uv[1])
13
13
  // - wgs84: 1 texture per tile because tiles are using wgs84 projection
14
14
  // - pm: use multiple textures per tile.
15
15
  // +-------------------------+
@@ -20,10 +20,6 @@ export default {
20
20
  return logDepthBufferSupported;
21
21
  },
22
22
  isFirefox,
23
- isInternetExplorer() {
24
- const internetExplorer = false || !!document.documentMode;
25
- return internetExplorer;
26
- },
27
23
  getMaxTextureUnitsCount() {
28
24
  return maxTexturesUnits;
29
25
  },
@@ -23,7 +23,8 @@ class TileGeometry extends THREE.BufferGeometry {
23
23
  this.setIndex(buffers.index);
24
24
  this.setAttribute('position', buffers.position);
25
25
  this.setAttribute('normal', buffers.normal);
26
- for (let i = 0; i < buffers.uvs.length; i++) {
26
+ this.setAttribute('uv', buffers.uvs[0]);
27
+ for (let i = 1; i < buffers.uvs.length; i++) {
27
28
  this.setAttribute(`uv_${i}`, buffers.uvs[i]);
28
29
  }
29
30
  this.computeBoundingBox();
package/lib/Core/View.js CHANGED
@@ -142,7 +142,6 @@ class View extends THREE.EventDispatcher {
142
142
  * a default one will be constructed. In this case, if options.renderer is an object, it will be used to
143
143
  * configure the renderer (see {@link c3DEngine}. If not present, a new &lt;canvas> will be created and
144
144
  * added to viewerDiv (mutually exclusive with mainLoop)
145
- * @param {boolean} [options.renderer.isWebGL2=true] - enable webgl 2.0 for THREE.js.
146
145
  * @param {boolean|Object} [options.webXR=false] - enable webxr button to switch on VR visualization.
147
146
  * @param {number} [options.webXR.scale=1.0] - apply webxr scale tranformation.
148
147
  * @param {?Scene} [options.scene3D] - [THREE.Scene](https://threejs.org/docs/#api/en/scenes/Scene) instance to use, otherwise a default one will be constructed
@@ -325,10 +324,6 @@ class View extends THREE.EventDispatcher {
325
324
  } else {
326
325
  return layer._reject(new Error(`Cant add color layer ${layer.id}: the maximum layer is reached`));
327
326
  }
328
- } else if (layer.isElevationLayer && layer.source.format == 'image/x-bil;bits=32') {
329
- var _this$renderer, _this$renderer$capabi;
330
- layer.source.networkOptions.isWebGL2 = (_this$renderer = this.renderer) === null || _this$renderer === void 0 ? void 0 : (_this$renderer$capabi = _this$renderer.capabilities) === null || _this$renderer$capabi === void 0 ? void 0 : _this$renderer$capabi.isWebGL2;
331
- parentLayer.attach(layer);
332
327
  } else {
333
328
  parentLayer.attach(layer);
334
329
  }
@@ -1,4 +1,4 @@
1
- import { TextureLoader, DataTexture, RedFormat, FloatType, AlphaFormat } from 'three';
1
+ import { TextureLoader, DataTexture, RedFormat, FloatType } from 'three';
2
2
  const textureLoader = new TextureLoader();
3
3
  const SIZE_TEXTURE_TILE = 256;
4
4
  function checkResponse(response) {
@@ -16,16 +16,10 @@ const arrayBuffer = function (url) {
16
16
  });
17
17
  };
18
18
  function getTextureFloat(buffer) {
19
- let isWebGL2 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true;
20
- if (isWebGL2) {
21
- const texture = new DataTexture(buffer, SIZE_TEXTURE_TILE, SIZE_TEXTURE_TILE, RedFormat, FloatType);
22
- texture.internalFormat = 'R32F';
23
- texture.needsUpdate = true;
24
- return texture;
25
- } else {
26
- const texture = new DataTexture(buffer, SIZE_TEXTURE_TILE, SIZE_TEXTURE_TILE, AlphaFormat, FloatType);
27
- return texture;
28
- }
19
+ const texture = new DataTexture(buffer, SIZE_TEXTURE_TILE, SIZE_TEXTURE_TILE, RedFormat, FloatType);
20
+ texture.internalFormat = 'R32F';
21
+ texture.needsUpdate = true;
22
+ return texture;
29
23
  }
30
24
 
31
25
  /**
@@ -136,7 +130,7 @@ export default {
136
130
  let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
137
131
  return arrayBuffer(url, options).then(buffer => {
138
132
  const floatArray = new Float32Array(buffer);
139
- const texture = getTextureFloat(floatArray, options.isWebGL2);
133
+ const texture = getTextureFloat(floatArray);
140
134
  return texture;
141
135
  });
142
136
  },
@@ -1,8 +1,8 @@
1
1
  import * as THREE from 'three';
2
2
  /* babel-plugin-inline-import './Shader/TileVS.glsl' */
3
- const TileVS = "#include <itowns/WebGL2_pars_vertex>\n#include <itowns/precision_qualifier>\n#include <common>\n#include <itowns/project_pars_vertex>\n#include <itowns/elevation_pars_vertex>\n#include <logdepthbuf_pars_vertex>\nattribute vec2 uv_0;\n#if NUM_CRS > 1\nattribute float uv_1;\n#endif\nattribute vec3 normal;\n\nuniform mat4 modelMatrix;\nuniform bool lightingEnabled;\nvarying vec2 vHighPrecisionZW;\n\n#if MODE == MODE_FINAL\n#include <fog_pars_vertex>\nvarying vec3 vUv;\nvarying vec3 vNormal;\n#endif\nvoid main() {\n vec2 uv = vec2(uv_0.x, 1.0 - uv_0.y);\n\n #include <begin_vertex>\n #include <itowns/elevation_vertex>\n #include <itowns/geoid_vertex>\n #include <project_vertex>\n #include <logdepthbuf_vertex>\n vHighPrecisionZW = gl_Position.zw;\n#if MODE == MODE_FINAL\n #include <fog_vertex>\n #if NUM_CRS > 1\n vUv = vec3(uv_0, (uv_1 > 0.) ? uv_1 : uv_0.y); // set uv_1 = uv_0 if uv_1 is undefined\n #else\n vUv = vec3(uv_0, 0.0);\n #endif\n vNormal = normalize ( mat3( modelMatrix[0].xyz, modelMatrix[1].xyz, modelMatrix[2].xyz ) * normal );\n#endif\n}\n";
3
+ const TileVS = "#include <itowns/precision_qualifier>\n#include <common>\n#include <itowns/elevation_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#if NUM_CRS > 1\nattribute float uv_1;\n#endif\n\nuniform bool lightingEnabled;\nvarying vec2 vHighPrecisionZW;\n\n#if MODE == MODE_FINAL\n#include <fog_pars_vertex>\nvarying vec3 vUv;\nvarying vec3 vNormal;\n#endif\nvoid main() {\n #include <begin_vertex>\n #include <itowns/elevation_vertex>\n #include <itowns/geoid_vertex>\n #include <project_vertex>\n #include <logdepthbuf_vertex>\n vHighPrecisionZW = gl_Position.zw;\n#if MODE == MODE_FINAL\n #include <fog_vertex>\n #if NUM_CRS > 1\n vUv = vec3(uv, (uv_1 > 0.) ? uv_1 : uv.y); // set uv_1 = uv if uv_1 is undefined\n #else\n vUv = vec3(uv, 0.0);\n #endif\n vNormal = normalize ( mat3( modelMatrix[0].xyz, modelMatrix[1].xyz, modelMatrix[2].xyz ) * normal );\n#endif\n}\n";
4
4
  /* babel-plugin-inline-import './Shader/TileFS.glsl' */
5
- const TileFS = "#include <itowns/WebGL2_pars_fragment>\n#include <itowns/precision_qualifier>\n#include <logdepthbuf_pars_fragment>\n#include <itowns/pitUV>\n#include <itowns/color_layers_pars_fragment>\n#if MODE == MODE_FINAL\n#include <itowns/fog_pars_fragment>\n#include <itowns/overlay_pars_fragment>\n#include <itowns/lighting_pars_fragment>\n#endif\n#include <itowns/mode_pars_fragment>\n\nuniform vec3 diffuse;\nuniform float opacity;\nvarying vec3 vUv; // uv_0.x/uv_1.x, uv_0.y, uv_1.y\nvarying vec2 vHighPrecisionZW;\n\nvoid main() {\n #include <logdepthbuf_fragment>\n\n#if MODE == MODE_ID\n\n #include <itowns/mode_id_fragment>\n\n#elif MODE == MODE_DEPTH\n\n #include <itowns/mode_depth_fragment>\n\n#else\n\n gl_FragColor = vec4(diffuse, opacity);\n\n uvs[0] = vec3(vUv.xy, 0.);\n\n#if NUM_CRS > 1\n uvs[1] = vec3(vUv.x, fract(vUv.z), floor(vUv.z));\n#endif\n\n vec4 color;\n #pragma unroll_loop\n for ( int i = 0; i < NUM_FS_TEXTURES; i ++ ) {\n color = getLayerColor( i , colorTextures[ i ], colorOffsetScales[ i ], colorLayers[ i ]);\n gl_FragColor.rgb = mix(gl_FragColor.rgb, color.rgb, color.a);\n }\n\n #if defined(DEBUG)\n if (showOutline) {\n #pragma unroll_loop\n for ( int i = 0; i < NUM_CRS; i ++) {\n color = getOutlineColor( outlineColors[ i ], uvs[ i ].xy);\n gl_FragColor.rgb = mix(gl_FragColor.rgb, color.rgb, color.a);\n }\n }\n #endif\n\n #include <itowns/fog_fragment>\n #include <itowns/lighting_fragment>\n #include <itowns/overlay_fragment>\n\n#endif\n}\n";
5
+ const TileFS = "#include <itowns/precision_qualifier>\n#include <logdepthbuf_pars_fragment>\n#include <itowns/pitUV>\n#include <itowns/color_layers_pars_fragment>\n#if MODE == MODE_FINAL\n#include <itowns/fog_pars_fragment>\n#include <itowns/overlay_pars_fragment>\n#include <itowns/lighting_pars_fragment>\n#endif\n#include <itowns/mode_pars_fragment>\n\nuniform vec3 diffuse;\nuniform float opacity;\nvarying vec3 vUv; // uv.x/uv_1.x, uv.y, uv_1.y\nvarying vec2 vHighPrecisionZW;\n\nvoid main() {\n #include <logdepthbuf_fragment>\n\n#if MODE == MODE_ID\n\n #include <itowns/mode_id_fragment>\n\n#elif MODE == MODE_DEPTH\n\n #include <itowns/mode_depth_fragment>\n\n#else\n\n gl_FragColor = vec4(diffuse, opacity);\n\n uvs[0] = vec3(vUv.xy, 0.);\n\n#if NUM_CRS > 1\n uvs[1] = vec3(vUv.x, fract(vUv.z), floor(vUv.z));\n#endif\n\n vec4 color;\n #pragma unroll_loop\n for ( int i = 0; i < NUM_FS_TEXTURES; i ++ ) {\n color = getLayerColor( i , colorTextures[ i ], colorOffsetScales[ i ], colorLayers[ i ]);\n gl_FragColor.rgb = mix(gl_FragColor.rgb, color.rgb, color.a);\n }\n\n #if defined(DEBUG)\n if (showOutline) {\n #pragma unroll_loop\n for ( int i = 0; i < NUM_CRS; i ++) {\n color = getOutlineColor( outlineColors[ i ], uvs[ i ].xy);\n gl_FragColor.rgb = mix(gl_FragColor.rgb, color.rgb, color.a);\n }\n }\n #endif\n\n #include <itowns/fog_fragment>\n #include <itowns/lighting_fragment>\n #include <itowns/overlay_fragment>\n\n#endif\n}\n";
6
6
  import ShaderUtils from "./Shader/ShaderUtils.js";
7
7
  import Capabilities from "../Core/System/Capabilities.js";
8
8
  import RenderMode from "./RenderMode.js";
@@ -85,7 +85,7 @@ export const ELEVATION_MODES = {
85
85
  };
86
86
  let nbSamplers;
87
87
  const fragmentShader = [];
88
- class LayeredMaterial extends THREE.RawShaderMaterial {
88
+ class LayeredMaterial extends THREE.ShaderMaterial {
89
89
  #_visible = true;
90
90
  constructor() {
91
91
  let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
@@ -94,16 +94,16 @@ class LayeredMaterial extends THREE.RawShaderMaterial {
94
94
  nbSamplers = nbSamplers || [samplersElevationCount, getMaxColorSamplerUnitsCount()];
95
95
  this.defines.NUM_VS_TEXTURES = nbSamplers[0];
96
96
  this.defines.NUM_FS_TEXTURES = nbSamplers[1];
97
+ // TODO: We do not use the fog from the scene, is this a desired
98
+ // behavior?
97
99
  this.defines.USE_FOG = 1;
98
100
  this.defines.NUM_CRS = crsCount;
99
101
  CommonMaterial.setDefineMapping(this, 'ELEVATION', ELEVATION_MODES);
100
102
  CommonMaterial.setDefineMapping(this, 'MODE', RenderMode.MODES);
101
103
  CommonMaterial.setDefineProperty(this, 'mode', 'MODE', RenderMode.MODES.FINAL);
102
- if (Capabilities.isLogDepthBufferSupported()) {
103
- this.defines.USE_LOGDEPTHBUF = 1;
104
- this.defines.USE_LOGDEPTHBUF_EXT = 1;
105
- }
106
104
  this.vertexShader = TileVS;
105
+ // three loop unrolling of ShaderMaterial only supports integer bounds,
106
+ // see https://github.com/mrdoob/three.js/issues/28020
107
107
  fragmentShader[crsCount] = fragmentShader[crsCount] || ShaderUtils.unrollLoops(TileFS, this.defines);
108
108
  this.fragmentShader = fragmentShader[crsCount];
109
109
 
@@ -170,12 +170,6 @@ class LayeredMaterial extends THREE.RawShaderMaterial {
170
170
  }
171
171
  });
172
172
  }
173
- onBeforeCompile(shader, renderer) {
174
- if (renderer.capabilities.isWebGL2) {
175
- this.defines.WEBGL2 = true;
176
- shader.glslVersion = '300 es';
177
- }
178
- }
179
173
  getUniformByType(type) {
180
174
  return {
181
175
  layers: this.uniforms[`${type}Layers`],
@@ -1,15 +1,15 @@
1
1
  import * as THREE from 'three';
2
2
  import Capabilities from "../Core/System/Capabilities.js";
3
3
  /* babel-plugin-inline-import './Shader/ProjectiveTextureVS.glsl' */
4
- const textureVS = "#include <itowns/WebGL2_pars_vertex>\n#include <itowns/precision_qualifier>\n#include <itowns/project_pars_vertex>\n#include <itowns/projective_texturing_pars_vertex>\n#include <common>\n#include <logdepthbuf_pars_vertex>\n\nvarying vec3 vNormal;\nattribute vec3 normal;\n\nvoid main() {\n #include <begin_vertex>\n #include <project_vertex>\n vNormal = normal;\n #include <itowns/projective_texturing_vertex>\n #include <logdepthbuf_vertex>\n}\n";
4
+ const textureVS = "#include <itowns/precision_qualifier>\n#include <itowns/projective_texturing_pars_vertex>\n#include <common>\n#include <logdepthbuf_pars_vertex>\n\nvarying vec3 vNormal;\n\nvoid main() {\n #include <begin_vertex>\n #include <project_vertex>\n vNormal = normal;\n #include <itowns/projective_texturing_vertex>\n #include <logdepthbuf_vertex>\n}\n";
5
5
  /* babel-plugin-inline-import './Shader/ProjectiveTextureFS.glsl' */
6
- const textureFS = "#include <itowns/WebGL2_pars_fragment>\n#include <itowns/precision_qualifier>\n#include <logdepthbuf_pars_fragment>\n#include <itowns/projective_texturing_pars_fragment>\nvarying vec3 vNormal;\n\n#ifdef USE_BASE_MATERIAL\nstruct noPT {\n vec3 lightDirection;\n vec3 ambient;\n float opacity;\n};\n\nuniform noPT noProjectiveMaterial;\n#endif\n\nvoid main(void)\n{\n #include <logdepthbuf_fragment>\n #ifdef USE_BASE_MATERIAL\n float nDotVP = (max(0.1, dot(vNormal, normalize(noProjectiveMaterial.lightDirection))));\n vec4 color = vec4(noProjectiveMaterial.ambient + nDotVP, 0.0);\n #else\n vec4 color = vec4(0.0);\n #endif\n\n #pragma unroll_loop\n for (int i = 0; i < ORIENTED_IMAGES_COUNT; i++) {\n color = projectiveTextureColor(projectiveTextureCoords[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTextureDistortion[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTexture[ ORIENTED_IMAGES_COUNT - 1 - i ], mask[ORIENTED_IMAGES_COUNT - 1 - i], color);\n }\n\n #ifdef USE_BASE_MATERIAL\n color.a = color.a < 1.0 ? max(noProjectiveMaterial.opacity, color.a) : 1.0 ;\n gl_FragColor = vec4(color.rgb, color.a * opacity);\n #else\n gl_FragColor = vec4(color.rgb / color.a, opacity);\n #endif\n\n}\n";
6
+ const textureFS = "#include <itowns/precision_qualifier>\n#include <logdepthbuf_pars_fragment>\n#include <itowns/projective_texturing_pars_fragment>\nvarying vec3 vNormal;\n\n#ifdef USE_BASE_MATERIAL\nstruct noPT {\n vec3 lightDirection;\n vec3 ambient;\n float opacity;\n};\n\nuniform noPT noProjectiveMaterial;\n#endif\n\nvoid main(void)\n{\n #include <logdepthbuf_fragment>\n #ifdef USE_BASE_MATERIAL\n float nDotVP = (max(0.1, dot(vNormal, normalize(noProjectiveMaterial.lightDirection))));\n vec4 color = vec4(noProjectiveMaterial.ambient + nDotVP, 0.0);\n #else\n vec4 color = vec4(0.0);\n #endif\n\n #pragma unroll_loop\n for (int i = 0; i < ORIENTED_IMAGES_COUNT; i++) {\n color = projectiveTextureColor(projectiveTextureCoords[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTextureDistortion[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTexture[ ORIENTED_IMAGES_COUNT - 1 - i ], mask[ORIENTED_IMAGES_COUNT - 1 - i], color);\n }\n\n #ifdef USE_BASE_MATERIAL\n color.a = color.a < 1.0 ? max(noProjectiveMaterial.opacity, color.a) : 1.0 ;\n gl_FragColor = vec4(color.rgb, color.a * opacity);\n #else\n gl_FragColor = vec4(color.rgb / color.a, opacity);\n #endif\n\n}\n";
7
7
  import ShaderUtils from "./Shader/ShaderUtils.js";
8
8
  const ndcToTextureMatrix = new THREE.Matrix4(1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 2, 0, 0, 0, 0, 2);
9
9
  const noMask = new THREE.DataTexture(new Uint8Array([255, 255, 255, 255]), 1, 1, THREE.RGBAFormat, THREE.UnsignedByteType);
10
10
  noMask.needsUpdate = true;
11
11
  const noTexture = new THREE.Texture();
12
- const rawShaderMaterial = new THREE.RawShaderMaterial();
12
+ const shaderMaterial = new THREE.ShaderMaterial();
13
13
  /**
14
14
  * @classdesc OrientedImageMaterial is a custom shader material used to do projective texture mapping.<br/>
15
15
  *
@@ -28,7 +28,7 @@ const rawShaderMaterial = new THREE.RawShaderMaterial();
28
28
  * <br/>
29
29
  * To get a more comprehensive support of camera Micmac models, you can consider using [three-photogrammetric-camera]{@link https://github.com/mbredif/three-photogrammetric-camera} instead.
30
30
  */
31
- class OrientedImageMaterial extends THREE.RawShaderMaterial {
31
+ class OrientedImageMaterial extends THREE.ShaderMaterial {
32
32
  /**
33
33
  * @constructor
34
34
  * @param { OrientedImageCamera[]} cameras - Array of {@link OrientedImageCamera}. Each camera will project a texture.
@@ -53,17 +53,17 @@ class OrientedImageMaterial extends THREE.RawShaderMaterial {
53
53
  options.transparent = options.transparent ?? true;
54
54
  options.opacity = options.opacity ?? 1;
55
55
 
56
- // Filter the rawShaderMaterial options
57
- const rawShaderMaterialOptions = {};
56
+ // Filter out non-ShaderMaterial options
57
+ const shaderMaterialOptions = {};
58
58
  for (const key in options) {
59
59
  if (Object.prototype.hasOwnProperty.call(options, key)) {
60
- const currentValue = rawShaderMaterial[key];
60
+ const currentValue = shaderMaterial[key];
61
61
  if (currentValue !== undefined) {
62
- rawShaderMaterialOptions[key] = options[key];
62
+ shaderMaterialOptions[key] = options[key];
63
63
  }
64
64
  }
65
65
  }
66
- super(rawShaderMaterialOptions);
66
+ super(shaderMaterialOptions);
67
67
  this.defines.ORIENTED_IMAGES_COUNT = options.OrientedImagesCount ?? cameras.length;
68
68
 
69
69
  // verify that number of textures doesn't exceed GPU capabilities
@@ -107,19 +107,11 @@ class OrientedImageMaterial extends THREE.RawShaderMaterial {
107
107
  ambient: new THREE.Color(0.1, 0.1, 0.1),
108
108
  opacity: 0.75
109
109
  });
110
- if (Capabilities.isLogDepthBufferSupported()) {
111
- this.defines.USE_LOGDEPTHBUF = 1;
112
- this.defines.USE_LOGDEPTHBUF_EXT = 1;
113
- }
114
110
  this.vertexShader = textureVS;
111
+ // three loop unrolling of ShaderMaterial only supports integer bounds,
112
+ // see https://github.com/mrdoob/three.js/issues/28020
115
113
  this.fragmentShader = ShaderUtils.unrollLoops(textureFS, this.defines);
116
114
  }
117
- onBeforeCompile(shader, renderer) {
118
- if (renderer.capabilities.isWebGL2) {
119
- this.defines.WEBGL2 = true;
120
- shader.glslVersion = '300 es';
121
- }
122
- }
123
115
 
124
116
  /**
125
117
  * Set new textures and new position/orientation of the camera set.
@@ -1,9 +1,8 @@
1
1
  import * as THREE from 'three';
2
2
  /* babel-plugin-inline-import './Shader/PointsVS.glsl' */
3
- const PointsVS = "#include <itowns/WebGL2_pars_vertex>\n#include <itowns/precision_qualifier>\n#include <itowns/project_pars_vertex>\n#if defined(USE_TEXTURES_PROJECTIVE)\n#include <itowns/projective_texturing_pars_vertex>\n#endif\n#include <common>\n#include <logdepthbuf_pars_vertex>\n\n#define NB_CLASS 8.\n\nuniform float size;\nuniform float scale;\n\nuniform bool picking;\nuniform int mode;\nuniform float opacity;\nuniform vec4 overlayColor;\n\nuniform vec2 elevationRange;\nuniform vec2 intensityRange;\nuniform vec2 angleRange;\n\nuniform bool applyOpacityClassication;\n\nuniform sampler2D classificationTexture;\nuniform sampler2D discreteTexture;\nuniform sampler2D gradientTexture;\nuniform int sizeMode;\nuniform float minAttenuatedSize;\nuniform float maxAttenuatedSize;\n\nattribute vec3 color;\nattribute vec2 range;\nattribute vec4 unique_id;\nattribute float intensity;\nattribute float classification;\nattribute float pointSourceID;\n\nattribute float returnNumber;\nattribute float numberOfReturns;\nattribute float scanAngle;\n\n#if defined(NORMAL_OCT16)\nattribute vec2 oct16Normal;\n#elif defined(NORMAL_SPHEREMAPPED)\nattribute vec2 sphereMappedNormal;\n#else\nattribute vec3 normal;\n#endif\n\nvarying vec4 vColor;\n\n// see https://web.archive.org/web/20150303053317/http://lgdv.cs.fau.de/get/1602\n// and implementation in PotreeConverter (BINPointReader.cpp) and potree (BinaryDecoderWorker.js)\n#if defined(NORMAL_OCT16)\nvec3 decodeOct16Normal(vec2 encodedNormal) {\n vec2 nNorm = 2. * (encodedNormal / 255.) - 1.;\n vec3 n;\n n.z = 1. - abs(nNorm.x) - abs(nNorm.y);\n if (n.z >= 0.) {\n n.x = nNorm.x;\n n.y = nNorm.y;\n } else {\n n.x = sign(nNorm.x) - sign(nNorm.x) * sign(nNorm.y) * nNorm.y;\n n.y = sign(nNorm.y) - sign(nNorm.y) * sign(nNorm.x) * nNorm.x;\n }\n return normalize(n);\n}\n#elif defined(NORMAL_SPHEREMAPPED)\n// see http://aras-p.info/texts/CompactNormalStorage.html method #4\n// or see potree's implementation in BINPointReader.cpp\nvec3 decodeSphereMappedNormal(vec2 encodedNormal) {\n vec2 fenc = 2. * encodedNormal / 255. - 1.;\n float f = dot(fenc,fenc);\n float g = 2. * sqrt(1. - f);\n vec3 n;\n n.xy = fenc * g;\n n.z = 1. - 2. * f;\n return n;\n}\n#endif\n\nvoid main() {\n\n#if defined(NORMAL_OCT16)\n vec3 normal = decodeOct16Normal(oct16Normal);\n#elif defined(NORMAL_SPHEREMAPPED)\n vec3 normal = decodeSphereMappedNormal(sphereMappedNormal);\n#elif defined(NORMAL)\n // nothing to do\n#else\n // default to color\n vec3 normal = color;\n#endif\n\n if (picking) {\n vColor = unique_id;\n } else {\n vColor.a = opacity;\n if (applyOpacityClassication || mode == PNTS_MODE_CLASSIFICATION) {\n vec2 uv = vec2(classification/255., 0.5);\n vColor = texture2D(classificationTexture, uv);\n vColor.a *= opacity;\n }\n\n if (mode == PNTS_MODE_NORMAL) {\n vColor.rgb = abs(normal);\n } else if (mode == PNTS_MODE_COLOR) {\n // default to color mode\n vColor.rgb = mix(color, overlayColor.rgb, overlayColor.a);\n } else if (mode == PNTS_MODE_RETURN_NUMBER) {\n vec2 uv = vec2(returnNumber/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_RETURN_TYPE) {\n float returnType;\n if (returnNumber > numberOfReturns) {\n returnType = 4.;\n } else if (returnNumber == 1.) {\n if (numberOfReturns == 1.) {\n // single\n returnType = 0.;\n } else {\n // first\n returnType = 1.;\n }\n } else {\n if (returnNumber == numberOfReturns) {\n // last\n returnType = 3.;\n } else {\n // intermediate\n returnType = 2.;\n }\n }\n vec2 uv = vec2(returnType/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_RETURN_COUNT) {\n vec2 uv = vec2(numberOfReturns/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_POINT_SOURCE_ID) {\n vec2 uv = vec2(mod(pointSourceID, NB_CLASS)/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_SCAN_ANGLE) {\n float i = (scanAngle - angleRange.x) / (angleRange.y - angleRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n } else if (mode == PNTS_MODE_INTENSITY) {\n float i = (intensity - intensityRange.x) / (intensityRange.y - intensityRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n } else if (mode == PNTS_MODE_ELEVATION) {\n float i = (position.z - elevationRange.x) / (elevationRange.y - elevationRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n }\n }\n\n #include <begin_vertex>\n #include <project_vertex>\n\n gl_PointSize = size;\n\n if (sizeMode == PNTS_SIZE_MODE_ATTENUATED) {\n bool isPerspective = isPerspectiveMatrix(projectionMatrix);\n\n if (isPerspective) {\n gl_PointSize *= scale / -mvPosition.z;\n gl_PointSize = clamp(gl_PointSize, minAttenuatedSize, maxAttenuatedSize);\n }\n }\n\n#if defined(USE_TEXTURES_PROJECTIVE)\n #include <itowns/projective_texturing_vertex>\n#endif\n #include <logdepthbuf_vertex>\n}\n";
3
+ const PointsVS = "#include <itowns/precision_qualifier>\n#if defined(USE_TEXTURES_PROJECTIVE)\n#include <itowns/projective_texturing_pars_vertex>\n#endif\n#include <common>\n#include <logdepthbuf_pars_vertex>\n\n#define NB_CLASS 8.\n\nuniform float size;\nuniform float scale;\n\nuniform bool picking;\nuniform int mode;\nuniform float opacity;\nuniform vec4 overlayColor;\n\nuniform vec2 elevationRange;\nuniform vec2 intensityRange;\nuniform vec2 angleRange;\n\nuniform bool applyOpacityClassication;\n\nuniform sampler2D classificationTexture;\nuniform sampler2D discreteTexture;\nuniform sampler2D gradientTexture;\nuniform int sizeMode;\nuniform float minAttenuatedSize;\nuniform float maxAttenuatedSize;\n\nattribute vec3 color;\nattribute vec2 range;\nattribute vec4 unique_id;\nattribute float intensity;\nattribute float classification;\nattribute float pointSourceID;\n\nattribute float returnNumber;\nattribute float numberOfReturns;\nattribute float scanAngle;\n\n#if defined(NORMAL_OCT16)\nattribute vec2 oct16Normal;\n#elif defined(NORMAL_SPHEREMAPPED)\nattribute vec2 sphereMappedNormal;\n#endif\n\nvarying vec4 vColor;\n\n// see https://web.archive.org/web/20150303053317/http://lgdv.cs.fau.de/get/1602\n// and implementation in PotreeConverter (BINPointReader.cpp) and potree (BinaryDecoderWorker.js)\n#if defined(NORMAL_OCT16)\nvec3 decodeOct16Normal(vec2 encodedNormal) {\n vec2 nNorm = 2. * (encodedNormal / 255.) - 1.;\n vec3 n;\n n.z = 1. - abs(nNorm.x) - abs(nNorm.y);\n if (n.z >= 0.) {\n n.x = nNorm.x;\n n.y = nNorm.y;\n } else {\n n.x = sign(nNorm.x) - sign(nNorm.x) * sign(nNorm.y) * nNorm.y;\n n.y = sign(nNorm.y) - sign(nNorm.y) * sign(nNorm.x) * nNorm.x;\n }\n return normalize(n);\n}\n#elif defined(NORMAL_SPHEREMAPPED)\n// see http://aras-p.info/texts/CompactNormalStorage.html method #4\n// or see potree's implementation in BINPointReader.cpp\nvec3 decodeSphereMappedNormal(vec2 encodedNormal) {\n vec2 fenc = 2. * encodedNormal / 255. - 1.;\n float f = dot(fenc,fenc);\n float g = 2. * sqrt(1. - f);\n vec3 n;\n n.xy = fenc * g;\n n.z = 1. - 2. * f;\n return n;\n}\n#endif\n\nvoid main() {\n\n#if defined(NORMAL_OCT16)\n vec3 normal = decodeOct16Normal(oct16Normal);\n#elif defined(NORMAL_SPHEREMAPPED)\n vec3 normal = decodeSphereMappedNormal(sphereMappedNormal);\n#elif defined(NORMAL)\n // nothing to do\n#else\n // default to color\n vec3 normal = color;\n#endif\n\n if (picking) {\n vColor = unique_id;\n } else {\n vColor.a = opacity;\n if (applyOpacityClassication || mode == PNTS_MODE_CLASSIFICATION) {\n vec2 uv = vec2(classification/255., 0.5);\n vColor = texture2D(classificationTexture, uv);\n vColor.a *= opacity;\n }\n\n if (mode == PNTS_MODE_NORMAL) {\n vColor.rgb = abs(normal);\n } else if (mode == PNTS_MODE_COLOR) {\n // default to color mode\n vColor.rgb = mix(color, overlayColor.rgb, overlayColor.a);\n } else if (mode == PNTS_MODE_RETURN_NUMBER) {\n vec2 uv = vec2(returnNumber/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_RETURN_TYPE) {\n float returnType;\n if (returnNumber > numberOfReturns) {\n returnType = 4.;\n } else if (returnNumber == 1.) {\n if (numberOfReturns == 1.) {\n // single\n returnType = 0.;\n } else {\n // first\n returnType = 1.;\n }\n } else {\n if (returnNumber == numberOfReturns) {\n // last\n returnType = 3.;\n } else {\n // intermediate\n returnType = 2.;\n }\n }\n vec2 uv = vec2(returnType/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_RETURN_COUNT) {\n vec2 uv = vec2(numberOfReturns/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_POINT_SOURCE_ID) {\n vec2 uv = vec2(mod(pointSourceID, NB_CLASS)/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_SCAN_ANGLE) {\n float i = (scanAngle - angleRange.x) / (angleRange.y - angleRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n } else if (mode == PNTS_MODE_INTENSITY) {\n float i = (intensity - intensityRange.x) / (intensityRange.y - intensityRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n } else if (mode == PNTS_MODE_ELEVATION) {\n float i = (position.z - elevationRange.x) / (elevationRange.y - elevationRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n }\n }\n\n #include <begin_vertex>\n #include <project_vertex>\n\n gl_PointSize = size;\n\n if (sizeMode == PNTS_SIZE_MODE_ATTENUATED) {\n bool isPerspective = isPerspectiveMatrix(projectionMatrix);\n\n if (isPerspective) {\n gl_PointSize *= scale / -mvPosition.z;\n gl_PointSize = clamp(gl_PointSize, minAttenuatedSize, maxAttenuatedSize);\n }\n }\n\n#if defined(USE_TEXTURES_PROJECTIVE)\n #include <itowns/projective_texturing_vertex>\n#endif\n #include <logdepthbuf_vertex>\n}\n";
4
4
  /* babel-plugin-inline-import './Shader/PointsFS.glsl' */
5
- const PointsFS = "#include <itowns/WebGL2_pars_fragment>\n#include <itowns/precision_qualifier>\n#include <logdepthbuf_pars_fragment>\n#if defined(USE_TEXTURES_PROJECTIVE)\n#include <itowns/projective_texturing_pars_fragment>\n#endif\n\nvarying vec4 vColor;\nuniform bool picking;\nuniform int shape;\n\nvoid main() {\n #include <logdepthbuf_fragment>\n //square shape does not require any change.\n if (shape == PNTS_SHAPE_CIRCLE) {\n //circular rendering in glsl\n if ((length(gl_PointCoord - 0.5) > 0.5) || (vColor.a == 0.0)) {\n discard;\n }\n }\n\n#if defined(USE_TEXTURES_PROJECTIVE)\n vec4 color = vColor;\n if (!picking) {\n #pragma unroll_loop\n for (int i = 0; i < ORIENTED_IMAGES_COUNT; i++) {\n color = projectiveTextureColor(projectiveTextureCoords[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTextureDistortion[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTexture[ ORIENTED_IMAGES_COUNT - 1 - i ], mask[ORIENTED_IMAGES_COUNT - 1 - i], color);\n }\n gl_FragColor = vec4(color.rgb, color.a * opacity);\n } else {\n gl_FragColor = color;\n }\n#else\n gl_FragColor = vColor;\n#endif\n}\n";
6
- import Capabilities from "../Core/System/Capabilities.js";
5
+ const PointsFS = "#include <itowns/precision_qualifier>\n#include <logdepthbuf_pars_fragment>\n#if defined(USE_TEXTURES_PROJECTIVE)\n#include <itowns/projective_texturing_pars_fragment>\n#endif\n\nvarying vec4 vColor;\nuniform bool picking;\nuniform int shape;\n\nvoid main() {\n #include <logdepthbuf_fragment>\n //square shape does not require any change.\n if (shape == PNTS_SHAPE_CIRCLE) {\n //circular rendering in glsl\n if ((length(gl_PointCoord - 0.5) > 0.5) || (vColor.a == 0.0)) {\n discard;\n }\n }\n\n#if defined(USE_TEXTURES_PROJECTIVE)\n vec4 color = vColor;\n if (!picking) {\n #pragma unroll_loop\n for (int i = 0; i < ORIENTED_IMAGES_COUNT; i++) {\n color = projectiveTextureColor(projectiveTextureCoords[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTextureDistortion[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTexture[ ORIENTED_IMAGES_COUNT - 1 - i ], mask[ORIENTED_IMAGES_COUNT - 1 - i], color);\n }\n gl_FragColor = vec4(color.rgb, color.a * opacity);\n } else {\n gl_FragColor = color;\n }\n#else\n gl_FragColor = vColor;\n#endif\n}\n";
7
6
  import ShaderUtils from "./Shader/ShaderUtils.js";
8
7
  import CommonMaterial from "./CommonMaterial.js";
9
8
  import Gradients from "../Utils/Gradients.js";
@@ -254,7 +253,7 @@ function recomputeTexture(scheme, texture, nbClass) {
254
253
  }
255
254
  texture.needsUpdate = true;
256
255
  }
257
- class PointsMaterial extends THREE.RawShaderMaterial {
256
+ class PointsMaterial extends THREE.ShaderMaterial {
258
257
  /**
259
258
  * @class PointsMaterial
260
259
  * @param {object} [options={}] The options
@@ -383,14 +382,12 @@ class PointsMaterial extends THREE.RawShaderMaterial {
383
382
  this.defines.DEBUG_ALPHA_BORDER = oiMaterial.defines.DEBUG_ALPHA_BORDER;
384
383
  this.defines.USE_TEXTURES_PROJECTIVE = true;
385
384
  this.defines.USE_BASE_MATERIAL = true;
385
+ // three loop unrolling of ShaderMaterial only supports integer
386
+ // bounds, see https://github.com/mrdoob/three.js/issues/28020
386
387
  this.fragmentShader = ShaderUtils.unrollLoops(PointsFS, this.defines);
387
388
  } else {
388
389
  this.fragmentShader = PointsFS;
389
390
  }
390
- if (Capabilities.isLogDepthBufferSupported()) {
391
- this.defines.USE_LOGDEPTHBUF = 1;
392
- this.defines.USE_LOGDEPTHBUF_EXT = 1;
393
- }
394
391
  }
395
392
  recomputeClassification() {
396
393
  recomputeTexture(this.classificationScheme, this.classificationTexture, 32);
@@ -406,12 +403,6 @@ class PointsMaterial extends THREE.RawShaderMaterial {
406
403
  target: this.uniforms
407
404
  });
408
405
  }
409
- onBeforeCompile(shader, renderer) {
410
- if (renderer.capabilities.isWebGL2) {
411
- this.defines.WEBGL2 = true;
412
- shader.glslVersion = '300 es';
413
- }
414
- }
415
406
  copy(source) {
416
407
  super.copy(source);
417
408
  if (source.uniforms.projectiveTextureAlphaBorder) {
@@ -2,7 +2,7 @@ import * as THREE from 'three';
2
2
  /* babel-plugin-inline-import './Chunk/color_layers_pars_fragment.glsl' */
3
3
  const color_layers_pars_fragment = "struct Layer {\n int textureOffset;\n int crs;\n int effect_type;\n float effect_parameter;\n float opacity;\n bool transparent;\n};\n\n#include <itowns/custom_header_colorLayer>\n\nuniform sampler2D colorTextures[NUM_FS_TEXTURES];\nuniform vec4 colorOffsetScales[NUM_FS_TEXTURES];\nuniform Layer colorLayers[NUM_FS_TEXTURES];\nuniform int colorTextureCount;\n\nvec3 uvs[NUM_CRS];\n\nfloat getBorderDistance(vec2 uv) {\n vec2 p2 = min(uv, 1. -uv);\n return min(p2.x, p2.y);\n}\n\nfloat tolerance = 0.99;\n\nvec4 applyWhiteToInvisibleEffect(vec4 color) {\n float a = dot(color.rgb, vec3(0.333333333));\n if (a >= tolerance) {\n color.a = 0.0;\n }\n return color;\n}\n\nvec4 applyLightColorToInvisibleEffect(vec4 color, float intensity) {\n float a = max(0.05,1. - length(color.xyz - 1.));\n color.a *= 1.0 - pow(abs(a), intensity);\n color.rgb *= color.rgb * color.rgb;\n return color;\n}\n\n#if defined(DEBUG)\nuniform bool showOutline;\nuniform vec3 outlineColors[NUM_CRS];\nuniform float outlineWidth;\n\nvec4 getOutlineColor(vec3 outlineColor, vec2 uv) {\n float alpha = 1. - clamp(getBorderDistance(uv) / outlineWidth, 0., 1.);\n return vec4(outlineColor, alpha);\n}\n#endif\n\nuniform float minBorderDistance;\nvec4 getLayerColor(int textureOffset, sampler2D tex, vec4 offsetScale, Layer layer) {\n if ( textureOffset >= colorTextureCount ) return vec4(0);\n\n vec3 uv;\n // #pragma unroll_loop\n for ( int i = 0; i < NUM_CRS; i ++ ) {\n if ( i == layer.crs ) uv = uvs[ i ];\n }\n\n float borderDistance = getBorderDistance(uv.xy);\n if (textureOffset != layer.textureOffset + int(uv.z) || borderDistance < minBorderDistance ) return vec4(0);\n vec4 color = texture2D(tex, pitUV(uv.xy, offsetScale));\n if (layer.effect_type == 3) {\n #include <itowns/custom_body_colorLayer>\n } else {\n if (layer.transparent && color.a != 0.0) {\n color.rgb /= color.a;\n }\n\n if (layer.effect_type == 1) {\n color = applyLightColorToInvisibleEffect(color, layer.effect_parameter);\n } else if (layer.effect_type == 2) {\n color = applyWhiteToInvisibleEffect(color);\n }\n }\n color.a *= layer.opacity;\n return color;\n}\n";
4
4
  /* babel-plugin-inline-import './Chunk/elevation_pars_vertex.glsl' */
5
- const elevation_pars_vertex = "#if NUM_VS_TEXTURES > 0\n struct Layer {\n float scale;\n float bias;\n int mode;\n float zmin;\n float zmax;\n };\n\n uniform Layer elevationLayers[NUM_VS_TEXTURES];\n uniform sampler2D elevationTextures[NUM_VS_TEXTURES];\n uniform vec4 elevationOffsetScales[NUM_VS_TEXTURES];\n uniform int elevationTextureCount;\n uniform float geoidHeight;\n\n highp float decode32(highp vec4 rgba) {\n highp float Sign = 1.0 - step(128.0,rgba[0])*2.0;\n highp float Exponent = 2.0 * mod(rgba[0],128.0) + step(128.0,rgba[1]) - 127.0;\n highp float Mantissa = mod(rgba[1],128.0)*65536.0 + rgba[2]*256.0 +rgba[3] + float(0x800000);\n highp float Result = Sign * exp2(Exponent) * (Mantissa * exp2(-23.0 ));\n return Result;\n }\n\n float getElevationMode(vec2 uv, sampler2D tex, int mode) {\n if (mode == ELEVATION_RGBA)\n return decode32(texture2D( tex, uv ).abgr * 255.0);\n if (mode == ELEVATION_DATA || mode == ELEVATION_COLOR)\n #if defined(WEBGL2)\n return texture2D( tex, uv ).r;\n #else\n return texture2D( tex, uv ).w;\n #endif\n return 0.;\n }\n\n float getElevation(vec2 uv, sampler2D tex, vec4 offsetScale, Layer layer) {\n uv = uv * offsetScale.zw + offsetScale.xy;\n float d = clamp(getElevationMode(uv, tex, layer.mode), layer.zmin, layer.zmax);\n return d * layer.scale + layer.bias;\n }\n#endif\n";
5
+ const elevation_pars_vertex = "#if NUM_VS_TEXTURES > 0\n struct Layer {\n float scale;\n float bias;\n int mode;\n float zmin;\n float zmax;\n };\n\n uniform Layer elevationLayers[NUM_VS_TEXTURES];\n uniform sampler2D elevationTextures[NUM_VS_TEXTURES];\n uniform vec4 elevationOffsetScales[NUM_VS_TEXTURES];\n uniform int elevationTextureCount;\n uniform float geoidHeight;\n\n highp float decode32(highp vec4 rgba) {\n highp float Sign = 1.0 - step(128.0,rgba[0])*2.0;\n highp float Exponent = 2.0 * mod(rgba[0],128.0) + step(128.0,rgba[1]) - 127.0;\n highp float Mantissa = mod(rgba[1],128.0)*65536.0 + rgba[2]*256.0 +rgba[3] + float(0x800000);\n highp float Result = Sign * exp2(Exponent) * (Mantissa * exp2(-23.0 ));\n return Result;\n }\n\n float getElevationMode(vec2 uv, sampler2D tex, int mode) {\n if (mode == ELEVATION_RGBA)\n return decode32(texture2D( tex, uv ).abgr * 255.0);\n if (mode == ELEVATION_DATA || mode == ELEVATION_COLOR)\n return texture2D( tex, uv ).r;\n return 0.;\n }\n\n float getElevation(vec2 uv, sampler2D tex, vec4 offsetScale, Layer layer) {\n // Elevation textures are inverted along the y-axis\n uv = vec2(uv.x, 1.0 - uv.y);\n uv = uv * offsetScale.zw + offsetScale.xy;\n float d = clamp(getElevationMode(uv, tex, layer.mode), layer.zmin, layer.zmax);\n return d * layer.scale + layer.bias;\n }\n#endif\n";
6
6
  /* babel-plugin-inline-import './Chunk/elevation_vertex.glsl' */
7
7
  const elevation_vertex = "#if NUM_VS_TEXTURES > 0\n if(elevationTextureCount > 0) {\n float elevation = getElevation(uv, elevationTextures[0], elevationOffsetScales[0], elevationLayers[0]);\n transformed += elevation * normal;\n }\n#endif\n";
8
8
  /* babel-plugin-inline-import './Chunk/geoid_vertex.glsl' */
@@ -18,7 +18,7 @@ const lighting_pars_fragment = "uniform bool lightingEnabled;\nuniform vec3 ligh
18
18
  /* babel-plugin-inline-import './Chunk/mode_pars_fragment.glsl' */
19
19
  const mode_pars_fragment = "#if MODE == MODE_ID || MODE == MODE_DEPTH\n#include <packing>\n#endif\n\n#if MODE == MODE_ID\nuniform int objectId;\n#endif\n";
20
20
  /* babel-plugin-inline-import './Chunk/mode_depth_fragment.glsl' */
21
- const mode_depth_fragment = "#if defined(USE_LOGDEPTHBUF) && defined(USE_LOGDEPTHBUF_EXT)\ngl_FragColor = packDepthToRGBA(gl_FragDepthEXT);\n#else\nfloat fragCoordZ = 0.5 * vHighPrecisionZW[0] / vHighPrecisionZW[1] + 0.5;\ngl_FragColor = packDepthToRGBA(fragCoordZ);\n#endif";
21
+ const mode_depth_fragment = "#if defined(USE_LOGDEPTHBUF)\ngl_FragColor = packDepthToRGBA(gl_FragDepthEXT);\n#else\nfloat fragCoordZ = 0.5 * vHighPrecisionZW[0] / vHighPrecisionZW[1] + 0.5;\ngl_FragColor = packDepthToRGBA(fragCoordZ);\n#endif\n";
22
22
  /* babel-plugin-inline-import './Chunk/mode_id_fragment.glsl' */
23
23
  const mode_id_fragment = "// 16777216.0 == 256.0 * 256.0 * 256.0\ngl_FragColor = packDepthToRGBA(float(objectId) / 16777216.0);\n";
24
24
  /* babel-plugin-inline-import './Chunk/overlay_fragment.glsl' */
@@ -29,18 +29,12 @@ const overlay_pars_fragment = "uniform vec3 overlayColor;\nuniform float overla
29
29
  const pitUV = "vec2 pitUV(vec2 uv, vec4 pit)\n{\n return uv * pit.zw + vec2(pit.x, 1.0 - pit.w - pit.y);\n}\n\n";
30
30
  /* babel-plugin-inline-import './Chunk/precision_qualifier.glsl' */
31
31
  const precision_qualifier = "precision highp float;\nprecision highp int;\n";
32
- /* babel-plugin-inline-import './Chunk/project_pars_vertex.glsl' */
33
- const project_pars_vertex = "attribute vec3 position;\nuniform mat4 projectionMatrix;\nuniform mat4 modelViewMatrix;\n";
34
32
  /* babel-plugin-inline-import './Chunk/projective_texturing_vertex.glsl' */
35
33
  const projective_texturing_vertex = "for(int i = 0; i < ORIENTED_IMAGES_COUNT; ++i)\n projectiveTextureCoords[i] = projectiveTextureMatrix[i] * mvPosition;\n";
36
34
  /* babel-plugin-inline-import './Chunk/projective_texturing_pars_vertex.glsl' */
37
35
  const projective_texturing_pars_vertex = "uniform mat4 projectiveTextureMatrix[ORIENTED_IMAGES_COUNT];\nvarying vec4 projectiveTextureCoords[ORIENTED_IMAGES_COUNT];\n";
38
36
  /* babel-plugin-inline-import './Chunk/projective_texturing_pars_fragment.glsl' */
39
37
  const projective_texturing_pars_fragment = "uniform sampler2D projectiveTexture[ORIENTED_IMAGES_COUNT];\nuniform sampler2D mask[ORIENTED_IMAGES_COUNT];\nvarying vec4 projectiveTextureCoords[ORIENTED_IMAGES_COUNT];\nuniform float projectiveTextureAlphaBorder;\nuniform float opacity;\nuniform bool boostLight;\n\nstruct Distortion {\n vec2 size;\n#if USE_DISTORTION\n vec2 pps;\n vec4 polynom;\n vec3 l1l2;\n#endif\n};\n\nuniform Distortion projectiveTextureDistortion[ORIENTED_IMAGES_COUNT];\n\nfloat getAlphaBorder(vec2 p)\n{\n vec2 d = clamp(projectiveTextureAlphaBorder * min(p, 1. - p), 0., 1.);\n return min(d.x, d.y);\n}\n\n#if USE_DISTORTION\nvoid distort(inout vec2 p, vec4 polynom, vec2 pps)\n{\n vec2 v = p - pps;\n float v2 = dot(v, v);\n if (v2 > polynom.w) {\n p = vec2(-1.);\n }\n else {\n p += (v2 * (polynom.x + v2 * (polynom.y + v2 * polynom.z) ) ) * v;\n }\n}\n\nvoid distort(inout vec2 p, vec4 polynom, vec3 l1l2, vec2 pps)\n{\n if ((l1l2.x == 0.) && (l1l2.y == 0.)) {\n distort(p, polynom, pps);\n } else {\n vec2 AB = (p - pps) / l1l2.z;\n float R = length(AB);\n float lambda = atan(R) / R;\n vec2 ab = lambda * AB;\n float rho2 = dot(ab, ab);\n float r357 = 1. + rho2* (polynom.x + rho2* (polynom.y + rho2 * polynom.z));\n p = pps + l1l2.z * (r357 * ab + vec2(dot(l1l2.xy, ab), l1l2.y * ab.x));\n }\n}\n#endif\n\nvec4 mixBaseColor(vec4 aColor, vec4 baseColor) {\n #ifdef USE_BASE_MATERIAL\n baseColor.rgb = aColor.a == 1.0 ? aColor.rgb : mix(baseColor, aColor, aColor.a).rgb;\n baseColor.a = min(1.0, aColor.a + baseColor.a);\n #else\n baseColor.rgb += aColor.rgb * aColor.a;\n baseColor.a += aColor.a;\n #endif\n return baseColor;\n}\n\nvec4 projectiveTextureColor(vec4 coords, Distortion distortion, sampler2D tex, sampler2D mask, vec4 baseColor) {\n vec3 p = coords.xyz / coords.w;\n if(p.z * p.z < 1.) {\n#if USE_DISTORTION\n p.xy *= distortion.size;\n distort(p.xy, distortion.polynom, distortion.l1l2, distortion.pps);\n p.xy /= distortion.size;\n#endif\n\n float d = getAlphaBorder(p.xy) * texture2D(mask, p.xy).r;\n\n if(d > 0.) {\n\n#if DEBUG_ALPHA_BORDER\n vec3 r = texture2D(tex, p.xy).rgb;\n return mixBaseColor(vec4( r.r * d, r.g, r.b, 1.0), baseColor);\n#else\n vec4 color = texture2D(tex, p.xy);\n color.a *= d;\n if (boostLight) {\n return mixBaseColor(vec4(sqrt(color.rgb), color.a), baseColor);\n } else {\n return mixBaseColor(color, baseColor);\n }\n#endif\n\n }\n }\n return mixBaseColor(vec4(0.), baseColor);\n}\n";
40
- /* babel-plugin-inline-import './Chunk/WebGL2_pars_vertex.glsl' */
41
- const WebGL2_pars_vertex = "// Copy from GLSL 3.0 conversion for built-in materials and ShaderMaterial in THREE.WebGLProgram\n// https://github.com/mrdoob/three.js/blob/696d7836d1fc56c4702a475e6991c4adef7357f4/src/renderers/webgl/WebGLProgram.js#L682\n#if defined(WEBGL2)\n#define attribute in\n#define varying out\n#define texture2D texture\n#endif\n";
42
- /* babel-plugin-inline-import './Chunk/WebGL2_pars_fragment.glsl' */
43
- const WebGL2_pars_fragment = "// Copy from GLSL 3.0 conversion for built-in materials and ShaderMaterial in THREE.WebGLProgram\n// https://github.com/mrdoob/three.js/blob/696d7836d1fc56c4702a475e6991c4adef7357f4/src/renderers/webgl/WebGLProgram.js#L682\n#if defined(WEBGL2)\n#define varying in\nout highp vec4 pc_fragColor;\n#define gl_FragColor pc_fragColor\n#define gl_FragDepthEXT gl_FragDepth\n#define texture2D texture\n#define textureCube texture\n#define texture2DProj textureProj\n#define texture2DLodEXT textureLod\n#define texture2DProjLodEXT textureProjLod\n#define textureCubeLodEXT textureLod\n#define texture2DGradEXT textureGrad\n#define texture2DProjGradEXT textureProjGrad\n#define textureCubeGradEXT textureGrad\n#endif\n";
44
38
  const custom_header_colorLayer = '// no custom header';
45
39
  const custom_body_colorLayer = '// no custom body';
46
40
  const itownsShaderChunk = {
@@ -63,10 +57,7 @@ const itownsShaderChunk = {
63
57
  precision_qualifier,
64
58
  projective_texturing_vertex,
65
59
  projective_texturing_pars_vertex,
66
- projective_texturing_pars_fragment,
67
- project_pars_vertex,
68
- WebGL2_pars_vertex,
69
- WebGL2_pars_fragment
60
+ projective_texturing_pars_fragment
70
61
  };
71
62
 
72
63
  /**
@@ -15,7 +15,7 @@ class c3DEngine {
15
15
  constructor(rendererOrDiv) {
16
16
  let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
17
17
  deprecatedC3DEngineWebGLOptions(options);
18
- const NOIE = !Capabilities.isInternetExplorer();
18
+
19
19
  // pick sensible default options
20
20
  if (options.antialias === undefined) {
21
21
  options.antialias = true;
@@ -24,10 +24,7 @@ class c3DEngine {
24
24
  options.alpha = true;
25
25
  }
26
26
  if (options.logarithmicDepthBuffer === undefined) {
27
- options.logarithmicDepthBuffer = this.gLDebug || NOIE;
28
- }
29
- if (options.isWebGL2 === undefined) {
30
- options.isWebGL2 = true;
27
+ options.logarithmicDepthBuffer = true;
31
28
  }
32
29
 
33
30
  // If rendererOrDiv parameter is a domElement, we use it as support to display data.
@@ -78,7 +75,7 @@ class c3DEngine {
78
75
  this.label2dRenderer = new Label2DRenderer();
79
76
  this.label2dRenderer.setSize(this.width, this.height);
80
77
  viewerDiv.appendChild(this.label2dRenderer.domElement);
81
- this.renderer = renderer || new (options.isWebGL2 ? THREE.WebGLRenderer : THREE.WebGL1Renderer)({
78
+ this.renderer = renderer || new THREE.WebGLRenderer({
82
79
  canvas: document.createElement('canvas'),
83
80
  antialias: options.antialias,
84
81
  alpha: options.alpha,
@@ -88,30 +85,10 @@ class c3DEngine {
88
85
  this.renderer.domElement.style.zIndex = 0;
89
86
  this.renderer.domElement.style.top = 0;
90
87
  } catch (ex) {
91
- const versionWebGL = options.isWebGL2 ? '2' : '1';
92
- console.error(`Failed to create WebGLRenderer webGL ${versionWebGL}.`);
93
- this.renderer = null;
94
- }
95
- if (!this.renderer) {
96
- if (!WEBGL.isWebGLAvailable()) {
97
- viewerDiv.appendChild(WEBGL.getErrorMessage(1));
98
- } else if (!WEBGL.isWebGL2Available()) {
88
+ if (!WEBGL.isWebGL2Available()) {
99
89
  viewerDiv.appendChild(WEBGL.getErrorMessage(2));
100
90
  }
101
- throw new Error('WebGL unsupported');
102
- }
103
- if (!renderer && options.logarithmicDepthBuffer) {
104
- // We don't support logarithmicDepthBuffer when EXT_frag_depth is missing.
105
- // So recreated a renderer if needed.
106
- if (!this.renderer.capabilities.isWebGL2 && !this.renderer.extensions.get('EXT_frag_depth')) {
107
- this.renderer.dispose();
108
- this.renderer = new (options.isWebGL2 ? THREE.WebGLRenderer : THREE.WebGL1Renderer)({
109
- canvas: document.createElement('canvas'),
110
- antialias: options.antialias,
111
- alpha: options.alpha,
112
- logarithmicDepthBuffer: false
113
- });
114
- }
91
+ throw ex;
115
92
  }
116
93
 
117
94
  // Let's allow our canvas to take focus
@@ -139,7 +116,7 @@ class c3DEngine {
139
116
 
140
117
  /**
141
118
  * return renderer THREE.js
142
- * @returns {undefined|THREE.WebGLRenderer}
119
+ * @returns {THREE.WebGLRenderer}
143
120
  */
144
121
  getRenderer() {
145
122
  return this.renderer;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "itowns",
3
- "version": "2.42.1-next.20",
3
+ "version": "2.42.1-next.21",
4
4
  "description": "A JS/WebGL framework for 3D geospatial data visualization",
5
5
  "type": "module",
6
6
  "main": "lib/Main.js",