@cognite/reveal 2.1.2 → 3.0.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/cad.d.ts +1 -2
- package/core/src/datamodels/cad/CadManager.d.ts +10 -10
- package/core/src/datamodels/cad/CadModelFactory.d.ts +10 -4
- package/core/src/datamodels/cad/createCadManager.d.ts +4 -5
- package/core/src/datamodels/cad/picking.d.ts +1 -1
- package/core/src/datamodels/cad/rendering/RenderAlreadyLoadedGeometryProvider.d.ts +2 -1
- package/core/src/datamodels/cad/styling/AssetNodeCollection.d.ts +8 -2
- package/core/src/datamodels/cad/styling/InvertedNodeCollection.d.ts +3 -2
- package/core/src/datamodels/cad/styling/PopulateIndexSetFromPagedResponseHelper.d.ts +10 -4
- package/core/src/datamodels/cad/styling/PropertyFilterNodeCollection.d.ts +5 -11
- package/core/src/datamodels/cad/styling/SinglePropertyFilterNodeCollection.d.ts +5 -14
- package/core/src/datamodels/pointcloud/PointCloudManager.d.ts +5 -4
- package/core/src/datamodels/pointcloud/PointCloudMetadataRepository.d.ts +6 -5
- package/core/src/datamodels/pointcloud/createPointCloudManager.d.ts +2 -4
- package/core/src/datamodels/pointcloud/index.d.ts +5 -0
- package/core/src/index.d.ts +1 -1
- package/core/src/internals.d.ts +15 -0
- package/core/src/public/RevealManager.d.ts +11 -8
- package/core/src/public/createRevealManager.d.ts +2 -3
- package/core/src/public/migration/Cognite3DModel.d.ts +20 -1
- package/core/src/public/migration/Cognite3DViewer.d.ts +30 -10
- package/core/src/public/migration/types.d.ts +36 -23
- package/core/src/public/types.d.ts +3 -1
- package/core/src/{public/migration → storage}/RevealManagerHelper.d.ts +7 -7
- package/core/src/utilities/Spinner.d.ts +4 -5
- package/core/src/utilities/ViewStateHelper.d.ts +2 -2
- package/core/src/utilities/worldToViewport.d.ts +1 -2
- package/extensions/datasource.d.ts +9 -0
- package/extensions/datasource.js +30 -0
- package/extensions/datasource.map +1 -0
- package/index.d.ts +2 -1
- package/index.js +214 -125
- package/index.map +1 -1
- package/package.json +6 -8
- package/packages/cad-geometry-loaders/index.d.ts +4 -11
- package/packages/cad-geometry-loaders/src/{CadModelSectorBudget.d.ts → CadModelBudget.d.ts} +3 -10
- package/packages/cad-geometry-loaders/src/CadModelUpdateHandler.d.ts +5 -7
- package/packages/cad-geometry-loaders/src/sector/SectorLoader.d.ts +7 -6
- package/packages/cad-geometry-loaders/src/sector/culling/ByScreenSizeSectorCuller.d.ts +23 -0
- package/packages/cad-geometry-loaders/src/sector/culling/ByVisibilityGpuSectorCuller.d.ts +3 -5
- package/packages/cad-geometry-loaders/src/sector/culling/WeightFunctionsHelper.d.ts +50 -0
- package/packages/cad-geometry-loaders/src/sector/culling/computeNdcAreaOfBox.d.ts +12 -0
- package/packages/cad-geometry-loaders/src/sector/culling/computeSectorCost.d.ts +7 -0
- package/packages/cad-geometry-loaders/src/sector/culling/createV8SectorCuller.d.ts +7 -0
- package/packages/cad-geometry-loaders/src/sector/culling/takensectors/TakenSectorMapBase.d.ts +11 -0
- package/packages/cad-geometry-loaders/src/sector/culling/takensectors/TakenV8SectorMap.d.ts +17 -0
- package/packages/cad-geometry-loaders/src/sector/culling/{TakenSectorTree.d.ts → takensectors/TakenV8SectorTree.d.ts} +6 -6
- package/packages/cad-geometry-loaders/src/sector/culling/takensectors/TakenV9SectorMap.d.ts +20 -0
- package/packages/cad-geometry-loaders/src/sector/culling/takensectors/index.d.ts +5 -0
- package/packages/cad-geometry-loaders/src/sector/culling/transformBoxToNDC.d.ts +5 -0
- package/packages/cad-geometry-loaders/src/sector/culling/types.d.ts +16 -4
- package/packages/cad-geometry-loaders/src/sector/rxSectorUtilities.d.ts +2 -2
- package/packages/cad-geometry-loaders/src/utilities/rxOperations.d.ts +3 -2
- package/packages/cad-geometry-loaders/src/utilities/types.d.ts +0 -23
- package/packages/cad-parsers/index.d.ts +7 -6
- package/packages/{cad-geometry-loaders/src/material-manager/rendering → cad-parsers/src/cad}/filterInstanceMesh.d.ts +1 -1
- package/packages/cad-parsers/src/cad/filterPrimitivesCommon.d.ts +5 -0
- package/packages/cad-parsers/src/cad/{filterPrimitives.d.ts → filterPrimitivesV8.d.ts} +0 -0
- package/packages/cad-parsers/src/cad/filterPrimitivesV9.d.ts +6 -0
- package/packages/cad-parsers/src/cad/primitiveGeometries.d.ts +1 -1
- package/packages/{cad-geometry-loaders/src/material-manager/rendering → cad-parsers/src/cad}/triangleMeshes.d.ts +1 -1
- package/packages/cad-parsers/src/cad/types.d.ts +2 -2
- package/packages/cad-parsers/src/metadata/CadModelMetadata.d.ts +10 -0
- package/packages/cad-parsers/src/metadata/CadModelMetadataRepository.d.ts +6 -6
- package/packages/cad-parsers/src/metadata/MetadataRepository.d.ts +3 -2
- package/packages/cad-parsers/src/metadata/parsers/CadMetadataParserGltf.d.ts +6 -0
- package/packages/cad-parsers/src/metadata/parsers/CadMetadataParserV8.d.ts +2 -47
- package/packages/cad-parsers/src/metadata/parsers/types.d.ts +60 -0
- package/packages/cad-parsers/src/metadata/types.d.ts +15 -4
- package/packages/{cad-geometry-loaders → cad-parsers}/src/sector/RootSectorNode.d.ts +1 -1
- package/packages/{cad-geometry-loaders → cad-parsers}/src/sector/SectorNode.d.ts +1 -1
- package/packages/cad-parsers/src/utilities/SectorSceneFactory.d.ts +2 -5
- package/packages/cad-parsers/src/utilities/computeBoundingBoxFromAttributes.d.ts +10 -0
- package/packages/{cad-geometry-loaders → cad-parsers}/src/utilities/float32BufferToMatrix.d.ts +0 -0
- package/packages/cad-parsers/src/utilities/types.d.ts +26 -0
- package/packages/cad-styling/index.d.ts +4 -0
- package/packages/cad-styling/src/CombineNodeCollectionBase.d.ts +4 -2
- package/packages/cad-styling/src/IntersectionNodeCollection.d.ts +4 -0
- package/packages/cad-styling/src/NodeAppearance.d.ts +23 -0
- package/packages/cad-styling/src/NodeAppearanceProvider.d.ts +7 -0
- package/packages/cad-styling/src/NodeCollectionBase.d.ts +7 -0
- package/packages/cad-styling/src/TreeIndexNodeCollection.d.ts +17 -1
- package/packages/cad-styling/src/UnionNodeCollection.d.ts +3 -0
- package/packages/cad-styling/src/prioritized/AreaCollection.d.ts +39 -0
- package/packages/cad-styling/src/prioritized/BoxClusterer.d.ts +29 -0
- package/packages/cad-styling/src/prioritized/ClusteredAreaCollection.d.ts +17 -0
- package/packages/cad-styling/src/prioritized/EmptyAreaCollection.d.ts +18 -0
- package/packages/cad-styling/src/prioritized/types.d.ts +7 -0
- package/packages/camera-manager/index.d.ts +3 -1
- package/packages/camera-manager/src/CameraManager.d.ts +84 -0
- package/packages/camera-manager/src/ComboControls.d.ts +60 -42
- package/packages/camera-manager/src/Keyboard.d.ts +10 -7
- package/packages/camera-manager/src/types.d.ts +64 -0
- package/packages/data-source/index.d.ts +6 -0
- package/packages/data-source/src/CdfDataSource.d.ts +19 -0
- package/packages/data-source/src/DataSource.d.ts +26 -0
- package/packages/data-source/src/LocalDataSource.d.ts +15 -0
- package/packages/metrics/index.d.ts +5 -0
- package/packages/metrics/src/MetricsLogger.d.ts +21 -0
- package/packages/metrics/src/types.d.ts +7 -0
- package/packages/modeldata-api/index.d.ts +8 -3
- package/packages/modeldata-api/src/{CdfModelDataClient.d.ts → CdfModelDataProvider.d.ts} +6 -6
- package/packages/modeldata-api/src/CdfModelIdentifier.d.ts +15 -0
- package/packages/modeldata-api/src/CdfModelMetadataProvider.d.ts +11 -27
- package/packages/modeldata-api/src/CdfModelOutputsProvider.d.ts +16 -0
- package/packages/modeldata-api/src/LocalModelDataProvider.d.ts +10 -0
- package/packages/modeldata-api/src/LocalModelIdentifier.d.ts +14 -0
- package/packages/modeldata-api/src/LocalModelMetadataProvider.d.ts +11 -14
- package/packages/modeldata-api/src/ModelIdentifier.d.ts +12 -0
- package/packages/modeldata-api/src/ModelMetadataProvider.d.ts +19 -0
- package/packages/modeldata-api/src/types.d.ts +19 -26
- package/packages/nodes-api/index.d.ts +0 -1
- package/packages/nodes-api/src/NodesApiClient.d.ts +16 -8
- package/packages/nodes-api/src/NodesCdfClient.d.ts +9 -4
- package/packages/nodes-api/src/NodesLocalClient.d.ts +10 -5
- package/packages/nodes-api/src/types.d.ts +0 -4
- package/packages/rendering/index.d.ts +17 -0
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/CadMaterialManager.d.ts +5 -5
- package/packages/rendering/src/GeometryBatchingManager.d.ts +30 -0
- package/packages/{cad-geometry-loaders → rendering}/src/InstancedMeshManager.d.ts +1 -1
- package/packages/{cad-geometry-loaders → rendering}/src/cameraconfig.d.ts +0 -0
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/rendering/EffectRenderManager.d.ts +28 -12
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/rendering}/NodeAppearanceTextureBuilder.d.ts +8 -7
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/RenderMode.d.ts +0 -0
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/rendering/createSimpleGeometryMesh.d.ts +1 -1
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/matCapTextureData.d.ts +0 -0
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/materials.d.ts +0 -0
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/primitives.d.ts +3 -3
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/shaders.d.ts +1 -1
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/rendering/types.d.ts +1 -1
- package/packages/{cad-geometry-loaders/src → rendering/src/sector}/CadNode.d.ts +18 -9
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/transform}/NodeTransformProvider.d.ts +1 -1
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/transform}/NodeTransformTextureBuilder.d.ts +1 -1
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/transform}/TransformOverrideBuffer.d.ts +3 -3
- package/packages/rendering/src/utilities/types.d.ts +26 -0
- package/packages/sector-loader/index.d.ts +6 -0
- package/packages/sector-loader/src/GltfSectorRepository.d.ts +14 -0
- package/packages/{cad-geometry-loaders/src/sector/Repository.d.ts → sector-loader/src/SectorRepository.d.ts} +2 -2
- package/packages/{cad-geometry-loaders/src/sector/CachedRepository.d.ts → sector-loader/src/V8SectorRepository.d.ts} +6 -6
- package/packages/{cad-geometry-loaders/src/sector → sector-loader/src/v8}/SimpleAndDetailedToSector3D.d.ts +4 -4
- package/packages/{cad-geometry-loaders/src/utilities → sector-loader/src/v8}/arrays.d.ts +0 -0
- package/packages/{cad-geometry-loaders/src/utilities → sector-loader/src/v8}/groupMeshesByNumber.d.ts +0 -0
- package/packages/sector-loader/src/v8/sectorUtilities.d.ts +16 -0
- package/packages/sector-parser/index.d.ts +5 -0
- package/packages/sector-parser/src/GltfSectorParser.d.ts +18 -0
- package/packages/sector-parser/src/reveal-glb-parser/GlbMetadataParser.d.ts +14 -0
- package/packages/sector-parser/src/reveal-glb-parser/primitiveGeometries.d.ts +12 -0
- package/packages/sector-parser/src/types.d.ts +98 -0
- package/packages/tools/index.d.ts +4 -1
- package/packages/tools/src/AxisView/AxisViewTool.d.ts +2 -2
- package/packages/tools/src/DebugLoadedSectorsTool.d.ts +1 -1
- package/packages/tools/src/Geomap/Geomap.d.ts +5 -2
- package/packages/tools/src/Geomap/GeomapTool.d.ts +5 -2
- package/packages/tools/src/HtmlOverlay/BucketGrid2D.d.ts +28 -0
- package/packages/tools/src/HtmlOverlay/HtmlOverlayTool.d.ts +160 -0
- package/packages/tools/src/Timeline/Keyframe.d.ts +39 -0
- package/packages/tools/src/Timeline/TimelineTool.d.ts +79 -0
- package/packages/tools/src/Timeline/types.d.ts +13 -0
- package/packages/utilities/index.d.ts +7 -3
- package/packages/utilities/src/NumericRange.d.ts +2 -1
- package/packages/utilities/src/RandomColors.d.ts +34 -0
- package/packages/utilities/src/WebGLRendererStateHelper.d.ts +1 -1
- package/packages/utilities/src/counterMap.d.ts +5 -0
- package/packages/utilities/src/datastructures/DynamicDefragmentedBuffer.d.ts +18 -5
- package/packages/utilities/src/events/InputHandler.d.ts +25 -0
- package/packages/utilities/src/events/index.d.ts +1 -0
- package/packages/utilities/src/indexset/IndexSet.d.ts +1 -1
- package/packages/utilities/src/three/isBox3OnPositiveSideOfPlane.d.ts +10 -0
- package/packages/utilities/src/three/toThreeBox3.d.ts +6 -0
- package/packages/utilities/src/three/visitBox3CornerPoints.d.ts +12 -0
- package/packages/utilities/src/types.d.ts +1 -0
- package/tools.js +77 -117
- package/tools.map +1 -1
- package/packages/cad-geometry-loaders/src/sector/sectorUtilities.d.ts +0 -19
- package/packages/cad-parsers/src/cad/computeBoundingBoxFromAttributes.d.ts +0 -9
- package/packages/modeldata-api/src/LocalModelDataClient.d.ts +0 -10
- package/packages/tools/src/HtmlOverlayTool.d.ts +0 -88
- package/packages/utilities/src/metrics.d.ts +0 -15
- package/packages/utilities/src/three/getBox3CornerPoints.d.ts +0 -5
package/index.js
CHANGED
|
@@ -1,392 +1,481 @@
|
|
|
1
|
-
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var n=t();for(var r in n)("object"==typeof exports?exports:e)[r]=n[r]}}("undefined"!=typeof self?self:this,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="https://apps-cdn.cogniteapp.com/@cognite/reveal-parser-worker/1.
|
|
1
|
+
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var n=t();for(var r in n)("object"==typeof exports?exports:e)[r]=n[r]}}("undefined"!=typeof self?self:this,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="https://apps-cdn.cogniteapp.com/@cognite/reveal-parser-worker/1.3.0/",n(n.s=66)}([function(e,t){e.exports=require("three")},function(e,t,n){"use strict";
|
|
2
2
|
/*!
|
|
3
3
|
* Copyright 2021 Cognite AS
|
|
4
4
|
*/
|
|
5
|
-
function r(e,t){if(t(e))for(let n=0;n<e.children.length;n++)r(e.children[n],t)}function o(e,t){if(void 0===e)return;const{position:n,target:r}=e;return n.applyMatrix4(t),r.applyMatrix4(t),{position:n,target:r}}
|
|
5
|
+
function r(e,t){if(t(e))for(let n=0;n<e.children.length;n++)r(e.children[n],t)}function o(e,t){if(void 0===e)return;const{position:n,target:r}=e;return n.applyMatrix4(t),r.applyMatrix4(t),{position:n,target:r}}n.d(t,"w",(function(){return r})),n.d(t,"v",(function(){return o})),n.d(t,"d",(function(){return d})),n.d(t,"f",(function(){return u})),n.d(t,"l",(function(){return p})),n.d(t,"i",(function(){return m})),n.d(t,"n",(function(){return h})),n.d(t,"e",(function(){return _})),n.d(t,"s",(function(){return T})),n.d(t,"c",(function(){return N})),n.d(t,"a",(function(){return R})),n.d(t,"b",(function(){return E})),n.d(t,"u",(function(){return B})),n.d(t,"q",(function(){return F})),n.d(t,"x",(function(){return z})),n.d(t,"r",(function(){return L})),n.d(t,"j",(function(){return k})),n.d(t,"k",(function(){return j})),n.d(t,"g",(function(){return q})),n.d(t,"h",(function(){return H})),n.d(t,"o",(function(){return K})),n.d(t,"t",(function(){return V})),n.d(t,"p",(function(){return X})),n.d(t,"m",(function(){return Y}));var i=n(0);
|
|
6
6
|
/*!
|
|
7
7
|
* Copyright 2021 Cognite AS
|
|
8
|
-
*/
|
|
8
|
+
*/class a{static color(e){const t=a._colors.get(e);if(void 0!==t)return t;const n=a.generateRandomColor();return a._colors.set(e,n),n}static colorRGB(e){const t=a.color(e);return[Math.floor(255*t.r),Math.floor(255*t.g),Math.floor(255*t.b)]}static colorCSS(e){const[t,n,r]=a.colorRGB(e);return`rgb(${t}, ${n}, ${r})`}static generateRandomColor(){const e=Math.random(),t=.4+.6*Math.random(),n=.3+.4*Math.random();return(new i.Color).setHSL(e,t,n)}}
|
|
9
9
|
/*!
|
|
10
10
|
* Copyright 2021 Cognite AS
|
|
11
|
-
*/
|
|
11
|
+
*/
|
|
12
|
+
function s(e,t){const n=t.getBoundingClientRect();if(e instanceof MouseEvent)return{offsetX:e.clientX-n.left,offsetY:e.clientY-n.top};if(e.changedTouches.length>0){const t=e.changedTouches[0];return{offsetX:t.clientX-n.left,offsetY:t.clientY-n.top}}return{offsetX:-1,offsetY:-1}}
|
|
13
|
+
/*!
|
|
14
|
+
* Copyright 2021 Cognite AS
|
|
15
|
+
*/a._colors=new Map;class d{constructor(){this._listeners=[]}subscribe(e){this._listeners.push(e)}unsubscribe(e){const t=this._listeners.indexOf(e);-1!==t&&this._listeners.splice(t,1)}unsubscribeAll(){this._listeners.splice(0)}fire(...e){this._listeners.forEach(t=>t(...e))}}var l=n(16),c=n.n(l);
|
|
12
16
|
/*!
|
|
13
17
|
* Copyright 2021 Cognite AS
|
|
14
18
|
*/
|
|
19
|
+
class u{constructor(e){this._events={click:new d,hover:new d},this.onHoverCallback=c()(e=>{this._events.hover.fire(s(e,this.domElement))},100),this.domElement=e,this.setupEventListeners()}on(e,t){switch(e){case"click":this._events.click.subscribe(t);break;case"hover":this._events.hover.subscribe(t);break;default:p(e)}}off(e,t){switch(e){case"click":this._events.click.unsubscribe(t);break;case"hover":this._events.hover.unsubscribe(t);break;default:p(e)}}setupEventListeners(){const{domElement:e}=this;let t=!1,n=0,r=!1;const o=new i.Vector2,a=i=>{this.handleClickEvent(i,o,t,r,n),t=!1,r=!1,e.removeEventListener("mouseup",a),e.removeEventListener("touchend",a),e.addEventListener("mousemove",this.onHoverCallback)},d=i=>{t=!0,r=!0,n=i.timeStamp;const{offsetX:d,offsetY:l}=s(i,e);o.set(d,l),e.addEventListener("mouseup",a),e.addEventListener("touchend",a),e.removeEventListener("mousemove",this.onHoverCallback)};e.addEventListener("mousedown",d),e.addEventListener("touchstart",d),e.addEventListener("mousemove",this.onHoverCallback)}isProperClick(e,t,n,r,o){const{offsetX:i,offsetY:a}=s(e,this.domElement),d=e.timeStamp-o,l=Math.abs(i-t.x)+Math.abs(a-t.y)>u.maxMoveDistance;return n&&r&&d<u.maxClickDuration&&!l}handleClickEvent(e,t,n,r,o){this.isProperClick(e,t,n,r,o)&&this._events.click.fire(s(e,this.domElement))}}
|
|
15
20
|
/*!
|
|
16
21
|
* Copyright 2021 Cognite AS
|
|
17
|
-
*/
|
|
22
|
+
*/
|
|
18
23
|
/*!
|
|
19
24
|
* Copyright 2021 Cognite AS
|
|
20
|
-
*/
|
|
25
|
+
*/
|
|
26
|
+
function p(e,t){throw new Error(t||"Unexpected object: "+e)}
|
|
21
27
|
/*!
|
|
22
28
|
* Copyright 2021 Cognite AS
|
|
23
|
-
*/
|
|
29
|
+
*/u.maxMoveDistance=8,u.maxClickDuration=250;class m{constructor(e,t){if(t<0)throw new Error("Range cannot have negative number of elements");this.from=e,this.count=t,this.toInclusive=e+t-1}static createFromInterval(e,t){return new m(e,t-e+1)}*values(){for(let e=this.from;e<=this.toInclusive;++e)yield e}toArray(){return Array.from(this.values())}equal(e){return this.from===e.from&&this.count===e.count}contains(e){return e>=this.from&&e<=this.toInclusive}intersects(e){return this.from<=e.toInclusive&&this.toInclusive>=e.from}intersectsOrCoinciding(e){return this.from<=e.toInclusive+1&&this.toInclusive+1>=e.from}intersectionWith(e){return this.intersects(e)?m.createFromInterval(Math.max(this.from,e.from),Math.min(this.toInclusive,e.toInclusive)):void 0}isInside(e){return this.from>=e.from&&this.toInclusive<=e.toInclusive}union(e){return m.createFromInterval(Math.min(this.from,e.from),Math.max(this.toInclusive,e.toInclusive))}forEach(e){for(let t=this.from;t<=this.toInclusive;++t)e(t)}toString(){return"("+this.from+", "+this.toInclusive+")"}static isNumericRange(e){if(!e)return!1;const t=e;return void 0!==t.from&&void 0!==t.count&&void 0!==t.toInclusive}}
|
|
30
|
+
/*!
|
|
31
|
+
* Copyright 2021 Cognite AS
|
|
32
|
+
*/function h(e){const t=Math.max(1,v(Math.sqrt(e)));return{width:t,height:Math.max(1,v(e/t))}}const f=Math.log(2);function v(e){return Math.pow(2,Math.ceil(Math.log(e)/f))}var x=n(3),g=n.n(x);class b{constructor(e,t){this.left=e,this.right=t,this.maxSubtreeDepth=Math.max(this.left.maxSubtreeDepth,this.right.maxSubtreeDepth)+1,this.range=m.createFromInterval(this.left.range.from,this.right.range.toInclusive),this.count=this.left.count+this.right.count}static fromIndexNodesAndBalance(e,t){return e.range.from>t.range.toInclusive+1?new b(t,e).balance():e.range.toInclusive+1<t.range.from?new b(e,t).balance():void g()(!1,"Internal error in IndexSet: Overlapping nodes")}traverse(e){this.left.traverse(e),this.right.traverse(e)}contains(e){return!!this.range.contains(e)&&(this.left.contains(e)||this.right.contains(e))}addRange(e){if(!e.intersectsOrCoinciding(this.range)){if(e.from<this.range.from){const t=this.left.addRange(e);return b.fromIndexNodesAndBalance(t,this.right)}{const t=this.right.addRange(e);return b.fromIndexNodesAndBalance(this.left,t)}}const t=e.intersectsOrCoinciding(this.left.range),n=e.intersectsOrCoinciding(this.right.range);if(t&&n){const[t,n]=this.left.soak(e),[r,o]=this.right.soak(e),i=n.union(o);if(void 0===t&&void 0===r)return new y(i);if(void 0===t&&void 0!==r)return r.addRange(i);if(void 0===r&&void 0!==t)return t.addRange(i);return b.fromIndexNodesAndBalance(t,r).addRange(i)}return t?b.fromIndexNodesAndBalance(this.left.addRange(e),this.right):n?b.fromIndexNodesAndBalance(this.left,this.right.addRange(e)):this.left.maxSubtreeDepth<this.right.maxSubtreeDepth?b.fromIndexNodesAndBalance(this.left.addRange(e),this.right):b.fromIndexNodesAndBalance(this.left,this.right.addRange(e))}removeRange(e){if(!e.intersects(this.range))return this;const[t,n]=this.soak(e);let r=void 0,o=void 0;if(n.from<e.from&&(r=m.createFromInterval(n.from,e.from-1)),n.toInclusive>e.toInclusive&&(o=m.createFromInterval(e.toInclusive+1,n.toInclusive)),void 0===t)return void 0!==r&&void 0!==o?b.fromIndexNodesAndBalance(new y(r),new y(o)):null!=r?new y(r):null!=o?new y(o):void 0;{let e=t;return void 0!==r&&(e=e.addRange(r)),void 0!==o&&(e=e.addRange(o)),e}}balance(){const e=this.left.maxSubtreeDepth,t=this.right.maxSubtreeDepth;if(t+2<=e){const e=this.left.rotateSmallerRight();return new b(e,this.right).rotateRight().balance()}if(e+2<=t){const e=this.right.rotateSmallerLeft();return new b(this.left,e).rotateLeft().balance()}return this}clone(){return b.fromIndexNodesAndBalance(this.left.clone(),this.right.clone())}hasIntersectionWith(e){return!!e.range.intersects(this.range)&&(this.range.isInside(e.range)?e.hasIntersectionWith(this):!(!this.left.range.intersects(e.range)||!this.left.hasIntersectionWith(e))||!(!this.right.range.intersects(e.range)||!this.right.hasIntersectionWith(e)))}soak(e){let[t,n]=[this.left,e],[r,o]=[this.right,e];if(this.right.range.isInside(e)&&this.left.range.isInside(e))return[void 0,e];this.left.range.intersectsOrCoinciding(e)&&([t,n]=this.left.soak(e)),this.right.range.intersectsOrCoinciding(e)&&([r,o]=this.right.soak(e));const i=n.union(o);if(null==r)return[t,i];if(null==t)return[r,i];return[b.fromIndexNodesAndBalance(t,r),i]}rotateRight(){return"right"in this.left?new b(this.left.left,new b(this.left.right,this.right)):this}rotateLeft(){return"left"in this.right?new b(new b(this.left,this.right.left),this.right.right):this}rotateSmallerLeft(){if(this.left.maxSubtreeDepth>this.right.maxSubtreeDepth){let e=this.rotateRight();return e=e.rotateSmallerLeft(),e}return this}rotateSmallerRight(){if(this.right.maxSubtreeDepth>this.left.maxSubtreeDepth){let e=this.rotateLeft();return e=e.rotateSmallerRight(),e}return this}}
|
|
24
33
|
/*!
|
|
25
34
|
* Copyright 2021 Cognite AS../NumericRange
|
|
26
|
-
*/class
|
|
35
|
+
*/class y{constructor(e){this.range=e,this.maxSubtreeDepth=0,this.count=e.count}static fromInterval(e,t){return new y(m.createFromInterval(e,t))}traverse(e){e(this.range)}contains(e){return this.range.contains(e)}addRange(e){return this.range.intersectsOrCoinciding(e)?new y(this.range.union(e)):b.fromIndexNodesAndBalance(this,new y(e))}removeRange(e){if(!e.intersects(this.range))return this;if(this.range.isInside(e))return;let t=void 0,n=void 0;return this.range.from<e.from&&(t=m.createFromInterval(this.range.from,e.from-1)),this.range.toInclusive>e.toInclusive&&(n=m.createFromInterval(e.toInclusive+1,this.range.toInclusive)),null!=t&&null!=n?b.fromIndexNodesAndBalance(new y(t),new y(n)):null!=t?new y(t):null!=n?new y(n):void 0}hasIntersectionWith(e){return e.range.intersects(this.range)}soak(e){return this.range.intersectsOrCoinciding(e)?[void 0,this.range.union(e)]:[this,e]}clone(){return new y(this.range)}}class _{constructor(e){if(null==e)this.rootNode=void 0;else if(m.isNumericRange(e))this.addRange(e);else for(const t of e)this.add(t)}forEachRange(e){this.rootNode&&this.rootNode.traverse(e)}add(e){const t=new m(e,1);this.addRange(t)}addRange(e){this.rootNode?this.rootNode=this.rootNode.addRange(e):this.rootNode=new y(e)}remove(e){const t=new m(e,1);this.removeRange(t)}removeRange(e){this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))}contains(e){return!!this.rootNode&&this.rootNode.contains(e)}get count(){return this.rootNode?this.rootNode.count:0}toRangeArray(){const e=[];return this.forEachRange(t=>{e.push(t)}),e}toIndexArray(){const e=[];return this.rootNode&&this.forEachRange(t=>{t.forEach(t=>{e.push(t)})}),e}toPlainSet(){const e=this.toIndexArray();return new Set(e)}invertedRanges(){const e=this.toRangeArray(),t=[];for(let n=0;n<e.length-1;n++)e[n].toInclusive+1>=e[n+1].from||t.push(m.createFromInterval(e[n].toInclusive+1,e[n+1].from-1));return t}unionWith(e){return this.rootNode?e.forEachRange(e=>{this.rootNode=this.rootNode.addRange(e)}):this.rootNode=e.rootNode,this}differenceWith(e){return this.rootNode&&e.forEachRange(e=>{var t;this.rootNode=null===(t=this.rootNode)||void 0===t?void 0:t.removeRange(e)}),this}hasIntersectionWith(e){if(e instanceof _)return void 0!==this.rootNode&&void 0!==e.rootNode&&this.rootNode.hasIntersectionWith(e.rootNode);if(e instanceof Map){for(const t of e.keys())if(this.contains(t))return!0;return!1}for(const t of e)if(this.contains(t))return!0;return!1}intersectWith(e){if(this.rootNode&&e.rootNode){if(this.rootNode.range.from<e.rootNode.range.from){const t=m.createFromInterval(this.rootNode.range.from,e.rootNode.range.from-1);if(this.rootNode=this.rootNode.removeRange(t),!this.rootNode)return this}if(this.rootNode.range.toInclusive>e.rootNode.range.toInclusive){const t=m.createFromInterval(e.rootNode.range.toInclusive+1,this.rootNode.range.toInclusive);this.rootNode=this.rootNode.removeRange(t)}e.invertedRanges().forEach(e=>{this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))})}else this.rootNode&&(this.rootNode=void 0);return this}clear(){this.rootNode=void 0}clone(){const e=new _;return this.rootNode&&(e.rootNode=this.rootNode.clone()),e}}
|
|
27
36
|
/*!
|
|
28
37
|
* Copyright 2021 Cognite AS
|
|
29
|
-
*/function
|
|
38
|
+
*/function T(e,t,n){const r=P(e);if(0==r)return;const o=C(0,-e);let i=I(S(r));const a=r/w(i);a<1&&(i-=1),i+=127,t[n]=128*o+I(i*w(-1)),t[n+1]=128*M(i,2)+M(I(128*a),128),t[n+2]=I(M(I(a*w(15)),w(8))),t[n+3]=I(w(23)*M(a,w(-15)))}function C(e,t){return t<e?0:1}function w(e){return Math.pow(2,e)}function M(e,t){return e-t*I(e/t)}function I(e){return Math.floor(e)}function S(e){return Math.log(e)/Math.log(2)}function P(e){return Math.abs(e)}
|
|
30
39
|
/*!
|
|
31
40
|
* Copyright 2021 Cognite AS
|
|
32
|
-
*/class
|
|
41
|
+
*/class N{constructor(e,t){this._numFilled=0,this._batchIdCounter=0,this._batchMap=new Map,this._type=t;const n=Math.pow(2,Math.ceil(Math.log2(e)));this._bufferView=new t(n)}get length(){return this._numFilled}get bufferView(){return this._bufferView}add(e){let t=!1;if(this._numFilled+e.length>this._bufferView.length){const n=Math.pow(2,Math.ceil(Math.log2(this._numFilled+e.length)));this.allocateNewBuffer(n),t=!0}this._bufferView.set(e,this._numFilled);const n=this.createBatch(e),r=this._numFilled;this._numFilled+=e.length;return{batchId:n,bufferIsReallocated:t,updateRange:{byteOffset:r,byteCount:e.length}}}remove(e){const t=this._batchMap.get(e);if(!t)throw new Error("batch does not exist in buffer");this._bufferView.copyWithin(t.from,t.from+t.count,this._numFilled),this._numFilled-=t.count;const n=t.from,r=this._numFilled-t.from;this._currentTail===t&&(this._currentTail=t.prev);const o=t.prev,i=t.next;o&&(o.next=i),i&&(i.prev=o);let a=i;for(;a;)a.from-=t.count,a=a.next;return this._batchMap.delete(e),{updateRange:{byteOffset:n,byteCount:r}}}getRangeForBatchId(e){const t=this._batchMap.get(e);if(!t)throw new Error("batch does not exist in buffer");return{byteOffset:t.from,byteCount:t.count}}createBatch(e){const t={from:this._numFilled,count:e.length,prev:this._currentTail,next:void 0};this._currentTail&&(this._currentTail.next=t),this._currentTail=t;const n=this._batchIdCounter;return this._batchIdCounter++,this._batchMap.set(n,t),n}allocateNewBuffer(e){const t=new this._type(e);t.set(this._bufferView),this._bufferView=t}}
|
|
33
42
|
/*!
|
|
34
43
|
* Copyright 2021 Cognite AS
|
|
35
|
-
*/const
|
|
44
|
+
*/const A=new i.BufferGeometry;class R extends i.Group{constructor(){super(...arguments),this._isDisposed=!1,this._referenceCount=0}reference(){this.ensureNotDisposed(),this._referenceCount++}dereference(){if(this.ensureNotDisposed(),0===this._referenceCount)throw new Error("No references");0==--this._referenceCount&&this.dispose()}dispose(){this.ensureNotDisposed(),this._isDisposed=!0;const e=this.children.filter(e=>e instanceof i.Mesh).map(e=>e);for(const t of e)void 0!==t.geometry&&(t.geometry.dispose(),t.geometry=A)}ensureNotDisposed(){if(this._isDisposed)throw new Error("Already disposed/dereferenced")}}
|
|
36
45
|
/*!
|
|
37
46
|
* Copyright 2021 Cognite AS
|
|
38
|
-
*/const
|
|
47
|
+
*/const D={camPos:new i.Vector3,bounds:new i.Box3};class E extends i.Object3D{constructor(e){super(),this._activeLevel=0,this._levels=[],this.isLOD=!0,this.autoUpdate=!0,this._boundingBox=e.clone(),this.type="BoundingBoxLOD"}setBoundingBox(e){this._boundingBox.copy(e)}addLevel(e,t=0){this._levels.push({object:e,distance:Math.abs(t)}),this._levels.sort((e,t)=>t.distance-e.distance),e.visible=!1,this.add(e)}getCurrentLevel(){return this._levels.length>0?this._levels.length-this._activeLevel-1:0}update(e){this.updateCurrentLevel(e)}updateCurrentLevel(e){const t=this._levels,{camPos:n,bounds:r}=D;r.copy(this._boundingBox).applyMatrix4(this.matrixWorld);const o=e instanceof i.PerspectiveCamera?e.zoom:1;if(t.length>0){n.setFromMatrixPosition(e.matrixWorld);const i=r.distanceToPoint(n)/o;t[this._activeLevel].object.visible=!1,this._activeLevel=t.findIndex(e=>i>=e.distance),this._activeLevel=this._activeLevel>=0?this._activeLevel:t.length-1,t[this._activeLevel].object.visible=!0}}}
|
|
39
48
|
/*!
|
|
40
49
|
* Copyright 2021 Cognite AS
|
|
41
|
-
*/function
|
|
50
|
+
*/function B(e,t){return(t=null!=t?t:new i.Box3).min.set(e.min[0],e.min[1],e.min[2]),t.max.set(e.max[0],e.max[1],e.max[2]),t}
|
|
42
51
|
/*!
|
|
43
52
|
* Copyright 2021 Cognite AS
|
|
44
|
-
*/
|
|
53
|
+
*/const O={tmpVector:new i.Vector3};function z(e,t){const{tmpVector:n}=O;n.set(e.min.x,e.min.y,e.min.z),t(n),n.set(e.min.x,e.min.y,e.max.z),t(n),n.set(e.min.x,e.max.y,e.min.z),t(n),n.set(e.min.x,e.max.y,e.max.z),t(n),n.set(e.max.x,e.min.y,e.min.z),t(n),n.set(e.max.x,e.min.y,e.max.z),t(n),n.set(e.max.x,e.max.y,e.min.z),t(n),n.set(e.max.x,e.max.y,e.max.z),t(n)}function F(e,t){let n=!1;return z(e,e=>{n=n||t.distanceToPoint(e)>=0}),n}
|
|
45
54
|
/*!
|
|
46
55
|
* Copyright 2021 Cognite AS
|
|
47
|
-
*/
|
|
56
|
+
*/function L(){let e=!1;var t;return t=navigator.userAgent||navigator.vendor||window.opera,(/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino|android|ipad|playbook|silk/i.test(t)||/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(t.substr(0,4)))&&(e=!0),e}
|
|
48
57
|
/*!
|
|
49
58
|
* Copyright 2021 Cognite AS
|
|
50
|
-
*/
|
|
59
|
+
*/class k{constructor(e){this._originalState={},this._renderer=e,this._originalState={}}setClearColor(e,t){this._originalState={clearColor:this._renderer.getClearColor(new i.Color),clearAlpha:this._renderer.getClearAlpha(),...this._originalState},this._renderer.setClearColor(e,t)}setSize(e,t){this._originalState={size:this._renderer.getSize(new i.Vector2),...this._originalState},this._renderer.setSize(e,t)}set localClippingEnabled(e){this._originalState={localClippingEnabled:this._renderer.localClippingEnabled,...this._originalState},this._renderer.localClippingEnabled=e}set autoClear(e){this._originalState={autoClear:this._renderer.autoClear,...this._originalState},this._renderer.autoClear=e}setRenderTarget(e){this._originalState={renderTarget:this._renderer.getRenderTarget(),...this._originalState},this._renderer.setRenderTarget(e)}resetState(){void 0!==this._originalState.autoClear&&(this._renderer.autoClear=this._originalState.autoClear),void 0!==this._originalState.clearColor&&this._renderer.setClearColor(this._originalState.clearColor,this._originalState.clearAlpha),void 0!==this._originalState.localClippingEnabled&&(this._renderer.localClippingEnabled=this._originalState.localClippingEnabled),void 0!==this._originalState.size&&this._renderer.setSize(this._originalState.size.width,this._originalState.size.height),void 0!==this._originalState.renderTarget&&this._renderer.setRenderTarget(this._originalState.renderTarget),this._originalState={}}}var G=n(24);
|
|
51
60
|
/*!
|
|
52
61
|
* Copyright 2021 Cognite AS
|
|
53
|
-
*/
|
|
62
|
+
*/const V={publicPath:""};
|
|
54
63
|
/*!
|
|
55
64
|
* Copyright 2021 Cognite AS
|
|
56
|
-
*/
|
|
65
|
+
*/var U=n(10);
|
|
57
66
|
/*!
|
|
58
67
|
* Copyright 2021 Cognite AS
|
|
59
|
-
*/
|
|
68
|
+
*/class j{constructor(){this.workerList=[];const e=this.determineNumberOfWorkers();for(let t=0;t<e;t++){const e={worker:Object(G.wrap)(this.createWorker()),activeJobCount:0,messageIdCounter:0};this.workerList.push(e)}(async function(e){let t;try{t=await e.getVersion()}catch(e){t="1.1.0"}const n="1.3.0",[r,o,i]=n.split(".").map(e=>parseInt(e,10)),[a,s,d]=t.split(".").map(e=>parseInt(e,10)),l=`Update your local copy of @cognite/reveal-parser-worker. Required version is ${n}. Received ${t}.`;if(r!==a)throw new Error(l);if(s<o)throw new Error(l);if(s===o&&d<i)throw new Error(l)}
|
|
60
69
|
/*!
|
|
61
70
|
* Copyright 2021 Cognite AS
|
|
62
|
-
*/
|
|
71
|
+
*/)(this.workerList[0].worker).catch(e=>U.a.error(e)),this.workerObjUrl&&URL.revokeObjectURL(this.workerObjUrl)}static get defaultPool(){return j._defaultPool=j._defaultPool||new j,j._defaultPool}createWorker(){const e=(V.publicPath||n.p)+"reveal.parser.worker.js",t={name:"reveal.parser #"+this.workerList.length};if(function(e,t=location.origin){const n=e=>!e.match(/^.*\/\//);if(n(t))throw new Error("isTheSameDomain: the second argument must be an absolute url or omitted. Received "+t);if(n(e))return!0;try{const n=[e,t].map(e=>e.startsWith("//")?"https:"+e:e).map(e=>new URL(e));return n[0].host===n[1].host}catch(n){return console.error(`can not create URLs for ${e} and ${t}`,n),!1}}(e))return new Worker(e,t);if(!this.workerObjUrl){const t=new Blob([`importScripts(${JSON.stringify(e)});`],{type:"text/javascript"});this.workerObjUrl=URL.createObjectURL(t)}return new Worker(this.workerObjUrl,t)}async postWorkToAvailable(e){const t=this.workerList.reduce((e,t)=>e.activeJobCount>t.activeJobCount?t:e,this.workerList[0]);t.activeJobCount+=1;return await(async()=>{try{return await e(t.worker)}finally{t.activeJobCount-=1}})()}determineNumberOfWorkers(){return Math.max(2,Math.min(4,window.navigator.hardwareConcurrency||2))}}class W{constructor(e){this._value=e,this._lastAccessTime=Date.now()}get value(){return this.touch(),this._value}get lastAccessTime(){return this._lastAccessTime}touch(){this._lastAccessTime=Date.now()}}class q{constructor(e=50,t,n=10){this._data=new Map,this._maxElementsInCache=e,this._defaultCleanupCount=n,this._removeCallback=t}has(e){return this._data.has(e)}forceInsert(e,t){this.isFull()&&this.cleanCache(this._defaultCleanupCount),this.insert(e,t)}insert(e,t){if(!(this._data.size<this._maxElementsInCache))throw new Error("Cache full, please clean Cache and retry adding data");this._data.set(e,new W(t))}remove(e){if(void 0!==this._removeCallback){const t=this._data.get(e);void 0!==t&&this._removeCallback(t.value)}this._data.delete(e)}get(e){const t=this._data.get(e);if(void 0!==t)return t.value;throw new Error(`Cache element ${e} does not exist`)}isFull(){return!(this._data.size<this._maxElementsInCache)}cleanCache(e){const t=Array.from(this._data.entries());t.sort((e,t)=>t[1].lastAccessTime-e[1].lastAccessTime);for(let n=0;n<e;n++){const e=t.pop();if(void 0===e)return;this.remove(e[0])}}clear(){if(void 0!==this._removeCallback)for(const e of this._data.values())this._removeCallback(e.value);this._data.clear()}}
|
|
63
72
|
/*!
|
|
64
73
|
* Copyright 2021 Cognite AS
|
|
65
|
-
*/
|
|
66
|
-
const{VERSION:V,MIXPANEL_TOKEN:j}={VERSION:"2.1.2",WORKER_VERSION:"1.2.0",MIXPANEL_TOKEN:"8c900bdfe458e32b768450c20750853d",IS_DEVELOPMENT_MODE:!1};let W=!0;const q={VERSION:V,project:"unknown",application:"unknown",sessionId:"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(function(e){const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))};function H(e,t,n,r){W=e,W&&(U.a.init(j,{disable_cookie:!0,disable_persistence:!0,ip:!1,property_blacklist:["$city","$region","mp_country_code","$geo_source","$timezone","mp_lib","$lib_version","$device_id","$user_id","$current_url","$screen_width","$screen_height","$referrer","$referring_domain","$initial_referrer","$initial_referring_domain"]}),U.a.reset(),U.a.identify("reveal-single-user"),t&&(q.project=t),n&&(q.application=n),K("init",r))}function K(e,t){if(!W)return;const n={...q,...t};U.a.track(e,n)}function X(e,t){K("loadModel",{...e,modelIdentifier:t})}function Y(e,t){A.a.error(e),K("error",{message:e.message,name:e.name,stack:e.stack,...t})}function Z(e){K("cameraNavigated",e)}
|
|
74
|
+
*/class H{constructor(e,t){this._cache=new Map,this._retrieves=new Map,this._capacity=e,this._disposeCallback=t}get(e){const t=this._retrieves.get(e)||0;return this._retrieves.set(e,t+1),this._cache.get(e)}set(e,t){return this._cache.has(e)||this._capacity<this._cache.size?(this._cache.set(e,t),!0):(this._cache.set(e,t),this.ensureWithinCapacity(),this._cache.has(e))}remove(e){this._retrieves.delete(e);const t=this._cache.get(e);return void 0!==t&&(void 0!==this._disposeCallback&&this._disposeCallback(t),this._cache.delete(e),!0)}clear(){if(void 0!==this._disposeCallback)for(const e of this._cache.values())this._disposeCallback(e);this._retrieves.clear(),this._cache.clear()}ensureWithinCapacity(){if(this._capacity>=this._cache.size)return;const e=Array.from(this._cache.keys()).map(e=>({key:e,retrivalCount:this._retrieves.get(e)||0})).sort((e,t)=>e.retrivalCount-t.retrivalCount).slice(0,this._cache.size-this._capacity).map(e=>e.key);for(const t of e)this.remove(t)}}
|
|
67
75
|
/*!
|
|
68
76
|
* Copyright 2021 Cognite AS
|
|
69
|
-
*/function
|
|
77
|
+
*/function K(){this.array=null}function X(e,t){const n=e.get(t);void 0===n?e.set(t,1):e.set(t,n+1)}function Y(e,t){const n=e.get(t);g()(void 0!==n),n<=1?e.delete(t):e.set(t,n-1)}
|
|
70
78
|
/*!
|
|
71
79
|
* Copyright 2021 Cognite AS
|
|
72
|
-
*/},function(e,t){
|
|
80
|
+
*/},function(e,t,n){"use strict";n.d(t,"a",(function(){return m})),n.d(t,"f",(function(){return x})),n.d(t,"e",(function(){return g})),n.d(t,"d",(function(){return b})),n.d(t,"b",(function(){return y})),n.d(t,"c",(function(){return h})),n.d(t,"m",(function(){return w})),n.d(t,"n",(function(){return M})),n.d(t,"o",(function(){return S})),n.d(t,"p",(function(){return I})),n.d(t,"l",(function(){return N})),n.d(t,"h",(function(){return A})),n.d(t,"s",(function(){return D})),n.d(t,"j",(function(){return z})),n.d(t,"v",(function(){return B})),n.d(t,"q",(function(){return k})),n.d(t,"u",(function(){return L})),n.d(t,"i",(function(){return R})),n.d(t,"t",(function(){return E})),n.d(t,"r",(function(){return G})),n.d(t,"g",(function(){return u})),n.d(t,"k",(function(){return V}));var r=n(0),o=n(1),i=n(26),a=n.n(i);
|
|
73
81
|
/*!
|
|
74
82
|
* Copyright 2021 Cognite AS
|
|
75
83
|
*/
|
|
76
|
-
class s{constructor(e){this.
|
|
84
|
+
class s{constructor(e,t,n,r,o){this.version=e,this.maxTreeIndex=t,this.root=r,this.sectors=o,this.unit=n}get sectorCount(){return this.sectors.size}getSectorById(e){return this.sectors.get(e)}getAllSectors(){return[...this.sectors.values()]}getSectorsContainingPoint(e){const t=[];return Object(o.w)(this.root,n=>!!n.bounds.containsPoint(e)&&(t.push(n),!0)),t}getSectorsIntersectingBox(e){const t=[];return Object(o.w)(this.root,n=>!!n.bounds.intersectsBox(e)&&(t.push(n),!0)),t}getBoundsOfMostGeometry(){if(0===this.root.children.length)return this.root.bounds;const e=[],t=[];Object(o.w)(this.root,n=>(0===n.children.length&&(t.push(n.bounds.min.toArray(),n.bounds.max.toArray()),e.push(n.bounds,n.bounds)),!0));const n=Math.min(t.length,4),i=a()(t,n,"kmpp",10),s=new Array(i.idxs.length).fill(0),d=s.map(e=>new r.Box3);i.idxs.map(e=>s[e]++);const l=s.reduce((e,t,n)=>(t>e.count&&(e.count=t,e.idx=n),e),{count:0,idx:-1}).idx;i.idxs.forEach((t,n)=>{s[t]++,d[t].expandByPoint(e[n].min),d[t].expandByPoint(e[n].max)});const c=d.filter((e,t)=>!(t===l||!e.intersectsBox(d[l])));if(c.length>0){const e=d[l].clone();return c.forEach(t=>{e.expandByPoint(t.min),e.expandByPoint(t.max)}),e}return d[l]}getSectorsIntersectingFrustum(e,t){const n=(new r.Matrix4).multiplyMatrices(e,t),i=(new r.Frustum).setFromProjectionMatrix(n),a=[];return Object(o.w)(this.root,e=>!!i.intersectsBox(e.bounds)&&(a.push(e),!0)),a}}
|
|
77
85
|
/*!
|
|
78
86
|
* Copyright 2021 Cognite AS
|
|
79
|
-
*/
|
|
80
|
-
class u extends s{constructor(e){super(u.classToken),e instanceof a.e?this._treeIndices=e:(a.h,this._treeIndices=new a.e(e))}updateSet(e){this._treeIndices=e,this.notifyChanged()}clear(){this._treeIndices=new a.e,this.notifyChanged()}getIndexSet(){return this._treeIndices}get isLoading(){return!1}serialize(){return{token:this.classToken,state:this._treeIndices.toRangeArray()}}}u.classToken="TreeIndexNodeCollection",function(e){e[e.NoOutline=0]="NoOutline",e[e.White=1]="White",e[e.Black=2]="Black",e[e.Cyan=3]="Cyan",e[e.Blue=4]="Blue",e[e.Green=5]="Green",e[e.Red=6]="Red",e[e.Orange=7]="Orange"}(r||(r={}));const m={visible:!0,outlineColor:r.White},h={visible:!0,renderInFront:!0},p={Default:{visible:!0,renderGhosted:!1,renderInFront:!1,outlineColor:r.NoOutline,color:[0,0,0]},Outlined:m,Hidden:{visible:!1},InFront:h,Ghosted:{visible:!0,renderGhosted:!0},Highlighted:{...h,visible:!0,color:[100,100,255],...m}};
|
|
87
|
+
*/function d(e){const t=new Map,n=[];e.sectors.forEach(e=>{var o;const i=function(e){const t=function(e){if(!e.facesFile)return{quadSize:-1,coverageFactors:{xy:-1,yz:-1,xz:-1},recursiveCoverageFactors:{xy:-1,yz:-1,xz:-1},fileName:null,downloadSize:e.indexFile.downloadSize};return{...e.facesFile,recursiveCoverageFactors:e.facesFile.recursiveCoverageFactors||e.facesFile.coverageFactors}}(e),n=e.boundingBox,o=n.min.x,i=n.min.y,a=n.min.z,s=n.max.x,d=n.max.y,l=n.max.z;return{id:e.id,path:e.path,depth:e.depth,bounds:new r.Box3(new r.Vector3(o,i,a),new r.Vector3(s,d,l)),estimatedDrawCallCount:e.estimatedDrawCallCount||0,estimatedRenderCost:e.estimatedTriangleCount||0,indexFile:{...e.indexFile},facesFile:t,children:[]}}(e);t.set(e.id,i),n[e.id]=null!==(o=e.parentId)&&void 0!==o?o:-1});for(const e of t.values()){const r=n[e.id];if(-1===r)continue;t.get(r).children.push(e)}const o=t.get(0);if(!o)throw new Error("Root sector not found, must have ID 0");!function e(t,n){!function(e){return-1===e.facesFile.coverageFactors.xy}(t)?t.children.forEach(n=>e(n,t.facesFile)):(t.facesFile.coverageFactors.xy=n.recursiveCoverageFactors.xy,t.facesFile.coverageFactors.yz=n.recursiveCoverageFactors.yz,t.facesFile.coverageFactors.xz=n.recursiveCoverageFactors.xz,t.facesFile.recursiveCoverageFactors.xy=n.recursiveCoverageFactors.xy,t.facesFile.recursiveCoverageFactors.yz=n.recursiveCoverageFactors.yz,t.facesFile.recursiveCoverageFactors.xz=n.recursiveCoverageFactors.xz,t.children.forEach(t=>e(t,n)))}
|
|
81
88
|
/*!
|
|
82
89
|
* Copyright 2021 Cognite AS
|
|
83
|
-
*/
|
|
84
|
-
class f{constructor(){this._styledCollections=new Array,this._events={changed:new a.d,loadingStateChanged:new a.d}}on(e,t){switch(e){case"changed":this._events.changed.subscribe(t);break;case"loadingStateChanged":this._events.loadingStateChanged.subscribe(t);break;default:Object(a.k)(e,`Unsupported event: '${e}'`)}}off(e,t){switch(e){case"changed":this._events.changed.unsubscribe(t);break;case"loadingStateChanged":this._events.loadingStateChanged.unsubscribe(t);break;default:Object(a.k)(e,`Unsupported event: '${e}'`)}}assignStyledNodeCollection(e,t){const n=this._styledCollections.find(t=>t.nodeCollection===e);if(void 0!==n)n.appearance=t,this.handleNodeCollectionChanged(n);else{const n={nodeCollection:e,appearance:t,handleNodeCollectionChangedListener:()=>{this.handleNodeCollectionChanged(n)}};this._styledCollections.push(n),e.on("changed",n.handleNodeCollectionChangedListener),this.notifyChanged()}}unassignStyledNodeCollection(e){const t=this._styledCollections.findIndex(t=>t.nodeCollection===e);if(-1===t)throw new Error("NodeCollection not added");const n=this._styledCollections[t];this._styledCollections.splice(t,1),e.off("changed",n.handleNodeCollectionChangedListener),this.notifyChanged()}applyStyles(e){this._styledCollections.forEach(t=>{const n=t.nodeCollection.getIndexSet();e(n,t.appearance)})}clear(){for(const e of this._styledCollections){e.nodeCollection.off("changed",e.handleNodeCollectionChangedListener)}this._styledCollections.splice(0),this.notifyChanged()}get isLoading(){return this._styledCollections.some(e=>e.nodeCollection.isLoading)}notifyChanged(){this._events.changed.fire()}notifyLoadingStateChanged(){this._lastFiredLoadingState!==this.isLoading&&(this._lastFiredLoadingState=this.isLoading,this._events.loadingStateChanged.fire(this.isLoading))}handleNodeCollectionChanged(e){this.notifyChanged(),this.notifyLoadingStateChanged()}}
|
|
90
|
+
*/(o,o.facesFile);const i=null!==e.unit?e.unit:"Meters";return new s(e.version,e.maxTreeIndex,i,o,t)}function l(e){const t=new Map,n=[];e.sectors.forEach(e=>{var o;const i=function(e){const t=e.boundingBox,n=t.min.x,o=t.min.y,i=t.min.z,a=t.max.x,s=t.max.y,d=t.max.z;return{id:e.id,path:e.path,depth:e.depth,bounds:new r.Box3(new r.Vector3(n,o,i),new r.Vector3(a,s,d)),estimatedDrawCallCount:e.estimatedDrawCallCount,estimatedRenderCost:e.estimatedTriangleCount||0,downloadSize:e.downloadSize||0,maxDiagonalLength:e.maxDiagonalLength||0,minDiagonalLength:e.minDiagonalLength||0,sectorFileName:e.sectorFileName,children:[]}}
|
|
85
91
|
/*!
|
|
86
92
|
* Copyright 2021 Cognite AS
|
|
87
|
-
*/
|
|
93
|
+
*/(e);t.set(e.id,i),n[e.id]=null!==(o=e.parentId)&&void 0!==o?o:-1});for(const e of t.values()){const r=n[e.id];if(-1===r)continue;t.get(r).children.push(e)}const o=t.get(0);if(!o)throw new Error("Root sector not found, must have ID 0");const i=null!==e.unit?e.unit:"Meters";return new s(e.version,e.maxTreeIndex,i,o,t)}class c{parse(e){const t=e.version;switch(t){case 8:return d(e);case 9:return l(e);case void 0:throw new Error('Metadata must contain a "version"-field');default:throw new Error(`Version ${t} is not supported`)}}}
|
|
88
94
|
/*!
|
|
89
95
|
* Copyright 2021 Cognite AS
|
|
90
|
-
*/
|
|
91
|
-
class i extends r.Group{constructor(e,t,n){super(),this._lod=o.a.Discarded,this._updatedTimestamp=Date.now(),this.name=`Sector ${t} [id=${e}]`,this.sectorId=e,this.sectorPath=t,this.bounds=n,this.depth=function(e){let t=0;for(let n=0;n<e.length;++n)t+="/"===e[n]?1:0;return t-1}
|
|
96
|
+
*/const u=new Map([["Meters",1],["Centimeters",.01],["Millimeters",.001],["Micrometers",1e-6],["Kilometers",1e3],["Feet",.3048],["Inches",.0254],["Yards",.9144],["Miles",1609.34],["Mils",254e-7],["Microinches",2.54e-8]]);var p=n(5);
|
|
92
97
|
/*!
|
|
93
98
|
* Copyright 2021 Cognite AS
|
|
94
|
-
*/(t)
|
|
99
|
+
*/class m{constructor(e,t,n="scene.json"){this._currentModelIdentifier=0,this._cadSceneParser=new c,this._modelMetadataProvider=e,this._modelDataProvider=t,this._blobFileName=n}async loadData(e){const t=await this.getSupportedOutput(e),n=this._modelMetadataProvider.getModelUri(e,t),i=this._modelMetadataProvider.getModelMatrix(e,t.format),a=this._modelMetadataProvider.getModelCamera(e),s=await n,d=await this._modelDataProvider.getJsonFile(s,this._blobFileName),l=this._cadSceneParser.parse(d),c=function(e,t){var n;const o=null!==(n=u.get(e))&&void 0!==n?n:1;if(void 0===o)throw new Error(`Unknown model unit '${e}'`);return(new r.Matrix4).makeScale(o,o,o).multiply(t)}(l.unit,await i),p=(new r.Matrix4).copy(c).invert(),m=await a;return{modelIdentifier:""+this._currentModelIdentifier++,modelBaseUrl:s,geometryClipBox:null,format:t.format,formatVersion:t.version,modelMatrix:c,inverseModelMatrix:p,cameraConfiguration:Object(o.v)(m,c),scene:l}}async getSupportedOutput(e){const t=await this._modelMetadataProvider.getModelOutputs(e),n=[{format:p.d.GltfCadModel,version:9},{format:p.d.RevealCadModel,version:8}],r=t.filter(e=>n.some(t=>t.format===e.format&&t.version===e.version));if(0===r.length){const e=t.map(e=>`${e.format} v${e.version}`).join(", "),r=n.map(e=>`${e.format} v${e.version}`).join(", ");throw new Error(`Model does not contain any supported CAD model outputs, got [${e}], but only supports [${r}]`)}return r[0]}}var h,f=n(3),v=n.n(f);
|
|
95
100
|
/*!
|
|
96
101
|
* Copyright 2021 Cognite AS
|
|
97
102
|
*/
|
|
103
|
+
class x{createSectorScene(e,t,n,r){v()(8===e||9===e,"Only version 8 and 9 is currently supported");const i=new Map;return Object(o.w)(r,e=>(i.set(e.id,e),!0)),new s(e,t,n,r,i)}}
|
|
104
|
+
/*!
|
|
105
|
+
* Copyright 2021 Cognite AS
|
|
106
|
+
*/!function(e){e[e.Discarded=0]="Discarded",e[e.Simple=1]="Simple",e[e.Detailed=2]="Detailed"}(h||(h={}));
|
|
98
107
|
/*!
|
|
99
108
|
* Copyright 2021 Cognite AS
|
|
100
109
|
*/
|
|
101
|
-
class
|
|
110
|
+
class g extends r.Group{constructor(e,t,n){super(),this._lod=h.Discarded,this._updatedTimestamp=Date.now(),this.name=`Sector ${t} [id=${e}]`,this.sectorId=e,this.sectorPath=t,this.bounds=n,this.depth=function(e){let t=0;for(let n=0;n<e.length;++n)t+="/"===e[n]?1:0;return t-1}
|
|
102
111
|
/*!
|
|
103
112
|
* Copyright 2021 Cognite AS
|
|
104
|
-
*/
|
|
113
|
+
*/(t)}get levelOfDetail(){return this._lod}get group(){return this._group}get updatedTimestamp(){return this._updatedTimestamp}updateGeometry(e,t){this.resetGeometry(),this._group=e,void 0!==this._group&&this._group.reference(),this._lod=t,this._updatedTimestamp=Date.now(),this.visible=this._lod!==h.Discarded,this.updateMatrixWorld(!0)}resetGeometry(){void 0!==this._group&&(this._group.dereference(),this.remove(this._group)),this._group=void 0,this._lod=h.Discarded,this._updatedTimestamp=Date.now()}}class b extends g{constructor(e){const t=e.scene.root.bounds.clone();t.applyMatrix4(e.modelMatrix),super(0,"/",t);const{scene:n,modelMatrix:r}=e;this.sectorNodeMap=new Map,function e(t,n,r,o){const i=t.bounds.clone();i.applyMatrix4(o);const a=new g(t.id,t.path,i);a.name="Sector "+t.id,n.add(a),a.matrixAutoUpdate=!1,a.updateMatrixWorld(!0),r.set(t.id,a);for(const n of t.children)e(n,a,r,o)}
|
|
105
114
|
/*!
|
|
106
115
|
* Copyright 2021 Cognite AS
|
|
107
|
-
*/
|
|
116
|
+
*/(n.root,this,this.sectorNodeMap,r),this.matrixAutoUpdate=!1,this.setModelTransformation(r)}setModelTransformation(e){this.matrix.copy(e),this.updateMatrixWorld(!0)}getModelTransformation(e=new r.Matrix4){return e.copy(this.matrix)}}class y{constructor(e=o.k.defaultPool){this.workerPool=e}parseI3D(e){return this.parseDetailed(e)}parseF3D(e){return this.parseSimple(e)}parseCTM(e){return this.parseCtm(e)}async parseSimple(e){return this.workerPool.postWorkToAvailable(async t=>t.parseQuads(e))}async parseDetailed(e){return this.workerPool.postWorkToAvailable(async t=>t.parseSector(e))}async parseCtm(e){return this.workerPool.postWorkToAvailable(async t=>t.parseCtm(e))}}var _=n(11),T=n(13);
|
|
108
117
|
/*!
|
|
109
118
|
* Copyright 2021 Cognite AS
|
|
110
119
|
*/
|
|
111
|
-
|
|
120
|
+
function C(e){return Array.from(e.values()).reduce((e,t)=>Math.max(e,t.offset+t.size),0)}function w(e,t,n,r){if(null===r)return e;const o=t.get("instanceMatrix");v()(void 0!==o);const i=C(t),a=o.offset;return Object(T.a)(e,i,r,(e,t,r,o)=>{Object(_.d)(r,a,t,e,n,o)})}function M(e,t,n,r="radiusA",o="radiusB"){if(null===n)return e;const i=t.get("centerA"),a=t.get("centerB"),s=t.get(r),d=t.get(o);v()(void 0!==i&&void 0!==a&&void 0!==s&&void 0!==d);const l=C(t);return Object(T.a)(e,l,n,(e,t,n,r)=>{Object(_.a)(n,i.offset,a.offset,s.offset,d.offset,t,e,r)})}function I(e,t,n){if(null===n)return e;const r=t.get("vertex1"),o=t.get("vertex2"),i=t.get("vertex3"),a=t.get("vertex4");v()(void 0!==r&&void 0!==o&&void 0!==i&&void 0!==a);const s=C(t);return Object(T.a)(e,s,n,(e,t,n,s)=>{Object(_.e)(r.offset,o.offset,i.offset,a.offset,n,t,e,s)})}function S(e,t,n,r="horizontalRadius",o="verticalRadius"){if(null===n)return e;const i=t.get("center"),a=t.get(r),s=t.get(o),d=t.get("height");v()(void 0!==i&&void 0!==a&&void 0!==s&&void 0!==d);const l=C(t);return Object(T.a)(e,l,n,(e,t,n,r)=>{Object(_.b)(i,a,s,d,n,t,e,r)})}
|
|
112
121
|
/*!
|
|
113
122
|
* Copyright 2021 Cognite AS
|
|
114
123
|
*/
|
|
115
|
-
|
|
124
|
+
const P={p:new r.Vector3,instanceMatrix:new r.Matrix4,baseBounds:new r.Box3,instanceBounds:new r.Box3};function N(e,t,n,r){if(null===r)return n;const{p:o,instanceMatrix:i,baseBounds:a,instanceBounds:s}=P;a.makeEmpty();for(let r=n.triangleOffset;r<n.triangleOffset+n.triangleCount;++r){const n=t[3*r+0],i=t[3*r+1],s=t[3*r+2];o.set(e[n+0],e[n+1],e[n+2]),a.expandByPoint(o),o.set(e[i+0],e[i+1],e[i+2]),a.expandByPoint(o),o.set(e[s+0],e[s+1],e[s+2]),a.expandByPoint(o)}let d=0;const l=n.treeIndices.length,c=new Float32Array(n.instanceMatrices.length),u=new Float32Array(l),p=new Uint8Array(4*l);for(let e=0;e<l;++e)if(m=n.instanceMatrices,h=e,i.set(m[h+0],m[h+4],m[h+8],m[h+12],m[h+1],m[h+5],m[h+9],m[h+13],m[h+2],m[h+6],m[h+10],m[h+14],m[h+3],m[h+7],m[h+11],m[h+15]),s.copy(a).applyMatrix4(i),r.intersectsBox(s)){const t=n.instanceMatrices.subarray(16*e,16*(e+1)),r=n.colors.subarray(4*e,4*(e+1)),o=n.treeIndices[e];c.set(t,16*d),p.set(r,4*d),u[d]=o,d++}var m,h;if(l===d)return n;return{triangleCount:n.triangleCount,triangleOffset:n.triangleOffset,instanceMatrices:c.slice(0,16*d),colors:p.slice(0,4*d),treeIndices:u.slice(0,d)}}
|
|
116
125
|
/*!
|
|
117
126
|
* Copyright 2021 Cognite AS
|
|
118
|
-
*/
|
|
127
|
+
*/const{boxGeometry:A,boxGeometryBoundingBox:R}=(()=>{const e=new r.BoxBufferGeometry(1,1,1,1,1,1);try{const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{boxGeometry:t,boxGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{quadGeometry:D,quadGeometryBoundingBox:E}=(()=>{const e=new r.PlaneBufferGeometry(1,1,1,1);try{const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{quadGeometry:t,quadGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{trapeziumGeometry:B,trapeziumGeometryBoundingBox:O}=(()=>{const e=[0,0,0,1,1,1,2,2,2,3,3,3];return{trapeziumGeometry:{index:new r.BufferAttribute(new Uint16Array([0,1,3,0,3,2]),1),position:new r.BufferAttribute(new Float32Array(e),3)},trapeziumGeometryBoundingBox:(new r.Box3).setFromArray(e)}})(),{coneGeometry:z,coneGeometryBoundingBox:F}=(()=>{const e=[];e.push(-1,1,-1),e.push(-1,-1,-1),e.push(1,1,-1),e.push(1,-1,-1),e.push(1,1,1),e.push(1,-1,1);const t=new Uint16Array([1,2,0,1,3,2,3,4,2,3,5,4]);return{coneGeometry:{index:new r.BufferAttribute(t,1),position:new r.BufferAttribute(new Float32Array(e),3)},coneGeometryBoundingBox:(new r.Box3).setFromArray(e)}})(),{torusLodGeometries:L}=(()=>{const e=(e,t)=>[e,2*t*Math.PI];return{torusLodGeometries:[{tubularSegments:9,radialSegments:18},{tubularSegments:5,radialSegments:12},{tubularSegments:4,radialSegments:5}].map(({tubularSegments:t,radialSegments:n})=>function(e,t,n=((e,t)=>[e,t,0])){const o=[],i=[],a=1/e,s=1/t;for(let r=0;r<=t;r++)for(let t=0;t<=e;t++){const[e,i,d]=n(t*a,r*s);o.push(e||0,i||0,d||0)}for(let n=1;n<=t;n++)for(let t=1;t<=e;t++){const r=(e+1)*n+t-1,o=(e+1)*(n-1)+t-1,a=(e+1)*(n-1)+t,s=(e+1)*n+t;i.push(r,o,s),i.push(o,a,s)}return{index:new r.Uint16BufferAttribute(i,1),position:new r.Float32BufferAttribute(o,3)}}(n,t,e))}})(),{nutGeometry:k,nutGeometryBoundingBox:G}=(()=>{const e=new r.CylinderBufferGeometry(.5,.5,1,6);try{e.applyMatrix4((new r.Matrix4).makeRotationX(-Math.PI/2));const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return{nutGeometry:t,nutGeometryBoundingBox:(new r.Box3).setFromArray(t.position.array)}}finally{e.dispose()}})();
|
|
119
128
|
/*!
|
|
120
129
|
* Copyright 2021 Cognite AS
|
|
121
130
|
*/
|
|
122
|
-
function
|
|
131
|
+
function V(e,t,n,i){const a=[],s=e.filter(e=>null===i||function(e,t){const{p:n,box:r}=U;r.makeEmpty();for(let t=0;t<e.vertices.length;t+=3)n.set(e.vertices[t+0],e.vertices[t+1],e.vertices[t+2]),r.expandByPoint(n);return t.intersectsBox(r)}
|
|
123
132
|
/*!
|
|
124
133
|
* Copyright 2021 Cognite AS
|
|
125
|
-
*/(e,
|
|
134
|
+
*/(e,i));for(const e of s){const i=new r.BufferGeometry,s=new r.Uint32BufferAttribute(e.indices.buffer,1).onUpload(o.o),d=new r.Float32BufferAttribute(e.vertices.buffer,3).onUpload(o.o),l=new r.Uint8BufferAttribute(e.colors.buffer,3).onUpload(o.o),c=new r.Float32BufferAttribute(e.treeIndices.buffer,1).onUpload(o.o);i.setIndex(s),i.setAttribute("color",l),i.setAttribute("position",d),i.setAttribute("treeIndex",c),i.boundingBox=t.clone(),i.boundingSphere=new r.Sphere,t.getBoundingSphere(i.boundingSphere);const u=new r.Mesh(i,n);u.name="Triangle mesh "+e.fileId,u.userData.treeIndices=new Map;for(const t of e.treeIndices)Object(o.p)(u.userData.treeIndices,t);a.push(u)}return a}const U={p:new r.Vector3,box:new r.Box3}},function(e,t){e.exports=require("assert")},function(e,t){e.exports=require("rxjs/operators")},function(e,t,n){"use strict";n.d(t,"a",(function(){return r})),n.d(t,"b",(function(){return o})),n.d(t,"c",(function(){return l})),n.d(t,"e",(function(){return p})),n.d(t,"f",(function(){return m})),n.d(t,"g",(function(){return h})),n.d(t,"d",(function(){return i}));class r{constructor(e){this.client=e,this.authenticationPromise=e.authenticate()}get headers(){return this.client.getDefaultRequestHeaders()}async getBinaryFile(e,t){const n=`${e}/${t}`,r={...this.client.getDefaultRequestHeaders(),Accept:"*/*"};return(await this.fetchWithRetry(n,{headers:r,method:"GET"})).arrayBuffer()}async getJsonFile(e,t){return(await this.client.get(`${e}/${t}`)).data}async fetchWithRetry(e,t,n=3){let r;for(let o=0;o<n;o++)try{await this.authenticationPromise;const n=await fetch(e,t);if(401===n.status){this.authenticationPromise=this.client.authenticate();continue}return n}catch(e){void 0!==r&&(r=e)}throw r}}class o{constructor(e,t){this.revealInternalId=Symbol(`${e}/${t}`),this.modelId=e,this.revisionId=t}toString(){return`${o.name} (${String(this.revealInternalId)})`}}var i,a=n(0);
|
|
126
135
|
/*!
|
|
127
136
|
* Copyright 2021 Cognite AS
|
|
128
|
-
|
|
129
|
-
const Q={p:new r.Vector3,instanceMatrix:new r.Matrix4,baseBounds:new r.Box3,instanceBounds:new r.Box3};
|
|
137
|
+
*/!function(e){e.EptPointCloud="ept-pointcloud",e.RevealCadModel="reveal-directory",e.GltfCadModel="gltf-directory",e.AnyFormat="all-outputs"}(i||(i={}));
|
|
130
138
|
/*!
|
|
131
139
|
* Copyright 2021 Cognite AS
|
|
132
140
|
*/
|
|
133
|
-
|
|
141
|
+
const s=(new a.Matrix4).set(1,0,0,0,0,0,1,0,0,-1,0,0,0,0,0,1);function d(e,t){switch(t){case i.RevealCadModel:e.premultiply(s);break;case i.RevealCadModel:case i.GltfCadModel:e.premultiply(s);break;case i.EptPointCloud:break;default:throw new Error("Unknown model format '"+t)}}
|
|
134
142
|
/*!
|
|
135
143
|
* Copyright 2021 Cognite AS
|
|
136
|
-
*/
|
|
137
|
-
class ee{constructor(e){this.materialManager=e}transformSimpleSector(e,t,n,r){const o=this.materialManager.getModelMaterials(e);return C()(void 0!==o,"Could not find materials for model '"+e),Promise.resolve(J(n,t.bounds,o,r))}transformDetailedSector(e,t,n,r){const o=this.materialManager.getModelMaterials(e);return C()(void 0!==o,"Could not find materials for model '"+e),Promise.resolve($(n,t,o,r))}}
|
|
144
|
+
*/class l{constructor(e){this._client=e}async getModelMatrix(e,t){if(!(e instanceof o))throw new Error(`Model must be a ${o.name}, but got ${e.toString()}`);const{modelId:n,revisionId:r}=e,i=await this._client.revisions3D.retrieve(n,r),s=new a.Matrix4;return i.rotation&&s.makeRotationFromEuler(new a.Euler(...i.rotation)),d(s,t),s}async getModelCamera(e){if(!(e instanceof o))throw new Error(`Model must be a ${o.name}, but got ${e.toString()}`);const{modelId:t,revisionId:n}=e,r=await this._client.revisions3D.retrieve(t,n);if(r.camera&&r.camera.position&&r.camera.target){const{position:e,target:t}=r.camera;return{position:new a.Vector3(e[0],e[1],e[2]),target:new a.Vector3(t[0],t[1],t[2])}}}async getModelUri(e,t){if(!(e instanceof o))throw new Error(`Model must be a ${o.name}, but got ${e.toString()}`);return`${this._client.getBaseUrl()}${this.getRequestPath(t.blobId)}`}async getModelOutputs(e){if(!(e instanceof o))throw new Error(`Model must be a ${o.name}, but got ${e.toString()}`);const{modelId:t,revisionId:n}=e,r=`/api/v1/projects/${this._client.project}/3d/models/${t}/revisions/${n}/outputs`,a=await this._client.get(r,{params:{format:i.AnyFormat}});if(200===a.status)return a.data.items.filter(e=>Object.values(i).includes(e.format));throw new Error(`Unexpected response ${a.status} (payload: '${a.data})`)}getRequestPath(e){return`/api/v1/projects/${this._client.project}/3d/files/${e}`}}
|
|
138
145
|
/*!
|
|
139
146
|
* Copyright 2021 Cognite AS
|
|
140
|
-
*/
|
|
147
|
+
*/var c=n(20);
|
|
141
148
|
/*!
|
|
142
149
|
* Copyright 2021 Cognite AS
|
|
143
|
-
*/function
|
|
150
|
+
*/async function u(e){const t=await fetch(e);if(!t.ok){const e={};throw t.headers.forEach((t,n)=>{e[t]=n}),new c.HttpError(t.status,t.body,e)}return t}
|
|
144
151
|
/*!
|
|
145
152
|
* Copyright 2021 Cognite AS
|
|
146
|
-
*/
|
|
153
|
+
*/class p{get headers(){return{}}async getBinaryFile(e,t){return(await u(`${e}/${t}`)).arrayBuffer()}async getJsonFile(e,t){return(await u(`${e}/${t}`)).json()}}
|
|
147
154
|
/*!
|
|
148
155
|
* Copyright 2021 Cognite AS
|
|
149
|
-
*/class
|
|
156
|
+
*/class m{constructor(e){this.revealInternalId=Symbol(e),this.localPath=e}toString(){return`${m.name} (${this.localPath})`}}
|
|
150
157
|
/*!
|
|
151
158
|
* Copyright 2021 Cognite AS
|
|
152
|
-
*/
|
|
159
|
+
*/class h{getModelUri(e){if(!(e instanceof m))throw new Error(`Model must be a ${m.name}, but got ${e.toString()}`);return Promise.resolve(`${location.origin}/${e.localPath}`)}async getModelMatrix(e){if(!(e instanceof m))throw new Error(`Model must be a ${m.name}, but got ${e.toString()}`);const t=new a.Matrix4;return d(t,i.RevealCadModel),t}getModelCamera(e){if(!(e instanceof m))throw new Error(`Model must be a ${m.name}, but got ${e.toString()}`);return Promise.resolve(void 0)}async getModelOutputs(e){var t;const n=await this.getModelUri(e),r=null!==(t=await async function(e){let t;try{t=(await(await u(e+"/scene.json")).json()).version}catch(e){return}switch(t){case 8:return Promise.resolve({blobId:-1,format:i.RevealCadModel,version:8});case 9:return Promise.resolve({blobId:-1,format:i.GltfCadModel,version:9});default:return}}(n))&&void 0!==t?t:await async function(e){let t;try{t=await(await u(e+"/ept.json")).json()}catch(e){return}return t?Promise.resolve({blobId:-1,format:i.EptPointCloud,version:-1}):void 0}(n);if(r)return[r];throw new Error("Only point cloud or CAD models (version 8 and 9) are supported)")}}
|
|
153
160
|
/*!
|
|
154
161
|
* Copyright 2021 Cognite AS
|
|
155
|
-
*/
|
|
162
|
+
*/},function(e,t){e.exports=require("glslify")},function(e,t){e.exports=require("rxjs")},function(e,t,n){"use strict";n.d(t,"a",(function(){return d}));var r=n(14),o=n.n(r),i=n(10);const{VERSION:a,MIXPANEL_TOKEN:s}={VERSION:"3.0.0-alpha.0",WORKER_VERSION:"1.3.0",MIXPANEL_TOKEN:"8c900bdfe458e32b768450c20750853d",IS_DEVELOPMENT_MODE:!1};class d{constructor(e,t,n){o.a.init(s,{disable_cookie:!0,disable_persistence:!0,ip:!1,property_blacklist:["$city","$region","mp_country_code","$geo_source","$timezone","mp_lib","$lib_version","$device_id","$user_id","$current_url","$screen_width","$screen_height","$referrer","$referring_domain","$initial_referrer","$initial_referring_domain"]}),o.a.reset(),o.a.identify("reveal-single-user"),this._sessionProps={VERSION:a,project:"unknown",application:"unknown",sessionId:"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(function(e){const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))},e&&(this._sessionProps.project=e),t&&(this._sessionProps.application=t),this.innerTrackEvent("init",n)}static init(e,t,n,r){if(void 0===globalThis.revealMetricsLogger&&e){const e=new d(t,n,r);globalThis.revealMetricsLogger={metricsLogger:e}}}innerTrackEvent(e,t){const n={...this._sessionProps,...t};o.a.track(e,n)}static trackEvent(e,t){globalThis.revealMetricsLogger&&globalThis.revealMetricsLogger.metricsLogger.innerTrackEvent(e,t)}static trackCreateTool(e){d.trackEvent("toolCreated",{toolName:e})}static trackLoadModel(e,t){d.trackEvent("loadModel",{...e,modelIdentifier:t})}static trackCadModelStyled(e,t){d.trackEvent("cadModelStyleAssigned",{nodeCollectionClassToken:e,style:t})}static trackError(e,t){i.a.error(e),void 0!==e?this.trackEvent("error",{message:e.message||e,name:e.name,stack:e.stack,...t}):this.trackEvent("error",{name:"unknown",...t})}static trackCameraNavigation(e){d.trackEvent("cameraNavigated",e)}}
|
|
156
163
|
/*!
|
|
157
164
|
* Copyright 2021 Cognite AS
|
|
158
|
-
*/
|
|
159
|
-
const fe={defines:{COGNITE_COLOR_BY_TREE_INDEX:!1}},ve={simpleMesh:{fragment:pe()(n(26).default),vertex:pe()(n(27).default)},detailedMesh:{fragment:pe()(n(28).default),vertex:pe()(n(29).default)},instancedMesh:{fragment:pe()(n(30).default),vertex:pe()(n(31).default)},boxPrimitive:{fragment:pe()(n(16).default),vertex:pe()(n(17).default)},circlePrimitive:{fragment:pe()(n(32).default),vertex:pe()(n(33).default)},conePrimitive:{fragment:pe()(n(34).default),vertex:pe()(n(35).default)},eccentricConePrimitive:{fragment:pe()(n(36).default),vertex:pe()(n(37).default)},ellipsoidSegmentPrimitive:{fragment:pe()(n(38).default),vertex:pe()(n(39).default)},generalCylinderPrimitive:{fragment:pe()(n(40).default),vertex:pe()(n(41).default)},generalRingPrimitive:{fragment:pe()(n(42).default),vertex:pe()(n(43).default)},nutPrimitive:{fragment:pe()(n(16).default),vertex:pe()(n(17).default)},quadPrimitive:{fragment:pe()(n(16).default),vertex:pe()(n(17).default)},torusSegmentPrimitive:{fragment:pe()(n(44).default),vertex:pe()(n(45).default)},trapeziumPrimitive:{fragment:pe()(n(46).default),vertex:pe()(n(47).default)}},xe={fragment:pe()(n(48).default),vertex:pe()(n(49).default)},ge={fragment:pe()(n(50).default),vertex:pe()(n(51).default)},ye={fragment:pe()(n(52).default),vertex:pe()(n(19).default)},be={fragment:pe()(n(53).default),vertex:pe()(n(19).default)},_e={fragment:pe()(n(54).default),vertex:pe()(n(55).default)};var Te=n(63);
|
|
165
|
+
*/},function(e,t){e.exports=require("@cognite/potree-core")},function(e,t,n){"use strict";var r=n(25);
|
|
160
166
|
/*!
|
|
161
167
|
* Copyright 2021 Cognite AS
|
|
162
|
-
*/class Ce{constructor(e,t,n,o){var i,a;this._lastFrameSceneState={hasBackElements:!0,hasInFrontElements:!0,hasGhostElements:!0,hasCustomObjects:!0},this._rootSectorNodeBuffer=new Set,this._outlineTexelSize=2,this._autoSetTargetSize=!1,this._uiObjects=[],this._renderer=e,this._renderOptions=o,this._materialManager=n,this._orthographicCamera=new r.OrthographicCamera(-1,1,1,-1,-1,1),this._renderTarget=null,this._originalScene=t,this._cadScene=new r.Scene,this._cadScene.autoUpdate=!1,this._normalScene=new r.Scene,this._normalScene.autoUpdate=!1,this._inFrontScene=new r.Scene,this._inFrontScene.autoUpdate=!1,this._compositionScene=new r.Scene,this._compositionScene.autoUpdate=!1,this._fxaaScene=new r.Scene,this._fxaaScene.autoUpdate=!1,this._ssaoScene=new r.Scene,this._ssaoScene.autoUpdate=!1,this._ssaoBlurScene=new r.Scene,this._ssaoBlurScene.autoUpdate=!1,this._emptyScene=new r.Scene,this._emptyScene.autoUpdate=!1;const s=e.capabilities.isWebGL2,d=this.createOutlineColorTexture();this._inFrontRenderedCadModelTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._inFrontRenderedCadModelTarget.depthTexture=new r.DepthTexture(0,0),this._inFrontRenderedCadModelTarget.depthTexture.format=r.DepthFormat,this._inFrontRenderedCadModelTarget.depthTexture.type=r.UnsignedIntType,this._normalRenderedCadModelTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._normalRenderedCadModelTarget.depthTexture=new r.DepthTexture(0,0),this._normalRenderedCadModelTarget.depthTexture.format=r.DepthFormat,this._normalRenderedCadModelTarget.depthTexture.type=r.UnsignedIntType,this._ghostObjectRenderTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._ghostObjectRenderTarget.depthTexture=new r.DepthTexture(0,0),this._ghostObjectRenderTarget.depthTexture.format=r.DepthFormat,this._ghostObjectRenderTarget.depthTexture.type=r.UnsignedIntType,this._customObjectRenderTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._customObjectRenderTarget.depthTexture=new r.DepthTexture(0,0),this._customObjectRenderTarget.depthTexture.format=r.DepthFormat,this._customObjectRenderTarget.depthTexture.type=r.UnsignedIntType,this._compositionTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._compositionTarget.depthTexture=new r.DepthTexture(0,0),this._compositionTarget.depthTexture.format=r.DepthFormat,this._compositionTarget.depthTexture.type=r.UnsignedIntType,this._ssaoTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoTarget.depthTexture=new r.DepthTexture(0,0),this._ssaoTarget.depthTexture.format=r.DepthFormat,this._ssaoTarget.depthTexture.type=r.UnsignedIntType,this._ssaoBlurTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoBlurTarget.depthTexture=new r.DepthTexture(0,0),this._ssaoBlurTarget.depthTexture.format=r.DepthFormat,this._ssaoBlurTarget.depthTexture.type=r.UnsignedIntType,this._combineOutlineDetectionMaterial=new r.ShaderMaterial({vertexShader:xe.vertex,fragmentShader:xe.fragment,uniforms:{tFront:{value:this._inFrontRenderedCadModelTarget.texture},tFrontDepth:{value:this._inFrontRenderedCadModelTarget.depthTexture},tBack:{value:this._normalRenderedCadModelTarget.texture},tBackDepth:{value:this._normalRenderedCadModelTarget.depthTexture},tCustom:{value:this._customObjectRenderTarget.texture},tCustomDepth:{value:this._customObjectRenderTarget.depthTexture},tGhost:{value:this._ghostObjectRenderTarget.texture},tGhostDepth:{value:this._ghostObjectRenderTarget.depthTexture},tOutlineColors:{value:d},resolution:{value:new r.Vector2(0,0)},texelSize:{value:new r.Vector2(0,0)},cameraNear:{value:.1},cameraFar:{value:1e4},edgeStrengthMultiplier:{value:2.5},edgeGrayScaleIntensity:{value:.1}},extensions:{fragDepth:!0},defines:{EDGES:null!==(a=null===(i=this._renderOptions.edgeDetectionParameters)||void 0===i?void 0:i.enabled)&&void 0!==a?a:le.edgeDetectionParameters.enabled}});const l=this.createNoiseTexture(),c=this.ssaoParameters(this._renderOptions),u=c.sampleSize,m=this.createKernel(u),h=c.sampleRadius,p=c.depthCheckBias;this._ssaoMaterial=new r.ShaderMaterial({uniforms:{tDepth:{value:this._compositionTarget.depthTexture},tNoise:{value:l},kernel:{value:m},sampleRadius:{value:h},bias:{value:p},projMatrix:{value:new r.Matrix4},inverseProjectionMatrix:{value:new r.Matrix4},resolution:{value:new r.Vector2}},defines:{MAX_KERNEL_SIZE:u},vertexShader:ye.vertex,fragmentShader:ye.fragment}),this._ssaoBlurMaterial=new r.ShaderMaterial({uniforms:{tDiffuse:{value:this._compositionTarget.texture},tAmbientOcclusion:{value:this._ssaoTarget.texture},resolution:{value:new r.Vector2}},vertexShader:be.vertex,fragmentShader:be.fragment});const f=this.supportsSsao(c)?this._ssaoBlurTarget.texture:this._compositionTarget.texture;this._fxaaMaterial=new r.ShaderMaterial({uniforms:{tDiffuse:{value:f},tDepth:{value:this._compositionTarget.depthTexture},resolution:{value:new r.Vector2},inverseResolution:{value:new r.Vector2}},vertexShader:ge.vertex,fragmentShader:ge.fragment,extensions:{fragDepth:!0}}),this.setupCompositionScene(),this.setupSsaoScene(),this.setupSsaoBlurCombineScene(),this.setupFxaaScene(),this._normalSceneBuilder=new Me(this._normalScene),this._inFrontSceneBuilder=new Me(this._inFrontScene)}set renderOptions(e){const t=this.ssaoParameters(e),n={...t};this.setSsaoParameters(n),this._renderOptions={...e,ssaoRenderParameters:{...t}}}addUiObject(e,t,n){this._uiObjects.push({object:e,screenPos:t,width:n.x,height:n.y})}removeUiObject(e){this._uiObjects=this._uiObjects.filter(t=>{const n=t.object;return e!==n})}ssaoParameters(e){var t;return null!==(t=null==e?void 0:e.ssaoRenderParameters)&&void 0!==t?t:{...le.ssaoRenderParameters}}get antiAliasingMode(){const{antiAliasing:e=le.antiAliasing}=this._renderOptions;return e}get multiSampleCountHint(){const{multiSampleCountHint:e=le.multiSampleCountHint}=this._renderOptions;return e}supportsSsao(e){return!Object(s.q)()&&(this._renderer.capabilities.isWebGL2||this._renderer.extensions.has("EXT_frag_depth"))&&e.sampleSize!==de.None}renderDetailedToDepthOnly(e){const t={renderMode:this._materialManager.getRenderMode()},n=new s.i(this._renderer);this._materialManager.setRenderMode(Te.a.DepthBufferOnly);try{n.setRenderTarget(this._renderTarget),this.setVisibilityOfSectors(o.a.Simple,!1),this.traverseForRootSectorNode(this._originalScene),this.extractCadNodes(this._originalScene),this.clearTarget(this._renderTarget);const{hasBackElements:r,hasInFrontElements:i,hasGhostElements:a}=this.splitToScenes();r&&!a?this.renderNormalCadModelsFromBaseScene(e,this._renderTarget):r&&a&&(this.renderNormalCadModels(e,this._renderTarget),this._normalSceneBuilder.restoreOriginalScene()),i&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene())}finally{this._materialManager.setRenderMode(t.renderMode),n.resetState(),this.restoreCadNodes(),this.setVisibilityOfSectors(o.a.Simple,!0)}}render(e){const t=this._renderer,n=this._originalScene,r=new s.i(t),o={autoClear:t.autoClear,clearAlpha:t.getClearAlpha(),renderMode:this._materialManager.getRenderMode()};t.info.autoReset=!1,t.info.reset(),r.autoClear=!1;try{r.setRenderTarget(this._renderTarget),this.updateRenderSize(t),t.info.autoReset=!1,t.info.reset(),r.autoClear=!1,this.traverseForRootSectorNode(n),this.extractCadNodes(n),this.clearTarget(this._ghostObjectRenderTarget),this.clearTarget(this._compositionTarget),this.clearTarget(this._customObjectRenderTarget),t.setClearAlpha(0),this.clearTarget(this._normalRenderedCadModelTarget),this.clearTarget(this._inFrontRenderedCadModelTarget),t.setClearAlpha(o.clearAlpha);const i={...this._lastFrameSceneState},{hasBackElements:a,hasInFrontElements:s,hasGhostElements:d}=this.splitToScenes(),l=n.children.length>0;this._lastFrameSceneState={hasBackElements:a,hasInFrontElements:s,hasGhostElements:d,hasCustomObjects:l},a&&!d?this.renderNormalCadModelsFromBaseScene(e):a&&d?(this.renderNormalCadModels(e),this._normalSceneBuilder.restoreOriginalScene(),this.renderGhostedCadModelsFromBaseScene(e)):!a&&d&&this.renderGhostedCadModelsFromBaseScene(e),s&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene()),l&&this.renderCustomObjects(n,e),t.capabilities.isWebGL2&&(!a&&i.hasBackElements&&this.explicitFlushRender(e,this._normalRenderedCadModelTarget),!d&&i.hasGhostElements&&this.explicitFlushRender(e,this._ghostObjectRenderTarget),!s&&i.hasInFrontElements&&this.explicitFlushRender(e,this._inFrontRenderedCadModelTarget),!l&&i.hasInFrontElements&&this.explicitFlushRender(e,this._customObjectRenderTarget));const c=this.supportsSsao(this.ssaoParameters(this._renderOptions));switch(this.antiAliasingMode){case se.FXAA:this.renderComposition(t,e,this._compositionTarget),r.autoClear=o.autoClear,c&&(this.renderSsao(t,this._ssaoTarget,e),this.renderPostProcessStep(t,this._ssaoBlurTarget,this._ssaoBlurScene)),this.renderPostProcessStep(t,this._renderTarget,this._fxaaScene);break;case se.NoAA:t.autoClear=o.autoClear,c?(this.renderComposition(t,e,this._compositionTarget),this.renderSsao(t,this._ssaoTarget,e),this.renderPostProcessStep(t,this._renderTarget,this._ssaoBlurScene)):this.renderComposition(t,e,this._renderTarget);break;default:throw new Error("Unsupported anti-aliasing mode: "+this.antiAliasingMode)}}finally{r.resetState(),this._materialManager.setRenderMode(o.renderMode),this.restoreCadNodes()}}restoreCadNodes(){this._rootSectorNodeBuffer.forEach(e=>{e[1].add(e[0])}),this._rootSectorNodeBuffer.clear()}extractCadNodes(e){this._rootSectorNodeBuffer.forEach(t=>{if(t[1].parent!==e&&null!==t[1].parent&&t[1].parent.parent!==e)throw new Error("CadNode must be put at scene root");this._cadScene.add(t[0])})}setRenderTarget(e){this._renderTarget=e}getRenderTarget(){return this._renderTarget}setRenderTargetAutoSize(e){this._autoSetTargetSize=e}getRenderTargetAutoSize(){return this._autoSetTargetSize}clearTarget(e){this._renderer.setRenderTarget(e),this._renderer.clear()}explicitFlushRender(e,t){this._renderer.setRenderTarget(t),this._renderer.render(this._emptyScene,e)}splitToScenes(){const e={hasBackElements:!1,hasInFrontElements:!1,hasGhostElements:!1};this._rootSectorNodeBuffer.forEach(t=>{const n=t[1],r=this._materialManager.getModelBackTreeIndices(n.cadModelMetadata.modelIdentifier),o=this._materialManager.getModelInFrontTreeIndices(n.cadModelMetadata.modelIdentifier),i=this._materialManager.getModelGhostedTreeIndices(n.cadModelMetadata.modelIdentifier),a=r.count>0,s=o.count>0,d=i.count>0;e.hasBackElements=e.hasBackElements||a,e.hasInFrontElements=e.hasInFrontElements||s,e.hasGhostElements=e.hasGhostElements||d});const{hasBackElements:t,hasInFrontElements:n,hasGhostElements:o}=e;return this._rootSectorNodeBuffer.forEach(e=>{const i=e[0],a=e[1],s=this._materialManager.getModelBackTreeIndices(a.cadModelMetadata.modelIdentifier),d=this._materialManager.getModelInFrontTreeIndices(a.cadModelMetadata.modelIdentifier),l=new r.Object3D;l.applyMatrix4(i.matrix),t&&o&&this._normalScene.add(l);const c=new r.Object3D;c.applyMatrix4(i.matrix),n&&this._inFrontScene.add(c);const u=[e[0]];for(;u.length>0;){const e=u.pop(),r=e.userData.treeIndices;r?(n&&d.hasIntersectionWith(r)&&this._inFrontSceneBuilder.addElement(e,c),t&&!o||o&&s.hasIntersectionWith(r)&&this._normalSceneBuilder.addElement(e,l)):u.push(...e.children)}}),e}renderNormalCadModels(e,t=this._normalRenderedCadModelTarget){this._normalSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this._renderer.render(this._normalScene,e)}renderNormalCadModelsFromBaseScene(e,t=this._normalRenderedCadModelTarget){this._renderer.setRenderTarget(t),this._renderer.render(this._cadScene,e)}renderInFrontCadModels(e,t=this._inFrontRenderedCadModelTarget){this._inFrontSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this._materialManager.setRenderMode(Te.a.Effects),this._renderer.render(this._inFrontScene,e)}renderGhostedCadModelsFromBaseScene(e){this._renderer.setRenderTarget(this._ghostObjectRenderTarget),this._materialManager.setRenderMode(Te.a.Ghost),this._renderer.render(this._cadScene,e)}renderCustomObjects(e,t){this._renderer.setRenderTarget(this._customObjectRenderTarget),this._renderer.render(e,t)}updateRenderSize(e){const t=new r.Vector2;return e.getSize(t),this._renderTarget&&this._autoSetTargetSize&&t.x!==this._renderTarget.width&&t.y!==this._renderTarget.height&&this._renderTarget.setSize(t.x,t.y),t.x===this._normalRenderedCadModelTarget.width&&t.y===this._normalRenderedCadModelTarget.height||(this._normalRenderedCadModelTarget.setSize(t.x,t.y),this._inFrontRenderedCadModelTarget.setSize(t.x,t.y),this._customObjectRenderTarget.setSize(t.x,t.y),this._ghostObjectRenderTarget.setSize(t.x,t.y),this._compositionTarget.setSize(t.x,t.y),this._ssaoTarget.setSize(t.x,t.y),this._ssaoBlurTarget.setSize(t.x,t.y),this._combineOutlineDetectionMaterial.uniforms.texelSize.value=new r.Vector2(this._outlineTexelSize/t.x,this._outlineTexelSize/t.y),this._combineOutlineDetectionMaterial.uniforms.resolution.value=t,this._ssaoMaterial.uniforms.resolution.value=t,this._ssaoBlurMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.inverseResolution.value=new r.Vector2(1/t.x,1/t.y)),t}renderComposition(e,t,n){this._combineOutlineDetectionMaterial.uniforms.cameraNear.value=t.near,this._combineOutlineDetectionMaterial.uniforms.cameraFar.value=t.far,this.renderPostProcessStep(e,n,this._compositionScene)}setSsaoParameters(e){var t;const n=le.ssaoRenderParameters;if(this._ssaoMaterial.uniforms.sampleRadius.value=e.sampleRadius,this._ssaoMaterial.uniforms.bias.value=e.depthCheckBias,e.sampleSize!==this.ssaoParameters(this._renderOptions).sampleSize){const r=null!==(t=null==e?void 0:e.sampleSize)&&void 0!==t?t:n.sampleSize,o=this.createKernel(r);this._fxaaMaterial.uniforms.tDiffuse.value=e.sampleSize!==de.None?this._ssaoBlurTarget.texture:this._compositionTarget.texture,this._ssaoMaterial.uniforms.kernel.value=o,this._ssaoMaterial.defines={MAX_KERNEL_SIZE:r},this._ssaoMaterial.needsUpdate=!0}}renderPostProcessStep(e,t,n){if(e.setRenderTarget(t),e.render(n,this._orthographicCamera),t===this._renderTarget){const t=e.getSize(new r.Vector2),n=new r.Vector2(e.domElement.clientWidth,e.domElement.clientHeight),o=new r.Vector2(t.x/n.x,t.y/n.y);e.autoClear=!1,this._uiObjects.forEach(t=>{const n=new r.Scene;n.add(t.object);const i=t.screenPos.clone().multiply(o),a=t.width*o.x,s=t.height*o.y;e.setViewport(i.x,i.y,a,s),e.clearDepth(),e.render(n,this._orthographicCamera)}),e.setViewport(0,0,t.x,t.y),e.autoClear=!0}}renderSsao(e,t,n){this._ssaoMaterial.uniforms.inverseProjectionMatrix.value=n.projectionMatrixInverse,this._ssaoMaterial.uniforms.projMatrix.value=n.projectionMatrix,this.renderPostProcessStep(e,t,this._ssaoScene)}createOutlineColorTexture(){const e=new Uint8Array(32),t=new r.DataTexture(e,8,1);return we(t.image.data,me.e.White,ce.White),we(t.image.data,me.e.Black,ce.Black),we(t.image.data,me.e.Cyan,ce.Cyan),we(t.image.data,me.e.Blue,ce.Blue),we(t.image.data,me.e.Green,ue.Green),we(t.image.data,me.e.Red,ue.Red),we(t.image.data,me.e.Orange,ce.Orange),t}setupCompositionScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._combineOutlineDetectionMaterial);this._compositionScene.add(t)}setupFxaaScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._fxaaMaterial);this._fxaaScene.add(t)}setupSsaoScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._ssaoMaterial);this._ssaoScene.add(t)}setupSsaoBlurCombineScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._ssaoBlurMaterial);this._ssaoBlurScene.add(t)}createNoiseTexture(){const e=new Float32Array(65536);for(let t=0;t<16384;t++){const n=4*t,r=2*Math.random()-1,o=2*Math.random()-1,i=2*Math.random()-1;e[n]=r,e[n+1]=o,e[n+2]=i,e[n+3]=1}const t=new r.DataTexture(e,128,128,r.RGBAFormat,r.FloatType);return t.wrapS=r.RepeatWrapping,t.wrapT=r.RepeatWrapping,t}createKernel(e){const t=[];for(let o=0;o<e;o++){const i=new r.Vector3;for(;i.length()<.5;)i.x=2*Math.random()-1,i.y=2*Math.random()-1,i.z=Math.random();i.normalize();let a=o/e;a=n(.1,1,a*a),i.multiplyScalar(a),t.push(i)}return t;function n(e,t,n){return e+(t-e)*(n=(n=n<0?0:n)>1?1:n)}}createRenderTriangle(){const e=new r.BufferGeometry,t=new Float32Array([-1,-1,0,3,-1,0,-1,3,0]),n=new Float32Array([0,0,2,0,0,2]);return e.setAttribute("position",new r.BufferAttribute(t,3)),e.setAttribute("uv",new r.BufferAttribute(n,2)),e}traverseForRootSectorNode(e){const t=[e];for(;t.length>0;){const e=t.pop();if(e instanceof a){const t=e.parent;t.visible&&this._rootSectorNodeBuffer.add([e,t])}else e instanceof r.Group||t.push(...e.children)}}setVisibilityOfSectors(e,t){this._originalScene.traverse(n=>{n instanceof i&&n.levelOfDetail===e&&(n.visible=t)})}}function Ie(e,t,n){if(e&&t>1){const e=new r.WebGLMultisampleRenderTarget(0,0,n);return e.samples=t,e}return new r.WebGLRenderTarget(0,0,n)}function we(e,t,n){e[4*t+0]=Math.floor(255*n.r),e[4*t+1]=Math.floor(255*n.g),e[4*t+2]=Math.floor(255*n.b),e[4*t+3]=255}class Me{constructor(e){this.buffer=[],this.temporaryScene=e}addElement(e,t){this.buffer.push({object:e,parent:e.parent,sceneParent:t})}populateTemporaryScene(){this.buffer.forEach(e=>e.sceneParent.add(e.object))}restoreOriginalScene(){this.buffer.forEach(e=>{e.parent.add(e.object)}),this.buffer.length=0,this.temporaryScene.remove(...this.temporaryScene.children)}}
|
|
168
|
+
*/t.a=r},function(e,t,n){"use strict";n.d(t,"a",(function(){return i})),n.d(t,"e",(function(){return s})),n.d(t,"d",(function(){return l})),n.d(t,"c",(function(){return u})),n.d(t,"b",(function(){return p}));var r=n(0);
|
|
163
169
|
/*!
|
|
164
170
|
* Copyright 2021 Cognite AS
|
|
165
|
-
*/
|
|
171
|
+
*/
|
|
172
|
+
const o={centerA:new r.Vector3,centerB:new r.Vector3,sphere:new r.Sphere,box:new r.Box3};function i(e,t,n,r,i,a,s,d){const{centerA:l,centerB:c,sphere:u,box:p}=o;function m(t,n=0){const r=(s*a+t)/e.BYTES_PER_ELEMENT;return e[r+n]}l.set(m(t,0),m(t,1),m(t,2)),c.set(m(n,0),m(n,1),m(n,2));const h=m(r),f=m(i);return u.set(l,h),u.getBoundingBox(d),u.set(c,f),u.getBoundingBox(p),d.expandByPoint(p.min),d.expandByPoint(p.max),d}const a={vertex1:new r.Vector3,vertex2:new r.Vector3,vertex3:new r.Vector3,vertex4:new r.Vector3};function s(e,t,n,r,o,i,s,d){const{vertex1:l,vertex2:c,vertex3:u,vertex4:p}=a;function m(e,t=0){const n=(s*i+e)/o.BYTES_PER_ELEMENT;return o[n+t]}return l.set(m(e,0),m(e,1),m(e,2)),c.set(m(t,0),m(t,1),m(t,2)),u.set(m(n,0),m(n,1),m(n,2)),p.set(m(r,0),m(r,1),m(r,2)),d.setFromPoints([l,c,u,p]),d}const d={instanceMatrix:new r.Matrix4};function l(e,t,n,r,o,i){const{instanceMatrix:a}=d,s=(r*n+t)/e.BYTES_PER_ELEMENT;return a.set(e[s+0],e[s+4],e[s+8],e[s+12],e[s+1],e[s+5],e[s+9],e[s+13],e[s+2],e[s+6],e[s+10],e[s+14],e[s+3],e[s+7],e[s+11],e[s+15]),i.copy(o).applyMatrix4(a),i}const c={center:new r.Vector3,size:new r.Vector3};function u(e,t,n,r,o){const{size:i}=c,a=2*Math.max(e,t,n);return i.set(a,a,a),o.setFromCenterAndSize(r,i),o}function p(e,t,n,r,o,i,a,s){const{center:d}=c;function l(e,t=0){const n=(a*i+e.offset)/o.BYTES_PER_ELEMENT;return o[n+t]}const p=l(t),m=l(n),h=l(r);return d.set(l(e,0),l(e,1),l(e,2)),u(p,m,h,d,s)}},function(e,t){e.exports=require("@tweenjs/tween.js")},function(e,t,n){"use strict";n.d(t,"a",(function(){return o}));var r=n(0);
|
|
166
173
|
/*!
|
|
167
174
|
* Copyright 2021 Cognite AS
|
|
168
|
-
*/
|
|
175
|
+
*/
|
|
176
|
+
function o(e,t,n,o){const i=e.length/t,a=new Float32Array(e.buffer,e.byteOffset,e.byteLength/Float32Array.BYTES_PER_ELEMENT),s=new r.Box3,d=new Uint8Array(e.length);let l=0;for(let r=0;r<i;++r)if(o(r,t,a,s),n.intersectsBox(s)){const n=e.subarray(r*t,(r+1)*t);d.set(n,l*t),l++}return d.slice(0,l*t)}},function(e,t){e.exports=require("mixpanel-browser")},function(e,t,n){"use strict";n.d(t,"a",(function(){return i})),n.d(t,"b",(function(){return a}));var r=n(0);
|
|
169
177
|
/*!
|
|
170
178
|
* Copyright 2021 Cognite AS
|
|
171
179
|
*/
|
|
172
|
-
|
|
180
|
+
const o={renderSize:new r.Vector2,position:new r.Vector3};function i(e,t,n=new r.Vector3){const{position:i}=o;i.copy(t),i.project(e);const a=(i.x+1)/2,s=(1-i.y)/2;return n.set(a,s,i.z)}function a(e,t,n,a=new r.Vector3){i(t,n,a);const{renderSize:s}=o;e.getSize(s);const d=e.domElement,{width:l,height:c}=d.getBoundingClientRect();return a.x=Math.round(a.x*l),a.y=Math.round(a.y*c),a}},function(e,t){e.exports=require("lodash/debounce")},function(e,t){e.exports=require("lodash/cloneDeep")},function(e,t){e.exports=require("lodash/range")},,function(e,t){e.exports=require("@cognite/sdk-core")},function(e,t,n){"use strict";var r=n(15);n.d(t,"d",(function(){return r.b}));var o=n(22);n.d(t,"a",(function(){return o.a}));var i=n(1);n.d(t,"c",(function(){return i.l})),n.d(t,"b",(function(){return i.d}));n(5);
|
|
173
181
|
/*!
|
|
174
182
|
* Copyright 2021 Cognite AS
|
|
175
|
-
*/(e);this._transformOverrideIndexTexture=n.transformOverrideIndexTexture,this._transformOverrideBuffer=new Re(this.handleNewTransformTexture.bind(this)),this._transformProvider=t,this._transformProvider.on("changed",this._handleTransformChangedBound)}dispose(){this._transformOverrideBuffer.dispose(),this._transformOverrideIndexTexture.dispose(),this._transformProvider.off("changed",this._handleTransformChangedBound)}get needsUpdate(){return this._needsUpdate}get overrideTransformIndexTexture(){return this._transformOverrideIndexTexture}get transformLookupTexture(){return this._transformOverrideBuffer.dataTexture}build(){this._needsUpdate=!1}setNodeTransform(e,t){const n=this._transformOverrideBuffer.addOverrideTransform(e.from,t);e.forEach(e=>this.setOverrideIndex(e,n)),this._needsUpdate=!0}resetNodeTransform(e){this._transformOverrideBuffer.removeOverrideTransform(e.from),e.forEach(e=>this.setOverrideIndex(e,-1)),this._needsUpdate=!0}setOverrideIndex(e,t){const n=this._transformOverrideIndexTexture.image.data;n[3*e+0]=t+1>>16,n[3*e+1]=t+1>>8,n[3*e+2]=t+1>>0,this._transformOverrideIndexTexture.needsUpdate=!0}handleNewTransformTexture(){this._needsUpdate=!0}handleTransformChanged(e,t,n){switch(e){case"set":this.setNodeTransform(t,n);break;case"reset":this.resetNodeTransform(t);break;default:Object(s.k)(e,`Unexpected change type '${e}'`)}}}const De=new Image;De.src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAABgGlDQ1BJQ0MgcHJvZmlsZQAAKJFjYGCqSCwoyGFhYGDIzSspCnJ3UoiIjFJgv8PAzcDDIMRgxSCemFxc4BgQ4MOAE3y7xsAIoi/rgsxqOqd2d+pGwehjat+yq+1cc3DrAwPulNTiZAYGRg4gOyWlODkXyAbp0UsuKCoBsucA2brlJQUg9hkgW6QI6EAg+wGInQ5hfwGxk8BsJg6wmpAgZyBbBsgWSIKwdUDsdAjbBsROzkhMAbJB/tKBuAEMuIJdFAzNDXx1HQk4nFSQm1MKswMUWjypeaHBQFoIiGUYghlcGBQYDBnMGQwYfBl0GYCWl6RWlIAUO+cXVBZlpmeUKDgCQzdVwTk/t6C0JLVIR8EzL1lPR8HIwNAApA4UbxDjPweBbWAUO48Qy5rMwGDxhoGBuQohlrKcgWGLPQODeDBCTH020EnvGRh2hBckFiXCHc/4jYUQvzjN2AjC5nFiYGC99///ZzUGBvZJDAx/J/7//3vR//9/FwPtv8PAcCAHALbUa33lfYEHAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5AobCyAEEhU0UQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAACAASURBVHjalV3bkiLXESzoC9cZdle7lixF7IMj7Ad/qn/RVliybMmyZhkGGKCBBj+s6ig7yTqNJmJjbzPN6XOpysrKqjP429/+djUzu16vNhgMbDAY2HA4tLIsbTKZWFmW1rat7fd7O5/P1ratXS4Xw5+5Xq82HA47v1+vVzOz9Hcz6/zc5XKxtm07z/Ev/34elz9vOBzaYDCwtm3tfD7b6XSy6/Vq5/M5fU9RFFbXtZVlaXVdm5mlZ1wuFyvLMn2GP78oCjMzO51Odjqd0rNxrP5VFEXnl7+nf4bP4+VySe/jn+Hj97nzMU+nUyuKwo7HY3onfGZRFDabzWyxWNjpdLLVamVN06Tvw3nHL5xb/LfL5WIlDsJ/90Vu29bKsrTr9Wpt23YWHxfocrl0Jtf/HQeE/68GpDaBP5tfwsdzPp/tcrnYcDi0qqpsNptZVVVW17UVRZEWwZ91PB7TJuHF8gnxBR0MBukQ+Lh9kx2Px84iFUVhZVlaWZbpM3Hjq7nCeTezNObL5WLn81keoLqubTqd2vl8ts1mY03TpHGphY/mFte69P/gTaBOLf7CBR0Oh9a2rQ2Hw5tF8pfljYaf65/hz8Hv8/8/nU5p0cuytNFoZKPRyKqqSiceFxInECfRTzlaFPydLZSP1Z8zHo9tNBqlzXA6naxpGjsej3Y4HNJmqOu680z/bJ7H6/WaNu3lcrHD4ZDmDdfBF9/MbL1e236/l3OFm1ptAt4MJU4Un27frfhwPvG+0GaWTheamMgMqQ2AL+QL0DSNnc/nNPnT6dTG47FVVWWXy+XmM/CXLxr/P1shXxh2P/w8f0devNFolMbcNI01TWObzSZtVLcqfDh83t1FuYXiuSrL0sbjsRVFYa+vr2lO+DRHi8//jgei4wLYLKEFwIfipsAHKlPEbsD/7j/DE3q9Xu10OtnxeEy+ej6fp5PuC+obEn0qnlb/HceMC41+GBfbD4JbFDTZuFkQE7jFGQwGaZO6RXh9fU2bt67rjoVyrDUYDOxwOEi/79akqiprmsb2+33CJYy5/J0iF8DWouMCcKH5FLA7wMlmP+0P9wnCF1bPRdxxOp3scDiYmSVTy2bUzb0vBO9uXEj8N5wsPgkM3HDh8eTi+7CP5w3sG3c6nVrTNHY4HGy321ld1+k0O25wPKHG65vEN5SDPv8+drO5Lwad1+vVSpw0BSR48ZWJ50lEk46m3jcLY4jz+Wz7/d7MzCaTiY3H405UgSi9LEuJotmPIwD033E8yiziBLKlUT4Ux6Asqf8aj8c2Ho/tfD6nk+6Wwt0cR09u2aqqSt9zPB5vcJjCWJEVUAel7DMXKkrAk4MbQ/0M/r/7bNwg+/3ertdr8u24iH7a/d9wlzv6xs9UoSJvCPb/yl/ySeF/ZzPt74KuyDcbbtCyLG02m9nxeLTlcmnr9boTauM40d0cDoe0+BGuUqFzzhI44C5VWKJMmzrx/H3RTvOXc2DXtq3tdrt0OqbT6c3pcx/M5h9NI5rhnAnk08LjRovC40Bcge+IGwMxA4NqPiyOazzeX61WVlVVQvg4FneLDo755OM40MKyJYxcQbIA/NDIfCCCxYHyKcdFcfLIB7jf761pGiuKwubzeQJG7mv9xLMlYB/NII5fjMOo6KSrU65cCm9qhSXYwuAYGF9UVWXv3r2zpmlsuVzadrtNmGE0Gtn5fLamaTqRUUTm8EG455397yWHfAzsGDUiaFOxpm8QRpwO8Nq2tel0mtCvLzqSKMpcqZBMvVhkFpU7UBtMPVtZumizoFtSB8ldl5/4wWCQNsJ6vbbL5WJffPGFtW1rx+Mx+5loYdW7RiAZfy/ZxOMH9G0ChejdAvjv/iL7/d6KorDFYiERvZ98NGEK6KD5V0RPNFF4Chn0KbPPoFfNQy7WZvCLOMCt3uFw6LiFN2/e2MvLi/3jH/+wh4eHRA5dr1cry7IzN7zROWTNYRt0ryX7cwYZiNpVjM+cAVoBZ7aOx2MCOz7JfuJ9UdD0c6zOuABPm0LCjD944ftOewRsmRPhRVbg0RfFf7aqKptMJtY0TWfDOU56//697XY7Wy6XKWR0V6rcNLu6vq8bkolPj4rpmRLOoU8ndy6Xi+12O2vb1h4fH7t+B0I5PNFMaiDVrHw/x+/3+EF2MfckTnI4QJ0wB4Uq9h6Px50owefVreBkMkkU99PTk202G5vP5535UGPhOWGgGkUGQ4WIOeTLgUIflLNTnkXbbDZmZvbw8NAhNHzx0fw7y4exPsb+vFkQATMw5BOuACQyg8oN8IRGG94XmRlK3NA4Vmf0PBJifsPNvLOg7969s7qubbPZdDKnmLBjfKBctTqk/ueSzRxSnNGu4ZPmg3FSZ7PZWF3XNp/POz4nonHVCceQBicqOtF8Mpmq7ov1owhIAVLlShStnOjWX13faDRKC8zP8twGxvxuPXe7na3X60SJq3dCfKA4EA7fE/ZQ8TsnaCIziMSOZ8VeX19TbO8vzwvINGeExvHvjq5V7K8yYYrQ4mRQxI71bYK+UJFdElq/3W53Q/e6yXdSrCiK5PNPp1PSCaxWK5tOpwkcKmJKhZ6RBTCz3yxAn+lQdDCe+uPxaNvt1kajkU0mk87i4knHiVfiCPydQV4U4qgdz36Pn63Qfk5U0cccKs4BN3pVVck94pdbShfc8OINh0M7nU42Go3s7du39unTJ5vNZglLME7jw9PHGg45XufJZ3yA3+chXtM0tt1uE4/vQM99Gy40U7ycuUOqGK0RAy4kXSLTG0UtDB7v/cq5iQhLIFbwhA/O8WKxsMFgkEgfprI9RHRxzvv37ztJIU6JMwvIhwA35WAwsKECCYzWVU7AxRCHw8HW63VKeLgUC+N7NufI/CkWUqmJWJiCoCjiANRpxZdX38efo/QRymVE6Nx/d5CM3/P4+GiTycS2220KC3EhfQ18jtq2taIo7O3bt0kXwGAwEogo63S9Xm0YERbKr+BEOFW52WxSrtuFCxzjq8VnQQbG9Zg34J3tCFqZP/+ZSCSCAhTMp0fRzT0hJIet/Hmoe8D/H4/H9vDwYOv12g6Hww2NzVwJsozD4dDevXtnu92uIx9Dy+hzhO+KVH7CJzn5EJ9G1sVtt1ur69pGo1FSv6DpR9dRVVVnctACKHGo2tEMSO/RwanwMQJFvFlzz+bf2TqwtcS/V1Vlj4+PdjgcUrgcRV58iHy8ZVnaYrGw19fXThTmohJXL+XY3Ov1+hkDRKecdw/G+a+vr4m4iDRwatAe9+MmUdIuJoFyC6iUOQr98gJFGUx10nPkl+JReBPg5vcIabVadUx4n7ADuRNUS202Gzufz8nKsAVQhziNF3eof7CLL9mMuOnfbrdmZimp4+ZfWQ5W1Mznc/v48aN98cUXHTm1ynqxG1J4RAFAxQJGGkUlO+9zAyqrxtR0pGr2uVqv1wkXMKGj8hN+WPCzLpdLcr/r9TqtmzrpYRiLA2X/iYvvO8w1aXjy0VfxwB3I+NdoNLIPHz7YH/7wh5scO6pwGRiyzDuaZLWwyqzzpEesopKOq39ncKkApH/PbrfroPhc3p6BNOZO6rq26/VqDw8PNhgMkjtQqqUIGJd9oQ9ugP1+b/v9PpERTO0qMoTB0Xa7tR9//DHt4FyYxSpkxA/olljVq15YkUIKKKq6BUVQRZOMIR4mgvDnnOqNSKvISiFuSlRuWdrpdLLFYmHPz89Ja8GgV2kkzew3RVBfbYCreKqqsqqqOmlNJngY6WOat21b+/nnn+/i4BlguctAMKjSooq34CKRKNcfbQLFlnKaWlkdfj4DwkjHwGOPimNQiPL4+Jjk6FhowuPthPosjMBkA7oGlzdPp9MbAsd3Ji4UZsMGg4FVVSUFlszIKbUNF1cwsFHWR20Qj6Mj/95HG7MUS/ECnEhjidY9GUac25zcnj/XaxT8oPr7qkKT5AI4z81MHypS5/N5h+hRIQqKOB28RJOAAFBtRD5VeLr4pRTZhAQNppvVZKiSN1VIweOLijAUd8CJNrXhlb7BP8utB6eGHWedz2ebTCZJQj4ejyXNjfNTIkGhTpGHfF6Nw3G+CtkYbEVaOvWiKoOnJNwKP/wenj4KD9Umjfw8vw9L3vkU42HDwlGmw/F70b36IiMmYJbwfD7bbDZLHA1GYgpwllEs7IN+fX1NzBXLtPGlMQRyfX9RFNY0Ta+eD/16tBH59KF/yxE3OZDFG6Ev0xidzD4pNi42RywqJa3EppEa2zcBbgbXHbgMzzcHR0+yLgB9//F4tLZtUx4aK2AVJ+6boCgK+/rrr62ua/v3v/+deO5cJQ/69hz1yieFcUWfNPwegicifaJows2zqoD2PzuvojaHUjnl1Fio2/C/Y2n85XKx+Xxuz8/PxhaesVfJOx+R6uFwSFW4zuDxALlmzwd0OBxukkkKAEbmkpU8+L2cM2DSKYrFGZmzrDqXSYwKY1URLZ9YP4G4cLl6hByHgGNgsanPjeOj6XRq2+3WHh4e0uexrqLkl/IvL2FySZcvPgM+9QKXy8U+ffpkq9WqUzShkK7K0eOCKgEkAz6up1eIPIoM1AZVglgGyQpIKq0CLzjTvlEdZq5Gg78HRbIOqN0K7HY7Ox6PnSYZiKFKPAFu/p308eJM1tPhLlLNFdCSMBOYyzxGNGu0qBiKRdnLKAZWYZwSkOQKZHJ6AVUHGdG9rEvgRY4KeCMXgfPpGUcPw7mQZOgL5pkjL832ej1H/FzCzaDPQz6UfXHlkKrdizQA+GL4ghxloOBEFZDk6GEUdXJFUo4+jYpJ8d9yMi0Oadn8I7B2F9u38dSzXE42HA5vCkvTBmDT7QWbTiQoAQW/HCZ1ol2uMm7RIvVJuHmTOTuZE2gqmRZPsHJlKr/PHUmQ/FK4Rsm1ImuXqnZ/xU+oc2D8owplMNLwn314eEjtZLzeMM0xL5JnqNxkqDx8TjSCp52zd7nYnXsLsNWI2D/k2ZUKKccPRMkfLjyJ0txK4II1D4qLjwpsWEDqgLFv4VVvJnw39P++CXBDDdn0+A9wvR738olKoZSIFF0BLti9ihs+wVztg1qDvuyXeo/xeJxCXdxAqstIJJ5ly5UTpkaAlCloLiqN8hNctcz5icvlYrPZrNOEwjfBkJFq27Y2m81SzI9FnKr4k09nTnuPANF9HLdZi4Af73LvG6QYOHXKIzc2mUzsm2++sY8fP9r79+9vElzRxnTMpLALS+Aj8osl8vyeyrQr/V+fwPV8PicrwBT6EHUA3toENfuo7OU0YyS85EVQfLyLF1QuXrkS1NBj6OoRBkqhFL+vNoNXLj0+PtpoNLLFYtHBEpjb4OTX27dv7e3bt1KDF9U25MApWzelYWSQGPH8OF6k7Ou67qiGLpfL5w2AqNETCKpok30sPihqRKTy2MhmqdJmBk94ir755hv78ssvE17Bz0Okq1yIcguo1nVtvppkNO2TycQ+fvxoHz9+tIeHh45Qlq1AZL6VGIWbS/KpVwkrnD+2eniYL5eLjUajToOptm2tdPPvyNBjf25r1hcWqTCNY2EVEqk+A5z08e9tmsZWq5X99a9/tePxaE9PTx2dgT+PGT8mlHDCttut/fTTTzYcDlPnTbeInA7m/LoTZOwOo3K1HL5BjMRKZ5U5RGV0LoLCVjWj0cg2m42dTqfkEko8PQ6ClJ9SG4CTFUxCqMRJLn8fsX44hv/85z82Ho/t3bt39vT0ZOfzuZPt8g2Ai5ir9vGNxCdOjccnc7vd2g8//GBFUdh6vb5h5WazmRVFkbSTPJaorgC/T4leGOzhZlel6Vym3ratjUajBPRTcaj/53Q6vYnvlc9X1Tw5LNCnsuWBcg6ew6h//vOfKdvogBA3gdI15j6L2TvF6jHj6d08eB6qqrJvvvnG6rq2H3/80ZbL5Q0oVJlXThMrMQlrJllWx+6DtY9t26YsoYPoIZoJ7JEbMVY5ti0nwuTkiWK2nNRBkz2ZTGyxWNycYG+Vys/B2FmVl+XUOWhyVeZSpWN5wh2bIJnDTa9U+KiqozgyQFwRFb6oSiZ059gAMymCfGe4egcnU1GcESUZDYw7kESly6rJw/l8tj/96U+22+3su+++k/42om+jvn8Rr99X94d/5kaYPkeHw8H+9a9/2XQ6TU0vPV1clmVi5fb7fW/TB6VC4p5AUSfXKA/ifQp8zVNdgNenI9qPkjLKX7IuPurioWrxVaoVZU7ffvutDQYDm8/nlqtl5LHhaYmaODKxpXL9GAarDcWfsd/vbblcpnJvfLfRaJTK55yKVhY3IqIi6xm5VWy84e9eVVWKdkrfTd54IOpEGZUtMUGEZicqcmBLgKbVuQh/OW+Z9t1333W0hgrxRp/HYaoytRhD89hUY0iuWeBwTG3Q0+lkr6+v6fmLxcI+fPhgm83Gfv755xSJMf7hpJhqapWzIPxOvtZt237WBHJm7J48uiIrlComCoEUIsZCUew+ggWWub44qh4hkorj5Kl4H8uz2TJhroK7lUZ0uP/ZS+q8NNwrhI/Ho/3yyy/SKubo9ahcTrXDZxDZtq0N3Tep7FfUCJonCZUmyryyKe5rxcKgyReeFcecgeOxMIHD1icaC5pNVSfJERFHC6qFjurA4l1APW/Pc8RiD6y8Vhb4HlzDVmvojQc6JcPEnSsNXh9B5C/uL6nSxrmmCqpejtO5yhqoTKBqOq1MJfcGwA2FixlpFKOmknyC/Tnr9bqjl1SSfC7OURgod2VMpIN0fFWi2eXYkxdN9aTJJWFUzV8uWuC4FjckM4ZRkiWXjkZlLH6WWlgUuJ5Op5s5wGiAMcY9m2AwGNhms7GffvrJ5vO5LZdLKaHHknyl+mG2U9HFqtWvg/6SgQTGieoyBv4QlexQdWgsBlVsHy+a6nkTpWijsA+tgCJSfOH9RKCF8EnCfAEjcUzecASB4FBxCOfz2Z6enmy5XIbKKAdseEVM1KJXsYaROAf0C+VNOOSnA82OYth4QfvKnnIl3oo8Yl5A8d3qciYOz9j/4+8fPnywtm3t06dPYbNJb9TEuQDFGCr3qe4pYHenzDluOi/66NMWqjnCMfszXPMxVHIuJaDgU88Lyrl9lk0pWravSZOipZX1iEIh1WeIy8j++Mc/2rt37zob5i9/+Yu9efMmxfGYUWM6XJVzRxFErhZBgUzPVXg1MfdaUA0gmAuJtJZwZU0pmaNIY8YxMwMl3iSqzInDtIjUQD+NUYYCVaoalyMb9e9///vfb3ICP/74ox0OBxsOh/bnP//ZvvvuO1utVimdiu3yFcmk4vGcDEyFocj6cTpYpYdz8jAFpr3DSMlgSBVH8I5V3b8UMkZRI4d47ttQXaNEoz4ZKGTEujj+xWxjdLWN/9mrnvH7PIEzHA7t+++/t6ZprK7rRON++eWX9vT0ZKvV6qZzuYPG6OJIRSQp8KzawOKBVA20ouqoTjEo0P2DweC3W8OiFit9SQYEWHxrBloJVMx4MYdvvijs5CSN6tatiis5KuCESFQMwwLO6/Vqz8/PKWxaLpf21Vdf2ddff935P3w/DN0QtEUWLFfYij6c8QBGNFHSTV31w1xI6R0mMPnDk6PoUMwaMv8eTSbHuXhd6n6/T63Trtdr6nRRlmXKq/PNWhENnLsoMkoOcRcPlGe7fM2tw7fffmur1aqzkeu67txxyFp+VgZxqjfXtj833qiLisIFLCsbDoefK4PQz6LWTlXiRMSL6mad6zjuz/nw4UNqjb5YLOzt27epIcVkMrGqquz5+TnRpNgSjYWbDNhU+baiVVWXMt/MHP7t93v74YcfOnODdyqrOgB2P6p1m7pfKMfwKc0gWjoFNPH5/n2lDz7qzRu1M3HFMJssjKXV9XH4fGfDvIs2K1mc+mT1S9QyTlX9qMZXbNX4NCnwxBskYt0U6IpOqNoErDFQtRSKEIoSUMql4zNKVI0yv85EB1fBqElFLgE3QVQfuFwuOy3QvQml3yh2PB5tt9vdtJFXyZwPHz4kda/n5r///nvJYURavQitRz2Vo43FFoIvlc718EdXzCrhnKYhCuf5Kh/8zBIJBqXaiQonUQnLDCBe8Ij6PA5z/FQ4BtlsNqkhRV3X9t///je1qOGaObYsZpawgn+/X7qUaxAZ9SpWrKfCRlFDZgav6FZyIDC3WH3C0qgRVC4LW+IV7JiocGsQ5feRo+b+OyigZI2Ain85MnAFMHbQVoWYLBnf7Xa23++TyDOq2GVwmkue5Pj9SHmj7jFWlkOF3SpiUalsdb08W6Xo4quOK+eXUehU7XjuUKGUQmx6o4nHGN7DySipoZA6jo1Lzxj5Rr4wEoIq/35P7X5UnMJzGgFCdVpz4bmy3HiAWQ/gn1MyEMKya9XESN0BHHX1iLpx5nwZRwxsrlk+heCQQafKOjIzqQQUTMsqnkQdHLWJIqAYbRw0/4zq++4GRveIa4J9gnAtzufz5zCQrxyNBAfRtaROK7JQgweh2DA+obw4yuRyiKbAmbe25WtquPBC1R2quFkBPEVDR907VPUuz4OqnVCp+JzVUdfVMW2P710q/R/XnDMAyl1IpGJgpdRljkC1heer3DHJxI2YFOePqV60ZNyePmqujIvIUvMINHI4misHj274VMmbeyMOjBpUcgnnpm3b35pFs89TDQ4RJGKygs04mxp1kll5rFgsjCjw55TPYyHlzYvSFfKRikaNQ+kf7uldFNVERh1B1FgiU88/E3USVXON71Fyu1a+sJHboirqFHco07C4ee5pdcIhFPtADFm9OJSRME8IF22qhheRwkhpJVU4qWRyUcZPAVyOTriXT1RxHc1hdKkF/l8qDkV5U0Qm5O74UbsZJx19cZ9ciSMNJn4Y8CgaV4E0Vvuo+r9IucMKKRaV8viQ94hYuYgLUPqLaIMqljJXycQ5m+v1+pkH8ELBXK99VQShysQU2ENAGCUqolIybHTI2r2ojDsq4+IcRVSmpWTZWLeg5GdcFNt3e6dqO8/AOXcJBrfr442jxoAZRD8Upep8He20qMqHVbjYPFLRsDnyRWUhWX+HJ00JKRR6jpizHMOmQF6OGIquuFPm+vemhu9RPXEndbd6qgQ/9QfwH4yuU8stvAJxDIByeepIIBFZiqjMC8eGiS2+kCqaVMVrRGPmLGOkQFI0sYpU+m4qVcomfAa/nyLGVFrYN0ip2r2xsvXmssGgxRuqilUVCw5Y8QlKIJmbVL+N0wkirgdkd6FSshwb9/nsXDvZvqIM1UcwKrfrE3qqht2RNVRVRe4KS6xcjV5E9eBVSRHuAaTQOde+K8CV09Mz2VEUhX311Ve22+3s5eUl1DKiKeT7CCIAmMv65ZIykc+/B7UrwWxf63ylKFYVUS4yLYoiXWBdeqjG3bE4BMtJxHjCOe2p+HgGJBGAi7qH+vdVVZVu3PYaO7QyKu7nEDcqCv09JjqSzvWJXxXax0qg6OLN6Fq4SCugoqK6rq3EzlH4gGjxlXiRlSxR2ZJqO6N6/uViVwasl8vFnp6eUvrZXYK6twdZRSVAUQg9KtDM5RlyTF/OiiiNAja84LsVVBeQHPDFNUobwGvF8Yvr4SNRJWfcUMjQNwGK31d3BEcnxuldNO+uK4hy9pxhU1fZRMUhCh/0RS8qNOy70DraCEx5K7wRqaT4GS5tL8vSht4XL+nEodWp6qun/FtEVXJcmruOViUz7hVEKCTvZg41hMqn8wmLFiPXJY0roFV003dbiapm5iwq9xlSBFhUiIMXgHhziF/HPEwdI1AIijtG3UjJiD2K9bnIUXHYSrBxT8UrdtbKNV7Kxd65f0MxSw6kjkajpG7OJWByBTAqP8CAMAoh1cbmZ/lt706hpwsjfACOCvF+Geb4FVDDU6dSyhHTpvxylAlD369YQdWbUJnqKOLIMYcMYplg8hYy3jDS9YxKvxhFBSqncE+95b38AY+j0ybOY2inObFYg5tF5VDy7yE41G5lV6C6hUfAUNXvRT0M2R+r5JdyWYqS9XE0TZNKyTysvsfUR+xkFMfnXGFOBIpaSrdq3uByiD1lPUbMceqqrRmb+SjH3vcrwheKo1cNqxEdqz7B3JQ6QtC5tiyqZOt8Ptt6vU79f9R7KCwUldRFSap7Fj4ShvqzveAmrTveTOFuICpsiDaDMrNRWzmVD1d3DnCLWbUYSpiRk3vnJOARWo+IHbZOrkC+p01LBDKjjmdRoqyvJ5HKRzRNk3I1g8Hg840hTqhgtkvJm3Pp29yt2arFuco55HrxR9agj77NTaA6QUgcKUsX3R8YgVfl76PkUK4WIbpmPqdm4s3tEY/7/4QBMJTZ7Xb28PDQAYNcsKAaL0R1A4rDz5n7KL2r0tT3mMSofCxqdhHJp3MW5B5iJ9rYUeY191m5cJjdMeZo/E5h0gj+NkF1XSc3wFe4YciF+YEoXle7mJMYkUmMmlUpsibCGSqppNLXCjhFobA6ibnCE/WluPvIGuQkYQyGcyGv/7lpms4NsKk83L/cDex2O5tMJp2SJoxVc02fVLMGJpSiiWYqNgeaIq18X46dpdNqQXONnbiqKZJ5Ky5enWKlpVTRj5KeM15T7tijG7/C9+Zyb5zwtm1tPB6nDaAWgAs91WlU1Tw8aWweuQeuyoJFDFvuPh3+d59kLC9jNTKGhHi6WFiBCTPmKaJ6h5wkLHJBCnQyDxM9A83/aDS6vZ+YpVIOEA6Hw02Ha97BUf/AiJDgMrH5fJ4aJHJ+PufjVReNnFtRVCu+W45lU3oFZAlzl0UpU50Doip05HSuqrrKVWYNh0M7HA6J6r/BReyf/V6e3W4X1qlH9fY5lk0t0HQ6tTdv3nQuprynK7mimqN2qY58lSI2pwdkqlbpDFi4mqNpVYlYdFCiSuZc7iWnMt7tdlbX9U29x2Aw+CwI4YUejUbWNE26RURl9iKdWa6aGF+wbVt7eXnpXFDJNQiRHu6epBTToEhVq7Erqll1GlWtc90VcVCcmwAACrxJREFUcL2EqkTKZRBzQFCRT9GB8Dkuy9Kapkl3QWFpXcIBKtvlzRk2m83NzlftYLGtKcf8zA7iRluv1/by8pImq67rTtuZnNQqyjoq/psxB/t9ZX1UTWOuKIOtjGoBn5PC33PrKiuBVM9F7u+42WzS/UBSpqZiXu9rb/a5QyXX1qnMH19+hKlYXBQmSBCFs8o36uARsWtoPXCRlannTF/fla7chk3VCCpuoS+TmOt6nsMG7Io4GVZVVSqv9w2AUYY/o1SXGPnkTSYT2263aTPwSWFTGpV95+hf1P+rCc6FjSpe9uew9IuRvVqAqL8P9zJWdDdrHVUxS870MyBFF8J4Symv2PWWZWmfPn36LesHFqkTskdKF9T3bzab1C2rz4z1UZJ9SFhlBXMKl6jxgqKwOcyMmLk+di66mSPi4/siDH+uK7F4kRWNzkQdvmNVVbZerztrGLahy+n9zMym06nt9/tOKzmFUrmnPi+OuiRSmWVG50p8krs9O6pEVmxlpFKK8EYkic/5dNVeNqoSqus69fCNGL7IdWC3Fr8BHq+miSKPYU7m5AObTCa2Wq06N4pHcW7OOkSMW8TIqYxWdJ2NUteoNHJkbfqijOhEq8ZPqr5PYQR8Vl3XNp1OO4DNDxsKXyJq29+nLEtbrVZWlmVqvhX1D7xer583gKqwwZZxzgp6Sze+4p2JHvbj2EOPTZfKl0eFFLk7DJX5VUxkBEpzXEaEG/pqGThSyfVVwK5gLr9TuYic7tD5G2d08eq6CD+VuZOGoGg+n9vz87ONRqMU80aFnkwv50qjWHUU+SpFJed4dSX3UpEEbiZ1KUVfZjJK5UZ8Apd3I/D2Cx2jqMK1fCyju1wuKXez3W5tNptJt6jmpezrlIUuYjKZ2HK5tPfv33dMlCpcVNfAcZ4g0ujxhsmlaCPhQy4yyYlVuNlUbnHVpsuVdedqIvF6HRVq8vO5OqooCvvll19sOp3enPpcTeJQyb7xA/Dk+HWt7mOwQTKaV9XiPIcbchp8Zaa5aVVOcRRtLKWZU9fh3KO94xI0/gzuzqVOIp78XHdTzgU4abdcLq0sS5tOp507oHNjTyAwusJN9brx8isPDVkurnLUEUDknZm701eRK4rDiKIZhQ8Uq5kDsrmMmxqzN6pi8Mr1+pEWIddxDAmf5+dnO5/PNp1ObyKjHLk1GAw+9wiKZMiqD79fcbbdbq1pmrQJItZNycmia1lytQB48nM5/z7tnGqIpU5ZjuPIJXUUJ6HqF3jTYKgcye/w5Ltia7PZJL8fSe1yX8Mo/FKCD5y42Wxmq9UqScndEmB/2wikReRNn8lVdGjU0JHj9Rxaxw2YUzf16e+jcJjzFZFiKgqVOc+CiZ7VamXT6TTUL0YbIR1s1vb1vTzKyCeTiT0/P3d680Vl11wZpPLbOc38PVU2fRKzqJomB/b6yuD6tP+sqWQ3k0uW4S88YF7csVqtbDKZhAdC3aF4g/VyA4/MOJIX4/HYPn36FE6kMl8RSufIQW08NUG/pwVLpHO8J5uYk55xJBRtXGY7VSWVqkvgQtjn52cbj8dJ5BlhJqar+d6mYdSaVeW+Fdjy27D9/pwoFOpD9VGGL7rWJaex6xOM5iYqp8LN5e9VQUjUExA3nUr2cGYUw1JefGYEo45hkcCljKpflDhCoW7v6z8YDOz5+dnevHmTyqO4x4ACWmoDclu5HE6IFiXCNUq4mdMf5OoK1bWuOYzAvRhVG1hOqfv/1XVt+/3eXl5ebDabpZOv7nlmJXbOIpWR7+U+/1wQya3NncN+eXmxh4cHq+s6AURWDLPPUyxd1FiSFwzVRMpMcx1AdIVaX/dwRZnnupcoSZjqHRxlD/HZTvFuNpvO4vdZS9WxhX+mvAeJRxODcWxRFDYajWw4HNp6vbaHh4d0zx76H54wXxyWROcqd1XpuUoCRQ2ZFXGkZNX3CDgjyTdrG6LOnbzo3PK2qirbbDa22Wzs8fGxo0Jm85+7H5FTyGkDRL6PW8FGBQ58Unx3bjYbO5/PNpvNkjqIawNY0x6d4qhrR7Qhojau0QbpSwblqn4iV6O6nEXEjkqQ+aZ+eXmxpmns8fGxk91TafHcGDEqkMkg7sHTp2Pnxccd5pLv3W5n5/PZFovFjQZfgSN1+yX7RwVMWcmjJkmZXmVdVDzdF3bmLABzIuqwqXa9g8HAVquVtW1r8/m8c/Lv6Q+g6juVdH2owj9uL9ZHKnCrWD/ps9ksXQzl5ozJEG51rkIgJELUBY2KZ8iVb0Wtb6IoQOUT+qxErjsp5wkwDewxvl9mvVgskqRbnXh+h2itIkay7CNbOGd9j1XAD59Op3Y8Hu35+dlms5nNZrMOYo40dgzwVA6BrQbr6iNFM+sDI+4i14uP9ZN9rWf58xURVJalrddra5rGZrNZquML1TwBfZ/bqNgsItUF9PEAaNpzzYgVZ+9hYlVVtt/vrWkaWywWhjeWRrdps2nnhApLtSMBqQJeEXmUGwsveqT46esn7OJV7HjWtq09PT3ZYDCw+Xze6besOpfcE+Jx5MWHqCiKWz2A6pLdRyxwLaAqw/YXOxwOtlwubTqd2mw2kyVOqmBD5erZp6F5QyyTYxoVgRLJp7CiKCKeGIfkxJ0+9u12mzR8k8kkdWtTauWoiimX9IqIrMvl8tkF5KRPLFmKwoyopy6bYb8O1q2BcwaqVQuesoho6Us65W4fizaYAlJ9esIICPL1tNjy7XA4pBK8+XzeEXDyHCh53D2ZyQivpeqhCLhwL/uIo88lYlRxhZNGdV3b4XCw9XptVVUlgiMifBSC5jHeZLqCzmbMeEY99nJJHQaz7KZUHsRPddM09vr6mrR7KAePxDmRVcoplCOLgPihZFKGOeiI5eoTSOQ2B1LIzhhuNhsrisKm02mnjJk7fkeiFS5Nd1PN16hFt33mJGTKKkUdRqIWMd6Ea7fb2eVySQvPsvFIxaT+LZd3ENKvGzY3WYC+Xvi5+3RyjOE9HILr2cbjsZ1OJ3t9fbXdbmfT6TS5C9fKqVtKWCTCpEeuVWzU01+BwL7iFnUZtYtlmqZJal1vKslhncp9qL7DvQKP4F4mNP2dBhG5erbcxN3LDCr0GsXW4/HYRqORnc9ne319tfV6bePxONHKHisr3MGXQTGZpe7tU5MaXegUsYUM8HxDe/Vzasv+q16Pr7/Dkxm1z4k4GLbWag1yjGGHCu7jAiJFLg+QO4iwv1b4gusFPGw8nU623+/tf//7n5VlabPZrAOUONpgppEznNgGv09MosSuSrGMFul4PHbuPB4Oh+m0R00ocxRuX0kZ1yoii6i6qysLkiWC+pJDSqacawKVa7nGhEsiKn7lCxwnOIj0AhVPQPmFF03TdBZQET54jWr0/qq6B822T/7pdLLD4dBxU2VZdnw8/pzqtJbLQEYUL46Hay9YNa2ypZ0NoNQsfRyzMj0KZavFjSwJ3+zpP1NVVZpUpFAxjPJNgTeg4Wd7Qkq5O743gMeHoahvxuPxmGRaZVmmghnVERSf7xs10laouxiiu5zQEqhEUt9NJWbWZQL7Up4KKEZ3/+QAS3TTJpeFMRPpk435hsvlkjbCer3u+NeyLFPTiaIoUl0DXlLBp7pt29RV43g82vF4TKebGUH06RGhxuKPXIUx8wf3MH2clbw3UeSf+X/9B04mXw6cfAAAAABJRU5ErkJggg==";var ze=De;
|
|
183
|
+
*/},function(e,t,n){"use strict";n.d(t,"a",(function(){return o}));var r=n(0);
|
|
176
184
|
/*!
|
|
177
185
|
* Copyright 2021 Cognite AS
|
|
178
|
-
*/
|
|
186
|
+
*/
|
|
187
|
+
class o{constructor(e){this._clippingPlanes=[new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane],this._box=e||new r.Box3,this.updatePlanes()}set minX(e){this._box.min.x=e,this.updatePlanes()}get minX(){return this._box.min.x}set minY(e){this._box.min.y=e,this.updatePlanes()}get minY(){return this._box.min.y}set minZ(e){this._box.min.z=e,this.updatePlanes()}get minZ(){return this._box.min.z}set maxX(e){this._box.max.x=e,this.updatePlanes()}get maxX(){return this._box.max.x}set maxY(e){this._box.max.y=e,this.updatePlanes()}get maxY(){return this._box.max.y}set maxZ(e){this._box.max.z=e,this.updatePlanes()}get maxZ(){return this._box.max.z}updatePlanes(){this._clippingPlanes[0].setFromNormalAndCoplanarPoint(new r.Vector3(1,0,0),new r.Vector3(this.minX,0,0)),this._clippingPlanes[1].setFromNormalAndCoplanarPoint(new r.Vector3(-1,0,0),new r.Vector3(this.maxX,0,0)),this._clippingPlanes[2].setFromNormalAndCoplanarPoint(new r.Vector3(0,1,0),new r.Vector3(0,this.minY,0)),this._clippingPlanes[3].setFromNormalAndCoplanarPoint(new r.Vector3(0,-1,0),new r.Vector3(0,this.maxY,0)),this._clippingPlanes[4].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,1),new r.Vector3(0,0,this.minZ)),this._clippingPlanes[5].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,-1),new r.Vector3(0,0,this.maxZ))}get clippingPlanes(){return this._clippingPlanes}}},,function(e,t){e.exports=require("comlink")},function(e,t){e.exports=require("loglevel")},function(e,t){e.exports=require("skmeans")},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nin float v_treeIndex;\nin vec3 v_normal;\nin vec3 v_color;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nin vec3 vViewPosition;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n if (isClipped(appearance, vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance); \n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin mat4 a_instanceMatrix;\n\nin float a_treeIndex;\nin vec3 a_color;\n\nout float v_treeIndex;\nout vec3 v_normal;\nout vec3 v_color;\n\nout vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * a_instanceMatrix * vec4(normalize(normal), 0.0)).xyz;\n //v_normal = normal;\n\n vec3 transformed = (a_instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n vViewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}"},function(e,t){e.exports=require("@cognite/sdk")},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nout vec2 vUv;\n\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t){e.exports=require("lodash/omit")},function(e,t){e.exports=require("lodash/chunk")},function(e,t){e.exports=require("lodash/throttle")},,function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform int renderMode;\n\nin vec3 vViewPosition;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n if (isClipped(appearance, vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance);\n updateFragmentColor(renderMode, color, v_treeIndex, v_normal, gl_FragCoord.z, matCapTexture, GeometryType.Quad);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin vec3 color;\nin float treeIndex;\nin vec4 matrix0;\nin vec4 matrix1;\nin vec4 matrix2;\nin vec4 matrix3;\n\nout float v_treeIndex;\nout vec3 v_color;\nout vec3 v_normal;\n\nout vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n \n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = treeIndex;\n v_color = color;\n v_normal = normalize(normalMatrix * (inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(normal), 0.0)).xyz);\n mat4 instanceMatrix = mat4(matrix0, matrix1, matrix2, matrix3);\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main()\n{\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n\n if (isClipped(appearance, v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.TriangleMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nin vec3 color;\nin float treeIndex; \n\nout vec3 v_color;\nout float v_treeIndex;\nout vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_color = color;\n v_treeIndex = treeIndex;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(position, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n\n if (isClipped(appearance, v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.InstancedMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nin mat4 a_instanceMatrix;\n\nin float a_treeIndex;\nin vec3 a_color;\n\nout float v_treeIndex;\nout vec3 v_color;\nout vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_color = a_color;\n\n vec3 transformed = (a_instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * modelMatrix * vec4(transformed, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n v_treeIndex = a_treeIndex;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nin float v_treeIndex;\nin vec2 v_xy;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nin vec3 vViewPosition;\n\nvoid main() {\n float dist = dot(v_xy, v_xy);\n vec3 normal = normalize( v_normal );\n if (dist > 0.25)\n discard;\n \n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n if (isClipped(appearance, vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin mat4 a_instanceMatrix;\n\nin float a_treeIndex;\nin vec3 a_color;\nin vec3 a_normal;\n\nout vec2 v_xy;\nout vec3 v_color;\nout vec3 v_normal;\nout float v_treeIndex;\n\nout vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_xy = vec2(position.x, position.y);\n v_treeIndex = a_treeIndex;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (a_instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nin vec4 v_centerB;\n\nin vec4 v_W;\nin vec4 v_U;\n\nin float v_angle;\nin float v_arcAngle;\n\nin vec4 v_centerA;\nin vec4 v_V;\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n\n vec3 normal = normalize( v_normal );\n vec4 color = determineColor(v_color, appearance);\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = v_V;\n float height = length(v_centerA.xyz - v_centerB.xyz);\n float R2 = v_centerA.w;\n float dR = R2 - R1;\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n #if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n #else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n #endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR / height;\n float dRdRL2Inv = dRLInv * dRLInv;\n a -= D.z * D.z * dRdRL2Inv;\n b -= dRLInv * (E.z * D.z * dRLInv + R1 * D.z);\n c -= dRLInv * (E.z * E.z * dRLInv + 2.0 * R1 * E.z);\n }\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0) {\n discard;\n }\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n bool isInner = false;\n\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isClipped(appearance, p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isClipped(appearance, p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n #if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n if (R1 != R2)\n {\n // Find normal vector\n vec3 n = -normalize(W.xyz);\n vec3 P1 = v_centerB.xyz;\n vec3 P2 = v_centerA.xyz;\n vec3 A = cross(P1 - p, P2 - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = P1 + R1 * t;\n vec3 o2 = P2 + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n }\n else\n {\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n }\n #endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin float a_treeIndex;\nin vec3 a_centerA;\nin vec3 a_centerB;\nin float a_radiusA;\nin float a_radiusB;\nin vec3 a_color;\n// segment ins\nin vec3 a_localXAxis;\nin float a_angle;\nin float a_arcAngle;\n\nout float v_treeIndex;\n// We pack the radii into w-components\nout vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 out vec4 registers.\nout vec4 v_U;\nout vec4 v_W;\n\nout vec4 v_centerA;\nout vec4 v_V;\n\nout float v_angle;\nout float v_arcAngle;\n\nout vec3 v_color;\nout vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float maxRadius = max(a_radiusA, a_radiusB);\n float leftUpScale = maxRadius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (maxRadius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // out data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0.0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n float radiusB = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusB, 0.0)).xyz);\n float radiusA = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusA, 0.0)).xyz);\n\n // We pack radii as w-components of v_centerB\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radiusB;\n\n v_V.xyz = -cross(v_U.xyz, v_W.xyz);\n v_V.w = surfacePoint.y;\n\n v_centerA.xyz = mul3(modelViewMatrix, centerA);\n v_centerA.w = radiusA;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nin vec4 U;\nin vec4 V;\nin vec4 axis;\n\nin vec4 v_centerA;\nin vec4 v_centerB;\nin float height;\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance);\n vec3 normal = normalize( v_normal );\n mat3 basis = mat3(U.xyz, V.xyz, axis.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, axis.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerA.xyz;\n vec3 E = diff * basis;\n float L = height;\n vec3 D = rayDirection * basis;\n\n float R1 = v_centerA.w;\n float R2 = v_centerB.w;\n float dR = R2 - R1;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy)-R1*R1;\n float L2Inv = 1.0/(L*L);\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR/L;\n float dRdRL2Inv = dRLInv*dRLInv;\n a -= D.z*D.z*dRdRL2Inv;\n b -= dRLInv*(E.z*D.z*dRLInv + R1*D.z);\n c -= dRLInv*(E.z*E.z*dRLInv + 2.0*R1*E.z);\n }\n\n // Additional terms when one of the center points is displaced orthogonal to normal vector\n vec2 displacement = ((v_centerB.xyz-v_centerA.xyz)*basis).xy; // In the basis where displacement is in XY only\n float displacementLengthSquared = dot(displacement, displacement);\n a += D.z*(D.z*displacementLengthSquared - 2.0*L*dot(D.xy, displacement))*L2Inv;\n b += (D.z*E.z*displacementLengthSquared - L*(D.x*E.z*displacement.x + D.y*E.z*displacement.y + D.z*E.x*displacement.x + D.z*E.y*displacement.y))*L2Inv;\n c += E.z*(E.z*displacementLengthSquared - 2.*L*dot(E.xy, displacement))*L2Inv;\n\n // Calculate a dicriminant of the above quadratic equation (factor 2 removed from all b-terms above)\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long eccentric cone\n if (d < 0.0) {\n discard;\n }\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n // Check the smallest root, it is closest camera. Only test if the z-component is outside the truncated eccentric cone\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n bool isInner = false;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isClipped(appearance, p)\n ) {\n // Either intersection point is behind starting point (happens inside the cone),\n // or the intersection point is outside the end caps. This is not a valid solution.\n isInner = true;\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isClipped(appearance, p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector\n vec3 n = normalize(-axis.xyz);\n vec3 v_centerA = v_centerA.xyz;\n vec3 v_centerB = v_centerB.xyz;\n vec3 A = cross(v_centerA - p, v_centerB - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = v_centerA + R1 * t;\n vec3 o2 = v_centerB + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin float a_treeIndex;\nin vec3 a_centerA;\nin vec3 a_centerB;\nin float a_radiusA;\nin float a_radiusB;\nin vec3 a_normal;\nin vec3 a_color;\n\nout float v_treeIndex;\n// We pack the radii into w-components\nout vec4 v_centerA;\nout vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 out vec4 registers.\nout vec4 U;\nout vec4 V;\nout vec4 axis;\nout float height;\n\nout vec3 v_color;\nout vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 normalWithOffset = normalize((modelTransformOffset * vec4(a_normal, 0)).xyz);\n\n float uniformScaleFactor = length(mul3(modelMatrix, normalize(vec3(1.0))));\n\n height = dot(centerA - centerB, normalWithOffset) * uniformScaleFactor;\n\n vec3 lDir;\n vec3 center = 0.5 * (centerA + centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n // Find the coordinates of centerA and centerB projected down to the end cap plane\n vec3 maxCenterProjected = centerA - dot(centerA, normalWithOffset) * normalWithOffset;\n vec3 minCenterProjected = centerB - dot(centerB, normalWithOffset) * normalWithOffset;\n float distanceBetweenProjectedCenters = length(maxCenterProjected - minCenterProjected);\n\n lDir = normalWithOffset;\n float dirSign = 1.0;\n if (dot(objectToCameraModelSpace, lDir) < 0.0) { // direction vector looks away, flip it\n dirSign = -1.0;\n lDir *= -1.;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n // compute basis for cone\n axis.xyz = -normalWithOffset;\n U.xyz = cross(objectToCameraModelSpace, axis.xyz);\n V.xyz = cross(U.xyz, axis.xyz);\n // Transform to camera space\n axis.xyz = normalize(normalMatrix * axis.xyz);\n U.xyz = normalize(normalMatrix * U.xyz);\n V.xyz = normalize(normalMatrix * V.xyz);\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radiusA * (position.x + 1.0) * 0.0025 / height);\n#endif\n\n v_centerA.xyz = mul3(viewMatrix, mul3(modelMatrix, centerA));\n v_centerB.xyz = mul3(viewMatrix, mul3(modelMatrix, centerB));\n\n float radiusA = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusA, 0.0)).xyz);\n float radiusB = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusB, 0.0)).xyz);\n\n // Pack radii as w components of v_centerA and v_centerB\n v_centerA.w = radiusA;\n v_centerB.w = radiusB;\n\n float radiusIncludedDisplacement = 0.5*(2.0*max(a_radiusA, a_radiusB) + distanceBetweenProjectedCenters);\n vec3 surfacePoint = center + mat3(0.5 * height * lDir * (1.0 / uniformScaleFactor), radiusIncludedDisplacement*left, radiusIncludedDisplacement*up) * newPosition;\n vec3 transformed = surfacePoint;\n\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n axis.w = surfacePoint.z;\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\nin vec4 center;\nin float hRadius;\nin float height;\n\nin vec4 U;\nin vec4 V;\nin vec4 sphereNormal;\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance);\n vec3 normal = normalize(sphereNormal.xyz);\n\n float vRadius = center.w;\n float ratio = vRadius / hRadius;\n mat3 basis = mat3(U.xyz, V.xyz, sphereNormal.xyz);\n mat3 scaledBasis = mat3(ratio * U.xyz, ratio * V.xyz, sphereNormal.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, sphereNormal.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - center.xyz;\n vec3 E = diff * scaledBasis;\n vec3 D = rayDirection * scaledBasis;\n\n float a = dot(D, D);\n float b = dot(E, D);\n float c = dot(E, E) - vRadius*vRadius;\n\n // discriminant of sphere equation (factor 2 removed from b above)\n float d = b*b - a*c;\n if(d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isClipped(appearance, p)\n ) {\n // Missed the first point, check the other point\n\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isClipped(appearance, p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector in local space\n normal = vec3(p - center.xyz) * basis;\n normal.z = normal.z * (hRadius / vRadius) * (hRadius / vRadius);\n // Transform into camera space\n normal = normalize(basis * normal);\n if (dot(normal, rayDirection) > 0.) {\n normal = -normal;\n }\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\nuniform mat4 inverseNormalMatrix;\n\nin float a_treeIndex;\nin vec3 a_color;\nin vec3 a_center;\nin vec3 a_normal;\nin float a_horizontalRadius;\nin float a_verticalRadius;\nin float a_height;\n\nout float v_treeIndex;\n// We pack vRadius as w-component of center\nout vec4 center;\nout float hRadius;\nout float height;\n\n// U, V, axis represent the 3x3 sphere basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 out vec4 registers.\nout vec4 U;\nout vec4 V;\nout vec4 sphereNormal;\n\nout vec3 v_color;\nout vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerWithOffset = mul3(modelTransformOffset, a_center).xyz;\n\n vec3 normalWithOffset = (modelTransformOffset * vec4(a_normal, 0)).xyz;\n\n vec3 lDir;\n float distanceToCenterOfSegment = a_verticalRadius - a_height * 0.5;\n vec3 centerOfSegment = centerWithOffset + normalWithOffset * distanceToCenterOfSegment;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 objectToCameraModelSpace = inverseNormalMatrix * vec3(0.0, 0.0, 1.0);\n#else\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - centerOfSegment;\n#endif\n\n vec3 newPosition = position;\n\n float bb = dot(objectToCameraModelSpace, normalWithOffset);\n if (bb < 0.0) { // direction vector looks away, flip it\n lDir = -normalWithOffset;\n } else { // direction vector already looks in my direction\n lDir = normalWithOffset;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_verticalRadius * (position.x + 1.0) * 0.0025 / a_height);\n#endif\n\n // Negative angle means height larger than radius,\n // so we should have full size so we can render the largest part of the ellipsoid segment\n float ratio = max(0.0, 1.0 - a_height / a_verticalRadius);\n // maxRadiusOfSegment is the radius of the circle (projected ellipsoid) when ellipsoid segment is seen from above\n float maxRadiusOfSegment = a_horizontalRadius * sqrt(1.0 - ratio * ratio);\n\n vec3 displacement = vec3(newPosition.x*a_height*0.5, maxRadiusOfSegment*newPosition.y, maxRadiusOfSegment*newPosition.z);\n vec3 surfacePoint = centerOfSegment + mat3(lDir, left, up) * displacement;\n vec3 transformed = surfacePoint;\n\n v_treeIndex = a_treeIndex;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n center.xyz = mul3(modelViewMatrix, centerWithOffset);\n center.w = a_verticalRadius; // Pack radius into w-component\n hRadius = a_horizontalRadius;\n height = a_height;\n v_color = a_color;\n\n // compute basis\n sphereNormal.xyz = normalMatrix * normalWithOffset;\n U.xyz = normalMatrix * up;\n V.xyz = normalMatrix * left;\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n sphereNormal.w = surfacePoint.z;\n\n // TODO should perhaps be a different normal?\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n'},function(e,t,n){"use strict";n.r(t),t.default='precision highp float;\n#define GLSLIFY 1\n\n#define texture2D texture\n#define gl_FragDepthEXT gl_FragDepth \n\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\n// TODO general cylinder and cone are very similar and used\n// the same shader in the old code. Consider de-duplicating\n// parts of this code\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform float dataTextureWidth;\nuniform float dataTextureHeight;\nuniform mat4 projectionMatrix;\n\nin vec4 v_centerB;\n\nin vec4 v_W;\nin vec4 v_U;\n\nin float v_angle;\nin float v_arcAngle;\n\nin float v_surfacePointY;\n\nin vec4 v_planeA;\nin vec4 v_planeB;\n\nin float v_treeIndex;\n\nin vec3 v_color;\n\nin vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance); \n vec3 normal = normalize( v_normal );\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = vec4(normalize(cross(W.xyz, U.xyz)), v_surfacePointY);\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n vec3 planeACenter = vec3(0.0, 0.0, v_planeA.w);\n vec3 planeANormal = v_planeA.xyz;\n vec3 planeBCenter = vec3(0.0, 0.0, v_planeB.w);\n vec3 planeBNormal = v_planeB.xyz;\n bool isInner = false;\n\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle ||\n isClipped(appearance, p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle || isClipped(appearance, p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n#define texture2D texture\n\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\nuniform mat4 modelMatrix;\nuniform mat4 viewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\n\nin vec3 position;\nin vec3 normal;\n\nin float a_treeIndex;\n\nin vec3 a_centerA;\nin vec3 a_centerB;\n\nin float a_radius;\n\nin vec3 a_color;\n// slicing plane attributes\nin vec4 a_planeA;\nin vec4 a_planeB;\n// segment attributes\nin vec3 a_localXAxis;\nin float a_angle;\nin float a_arcAngle;\n\nout float v_treeIndex;\n// We pack the radii into w-components\nout vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nout vec4 v_U;\nout vec4 v_W;\n\nout vec4 v_planeA;\nout vec4 v_planeB;\n\nout float v_surfacePointY;\n\nout float v_angle;\nout float v_arcAngle;\n\nout vec3 v_color;\n\nout vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 modelViewMatrix = viewMatrix * modelMatrix;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float leftUpScale = a_radius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n // We pack radii as w-components of v_centerB\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n float radius = length((modelToTransformOffset * vec4(a_localXAxis * a_radius, 0.0)).xyz);\n\n centerB = centerB - dir;\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radius;\n\n vec4 planeA = a_planeA;\n planeA.w = length((modelToTransformOffset * vec4(planeA.xyz * planeA.w, 0.0)).xyz);\n\n vec4 planeB = a_planeB;\n planeB.w = length((modelToTransformOffset * vec4(planeB.xyz * planeB.w, 0.0)).xyz);\n\n v_planeA = planeA;\n v_planeB = planeB;\n v_surfacePointY = surfacePoint.y;\n v_centerB.w = radius;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="precision highp float;\n#define GLSLIFY 1\n\n#define texture2D texture\n#define gl_FragDepthEXT gl_FragDepth \n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nin float v_oneMinusThicknessSqr;\nin vec2 v_xy;\nin float v_angle;\nin float v_arcAngle;\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nin vec3 vViewPosition;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n\n if (isClipped(appearance, vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance);\n float dist = dot(v_xy, v_xy);\n float theta = atan(v_xy.y, v_xy.x);\n vec3 normal = normalize( v_normal );\n if (theta < v_angle) {\n theta += 2.0 * PI;\n }\n if (dist > 0.25 || dist < 0.25 * v_oneMinusThicknessSqr || theta >= v_angle + v_arcAngle) {\n discard;\n }\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n#define texture2D texture\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin vec3 position;\n\nuniform mat4 modelMatrix;\nuniform mat4 viewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat3 normalMatrix;\n\nin mat4 a_instanceMatrix;\n\nin float a_treeIndex;\nin vec3 a_color;\nin float a_angle;\nin float a_arcAngle;\nin float a_thickness;\nin vec3 a_normal;\n\nout float v_treeIndex;\nout float v_oneMinusThicknessSqr;\nout vec2 v_xy;\nout float v_angle;\nout float v_arcAngle;\n\nout vec3 v_color;\nout vec3 v_normal;\n\nout vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_treeIndex = a_treeIndex;\n v_oneMinusThicknessSqr = (1.0 - a_thickness) * (1.0 - a_thickness);\n v_xy = vec2(position.x, position.y);\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (a_instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nin vec3 vViewPosition;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n if (isClipped(appearance, vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance); \n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin mat4 a_instanceMatrix;\n\nin float a_treeIndex;\nin vec3 a_color;\nin float a_arcAngle;\nin float a_radius;\nin float a_tubeRadius;\n\nout float v_treeIndex;\nout vec3 v_color;\nout vec3 v_normal;\n\nout vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n // normalized theta and phi are packed into positions\n float theta = position.x * a_arcAngle;\n float phi = position.y;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n vec3 pos3 = vec3(0);\n\n pos3.x = (a_radius + a_tubeRadius*cos(phi)) * cosTheta;\n pos3.y = (a_radius + a_tubeRadius*cos(phi)) * sinTheta;\n pos3.z = a_tubeRadius*sin(phi);\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n vec3 transformed = (a_instanceMatrix * vec4(pos3, 1.0)).xyz;\n\n // Calculate normal vectors if we're not picking\n vec3 center = (a_instanceMatrix * vec4(a_radius * cosTheta, a_radius * sinTheta, 0.0, 1.0)).xyz;\n vec3 objectNormal = normalize(transformed.xyz - center);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n\n vViewPosition = modelViewPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nout vec4 outputColor;\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n outputColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n float s = 0.4 + 0.6 * amplitude;\n outputColor = vec4(vec3(s), 0.3);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n outputColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n outputColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n outputColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n outputColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n outputColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n outputColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n outputColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nNodeAppearance determineNodeAppearance(sampler2D nodeAppearanceTexture, vec2 textureSize, float treeIndex) {\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight;\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n \n vec4 texel = texture2D(nodeAppearanceTexture, treeIndexUv);\n float alphaUnwrapped = floor((texel.a * 255.0) + 0.5);\n\n bool isVisible = floatBitsSubset(alphaUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(alphaUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(alphaUnwrapped, 2, 3) == 1.0;\n\n return NodeAppearance(texel, isVisible, renderInFront, renderGhosted);\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(NodeAppearance nodeAppearance, int renderMode) {\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && nodeAppearance.isVisible && nodeAppearance.renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !nodeAppearance.renderGhosted && nodeAppearance.isVisible && (nodeAppearance.renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, NodeAppearance nodeAppearance) {\n vec4 overrideColor = nodeAppearance.colorTexel;\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nin float v_treeIndex;\nin vec3 v_color;\nin vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nin vec3 vViewPosition;\n\nvoid main() {\n NodeAppearance appearance = determineNodeAppearance(colorDataTexture, treeIndexTextureSize, v_treeIndex);\n if (!determineVisibility(appearance, renderMode)) {\n discard;\n }\n if (isClipped(appearance, vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, appearance); \n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nin float a_treeIndex;\nin vec3 a_color;\nin vec3 a_vertex1;\nin vec3 a_vertex2;\nin vec3 a_vertex3;\nin vec3 a_vertex4;\n\nout float v_treeIndex;\nout vec3 v_color;\nout vec3 v_normal;\n\nout vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n vec3 transformed;\n // reduce the avarage branchings\n if (position.x < 1.5) {\n transformed = position.x == 0.0 ? a_vertex1 : a_vertex2;\n } else {\n transformed = position.x == 2.0 ? a_vertex3 : a_vertex4;\n }\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 objectNormal = cross(a_vertex1 - a_vertex2, a_vertex1 - a_vertex3);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \nmat3 G[9];\n// hard coded matrix values!!!! as suggested in https://github.com/neilmendoza/ofxPostProcessing/blob/master/src/EdgePass.cpp#L45\nconst mat3 g0 = mat3( 0.3535533845424652, 0, -0.3535533845424652, 0.5, 0, -0.5, 0.3535533845424652, 0, -0.3535533845424652 );\nconst mat3 g1 = mat3( 0.3535533845424652, 0.5, 0.3535533845424652, 0, 0, 0, -0.3535533845424652, -0.5, -0.3535533845424652 );\nconst mat3 g2 = mat3( 0, 0.3535533845424652, -0.5, -0.3535533845424652, 0, 0.3535533845424652, 0.5, -0.3535533845424652, 0 );\nconst mat3 g3 = mat3( 0.5, -0.3535533845424652, 0, -0.3535533845424652, 0, 0.3535533845424652, 0, 0.3535533845424652, -0.5 );\nconst mat3 g4 = mat3( 0, -0.5, 0, 0.5, 0, 0.5, 0, -0.5, 0 );\nconst mat3 g5 = mat3( -0.5, 0, 0.5, 0, 0, 0, 0.5, 0, -0.5 );\nconst mat3 g6 = mat3( 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.6666666865348816, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204 );\nconst mat3 g7 = mat3( -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, 0.6666666865348816, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408 );\nconst mat3 g8 = mat3( 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408 );\n\nfloat edgeDetectionFilter(sampler2D baseTexture, vec2 uv, vec2 resolution) {\n\n vec2 texel = vec2(1.0 / resolution.x, 1.0 / resolution.y);\n\n\tG[0] = g0,\n\tG[1] = g1,\n\tG[2] = g2,\n\tG[3] = g3,\n\tG[4] = g4,\n\tG[5] = g5,\n\tG[6] = g6,\n\tG[7] = g7,\n\tG[8] = g8;\n\n\tmat3 I;\n\tfloat cnv[9];\n\tvec3 neighbour;\n\n\t/* fetch the 3x3 neighbourhood and use the RGB vector's length as intensity value */\n\tfor (int i=0; i<3; i++) {\n\t\tfor (int j=0; j<3; j++) {\n\t\t\tneighbour = texture2D(baseTexture, uv + texel * vec2(float(i)-1.0,float(j)-1.0) ).rgb;\n\t\t\tI[i][j] = length(neighbour);\n\t\t}\n\t}\n\n\t/* calculate the convolution values for all the masks */\n\tfor (int i=0; i<9; i++) {\n\t\tfloat dp3 = dot(G[i][0], I[0]) + dot(G[i][1], I[1]) + dot(G[i][2], I[2]);\n\t\tcnv[i] = dp3 * dp3;\n\t}\n\n\tfloat M = (cnv[0] + cnv[1]) + (cnv[2] + cnv[3]);\n\tfloat S = (cnv[4] + cnv[5]) + (cnv[6] + cnv[7]) + (cnv[8] + M);\n\n float edgeStrength = sqrt(M/S);\n\n return edgeStrength;\n}\n\n#include <packing>\n\nin vec2 vUv;\n\nin vec2 vUv0;\nin vec2 vUv1;\nin vec2 vUv2;\nin vec2 vUv3;\n\nuniform sampler2D tFront;\nuniform sampler2D tFrontDepth;\n\nuniform sampler2D tBack;\nuniform sampler2D tBackDepth;\n\nuniform sampler2D tCustom;\nuniform sampler2D tCustomDepth;\n\nuniform sampler2D tGhost;\nuniform sampler2D tGhostDepth;\n\nuniform sampler2D tOutlineColors;\n\nuniform float cameraNear;\nuniform float cameraFar;\n\nuniform vec2 resolution;\n\nuniform float edgeStrengthMultiplier;\nuniform float edgeGrayScaleIntensity;\n\nout vec4 outputColor;\n\nconst float infinity = 1e20;\n\nfloat computeFloatEncodedOutlineIndex(float bitEncodedFloat){\n return floatBitsSubset(floor((bitEncodedFloat * 255.0) + 0.5), 5, 8);\n}\n\nvec4 computeNeighborOutlineIndices(sampler2D colorTexture){\n float outlineIndex0 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv0).a);\n float outlineIndex1 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv1).a);\n float outlineIndex2 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv2).a);\n float outlineIndex3 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv3).a);\n\n return vec4(outlineIndex0, outlineIndex1, outlineIndex2, outlineIndex3);\n}\n\nfloat toViewZ(float depth, float near, float far){\n float normalizedDepth = depth * 2.0 - 1.0;\n return 2.0 * near * far / (far + near - normalizedDepth * (far - near)); \n}\n\nvec4 computeNeighborAlphas(sampler2D colorTexture){\n float alpha0 = texture2D(colorTexture, vUv0).a;\n float alpha1 = texture2D(colorTexture, vUv1).a;\n float alpha2 = texture2D(colorTexture, vUv2).a;\n float alpha3 = texture2D(colorTexture, vUv3).a;\n\n return vec4(alpha0, alpha1, alpha2, alpha3);\n}\n\nvoid main() {\n vec4 frontAlbedo = texture2D(tFront, vUv);\n vec4 backAlbedo = texture2D(tBack, vUv);\n vec4 customAlbedo = texture2D(tCustom, vUv);\n vec4 ghostAlbedo = texture2D(tGhost, vUv);\n\n float frontDepth = texture2D(tFrontDepth, vUv).r;\n float backDepth = texture2D(tBackDepth, vUv).r; \n float customDepth = texture2D(tCustomDepth, vUv).r;\n float ghostDepth = texture2D(tGhostDepth, vUv).r;\n\n // This is a hack to make sure that all textures are initialized\n // If a texture is unused, it will have a clear value of 0.0.\n // Without this we've seen issues with MSAA where resizing render targets\n // causes depth to cleared to either 1 or 0 depending on the device/browser\n customDepth = customDepth > 0.0 ? customDepth : 1.0; \n backDepth = backDepth > 0.0 ? backDepth : 1.0;\n ghostDepth = ghostDepth > 0.0 ? ghostDepth : 1.0;\n frontDepth = frontDepth > 0.0 ? frontDepth : 1.0; \n\n if(all(greaterThanEqual(vec4(backDepth, customDepth, ghostDepth, frontDepth), vec4(1.0)))){\n discard;\n }\n \n // Decompose and clamp \"ghost\" color\n vec4 clampedGhostAlbedo = vec4(max(ghostAlbedo.rgb, 0.5), min(ghostAlbedo.a, 0.8));\n\n float frontOutlineIndex = computeFloatEncodedOutlineIndex(frontAlbedo.a);\n vec4 frontNeighborIndices = computeNeighborOutlineIndices(tFront);\n\n // There exsists fragments of rendered objects within the edge width that should have border\n if(any(equal(frontNeighborIndices, vec4(0.0))) && frontOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(frontNeighborIndices.x, frontNeighborIndices.y), max(frontNeighborIndices.z, frontNeighborIndices.w));\n outputColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = frontDepth;\n#endif\n return;\n }\n\n // texture has drawn fragment\n if(frontDepth < 1.0){\n float customDepthTest = step(customDepth, backDepth); // zero if back is in front\n\n float a = customDepthTest > 0.0 ? ceil(customAlbedo.a) * 0.5 : ceil(backAlbedo.a) * 0.5;\n\n outputColor = vec4(frontAlbedo.rgb, 1.0) * (1.0 - a) + (vec4(backAlbedo.rgb, 1.0) * (1.0 - customDepthTest) + vec4(customAlbedo.rgb, 1.0) * customDepthTest) * a;\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = texture2D(tFrontDepth, vUv).r;\n#endif\n return;\n }\n\n if (customDepth >= backDepth) {\n float backOutlineIndex = computeFloatEncodedOutlineIndex(backAlbedo.a);\n vec4 backNeighborIndices = computeNeighborOutlineIndices(tBack);\n\n if( any(equal(backNeighborIndices, vec4(0.0))) && backOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(backNeighborIndices.x, backNeighborIndices.y), max(backNeighborIndices.z, backNeighborIndices.w));\n outputColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tBackDepth, vUv).r;\n#endif\n return;\n }\n }\n \n float edgeStrength = 0.0;\n#if defined(EDGES)\n if (!any(equal(computeNeighborAlphas(tBack), vec4(0.0)))) {\n float depthEdge = toViewZ(backDepth, cameraNear, cameraFar);\n edgeStrength = (1.0 - smoothstep(10.0, 40.0, depthEdge)) * edgeDetectionFilter(tBack, vUv, resolution) * edgeStrengthMultiplier;\n }\n#endif\n\n // Combine color from ghost, back and custom object\n vec4 color = backAlbedo;\n float depth = backDepth;\n if (customDepth < backDepth && ghostDepth == 1.0) {\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * backAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else if (customDepth < backDepth && ghostDepth < 1.0) {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n vec3 modelAlbedo = mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s);\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * modelAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n color = vec4(mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s), backAlbedo.a);\n depth = mix(backDepth, ghostDepth, s);\n }\n \n outputColor = color * (1.0 - edgeStrength) + vec4(vec3(edgeGrayScaleIntensity) * edgeStrength, 1.0);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = depth;\n#endif\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nout vec2 vUv;\n\n// selection outline\nuniform vec2 texelSize;\nout vec2 vUv0;\nout vec2 vUv1;\nout vec2 vUv2;\nout vec2 vUv3;\n\nvoid main() {\n vUv = uv;\n\n // selection outline\n vUv0 = vec2(uv.x + texelSize.x, uv.y);\n vUv1 = vec2(uv.x - texelSize.x, uv.y);\n vUv2 = vec2(uv.x, uv.y + texelSize.y);\n vUv3 = vec2(uv.x, uv.y - texelSize.y);\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nin vec2 v_uv;\nin vec2 v_fragCoord;\nin vec2 v_rgbNW;\nin vec2 v_rgbNE;\nin vec2 v_rgbSW;\nin vec2 v_rgbSE;\nin vec2 v_rgbM;\n\nuniform vec2 inverseResolution;\nuniform vec2 resolution;\nuniform sampler2D tDiffuse;\nuniform sampler2D tDepth;\n\nout vec4 outputColor;\n\n#ifndef FXAA_REDUCE_MIN\n #define FXAA_REDUCE_MIN (1.0/ 128.0)\n#endif\n#ifndef FXAA_REDUCE_MUL\n #define FXAA_REDUCE_MUL (1.0 / 8.0)\n#endif\n#ifndef FXAA_SPAN_MAX\n #define FXAA_SPAN_MAX 8.0\n#endif\n\nvec4 fxaa(sampler2D tex, vec2 fragCoord,\n vec2 resolution, vec2 inverseResolution,\n vec2 v_rgbNW, vec2 v_rgbNE,\n vec2 v_rgbSW, vec2 v_rgbSE,\n vec2 v_rgbM) {\n vec4 color;\n\n vec3 rgbNW = texture2D(tex, v_rgbNW).xyz;\n vec3 rgbNE = texture2D(tex, v_rgbNE).xyz;\n vec3 rgbSW = texture2D(tex, v_rgbSW).xyz;\n vec3 rgbSE = texture2D(tex, v_rgbSE).xyz;\n vec4 texColor = texture2D(tex, v_rgbM);\n vec3 rgbM = texColor.xyz;\n\n vec3 luma = vec3(0.299, 0.587, 0.114);\n float lumaNW = dot(rgbNW, luma);\n float lumaNE = dot(rgbNE, luma);\n float lumaSW = dot(rgbSW, luma);\n float lumaSE = dot(rgbSE, luma);\n float lumaM = dot(rgbM, luma);\n float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE)));\n float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE)));\n\n mediump vec2 dir;\n dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE));\n dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE));\n\n float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) *\n (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN);\n\n float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce);\n dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX),\n max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX),\n dir * rcpDirMin));\n\n vec4 rgbA = 0.5 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (1.0 / 3.0 - 0.5))) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (2.0 / 3.0 - 0.5))));\n vec4 rgbB = rgbA * 0.5 + 0.25 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * -0.5)) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * 0.5)));\n\n float lumaB = dot(rgbB.rgb, luma);\n if ((lumaB < lumaMin) || (lumaB > lumaMax)) {\n color = rgbA;\n } else {\n color = rgbB;\n }\n return color;\n}\n\nvoid main() {\n outputColor = fxaa(tDiffuse, v_fragCoord, \n resolution, inverseResolution, \n v_rgbNW, v_rgbNE, v_rgbSW, v_rgbSE, v_rgbM);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tDepth, v_uv).r;\n#endif\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nuniform vec2 resolution;\nuniform vec2 inverseResolution;\n\nout vec2 v_uv;\nout vec2 v_fragCoord;\nout vec2 v_rgbNW;\nout vec2 v_rgbNE;\nout vec2 v_rgbSW;\nout vec2 v_rgbSE;\nout vec2 v_rgbM;\n\nvoid main() {\n v_fragCoord = uv * resolution;\n v_rgbNW = (v_fragCoord + vec2(-1.0, -1.0)) * inverseResolution;\n v_rgbNE = (v_fragCoord + vec2(1.0, -1.0)) * inverseResolution;\n v_rgbSW = (v_fragCoord + vec2(-1.0, 1.0)) * inverseResolution;\n v_rgbSE = (v_fragCoord + vec2(1.0, 1.0)) * inverseResolution;\n v_rgbM = vec2(v_fragCoord * inverseResolution);\n v_uv = uv;\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// From http://www.science-and-fiction.org/rendering/noise.html\nfloat rand2d(in vec2 co){\n return fract(sin(dot(co.xy, vec2(12.9898,78.233))) * 43758.5453);\n}\n\nin vec2 vUv;\n\nuniform mat4 projMatrix;\nuniform mat4 inverseProjectionMatrix;\n\nuniform vec3 kernel[MAX_KERNEL_SIZE];\n\nuniform sampler2D tDepth;\n\nuniform vec2 resolution;\n\nuniform float sampleRadius;\nuniform float bias;\n\nout vec4 outputColor;\n\nvec3 viewPosFromDepth(float depth, vec2 uv) {\n // Depth to clip space: [0, 1] -> [-1, 1]\n float z = depth * 2.0 - 1.0;\n\n // Fragment in clip space\n vec4 clipSpacePosition = vec4(uv * 2.0 - 1.0, z, 1.0);\n vec4 viewSpacePosition = inverseProjectionMatrix * clipSpacePosition;\n\n // Perspective division\n viewSpacePosition /= viewSpacePosition.w;\n\n return viewSpacePosition.xyz;\n}\n\nvec3 computeWorldNormalFromDepth(sampler2D depthTexture, vec2 resolution, vec2 uv, float sampleDepth){\n float dx = 1.0 / resolution.x;\n float dy = 1.0 / resolution.y;\n\n vec2 uv1 = uv + vec2(dx, 0.0); // right\n float d1 = texture2D(depthTexture, uv1).r; \n\n vec2 uv2 = uv + vec2(0.0, dy); // up\n float d2 = texture2D(depthTexture, uv2).r;\n\n vec2 uv3 = uv + vec2(-dx, 0.0); // left\n float d3 = texture2D(depthTexture, uv3).r;\n\n vec2 uv4 = uv + vec2(0.0, -dy); // down\n float d4 = texture2D(depthTexture, uv4).r;\n\n bool horizontalSampleCondition = abs(d1 - sampleDepth) < abs(d3 - sampleDepth);\n\n float horizontalSampleDepth = horizontalSampleCondition ? d1 : d3;\n vec2 horizontalSampleUv = horizontalSampleCondition ? uv1 : uv3;\n\n bool verticalSampleCondition = abs(d2 - sampleDepth) < abs(d4 - sampleDepth);\n\n float verticalSampleDepth = verticalSampleCondition ? d2 : d4;\n vec2 verticalSampleUv = verticalSampleCondition ? uv2 : uv4;\n\n vec3 viewPos = viewPosFromDepth(sampleDepth, vUv);\n \n vec3 viewPos1 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv) : viewPosFromDepth(verticalSampleDepth, verticalSampleUv);\n vec3 viewPos2 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(verticalSampleDepth, verticalSampleUv) : viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv);\n\n return normalize(cross(viewPos1 - viewPos, viewPos2 - viewPos));\n}\n\nvoid main(){\n float d = texture2D(tDepth, vUv).r;\n\n vec3 viewNormal = computeWorldNormalFromDepth(tDepth, resolution, vUv, d);\n\n vec3 viewPosition = viewPosFromDepth(d, vUv);\n\n vec3 randomVec = normalize(vec3(rand2d(vUv), rand2d(vUv * 3.0), rand2d(vUv * 5.0)));\n\n vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));\n\n vec3 bitangent = cross(viewNormal, tangent);\n\n mat3 TBN = mat3(tangent, bitangent, viewNormal);\n\n float occlusion = 0.0;\n\n for (int i = 0; i < MAX_KERNEL_SIZE; i++){\n \n vec3 sampleVector = TBN * kernel[i];\n sampleVector = viewPosition + sampleVector * sampleRadius;\n\n vec4 offset = projMatrix * vec4(sampleVector, 1.0);\n offset.xyz /= offset.w;\n offset.xyz = offset.xyz * 0.5 + 0.5;\n\n float realDepth = texture2D(tDepth, offset.xy).r;\n vec3 realPos = viewPosFromDepth(realDepth, offset.xy);\n\n float rangeCheck = smoothstep(0.0, 1.0, sampleRadius / length(viewPosition - realPos));\n\n occlusion += (realPos.z >= sampleVector.z + bias ? 1.0 : 0.0) * rangeCheck;\n }\n\n float occlusionFactor = 1.0 - clamp(occlusion / float(MAX_KERNEL_SIZE), 0.0, 1.0);\n\n outputColor = vec4(vec3(occlusionFactor), 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Copyright Cognite (C) 2021 Cognite\n//\n// Efficient Gaussian blur based on technique described by Daniel Rákos in\n// http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/\n// generalized for two dimensions\n\nin vec2 vUv;\n\nuniform sampler2D tDiffuse;\nuniform sampler2D tAmbientOcclusion;\n\nuniform vec2 resolution;\n\nout vec4 outputColor;\n\nvoid main() {\n float blurredAO =\n texture2D(tAmbientOcclusion, vUv + vec2(-3.1111111, -3.1111111) / resolution).r * 0.0012360 +\n texture2D(tAmbientOcclusion, vUv + vec2(-1.3333333, -3.1111111) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(-3.1111111, -1.3333333) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(-1.3333333, -1.3333333) / resolution).r * 0.1076660 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0000000, -3.1111111) / resolution).r * 0.0096130 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0000000, -1.3333333) / resolution).r * 0.0897217 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3333333, -3.1111111) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.1111111, -3.1111111) / resolution).r * 0.0012360 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3333333, -1.3333333) / resolution).r * 0.1076660 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.1111111, -1.3333333) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(-3.1111111, 0.0000000) / resolution).r * 0.0096130 +\n texture2D(tAmbientOcclusion, vUv + vec2(-1.3333333, 0.0000000) / resolution).r * 0.0897217 +\n texture2D(tAmbientOcclusion, vUv + vec2(-3.1111111, 1.3333333) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(-1.3333333, 1.3333333) / resolution).r * 0.1076660 +\n texture2D(tAmbientOcclusion, vUv + vec2(-3.1111111, 3.1111111) / resolution).r * 0.0012360 +\n texture2D(tAmbientOcclusion, vUv + vec2(-1.3333333, 3.1111111) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0000000, 1.3333333) / resolution).r * 0.0897217 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0000000, 3.1111111) / resolution).r * 0.0096130 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3333333, 1.3333333) / resolution).r * 0.1076660 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.1111111, 1.3333333) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3333333, 3.1111111) / resolution).r * 0.0115356 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.1111111, 3.1111111) / resolution).r * 0.0012360 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3333333, 0.0000000) / resolution).r * 0.0897217 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.1111111, 0.0000000) / resolution).r * 0.0096130 +\n texture2D(tAmbientOcclusion, vUv).r * 0.0747681;\n\n outputColor = vec4(texture2D(tDiffuse, vUv).rgb * blurredAO, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// From http://www.science-and-fiction.org/rendering/noise.html\nfloat rand2d(in vec2 co){\n return fract(sin(dot(co.xy, vec2(12.9898,78.233))) * 43758.5453);\n}\n\nstruct NodeAppearance {\n vec4 colorTexel;\n bool isVisible;\n bool renderInFront;\n bool renderGhosted;\n};\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isClipped(NodeAppearance nodeAppearance, vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nin mediump vec3 v_color;\nin lowp float v_coverageFactor;\nin lowp float v_visible;\nin lowp vec2 v_seed;\n\nin vec3 v_viewPosition;\n\nout vec4 outputColor;\n \nconst NodeAppearance dummyNodeAppearance = NodeAppearance(vec4(0.0), false, false, false);\n\nvoid main() {\n if(v_visible != 1.0 || isClipped(dummyNodeAppearance, v_viewPosition)){\n discard;\n }\n\n float v = rand2d(gl_FragCoord.xy + v_seed);\n if (v >= v_coverageFactor) {\n discard;\n }\n\n outputColor = vec4(v_color, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nin mediump float a_sectorId;\nin lowp vec3 a_coverageFactor;\nin lowp float a_visible;\n\nout mediump vec3 v_color;\nout lowp float v_coverageFactor;\nout lowp vec2 v_seed;\nout lowp float v_visible;\n\nout vec3 v_viewPosition;\n\nvoid main()\n{\n v_visible = a_visible;\n v_color = packIntToColor(a_sectorId);\n v_coverageFactor = abs(dot(a_coverageFactor, normal));\n // A seed to ensure that two overlapping sectors A and B \n // doesn't produce the same noise pattern\n v_seed = vec2(a_sectorId / 255.0, a_sectorId / 65025.0);\n\n vec4 mvPosition = modelViewMatrix * instanceMatrix * vec4( position, 1.0 );\n\n v_viewPosition = mvPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewMatrix * instanceMatrix * vec4(position, 1.0);\n}\n"},,function(e,t,n){"use strict";n.r(t),n.d(t,"NodeAppearanceProvider",(function(){return _})),n.d(t,"NodeOutlineColor",(function(){return d})),n.d(t,"NodeCollectionBase",(function(){return a})),n.d(t,"TreeIndexNodeCollection",(function(){return f})),n.d(t,"IntersectionNodeCollection",(function(){return h})),n.d(t,"UnionNodeCollection",(function(){return p})),n.d(t,"revealEnv",(function(){return i.t})),n.d(t,"IndexSet",(function(){return i.e})),n.d(t,"NumericRange",(function(){return i.i})),n.d(t,"BoundingBoxClipper",(function(){return Gn.a})),n.d(t,"Cognite3DModel",(function(){return q})),n.d(t,"Cognite3DViewer",(function(){return rr})),n.d(t,"CognitePointCloudModel",(function(){return H})),n.d(t,"PotreePointShape",(function(){return tn})),n.d(t,"PotreePointColorType",(function(){return rn})),n.d(t,"PotreePointSizeType",(function(){return an})),n.d(t,"WellKnownAsprsPointClassCodes",(function(){return dn})),n.d(t,"NotSupportedInMigrationWrapperError",(function(){return k})),n.d(t,"PropertyFilterNodeCollection",(function(){return oe})),n.d(t,"SinglePropertyFilterNodeCollection",(function(){return ae})),n.d(t,"AssetNodeCollection",(function(){return te})),n.d(t,"InvertedNodeCollection",(function(){return ie})),n.d(t,"registerCustomNodeCollectionType",(function(){return ce})),n.d(t,"THREE",(function(){return l})),n.d(t,"DefaultNodeAppearance",(function(){return g})),n.d(t,"ClusteredAreaCollection",(function(){return u}));var r=n(3),o=n.n(r),i=n(1);
|
|
179
188
|
/*!
|
|
180
189
|
* Copyright 2021 Cognite AS
|
|
181
190
|
*/
|
|
182
|
-
class
|
|
191
|
+
class a{constructor(e){this._changedEvent=new i.d,this._classToken=e}get classToken(){return this._classToken}on(e,t){o()("changed"===e),this._changedEvent.subscribe(t)}off(e,t){o()("changed"===e),this._changedEvent.unsubscribe(t)}notifyChanged(){this._changedEvent.fire()}}class s extends a{constructor(e,t){super(e),this._cachedCombinedIndexSet=void 0,this._nodeCollections=[],this._changedUnderlyingNodeCollectionHandler=this.makeDirty.bind(this),t&&t.forEach(e=>this.add(e))}add(e){e.on("changed",this._changedUnderlyingNodeCollectionHandler),this._nodeCollections.push(e),this.makeDirty()}remove(e){const t=this._nodeCollections.indexOf(e);if(t<0)throw new Error("Could not find set");e.off("changed",this._changedUnderlyingNodeCollectionHandler),this._nodeCollections.splice(t,1),this.makeDirty()}clear(){this._nodeCollections.forEach(e=>e.clear())}makeDirty(){void 0!==this._cachedCombinedIndexSet&&(this._cachedCombinedIndexSet=void 0,this.notifyChanged())}getIndexSet(){var e;return this._cachedCombinedIndexSet=null!==(e=this._cachedCombinedIndexSet)&&void 0!==e?e:this.createCombinedIndexSet(),this._cachedCombinedIndexSet}get isLoading(){return this._nodeCollections.some(e=>e.isLoading)}}var d,l=n(0);
|
|
192
|
+
/*!
|
|
193
|
+
* Copyright 2021 Cognite AS
|
|
194
|
+
*/class c{constructor(e){this.addedSinceSquash=0,this._surfaceAreaVars={size:new l.Vector3},this._shouldMergeBoxesVars={inputBox:new l.Box3},this.resultBoxes=null!=e?e:[]}mergeBoxesAtIndices(e,t){this.resultBoxes[e].union(this.resultBoxes[t]),this.resultBoxes[t]=this.resultBoxes[this.resultBoxes.length-1],this.resultBoxes.pop()}surfaceArea(e){const{size:t}=this._surfaceAreaVars;return e.getSize(t),2*(t.x*t.y+t.y*t.z+t.z*t.x)}shouldMergeBoxes(e,t){const{inputBox:n}=this._shouldMergeBoxesVars;n.copy(e);const r=n.union(t);return this.surfaceArea(r)<1*(this.surfaceArea(e)+this.surfaceArea(t))+8}addBox(e){let t=!1;for(let n=0;n<this.resultBoxes.length;n++)if(this.shouldMergeBoxes(e,this.resultBoxes[n])){this.resultBoxes[n].union(e),t=!0;break}t||this.resultBoxes.push(e.clone()),this.addedSinceSquash+=1}get boxCount(){return this.resultBoxes.length}addBoxes(e){for(const t of e)this.addBox(t)}squashBoxes(){for(let e=0;e<this.resultBoxes.length;e++)for(let t=e+1;t<this.resultBoxes.length;t++){this.shouldMergeBoxes(this.resultBoxes[e],this.resultBoxes[t])&&(this.mergeBoxesAtIndices(e,t),t--)}}*getBoxes(){this.addedSinceSquash>.3*this.resultBoxes.length&&(this.squashBoxes(),this.addedSinceSquash=0);for(const e of this.resultBoxes)yield e.clone()}intersectsBox(e){for(const t of this.resultBoxes)if(e.intersectsBox(t))return!0;return!1}union(e){const t=[];for(const e of this.resultBoxes)t.push(e.clone());const n=new c(t);return n.addBoxes(e),n}addIntersectionIfNonempty(e,t,n){const r=e.clone().intersect(t);r.isEmpty()||n.push(r)}intersection(e){const t=[...e],n=this.resultBoxes,r=[];for(const e of n)for(const n of t)this.addIntersectionIfNonempty(e,n,r);const o=new c(r);return o.squashBoxes(),o}}
|
|
183
195
|
/*!
|
|
184
196
|
* Copyright 2021 Cognite AS
|
|
185
|
-
*/
|
|
197
|
+
*/class u{constructor(){this._clusterer=new c}get isEmpty(){return 0==this._clusterer.boxCount}*areas(){yield*this._clusterer.getBoxes()}intersectsBox(e){for(const t of this._clusterer.getBoxes())if(e.intersectsBox(t))return!0;return!1}addAreas(e){this._clusterer.addBoxes(e)}intersectWith(e){this._clusterer.intersection(e)}}class p extends s{constructor(e){super(p.classToken,e),this._cachedNodeAreas=void 0}serialize(){return{token:this.classToken,state:{subCollections:this._nodeCollections.map(e=>e.serialize())}}}createCombinedIndexSet(){if(0===this._nodeCollections.length)return new i.e;const e=this._nodeCollections[0].getIndexSet().clone();for(let t=1;t<this._nodeCollections.length;++t)e.unionWith(this._nodeCollections[t].getIndexSet());return e}getAreas(){if(this._cachedNodeAreas)return this._cachedNodeAreas;const e=new u;for(let t=0;t<this._nodeCollections.length;++t)e.addAreas(this._nodeCollections[t].getAreas().areas());return this._cachedNodeAreas=e,e}}p.classToken="UnionNodeCollection";
|
|
186
198
|
/*!
|
|
187
199
|
* Copyright 2021 Cognite AS
|
|
188
200
|
*/
|
|
189
|
-
|
|
201
|
+
class m{constructor(){}static instance(){return m._instance=m._instance||new m,m._instance}*areas(){}intersectsBox(e){return!1}addAreas(e){throw new Error("addAreas() not defined for EmptyAreaCollection")}intersectWith(e){}get isEmpty(){return!0}}class h extends s{constructor(e){super(p.classToken,e),this._cachedNodeAreas=void 0}serialize(){return{token:this.classToken,state:{subCollections:this._nodeCollections.map(e=>e.serialize())}}}createCombinedIndexSet(){if(0===this._nodeCollections.length)return new i.e;const e=this._nodeCollections[0].getIndexSet().clone();for(let t=1;t<this._nodeCollections.length;++t)e.intersectWith(this._nodeCollections[t].getIndexSet());return e}makeDirty(){super.makeDirty(),this._cachedNodeAreas=void 0}getAreas(){if(this._cachedNodeAreas)return this._cachedNodeAreas;if(0===this._nodeCollections.length)return this._cachedNodeAreas=m.instance(),this._cachedNodeAreas;const e=new u;e.addAreas(this._nodeCollections[0].getAreas().areas());for(let t=1;t<this._nodeCollections.length;++t)e.intersectWith(this._nodeCollections[t].getAreas().areas());return this._cachedNodeAreas=e,e}}h.classToken="IntersectionNodeCollection";
|
|
190
202
|
/*!
|
|
191
203
|
* Copyright 2021 Cognite AS
|
|
192
204
|
*/
|
|
193
|
-
class
|
|
205
|
+
class f extends a{constructor(e){super(f.classToken),e instanceof i.e?this._treeIndices=e:(i.i.isNumericRange(e),this._treeIndices=new i.e(e))}updateSet(e){this._treeIndices=e,this.notifyChanged()}clear(){this._treeIndices=new i.e,this.notifyChanged()}getIndexSet(){return this._treeIndices}getAreas(){if(!this._areaCollection){if(0===this._treeIndices.count)return m.instance();throw new Error("The AreaCollection returned by getAreas() for TreeIndexNodeCollection must be constructed manually using addAreas() and addAreaPoints()")}return this._areaCollection}addAreas(e){this._areaCollection||(this._areaCollection=new u),this._areaCollection.addAreas(e)}addAreaPoints(e){this._areaCollection||(this._areaCollection=new u);const t=e.map(e=>(new l.Box3).setFromCenterAndSize(e,new l.Vector3(1,1,1)));this._areaCollection.addAreas(t)}clearAreas(){this._areaCollection=void 0}get isLoading(){return!1}serialize(){return{token:this.classToken,state:this._treeIndices.toRangeArray(),options:{areas:this._areaCollection?[...this._areaCollection.areas()]:void 0}}}}f.classToken="TreeIndexNodeCollection",function(e){e[e.NoOutline=0]="NoOutline",e[e.White=1]="White",e[e.Black=2]="Black",e[e.Cyan=3]="Cyan",e[e.Blue=4]="Blue",e[e.Green=5]="Green",e[e.Red=6]="Red",e[e.Orange=7]="Orange"}(d||(d={}));const v={visible:!0,outlineColor:d.White},x={visible:!0,renderInFront:!0},g={Default:{visible:!0,renderGhosted:!1,renderInFront:!1,outlineColor:d.NoOutline,color:[0,0,0],prioritizedForLoadingHint:0},Outlined:v,Hidden:{visible:!1},InFront:x,Ghosted:{visible:!0,renderGhosted:!0},Highlighted:{...x,visible:!0,color:[100,100,255],...v}};var b=n(16),y=n.n(b);
|
|
194
206
|
/*!
|
|
195
207
|
* Copyright 2021 Cognite AS
|
|
196
|
-
*/
|
|
208
|
+
*/
|
|
209
|
+
class _{constructor(){this._styledCollections=new Array,this._cachedPrioritizedAreas=void 0,this._events={changed:new i.d,loadingStateChanged:new i.d},this.scheduleNotifyChanged=y()(()=>this.notifyChanged(),0)}on(e,t){switch(e){case"changed":this._events.changed.subscribe(t);break;case"loadingStateChanged":this._events.loadingStateChanged.subscribe(t);break;default:Object(i.l)(e,`Unsupported event: '${e}'`)}}off(e,t){switch(e){case"changed":this._events.changed.unsubscribe(t);break;case"loadingStateChanged":this._events.loadingStateChanged.unsubscribe(t);break;default:Object(i.l)(e,`Unsupported event: '${e}'`)}}assignStyledNodeCollection(e,t){const n=this._styledCollections.find(t=>t.nodeCollection===e);if(void 0!==n)n.appearance=t,this.handleNodeCollectionChanged(n);else{const n={nodeCollection:e,appearance:t,handleNodeCollectionChangedListener:()=>{this.handleNodeCollectionChanged(n)}};this._styledCollections.push(n),e.on("changed",n.handleNodeCollectionChangedListener),this.scheduleNotifyChanged()}}unassignStyledNodeCollection(e){const t=this._styledCollections.findIndex(t=>t.nodeCollection===e);if(-1===t)throw new Error("NodeCollection not added");const n=this._styledCollections[t];this._styledCollections.splice(t,1),e.off("changed",n.handleNodeCollectionChangedListener),this.scheduleNotifyChanged()}applyStyles(e){this._styledCollections.forEach(t=>{const n=t.nodeCollection.getIndexSet();e(n,t.appearance)})}getPrioritizedAreas(){if(this._cachedPrioritizedAreas)return this._cachedPrioritizedAreas;const e=this._styledCollections.filter(e=>e.appearance.prioritizedForLoadingHint).flatMap(e=>{const t=[];for(const n of e.nodeCollection.getAreas().areas())t.push({area:n,extraPriority:e.appearance.prioritizedForLoadingHint});return t});return this._cachedPrioritizedAreas=e,this._cachedPrioritizedAreas}clear(){for(const e of this._styledCollections){e.nodeCollection.off("changed",e.handleNodeCollectionChangedListener)}this._styledCollections.splice(0),this.scheduleNotifyChanged()}get isLoading(){return this._styledCollections.some(e=>e.nodeCollection.isLoading)}notifyChanged(){this._cachedPrioritizedAreas=void 0,this._events.changed.fire()}notifyLoadingStateChanged(){this._lastFiredLoadingState!==this.isLoading&&(this._lastFiredLoadingState=this.isLoading,this._events.loadingStateChanged.fire(this.isLoading))}handleNodeCollectionChanged(e){this.scheduleNotifyChanged(),this.notifyLoadingStateChanged()}}
|
|
197
210
|
/*!
|
|
198
211
|
* Copyright 2021 Cognite AS
|
|
199
|
-
*/
|
|
212
|
+
*/var T,C,w=n(12),M=n.n(w),I=n(31),S=n.n(I),P=n(7);!function(e){e[e.NoAA=0]="NoAA",e[e.FXAA=1]="FXAA"}(T||(T={})),function(e){e[e.Medium=32]="Medium",e[e.High=64]="High",e[e.VeryHigh=128]="VeryHigh",e[e.None=1]="None",e[e.Default=32]="Default"}(C||(C={}));const N={antiAliasing:T.FXAA,multiSampleCountHint:1,ssaoRenderParameters:{sampleSize:C.Default,sampleRadius:1,depthCheckBias:.0125},edgeDetectionParameters:{enabled:!0}};var A,R=n(8),D=n(15);!function(e){e[e.Color=1]="Color",e[e.Normal=2]="Normal",e[e.TreeIndex=3]="TreeIndex",e[e.PackColorAndNormal=4]="PackColorAndNormal",e[e.Depth=5]="Depth",e[e.Effects=6]="Effects",e[e.Ghost=7]="Ghost",e[e.LOD=8]="LOD",e[e.DepthBufferOnly=9]="DepthBufferOnly",e[e.GeometryType=10]="GeometryType"}(A||(A={}));
|
|
200
213
|
/*!
|
|
201
214
|
* Copyright 2021 Cognite AS
|
|
202
|
-
*/
|
|
215
|
+
*/
|
|
216
|
+
const E=new l.Color("black");function B(e,t){const{camera:n,normalizedCoords:r,renderer:o,domElement:i}=t,a=new l.Scene,s=e.parent;a.add(e);try{const t={normalizedCoords:r,camera:n,renderer:o,domElement:i,scene:a,cadNode:e},p=function(e){const{cadNode:t}=e,n=t.renderMode;let r;t.renderMode=A.TreeIndex;try{r=L(e,E,0)}finally{t.renderMode=n}if(0===r[3])return;return 255*r[0]*255+255*r[1]+r[2]}(t);if(void 0===p)return;const m=function(e){const{cadNode:t}=e,n=t.renderMode;t.renderMode=A.Depth;const r=L(e,E,0);t.renderMode=n;return o=r,O.fromArray(o).multiplyScalar(1/255).dot(z);var o}(t),h=function(e,t){const{camera:n,normalizedCoords:r}=e,o=new l.Vector3;return o.set(r.x,r.y,.5).applyMatrix4(n.projectionMatrixInverse),o.multiplyScalar(t/o.z),o.applyMatrix4(n.matrixWorld),o}(t,(d=m,c=n.near,u=n.far,c*u/((u-c)*d-u)));return{distance:(new l.Vector3).subVectors(h,n.position).length(),point:h,treeIndex:p,object:e,cadNode:e}}finally{s&&s.add(e)}var d,c,u}const O=new l.Vector4,z=new l.Vector4(255/256/16777216,255/256/65536,255/256/256,255/256);const F={renderTarget:new l.WebGLRenderTarget(1,1),pixelBuffer:new Uint8Array(4)};function L(e,t,n){const{renderTarget:r,pixelBuffer:o}=F,{scene:a,camera:s,normalizedCoords:d,renderer:c,domElement:u}=e,p=s.clone(),m={x:(d.x+1)/2*u.clientWidth,y:(1-d.y)/2*u.clientHeight};p.setViewOffset(u.clientWidth,u.clientHeight,m.x,m.y,1,1);const h=new i.j(c);try{const{width:e,height:i}=c.getSize(new l.Vector2);r.setSize(e,i),h.setRenderTarget(r),h.setClearColor(t,n),c.clearColor(),c.render(a,p),c.readRenderTargetPixels(r,0,0,1,1,o)}finally{h.resetState()}return o}
|
|
203
217
|
/*!
|
|
204
218
|
* Copyright 2021 Cognite AS
|
|
205
|
-
*/
|
|
219
|
+
*/class k extends Error{constructor(e){super(e)}}
|
|
206
220
|
/*!
|
|
207
221
|
* Copyright 2021 Cognite AS
|
|
208
|
-
*/
|
|
222
|
+
*/class G{constructor(e){this._camera=e,this._needsRedraw=!0,this._lastCameraPosition=new l.Vector3,this._lastCameraRotation=new l.Euler,this._lastCameraZoom=0,window.addEventListener("focus",()=>{this.redraw()})}get needsRedraw(){return this._needsRedraw}update(){const{_camera:e,_lastCameraPosition:t,_lastCameraRotation:n,_lastCameraZoom:r}=this,{position:o,rotation:i,zoom:a}=e,s=!o.equals(t)||!i.equals(n)||a!==r;t.copy(o),n.copy(i),this._lastCameraZoom=a,s&&(this._needsRedraw=!0)}clearNeedsRedraw(){this._needsRedraw=!1}redraw(){this._needsRedraw=!0}}var V=n(4);
|
|
209
223
|
/*!
|
|
210
224
|
* Copyright 2021 Cognite AS
|
|
211
|
-
*/
|
|
212
|
-
class ot{constructor(e,t){this.sectors=[],this._totalCost={downloadSize:0,drawCalls:0,renderCost:0},this.determineSectorCost=t,Object(s.y)(e,e=>(this.sectors.length=Math.max(this.sectors.length,e.id),this.sectors[e.id]={sector:e,parentIndex:-1,priority:-1,cost:{downloadSize:0,drawCalls:0,renderCost:0},lod:o.a.Discarded},!0));for(let e=0;e<this.sectors.length;++e){const t=this.sectors[e];if(void 0!==t){const n=t.sector.children.map(e=>e.id);for(const t of n)this.sectors[t].parentIndex=e}}e.facesFile.fileName&&this.setSectorLod(e.id,o.a.Simple)}get totalCost(){return this._totalCost}determineWantedSectorCount(){return this.sectors.reduce((e,t)=>e=t.lod!==o.a.Discarded?e+1:e,0)}toWantedSectors(e,t,n){return this.sectors.filter(e=>void 0!==e).map(r=>({modelIdentifier:e,modelBaseUrl:t,levelOfDetail:r.lod,metadata:r.sector,priority:r.priority,geometryClipBox:n})).sort((e,t)=>t.priority-e.priority)}markSectorDetailed(e,t){if(this.setSectorPriority(e,t),this.sectors[e].lod===o.a.Detailed)return;let n=this.sectors[e];for(;;){switch(n.lod){case o.a.Simple:this.replaceSimpleWithDetailed(n.sector.id);break;case o.a.Discarded:this.setSectorLod(n.sector.id,o.a.Detailed)}if(-1===n.parentIndex)break;n=this.sectors[n.parentIndex]}this.markAllDiscardedChildrenAsSimple(e)}replaceSimpleWithDetailed(e){it(this.sectors[e].lod===o.a.Simple,`Sector ${e} must be a Simple-sector, but got ${this.sectors[e].lod}`),this.setSectorLod(e,o.a.Detailed),this.markAllDiscardedChildrenAsSimple(e)}markAllDiscardedChildrenAsSimple(e){for(const t of this.sectors[e].sector.children)this.getSectorLod(t.id)===o.a.Discarded&&null!==t.facesFile.fileName&&this.setSectorLod(t.id,o.a.Simple)}setSectorLod(e,t){var n,r;it(t!==o.a.Simple||null!==this.sectors[e].sector.facesFile.fileName),this.sectors[e].lod=t,n=this._totalCost,r=this.sectors[e].cost,n.downloadSize-=r.downloadSize,n.drawCalls-=r.drawCalls,n.renderCost-=r.renderCost,this.sectors[e].cost=this.determineSectorCost(this.sectors[e].sector,t),rt(this._totalCost,this.sectors[e].cost)}setSectorPriority(e,t){this.sectors[e].priority=t}getSectorLod(e){return this.sectors[e].lod}}function it(e,t="assertion hit"){e||je.a.error("[ASSERT]",t)}
|
|
225
|
+
*/class U{constructor(e,t,n){this.modelId=e,this.revisionId=t,this.nodesApiClient=n,this.nodeIdToTreeIndexMap=new Map,this.treeIndexToNodeIdMap=new Map,this.treeIndexSubTreeSizeMap=new Map,this.nodeIdRequestObservable=new P.Subject,this.nodeIdResponse=this.nodeIdRequestObservable.pipe(Object(V.bufferTime)(50),Object(V.filter)(e=>e.length>0),Object(V.mergeMap)(async e=>{const t=await this.nodesApiClient.mapNodeIdsToTreeIndices(this.modelId,this.revisionId,e);return e.map((e,n)=>({nodeId:e,treeIndex:t[n]}))}),Object(V.mergeAll)(),Object(V.tap)(e=>{this.nodeIdToTreeIndexMap.set(e.nodeId,e.treeIndex),this.treeIndexToNodeIdMap.set(e.treeIndex,e.nodeId)}),Object(V.share)()),this.treeIndexRequestObservable=new P.Subject,this.treeIndexResponse=this.treeIndexRequestObservable.pipe(Object(V.bufferTime)(50),Object(V.filter)(e=>e.length>0),Object(V.mergeMap)(async e=>{const t=await this.nodesApiClient.mapTreeIndicesToNodeIds(this.modelId,this.revisionId,e);return e.map((e,n)=>({nodeId:t[n],treeIndex:e}))}),Object(V.mergeAll)(),Object(V.tap)(e=>{this.nodeIdToTreeIndexMap.set(e.nodeId,e.treeIndex),this.treeIndexToNodeIdMap.set(e.treeIndex,e.nodeId)}),Object(V.share)()),this.subtreeSizeObservable=new P.Subject,this.subtreeSizeResponse=this.subtreeSizeObservable.pipe(Object(V.bufferTime)(50),Object(V.filter)(e=>e.length>0),Object(V.mergeMap)(async e=>this.nodesApiClient.determineTreeIndexAndSubtreeSizesByNodeIds(this.modelId,this.revisionId,e)),Object(V.mergeAll)(),Object(V.tap)(e=>{this.treeIndexSubTreeSizeMap.set(e.treeIndex,e.subtreeSize)}),Object(V.share)())}async getTreeIndex(e){const t=this.nodeIdToTreeIndexMap.get(e);if(void 0!==t)return t;const n=this.nodeIdResponse.pipe(Object(V.first)(t=>t.nodeId===e),Object(V.map)(e=>e.treeIndex)).toPromise();return this.nodeIdRequestObservable.next(e),await n}async getNodeId(e){const t=this.treeIndexToNodeIdMap.get(e);if(void 0!==t)return t;const n=this.treeIndexResponse.pipe(Object(V.first)(t=>t.treeIndex===e),Object(V.map)(e=>e.nodeId)).toPromise();return this.treeIndexRequestObservable.next(e),await n}async getSubtreeSize(e){const t=this.treeIndexSubTreeSizeMap.get(e);if(t)return t;const n=await this.getNodeId(e),r=this.subtreeSizeResponse.pipe(Object(V.first)(t=>t.treeIndex===e),Object(V.map)(e=>e.subtreeSize)).toPromise();return this.subtreeSizeObservable.next(n),await r}async getTreeIndices(e){const t=e.map(e=>this.nodeIdToTreeIndexMap.get(e)||-1),n=e.filter((e,n)=>-1===t[n]);if(0===n.length)return t;const r=new Map(e.map((e,t)=>[e,t])),o=await this.nodesApiClient.mapNodeIdsToTreeIndices(this.modelId,this.revisionId,n);console.assert(n.length===o.length);for(let e=0;e<o.length;e++){const i=n[e],a=o[e],s=r.get(i);t[s]=a,this.add(i,a)}return t}async getNodeIds(e){const t=e.map(e=>this.treeIndexToNodeIdMap.get(e)||-1),n=e.filter((e,n)=>-1===t[n]);if(0===n.length)return t;const r=new Map(e.map((e,t)=>[e,t])),o=await this.nodesApiClient.mapTreeIndicesToNodeIds(this.modelId,this.revisionId,n);console.assert(o.length===n.length);for(let e=0;e<o.length;e++){const i=o[e],a=n[e],s=r.get(a);t[s]=i,this.add(i,a)}return t}add(e,t){this.nodeIdToTreeIndexMap.set(e,t),this.treeIndexToNodeIdMap.set(t,e)}}
|
|
213
226
|
/*!
|
|
214
227
|
* Copyright 2021 Cognite AS
|
|
215
|
-
*/
|
|
228
|
+
*/async function j(e,t,n,r=15e3){let o=e;return new Promise(e=>{!function i(){for(let e=0;e<r&&o<=t;e++)n(o++);o<=t?setTimeout(i):e()}()})}var W=n(2);
|
|
216
229
|
/*!
|
|
217
230
|
* Copyright 2021 Cognite AS
|
|
218
|
-
*/}
|
|
231
|
+
*/class q extends l.Object3D{constructor(e,t,n,r){super(),this.type="cad",this._styledNodeCollections=[],this.modelId=e,this.revisionId=t,this.cadModel=n.cadModelMetadata,this.nodesApiClient=r,this.nodeIdAndTreeIndexMaps=new U(e,t,this.nodesApiClient),this.cadNode=n,this.add(this.cadNode),Object.defineProperty(this,"visible",{get:()=>this.cadNode.visible,set:e=>{this.cadNode.visible=e}})}get nodeTransformProvider(){return this.cadNode.nodeTransformProvider}get modelUnit(){return this.cadNode.cadModelMetadata.scene.unit}get modelUnitToMetersFactor(){return W.g.get(this.modelUnit)}setDefaultNodeAppearance(e){this.cadNode.defaultNodeAppearance=e}getDefaultNodeAppearance(){return this.cadNode.defaultNodeAppearance}get styledNodeCollections(){return[...this._styledNodeCollections]}assignStyledNodeCollection(e,t){R.a.trackCadModelStyled(e.classToken,t);const n=this._styledNodeCollections.findIndex(t=>t.nodeCollection===e);-1!==n?this._styledNodeCollections[n].appearance=t:this._styledNodeCollections.push({nodeCollection:e,appearance:t}),this.cadNode.nodeAppearanceProvider.assignStyledNodeCollection(e,t)}unassignStyledNodeCollection(e){const t=this._styledNodeCollections.findIndex(t=>t.nodeCollection===e);if(-1===t)throw new Error("Node collection has not been assigned to model");this._styledNodeCollections.splice(t,1),this.cadNode.nodeAppearanceProvider.unassignStyledNodeCollection(e)}removeAllStyledNodeCollections(){this._styledNodeCollections.splice(0),this.cadNode.nodeAppearanceProvider.clear()}setNodeTransform(e,t){this.nodeTransformProvider.setNodeTransform(e,t)}resetNodeTransform(e){this.nodeTransformProvider.resetNodeTransform(e)}mapFromCdfToModelCoordinates(e,t){return(t=void 0!==t?t:new l.Vector3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.modelMatrix),t}mapPositionFromModelToCdfCoordinates(e,t){return(t=void 0!==t?t:new l.Vector3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.inverseModelMatrix),t}mapBoxFromModelToCdfCoordinates(e,t){return(t=null!=t?t:new l.Box3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.inverseModelMatrix),t}mapBoxFromCdfToModelCoordinates(e,t){return(t=null!=t?t:new l.Box3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.modelMatrix),t}dispose(){this.children=[]}async getSubtreeTreeIndices(e){return this.determineTreeIndices(e,!0)}async getAncestorTreeIndices(e,t){const n=await this.mapTreeIndexToNodeId(e),r=await this.nodesApiClient.determineNodeAncestorsByNodeId(this.modelId,this.revisionId,n,t);return new i.i(r.treeIndex,r.subtreeSize)}getModelBoundingBox(e,t){const n=t?this.cadModel.scene.getBoundsOfMostGeometry():this.cadModel.scene.root.bounds;return(e=e||new l.Box3).copy(n),e.applyMatrix4(this.cadModel.modelMatrix),e}getCameraConfiguration(){return this.cadModel.cameraConfiguration}setModelTransformation(e){this.cadNode.setModelTransformation(e)}getModelTransformation(e){return this.cadNode.getModelTransformation(e)}async getBoundingBoxByNodeId(e,t){try{return(t=(await this.nodesApiClient.getBoundingBoxesByNodeIds(this.modelId,this.revisionId,[e]))[0]).applyMatrix4(this.cadModel.modelMatrix),t}catch(e){throw R.a.trackError(e,{moduleName:"Cognite3DModel",methodName:"getBoundingBoxByNodeId"}),e}}async getBoundingBoxByTreeIndex(e,t){const n=await this.nodeIdAndTreeIndexMaps.getNodeId(e);return this.getBoundingBoxByNodeId(n,t)}iterateNodesByTreeIndex(e){return j(0,this.cadModel.scene.maxTreeIndex,e)}get nodeCount(){return this.cadModel.scene.maxTreeIndex+1}async iterateSubtreeByTreeIndex(e,t){const n=await this.determineTreeIndices(e,!0);return j(n.from,n.toInclusive,t)}async setNodeTransformByTreeIndex(e,t,n=!0){const r=await this.determineTreeIndices(e,n);return this.nodeTransformProvider.setNodeTransform(r,t),r.count}async resetNodeTransformByTreeIndex(e,t=!0){const n=await this.determineTreeIndices(e,t);return this.nodeTransformProvider.resetNodeTransform(n),n.count}async mapNodeIdsToTreeIndices(e){return this.nodeIdAndTreeIndexMaps.getTreeIndices(e)}async mapNodeIdToTreeIndex(e){return this.nodeIdAndTreeIndexMaps.getTreeIndex(e)}async mapTreeIndicesToNodeIds(e){return this.nodeIdAndTreeIndexMaps.getNodeIds(e)}async mapTreeIndexToNodeId(e){return this.nodeIdAndTreeIndexMaps.getNodeId(e)}async determineTreeIndices(e,t){let n=1;if(t){const t=await this.nodeIdAndTreeIndexMaps.getSubtreeSize(e);n=t||1}return new i.i(e,n)}}
|
|
219
232
|
/*!
|
|
220
233
|
* Copyright 2021 Cognite AS
|
|
221
|
-
*/
|
|
234
|
+
*/class H extends l.Object3D{constructor(e,t,n){super(),this.type="pointcloud",this.modelId=e,this.revisionId=t,this.pointCloudNode=n,this.add(n)}dispose(){this.children=[]}getModelBoundingBox(e){return this.pointCloudNode.getBoundingBox(e)}getCameraConfiguration(){return this.pointCloudNode.cameraConfiguration}setModelTransformation(e){this.pointCloudNode.setModelTransformation(e)}getModelTransformation(e){return this.pointCloudNode.getModelTransformation(e)}setClassVisible(e,t){this.pointCloudNode.setClassVisible(e,t)}isClassVisible(e){return this.pointCloudNode.isClassVisible(e)}hasClass(e){return this.pointCloudNode.hasClass(e)}getClasses(){return this.pointCloudNode.getClasses()}get visiblePointCount(){return this.pointCloudNode.visiblePointCount}get pointColorType(){return this.pointCloudNode.pointColorType}set pointColorType(e){this.pointCloudNode.pointColorType=e}get pointSize(){return this.pointCloudNode.pointSize}set pointSize(e){this.pointCloudNode.pointSize=e}get pointShape(){return this.pointCloudNode.pointShape}set pointShape(e){this.pointCloudNode.pointShape=e}}
|
|
222
235
|
/*!
|
|
223
236
|
* Copyright 2021 Cognite AS
|
|
224
237
|
*/
|
|
225
|
-
const o={renderSize:new r.Vector2,position:new r.Vector3};function i(e,t,n,i=new r.Vector3){const{renderSize:a,position:s}=o,d=e.domElement;e.getSize(a),s.copy(n),s.project(t);const{width:l,height:c}=d.getBoundingClientRect(),u=a.width/l,m=a.height/c,h=(s.x+1)/(1*u*2),p=(1-s.y)/(1*m*2);return i.set(h,p,s.z)}function a(e,t,n,a=new r.Vector3){i(e,t,n,a);const{renderSize:s}=o;e.getSize(s);const d=e.getPixelRatio();return a.x=Math.round(a.x*s.width/d),a.y=Math.round(a.y*s.height/d),a}},function(e,t){e.exports=require("lodash/cloneDeep")},,function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * instanceMatrix * vec4(normalize(normal), 0.0)).xyz;\n //v_normal = normal;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n vViewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}"},function(e,t,n){"use strict";n.d(t,"a",(function(){return o}));var r=n(0);
|
|
238
|
+
class K{constructor(e){this._loading=!1,K.loadStyles(),this.el=document.createElement("div"),this.el.title=K.titles.idle,this.el.className=K.classnames.base,this.el.classList.add(K.classnames.topLeft),this.el.innerHTML='<svg fill="none" xmlns="http://www.w3.org/2000/svg" width="64" height="38" viewBox="0 0 64 38">\n <path id="reveal-spinner-top-1" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288c-28.2275 5.9674-28.7378 6.0661-29.2355 6.1624h-.0001c-1.3944.2698-2.6891.5203-3.9772.7036v-6.3583c-.0652-1.475-1.2799-2.6372-2.7563-2.6372s-2.6911 1.1622-2.7563 2.6372v7.1072z"/>\n <path id="reveal-spinner-top-2" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288c-16.8452 3.375-17.0189 3.4183-17.1711 3.506-1.7276.3694-3.3435.7389-4.9594 1.1083l-.0571.013V7.6106c-.0465-1.48876-1.2668-2.67155-2.7563-2.67155S22.8005 6.12184 22.754 7.6106v13.5866z"/>\n <path id="reveal-spinner-top-3" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288c-7.274 1.2593-9.1537 1.6344-11.0347 2.0095V2.74337c-.0338-1.54541-1.3116-2.7721267-2.8571-2.74285692h-.0134C35.0327.00556379 33.8015 1.2272 33.7846 2.74337V18.7098z"/>\n <path id="reveal-spinner-top-4" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288v-3.2274c0-1.5594-1.2642-2.82358-2.8236-2.82358s-2.8235 1.26418-2.8235 2.82358v4.4z"/>\n <path id="reveal-spinner-center" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288C5.46324 23.6926 1.83164 23.4439 1.45248 22.3172c-.773329-1.6548 3.63593-3.8942 5.79662-4.9915.28489-.1447.53068-.2695.72036-.3706v-.1237h-.12505c-1.62823.6226-8.646718 3.4958-7.768735 5.9832.875294 2.7456 11.653125 3.4958 34.086725-1.9939 18.7926-4.6145 27.4393-5.4925 28.3173-3.7446.6266 1.4952-4.0121 4.3644-8.5217 6.3597-.0966.0984-.1427.2357-.125.3724l.002.002c.1243.1244.2474.2475.3745.1231 1.6268-.6266 10.1499-4.238 9.7721-7.2323-.3738-1.9935-4.8842-2.6175-13.5274-1.3722z"/>\n <path id="reveal-spinner-bot-1" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M11.7557 30.5551v-2.0894c1.8291-.1048 3.6511-.3108 5.4575-.6171v2.7065c-.033 1.4835-1.245 2.6689-2.7288 2.6689s-2.6958-1.1854-2.7287-2.6689z"/>\n <path id="reveal-spinner-bot-2" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M22.7715 26.9222v7.602c-.0132 1.496 1.1804 2.7238 2.6762 2.7529 1.4957.0291 2.7362-1.1513 2.7813-2.6467v-8.8309c-1.7953.4821-3.6177.857-5.4575 1.1227z"/>\n <path id="reveal-spinner-bot-3" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M33.87 24.7193h-.3764v5.3419c.066 1.5151 1.3136 2.7095 2.8302 2.7095s2.7642-1.1944 2.8303-2.7095v-6.5883c-.3876.0886-.7821.1771-1.1838.2671l-.0007.0002c-1.2926.2897-2.6585.5959-4.0996.9764v.0027z"/>\n <path id="reveal-spinner-bot-4" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M44.5094 22.2377c2.1383-.3726 4.0242-.7438 5.6578-1.1137v4.3375c-.0249 1.5094-1.2534 2.7212-2.763 2.7254h-.0162c-.7432.0206-1.4641-.255-2.004-.7662-.54-.5112-.8546-1.216-.8746-1.9592v-3.2238z"/>\n</svg>\n',e.style.position="relative",e.appendChild(this.el)}static loadStyles(){if(document.getElementById(K.stylesId))return;const e=document.createElement("style");e.id=K.stylesId,e.appendChild(document.createTextNode(".reveal-viewer-spinner-top-left {\n top: 10px;\n left: 10px;\n}\n\n.reveal-viewer-spinner-top-right {\n top: 10px;\n right: 10px;\n}\n\n.reveal-viewer-spinner-bottom-left {\n bottom: 10px;\n left: 10px;\n}\n\n.reveal-viewer-spinner-bottom-right {\n bottom: 10px;\n right: 10px;\n}\n\n.reveal-viewer-spinner {\n position: absolute;\n color: white;\n}\n.reveal-viewer-spinner--dark {\n color: black;\n}\n\n.reveal-viewer-spinner--loading #reveal-spinner-bot-1,\n.reveal-viewer-spinner--loading #reveal-spinner-top-1 {\n animation: reveal-loading-opacity 0.8s ease-out infinite alternate;\n}\n.reveal-viewer-spinner--loading #reveal-spinner-bot-2,\n.reveal-viewer-spinner--loading #reveal-spinner-top-2 {\n animation: reveal-loading-opacity 0.8s ease-out 0.2s infinite alternate;\n}\n.reveal-viewer-spinner--loading #reveal-spinner-bot-3,\n.reveal-viewer-spinner--loading #reveal-spinner-top-3 {\n animation: reveal-loading-opacity 0.8s ease-out 0.4s infinite alternate;\n}\n.reveal-viewer-spinner--loading #reveal-spinner-bot-4,\n.reveal-viewer-spinner--loading #reveal-spinner-top-4 {\n animation: reveal-loading-opacity 0.8s ease-out 0.6s infinite alternate;\n}\n\n.reveal-viewer-spinner--loading #center {\n transform-origin: center;\n animation: reveal-loading-scale 1.6s ease-out infinite 0.8s alternate,\n reveal-loading-opacity 1.6s ease-out infinite alternate;\n}\n\n@keyframes reveal-loading-opacity {\n 0% {\n opacity: 1;\n }\n 100% {\n opacity: 0.2;\n }\n}\n\n@keyframes reveal-loading-scale {\n 100% {\n transform: scaleY(1.2) translateY(0.6px);\n }\n}\n")),document.head.appendChild(e)}set placement(e){switch(this.el.classList.remove(K.classnames.bottomLeft,K.classnames.bottomRight,K.classnames.topLeft,K.classnames.topRight),e){case"topLeft":this.el.classList.add(K.classnames.topLeft);break;case"topRight":this.el.classList.add(K.classnames.topRight);break;case"bottomLeft":this.el.classList.add(K.classnames.bottomLeft);break;case"bottomRight":this.el.classList.add(K.classnames.bottomRight);break;default:Object(i.l)(e,"Invalid placement: "+e)}}set opacity(e){this.el.style.opacity=""+e}get loading(){return this._loading}set loading(e){this._loading=e,e?(this.el.classList.add(K.classnames.loading),this.el.title=K.titles.loading):(this.el.classList.remove(K.classnames.loading),this.el.title=K.titles.idle)}updateBackgroundColor(e){const{l:t}=e.getHSL({h:0,s:0,l:0});t>.5?this.el.classList.add(K.classnames.dark):this.el.classList.remove(K.classnames.dark)}dispose(){this.el.remove();const e=document.getElementById(K.stylesId);e&&e.remove()}}K.stylesId="reveal-viewer-spinner-styles",K.classnames={base:"reveal-viewer-spinner",topLeft:"reveal-viewer-spinner-top-left",topRight:"reveal-viewer-spinner-top-right",bottomLeft:"reveal-viewer-spinner-bottom-left",bottomRight:"reveal-viewer-spinner-bottom-right",loading:"reveal-viewer-spinner--loading",dark:"reveal-viewer-spinner--dark"},K.titles={idle:"3.0.0-alpha.0",loading:"3.0.0-alpha.0 Loading..."};var X=n(9);
|
|
239
|
+
/*!
|
|
240
|
+
* Copyright 2021 Cognite AS
|
|
241
|
+
*/const Y=new l.Vector2,Z=new l.Raycaster;function Q(e,t,n=.05){const{normalizedCoords:r,camera:o}=t;Y.set(r.x,r.y),Z.setFromCamera(r,o),Z.params.Points={threshold:n};return Z.intersectObjects(e,!0).filter(e=>function(e,t){let n=!0;for(let r=0;n&&r<t.length;++r)n=t[r].distanceToPoint(e)>=0;return n}
|
|
242
|
+
/*!
|
|
243
|
+
* Copyright 2021 Cognite AS
|
|
244
|
+
*/(e.point,t.clippingPlanes)).map(t=>{const n=function(e,t){for(;"Points"===e.type&&null!==e.parent;)e=e.parent;if(e instanceof X.PointCloudOctree){const n=e.root;return t.find(e=>n.pointcloud===e.potreeNode.octtree)||null}return null}(t.object,e);if(null===n)throw new Error("Could not find PointCloudNode for intersected point");return{distance:t.distance,point:t.point,pointIndex:t.index,pointCloudNode:n,object:t.object}})}class J{constructor(e,t,n){this._ongoingOperations=0,this._interrupted=!1,this._indexSet=new i.e,this._areas=new u,this._itemsToTreeIndexRangesCallback=e,this._itemsToAreasCallback=t,this._notifyChangedCallback=n}interrupt(){this._interrupted=!0}get isLoading(){return!this._interrupted&&this._ongoingOperations>0}get indexSet(){return this._indexSet}get areas(){return this._areas}async pageResults(e){const t=this._indexSet;this._ongoingOperations++;try{let n=await e;for(;!this._interrupted;){const e=n.next?n.next():void 0;this._itemsToTreeIndexRangesCallback(n.items).forEach(e=>t.addRange(e));const r=await this._itemsToAreasCallback(n.items);if(this._areas.addAreas(r),this._notifyChangedCallback(),!e)break;n=await e}return!this._interrupted}finally{this._ongoingOperations--}}}var $=n(17),ee=n.n($);
|
|
226
245
|
/*!
|
|
227
246
|
* Copyright 2021 Cognite AS
|
|
228
247
|
*/
|
|
229
|
-
class o{constructor(e){this._clippingPlanes=[new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane],this._box=e||new r.Box3,this.updatePlanes()}set minX(e){this._box.min.x=e,this.updatePlanes()}get minX(){return this._box.min.x}set minY(e){this._box.min.y=e,this.updatePlanes()}get minY(){return this._box.min.y}set minZ(e){this._box.min.z=e,this.updatePlanes()}get minZ(){return this._box.min.z}set maxX(e){this._box.max.x=e,this.updatePlanes()}get maxX(){return this._box.max.x}set maxY(e){this._box.max.y=e,this.updatePlanes()}get maxY(){return this._box.max.y}set maxZ(e){this._box.max.z=e,this.updatePlanes()}get maxZ(){return this._box.max.z}updatePlanes(){this._clippingPlanes[0].setFromNormalAndCoplanarPoint(new r.Vector3(1,0,0),new r.Vector3(this.minX,0,0)),this._clippingPlanes[1].setFromNormalAndCoplanarPoint(new r.Vector3(-1,0,0),new r.Vector3(this.maxX,0,0)),this._clippingPlanes[2].setFromNormalAndCoplanarPoint(new r.Vector3(0,1,0),new r.Vector3(0,this.minY,0)),this._clippingPlanes[3].setFromNormalAndCoplanarPoint(new r.Vector3(0,-1,0),new r.Vector3(0,this.maxY,0)),this._clippingPlanes[4].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,1),new r.Vector3(0,0,this.minZ)),this._clippingPlanes[5].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,-1),new r.Vector3(0,0,this.maxZ))}get clippingPlanes(){return this._clippingPlanes}}},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t,n){"use strict";var r=n(13);n.d(t,"d",(function(){return r.b}));var o=n(18);n.d(t,"a",(function(){return o.a}));var i=n(1);n.d(t,"c",(function(){return i.k})),n.d(t,"b",(function(){return i.d}))},function(e,t){e.exports=require("comlink")},function(e,t){e.exports=require("loglevel")},function(e,t){e.exports=require("lodash/throttle")},function(e,t){e.exports=require("lodash/chunk")},function(e,t){e.exports=require("lodash/range")},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n updateFragmentColor(renderMode, color, v_treeIndex, v_normal, gl_FragCoord.z, matCapTexture, GeometryType.Quad);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec3 color;\nattribute float treeIndex;\nattribute vec4 matrix0;\nattribute vec4 matrix1;\nattribute vec4 matrix2;\nattribute vec4 matrix3;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n \n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = treeIndex;\n v_color = color;\n v_normal = normalize(normalMatrix * (inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(normal), 0.0)).xyz);\n mat4 instanceMatrix = mat4(matrix0, matrix1, matrix2, matrix3);\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main()\n{\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.TriangleMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec3 color;\nattribute float treeIndex; \n\nvarying vec3 v_color;\nvarying float v_treeIndex;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_color = color;\n v_treeIndex = treeIndex;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(position, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.InstancedMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_color = a_color;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * modelMatrix * vec4(transformed, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n v_treeIndex = a_treeIndex;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n vec3 normal = normalize( v_normal );\n if (dist > 0.25)\n discard;\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_normal;\n\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\nvarying float v_treeIndex;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_xy = vec2(position.x, position.y);\n v_treeIndex = a_treeIndex;\n\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec3 normal = normalize( v_normal );\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = v_V;\n float height = length(v_centerA.xyz - v_centerB.xyz);\n float R2 = v_centerA.w;\n float dR = R2 - R1;\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n #if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n #else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n #endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR / height;\n float dRdRL2Inv = dRLInv * dRLInv;\n a -= D.z * D.z * dRdRL2Inv;\n b -= dRLInv * (E.z * D.z * dRLInv + R1 * D.z);\n c -= dRLInv * (E.z * E.z * dRLInv + 2.0 * R1 * E.z);\n }\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0) {\n discard;\n }\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n bool isInner = false;\n\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n #if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n if (R1 != R2)\n {\n // Find normal vector\n vec3 n = -normalize(W.xyz);\n vec3 P1 = v_centerB.xyz;\n vec3 P2 = v_centerA.xyz;\n vec3 A = cross(P1 - p, P2 - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = P1 + R1 * t;\n vec3 o2 = P2 + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n }\n else\n {\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n }\n #endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_color;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float maxRadius = max(a_radiusA, a_radiusB);\n float leftUpScale = maxRadius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (maxRadius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0.0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n float radiusB = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusB, 0.0)).xyz);\n float radiusA = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusA, 0.0)).xyz);\n\n // We pack radii as w-components of v_centerB\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radiusB;\n\n v_V.xyz = -cross(v_U.xyz, v_W.xyz);\n v_V.w = surfacePoint.y;\n\n v_centerA.xyz = mul3(modelViewMatrix, centerA);\n v_centerA.w = radiusA;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\n\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\nvarying float height;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n mat3 basis = mat3(U.xyz, V.xyz, axis.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, axis.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerA.xyz;\n vec3 E = diff * basis;\n float L = height;\n vec3 D = rayDirection * basis;\n\n float R1 = v_centerA.w;\n float R2 = v_centerB.w;\n float dR = R2 - R1;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy)-R1*R1;\n float L2Inv = 1.0/(L*L);\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR/L;\n float dRdRL2Inv = dRLInv*dRLInv;\n a -= D.z*D.z*dRdRL2Inv;\n b -= dRLInv*(E.z*D.z*dRLInv + R1*D.z);\n c -= dRLInv*(E.z*E.z*dRLInv + 2.0*R1*E.z);\n }\n\n // Additional terms when one of the center points is displaced orthogonal to normal vector\n vec2 displacement = ((v_centerB.xyz-v_centerA.xyz)*basis).xy; // In the basis where displacement is in XY only\n float displacementLengthSquared = dot(displacement, displacement);\n a += D.z*(D.z*displacementLengthSquared - 2.0*L*dot(D.xy, displacement))*L2Inv;\n b += (D.z*E.z*displacementLengthSquared - L*(D.x*E.z*displacement.x + D.y*E.z*displacement.y + D.z*E.x*displacement.x + D.z*E.y*displacement.y))*L2Inv;\n c += E.z*(E.z*displacementLengthSquared - 2.*L*dot(E.xy, displacement))*L2Inv;\n\n // Calculate a dicriminant of the above quadratic equation (factor 2 removed from all b-terms above)\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long eccentric cone\n if (d < 0.0) {\n discard;\n }\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n // Check the smallest root, it is closest camera. Only test if the z-component is outside the truncated eccentric cone\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n bool isInner = false;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Either intersection point is behind starting point (happens inside the cone),\n // or the intersection point is outside the end caps. This is not a valid solution.\n isInner = true;\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector\n vec3 n = normalize(-axis.xyz);\n vec3 v_centerA = v_centerA.xyz;\n vec3 v_centerB = v_centerB.xyz;\n vec3 A = cross(v_centerA - p, v_centerB - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = v_centerA + R1 * t;\n vec3 o2 = v_centerB + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_normal;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\nvarying float height;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 normalWithOffset = normalize((modelTransformOffset * vec4(a_normal, 0)).xyz);\n\n float uniformScaleFactor = length(mul3(modelMatrix, normalize(vec3(1.0))));\n\n height = dot(centerA - centerB, normalWithOffset) * uniformScaleFactor;\n\n vec3 lDir;\n vec3 center = 0.5 * (centerA + centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n // Find the coordinates of centerA and centerB projected down to the end cap plane\n vec3 maxCenterProjected = centerA - dot(centerA, normalWithOffset) * normalWithOffset;\n vec3 minCenterProjected = centerB - dot(centerB, normalWithOffset) * normalWithOffset;\n float distanceBetweenProjectedCenters = length(maxCenterProjected - minCenterProjected);\n\n lDir = normalWithOffset;\n float dirSign = 1.0;\n if (dot(objectToCameraModelSpace, lDir) < 0.0) { // direction vector looks away, flip it\n dirSign = -1.0;\n lDir *= -1.;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n // compute basis for cone\n axis.xyz = -normalWithOffset;\n U.xyz = cross(objectToCameraModelSpace, axis.xyz);\n V.xyz = cross(U.xyz, axis.xyz);\n // Transform to camera space\n axis.xyz = normalize(normalMatrix * axis.xyz);\n U.xyz = normalize(normalMatrix * U.xyz);\n V.xyz = normalize(normalMatrix * V.xyz);\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radiusA * (position.x + 1.0) * 0.0025 / height);\n#endif\n\n v_centerA.xyz = mul3(viewMatrix, mul3(modelMatrix, centerA));\n v_centerB.xyz = mul3(viewMatrix, mul3(modelMatrix, centerB));\n\n float radiusA = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusA, 0.0)).xyz);\n float radiusB = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusB, 0.0)).xyz);\n\n // Pack radii as w components of v_centerA and v_centerB\n v_centerA.w = radiusA;\n v_centerB.w = radiusB;\n\n float radiusIncludedDisplacement = 0.5*(2.0*max(a_radiusA, a_radiusB) + distanceBetweenProjectedCenters);\n vec3 surfacePoint = center + mat3(0.5 * height * lDir * (1.0 / uniformScaleFactor), radiusIncludedDisplacement*left, radiusIncludedDisplacement*up) * newPosition;\n vec3 transformed = surfacePoint;\n\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n axis.w = surfacePoint.z;\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(sphereNormal.xyz);\n\n float vRadius = center.w;\n float ratio = vRadius / hRadius;\n mat3 basis = mat3(U.xyz, V.xyz, sphereNormal.xyz);\n mat3 scaledBasis = mat3(ratio * U.xyz, ratio * V.xyz, sphereNormal.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, sphereNormal.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - center.xyz;\n vec3 E = diff * scaledBasis;\n vec3 D = rayDirection * scaledBasis;\n\n float a = dot(D, D);\n float b = dot(E, D);\n float c = dot(E, E) - vRadius*vRadius;\n\n // discriminant of sphere equation (factor 2 removed from b above)\n float d = b*b - a*c;\n if(d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector in local space\n normal = vec3(p - center.xyz) * basis;\n normal.z = normal.z * (hRadius / vRadius) * (hRadius / vRadius);\n // Transform into camera space\n normal = normalize(basis * normal);\n if (dot(normal, rayDirection) > 0.) {\n normal = -normal;\n }\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\nuniform mat4 inverseNormalMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_center;\nattribute vec3 a_normal;\nattribute float a_horizontalRadius;\nattribute float a_verticalRadius;\nattribute float a_height;\n\nvarying float v_treeIndex;\n// We pack vRadius as w-component of center\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\n// U, V, axis represent the 3x3 sphere basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerWithOffset = mul3(modelTransformOffset, a_center).xyz;\n\n vec3 normalWithOffset = (modelTransformOffset * vec4(a_normal, 0)).xyz;\n\n vec3 lDir;\n float distanceToCenterOfSegment = a_verticalRadius - a_height * 0.5;\n vec3 centerOfSegment = centerWithOffset + normalWithOffset * distanceToCenterOfSegment;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 objectToCameraModelSpace = inverseNormalMatrix * vec3(0.0, 0.0, 1.0);\n#else\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - centerOfSegment;\n#endif\n\n vec3 newPosition = position;\n\n float bb = dot(objectToCameraModelSpace, normalWithOffset);\n if (bb < 0.0) { // direction vector looks away, flip it\n lDir = -normalWithOffset;\n } else { // direction vector already looks in my direction\n lDir = normalWithOffset;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_verticalRadius * (position.x + 1.0) * 0.0025 / a_height);\n#endif\n\n // Negative angle means height larger than radius,\n // so we should have full size so we can render the largest part of the ellipsoid segment\n float ratio = max(0.0, 1.0 - a_height / a_verticalRadius);\n // maxRadiusOfSegment is the radius of the circle (projected ellipsoid) when ellipsoid segment is seen from above\n float maxRadiusOfSegment = a_horizontalRadius * sqrt(1.0 - ratio * ratio);\n\n vec3 displacement = vec3(newPosition.x*a_height*0.5, maxRadiusOfSegment*newPosition.y, maxRadiusOfSegment*newPosition.z);\n vec3 surfacePoint = centerOfSegment + mat3(lDir, left, up) * displacement;\n vec3 transformed = surfacePoint;\n\n v_treeIndex = a_treeIndex;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n center.xyz = mul3(modelViewMatrix, centerWithOffset);\n center.w = a_verticalRadius; // Pack radius into w-component\n hRadius = a_horizontalRadius;\n height = a_height;\n v_color = a_color;\n\n // compute basis\n sphereNormal.xyz = normalMatrix * normalWithOffset;\n U.xyz = normalMatrix * up;\n V.xyz = normalMatrix * left;\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n sphereNormal.w = surfacePoint.z;\n\n // TODO should perhaps be a different normal?\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n'},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\n// TODO general cylinder and cone are very similar and used\n// the same shader in the old code. Consider de-duplicating\n// parts of this code\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform float dataTextureWidth;\nuniform float dataTextureHeight;\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_surfacePointY;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = vec4(normalize(cross(W.xyz, U.xyz)), v_surfacePointY);\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n vec3 planeACenter = vec3(0.0, 0.0, v_planeA.w);\n vec3 planeANormal = v_planeA.xyz;\n vec3 planeBCenter = vec3(0.0, 0.0, v_planeB.w);\n vec3 planeBNormal = v_planeB.xyz;\n bool isInner = false;\n\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle || isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radius;\nattribute vec3 a_color;\n// slicing plane attributes\nattribute vec4 a_planeA;\nattribute vec4 a_planeB;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_surfacePointY;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float leftUpScale = a_radius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n // We pack radii as w-components of v_centerB\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n float radius = length((modelToTransformOffset * vec4(a_localXAxis * a_radius, 0.0)).xyz);\n\n centerB = centerB - dir;\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radius;\n\n vec4 planeA = a_planeA;\n planeA.w = length((modelToTransformOffset * vec4(planeA.xyz * planeA.w, 0.0)).xyz);\n\n vec4 planeB = a_planeB;\n planeB.w = length((modelToTransformOffset * vec4(planeB.xyz * planeB.w, 0.0)).xyz);\n\n v_planeA = planeA;\n v_planeB = planeB;\n v_surfacePointY = surfacePoint.y;\n v_centerB.w = radius;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n float theta = atan(v_xy.y, v_xy.x);\n vec3 normal = normalize( v_normal );\n if (theta < v_angle) {\n theta += 2.0 * PI;\n }\n if (dist > 0.25 || dist < 0.25 * v_oneMinusThicknessSqr || theta >= v_angle + v_arcAngle) {\n discard;\n }\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_angle;\nattribute float a_arcAngle;\nattribute float a_thickness;\nattribute vec3 a_normal;\n\nvarying float v_treeIndex;\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n v_treeIndex = a_treeIndex;\n v_oneMinusThicknessSqr = (1.0 - a_thickness) * (1.0 - a_thickness);\n v_xy = vec2(position.x, position.y);\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_arcAngle;\nattribute float a_radius;\nattribute float a_tubeRadius;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n // normalized theta and phi are packed into positions\n float theta = position.x * a_arcAngle;\n float phi = position.y;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n vec3 pos3 = vec3(0);\n\n pos3.x = (a_radius + a_tubeRadius*cos(phi)) * cosTheta;\n pos3.y = (a_radius + a_tubeRadius*cos(phi)) * sinTheta;\n pos3.z = a_tubeRadius*sin(phi);\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n vec3 transformed = (instanceMatrix * vec4(pos3, 1.0)).xyz;\n\n // Calculate normal vectors if we're not picking\n vec3 center = (instanceMatrix * vec4(a_radius * cosTheta, a_radius * sinTheta, 0.0, 1.0)).xyz;\n vec3 objectNormal = normalize(transformed.xyz - center);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n\n vViewPosition = modelViewPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_vertex1;\nattribute vec3 a_vertex2;\nattribute vec3 a_vertex3;\nattribute vec3 a_vertex4;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n vec3 transformed;\n // reduce the avarage branchings\n if (position.x < 1.5) {\n transformed = position.x == 0.0 ? a_vertex1 : a_vertex2;\n } else {\n transformed = position.x == 2.0 ? a_vertex3 : a_vertex4;\n }\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 objectNormal = cross(a_vertex1 - a_vertex2, a_vertex1 - a_vertex3);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \nmat3 G[9];\n// hard coded matrix values!!!! as suggested in https://github.com/neilmendoza/ofxPostProcessing/blob/master/src/EdgePass.cpp#L45\nconst mat3 g0 = mat3( 0.3535533845424652, 0, -0.3535533845424652, 0.5, 0, -0.5, 0.3535533845424652, 0, -0.3535533845424652 );\nconst mat3 g1 = mat3( 0.3535533845424652, 0.5, 0.3535533845424652, 0, 0, 0, -0.3535533845424652, -0.5, -0.3535533845424652 );\nconst mat3 g2 = mat3( 0, 0.3535533845424652, -0.5, -0.3535533845424652, 0, 0.3535533845424652, 0.5, -0.3535533845424652, 0 );\nconst mat3 g3 = mat3( 0.5, -0.3535533845424652, 0, -0.3535533845424652, 0, 0.3535533845424652, 0, 0.3535533845424652, -0.5 );\nconst mat3 g4 = mat3( 0, -0.5, 0, 0.5, 0, 0.5, 0, -0.5, 0 );\nconst mat3 g5 = mat3( -0.5, 0, 0.5, 0, 0, 0, 0.5, 0, -0.5 );\nconst mat3 g6 = mat3( 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.6666666865348816, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204 );\nconst mat3 g7 = mat3( -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, 0.6666666865348816, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408 );\nconst mat3 g8 = mat3( 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408 );\n\nfloat edgeDetectionFilter(sampler2D baseTexture, vec2 uv, vec2 resolution) {\n\n vec2 texel = vec2(1.0 / resolution.x, 1.0 / resolution.y);\n\n\tG[0] = g0,\n\tG[1] = g1,\n\tG[2] = g2,\n\tG[3] = g3,\n\tG[4] = g4,\n\tG[5] = g5,\n\tG[6] = g6,\n\tG[7] = g7,\n\tG[8] = g8;\n\n\tmat3 I;\n\tfloat cnv[9];\n\tvec3 neighbour;\n\n\t/* fetch the 3x3 neighbourhood and use the RGB vector's length as intensity value */\n\tfor (int i=0; i<3; i++) {\n\t\tfor (int j=0; j<3; j++) {\n\t\t\tneighbour = texture2D(baseTexture, uv + texel * vec2(float(i)-1.0,float(j)-1.0) ).rgb;\n\t\t\tI[i][j] = length(neighbour);\n\t\t}\n\t}\n\n\t/* calculate the convolution values for all the masks */\n\tfor (int i=0; i<9; i++) {\n\t\tfloat dp3 = dot(G[i][0], I[0]) + dot(G[i][1], I[1]) + dot(G[i][2], I[2]);\n\t\tcnv[i] = dp3 * dp3;\n\t}\n\n\tfloat M = (cnv[0] + cnv[1]) + (cnv[2] + cnv[3]);\n\tfloat S = (cnv[4] + cnv[5]) + (cnv[6] + cnv[7]) + (cnv[8] + M);\n\n float edgeStrength = sqrt(M/S);\n\n return edgeStrength;\n}\n\n#include <packing>\n\nvarying vec2 vUv;\n\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nuniform sampler2D tFront;\nuniform sampler2D tFrontDepth;\n\nuniform sampler2D tBack;\nuniform sampler2D tBackDepth;\n\nuniform sampler2D tCustom;\nuniform sampler2D tCustomDepth;\n\nuniform sampler2D tGhost;\nuniform sampler2D tGhostDepth;\n\nuniform sampler2D tOutlineColors;\n\nuniform float cameraNear;\nuniform float cameraFar;\n\nuniform vec2 resolution;\n\nuniform float edgeStrengthMultiplier;\nuniform float edgeGrayScaleIntensity;\n\nconst float infinity = 1e20;\n\nfloat computeFloatEncodedOutlineIndex(float bitEncodedFloat){\n return floatBitsSubset(floor((bitEncodedFloat * 255.0) + 0.5), 3, 6);\n}\n\nvec4 computeNeighborOutlineIndices(sampler2D colorTexture){\n float outlineIndex0 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv0).a);\n float outlineIndex1 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv1).a);\n float outlineIndex2 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv2).a);\n float outlineIndex3 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv3).a);\n\n return vec4(outlineIndex0, outlineIndex1, outlineIndex2, outlineIndex3);\n}\n\nfloat toViewZ(float depth, float near, float far){\n float normalizedDepth = depth * 2.0 - 1.0;\n return 2.0 * near * far / (far + near - normalizedDepth * (far - near)); \n}\n\nvec4 computeNeighborAlphas(sampler2D colorTexture){\n float alpha0 = texture2D(colorTexture, vUv0).a;\n float alpha1 = texture2D(colorTexture, vUv1).a;\n float alpha2 = texture2D(colorTexture, vUv2).a;\n float alpha3 = texture2D(colorTexture, vUv3).a;\n\n return vec4(alpha0, alpha1, alpha2, alpha3);\n}\n\nvoid main() {\n vec4 frontAlbedo = texture2D(tFront, vUv);\n vec4 backAlbedo = texture2D(tBack, vUv);\n vec4 customAlbedo = texture2D(tCustom, vUv);\n vec4 ghostAlbedo = texture2D(tGhost, vUv);\n\n float frontDepth = texture2D(tFrontDepth, vUv).r;\n float backDepth = texture2D(tBackDepth, vUv).r; \n float customDepth = texture2D(tCustomDepth, vUv).r;\n float ghostDepth = texture2D(tGhostDepth, vUv).r;\n\n // This is a hack to make sure that all textures are initialized\n // If a texture is unused, it will have a clear value of 0.0.\n // Without this we've seen issues with MSAA where resizing render targets\n // causes depth to cleared to either 1 or 0 depending on the device/browser\n customDepth = customDepth > 0.0 ? customDepth : 1.0; \n backDepth = backDepth > 0.0 ? backDepth : 1.0;\n ghostDepth = ghostDepth > 0.0 ? ghostDepth : 1.0;\n frontDepth = frontDepth > 0.0 ? frontDepth : 1.0; \n\n if(all(greaterThanEqual(vec4(backDepth, customDepth, ghostDepth, frontDepth), vec4(1.0)))){\n discard;\n }\n \n // Decompose and clamp \"ghost\" color\n vec4 clampedGhostAlbedo = vec4(max(ghostAlbedo.rgb, 0.5), min(ghostAlbedo.a, 0.8));\n\n float frontOutlineIndex = computeFloatEncodedOutlineIndex(frontAlbedo.a);\n vec4 frontNeighborIndices = computeNeighborOutlineIndices(tFront);\n\n // There exsists fragments of rendered objects within the edge width that should have border\n if(any(equal(frontNeighborIndices, vec4(0.0))) && frontOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(frontNeighborIndices.x, frontNeighborIndices.y), max(frontNeighborIndices.z, frontNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = frontDepth;\n#endif\n return;\n }\n\n // texture has drawn fragment\n if(frontDepth < 1.0){\n float customDepthTest = step(customDepth, backDepth); // zero if back is in front\n\n float a = customDepthTest > 0.0 ? ceil(customAlbedo.a) * 0.5 : ceil(backAlbedo.a) * 0.5;\n\n gl_FragColor = vec4(frontAlbedo.rgb, 1.0) * (1.0 - a) + (vec4(backAlbedo.rgb, 1.0) * (1.0 - customDepthTest) + vec4(customAlbedo.rgb, 1.0) * customDepthTest) * a;\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = texture2D(tFrontDepth, vUv).r;\n#endif\n return;\n }\n\n if (customDepth >= backDepth) {\n float backOutlineIndex = computeFloatEncodedOutlineIndex(backAlbedo.a);\n vec4 backNeighborIndices = computeNeighborOutlineIndices(tBack);\n\n if( any(equal(backNeighborIndices, vec4(0.0))) && backOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(backNeighborIndices.x, backNeighborIndices.y), max(backNeighborIndices.z, backNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tBackDepth, vUv).r;\n#endif\n return;\n }\n }\n \n float edgeStrength = 0.0;\n#if defined(EDGES)\n if (!any(equal(computeNeighborAlphas(tBack), vec4(0.0)))) {\n float depthEdge = toViewZ(backDepth, cameraNear, cameraFar);\n edgeStrength = (1.0 - smoothstep(10.0, 40.0, depthEdge)) * edgeDetectionFilter(tBack, vUv, resolution) * edgeStrengthMultiplier;\n }\n#endif\n\n // Combine color from ghost, back and custom object\n vec4 color = backAlbedo;\n float depth = backDepth;\n if (customDepth < backDepth && ghostDepth == 1.0) {\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * backAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else if (customDepth < backDepth && ghostDepth < 1.0) {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n vec3 modelAlbedo = mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s);\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * modelAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n color = vec4(mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s), backAlbedo.a);\n depth = mix(backDepth, ghostDepth, s);\n }\n \n gl_FragColor = color * (1.0 - edgeStrength) + vec4(vec3(edgeGrayScaleIntensity) * edgeStrength, 1.0);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = depth;\n#endif\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\n// selection outline\nuniform vec2 texelSize;\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nvoid main() {\n vUv = uv;\n\n // selection outline\n vUv0 = vec2(uv.x + texelSize.x, uv.y);\n vUv1 = vec2(uv.x - texelSize.x, uv.y);\n vUv2 = vec2(uv.x, uv.y + texelSize.y);\n vUv3 = vec2(uv.x, uv.y - texelSize.y);\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nuniform vec2 inverseResolution;\nuniform vec2 resolution;\nuniform sampler2D tDiffuse;\nuniform sampler2D tDepth;\n\n#ifndef FXAA_REDUCE_MIN\n #define FXAA_REDUCE_MIN (1.0/ 128.0)\n#endif\n#ifndef FXAA_REDUCE_MUL\n #define FXAA_REDUCE_MUL (1.0 / 8.0)\n#endif\n#ifndef FXAA_SPAN_MAX\n #define FXAA_SPAN_MAX 8.0\n#endif\n\nvec4 fxaa(sampler2D tex, vec2 fragCoord,\n vec2 resolution, vec2 inverseResolution,\n vec2 v_rgbNW, vec2 v_rgbNE,\n vec2 v_rgbSW, vec2 v_rgbSE,\n vec2 v_rgbM) {\n vec4 color;\n\n vec3 rgbNW = texture2D(tex, v_rgbNW).xyz;\n vec3 rgbNE = texture2D(tex, v_rgbNE).xyz;\n vec3 rgbSW = texture2D(tex, v_rgbSW).xyz;\n vec3 rgbSE = texture2D(tex, v_rgbSE).xyz;\n vec4 texColor = texture2D(tex, v_rgbM);\n vec3 rgbM = texColor.xyz;\n\n vec3 luma = vec3(0.299, 0.587, 0.114);\n float lumaNW = dot(rgbNW, luma);\n float lumaNE = dot(rgbNE, luma);\n float lumaSW = dot(rgbSW, luma);\n float lumaSE = dot(rgbSE, luma);\n float lumaM = dot(rgbM, luma);\n float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE)));\n float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE)));\n\n mediump vec2 dir;\n dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE));\n dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE));\n\n float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) *\n (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN);\n\n float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce);\n dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX),\n max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX),\n dir * rcpDirMin));\n\n vec4 rgbA = 0.5 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (1.0 / 3.0 - 0.5))) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (2.0 / 3.0 - 0.5))));\n vec4 rgbB = rgbA * 0.5 + 0.25 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * -0.5)) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * 0.5)));\n\n float lumaB = dot(rgbB.rgb, luma);\n if ((lumaB < lumaMin) || (lumaB > lumaMax)) {\n color = rgbA;\n } else {\n color = rgbB;\n }\n return color;\n}\n\nvoid main() {\n gl_FragColor = fxaa(tDiffuse, v_fragCoord, \n resolution, inverseResolution, \n v_rgbNW, v_rgbNE, v_rgbSW, v_rgbSE, v_rgbM);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tDepth, v_uv).r;\n#endif\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nuniform vec2 resolution;\nuniform vec2 inverseResolution;\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nvoid main() {\n v_fragCoord = uv * resolution;\n v_rgbNW = (v_fragCoord + vec2(-1.0, -1.0)) * inverseResolution;\n v_rgbNE = (v_fragCoord + vec2(1.0, -1.0)) * inverseResolution;\n v_rgbSW = (v_fragCoord + vec2(-1.0, 1.0)) * inverseResolution;\n v_rgbSE = (v_fragCoord + vec2(1.0, 1.0)) * inverseResolution;\n v_rgbM = vec2(v_fragCoord * inverseResolution);\n v_uv = uv;\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nuniform mat4 projMatrix;\nuniform mat4 inverseProjectionMatrix;\n\nuniform vec3 kernel[MAX_KERNEL_SIZE];\n\nuniform sampler2D tDepth;\nuniform sampler2D tNoise;\n\nuniform vec2 resolution;\n\nuniform float sampleRadius;\nuniform float bias;\n\nvec3 viewPosFromDepth(float depth, vec2 uv) {\n // Depth to clip space: [0, 1] -> [-1, 1]\n float z = depth * 2.0 - 1.0;\n\n // Fragment in clip space\n vec4 clipSpacePosition = vec4(uv * 2.0 - 1.0, z, 1.0);\n vec4 viewSpacePosition = inverseProjectionMatrix * clipSpacePosition;\n\n // Perspective division\n viewSpacePosition /= viewSpacePosition.w;\n\n return viewSpacePosition.xyz;\n}\n\nvec3 computeWorldNormalFromDepth(sampler2D depthTexture, vec2 resolution, vec2 uv, float sampleDepth){\n float dx = 1.0 / resolution.x;\n float dy = 1.0 / resolution.y;\n\n vec2 uv1 = uv + vec2(dx, 0.0); // right\n float d1 = texture2D(depthTexture, uv1).r; \n\n vec2 uv2 = uv + vec2(0.0, dy); // up\n float d2 = texture2D(depthTexture, uv2).r;\n\n vec2 uv3 = uv + vec2(-dx, 0.0); // left\n float d3 = texture2D(depthTexture, uv3).r;\n\n vec2 uv4 = uv + vec2(0.0, -dy); // down\n float d4 = texture2D(depthTexture, uv4).r;\n\n bool horizontalSampleCondition = abs(d1 - sampleDepth) < abs(d3 - sampleDepth);\n\n float horizontalSampleDepth = horizontalSampleCondition ? d1 : d3;\n vec2 horizontalSampleUv = horizontalSampleCondition ? uv1 : uv3;\n\n bool verticalSampleCondition = abs(d2 - sampleDepth) < abs(d4 - sampleDepth);\n\n float verticalSampleDepth = verticalSampleCondition ? d2 : d4;\n vec2 verticalSampleUv = verticalSampleCondition ? uv2 : uv4;\n\n vec3 viewPos = viewPosFromDepth(sampleDepth, vUv);\n \n vec3 viewPos1 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv) : viewPosFromDepth(verticalSampleDepth, verticalSampleUv);\n vec3 viewPos2 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(verticalSampleDepth, verticalSampleUv) : viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv);\n\n return normalize(cross(viewPos1 - viewPos, viewPos2 - viewPos));\n}\n\nvoid main(){\n float d = texture2D(tDepth, vUv).r;\n\n vec3 viewNormal = computeWorldNormalFromDepth(tDepth, resolution, vUv, d);\n\n vec3 viewPosition = viewPosFromDepth(d, vUv);\n\n vec2 noiseScale = vec2( resolution.x / 128.0, resolution.y / 128.0 );\n vec3 randomVec = normalize(texture2D(tNoise, vUv * noiseScale).xyz);\n\n vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));\n\n vec3 bitangent = cross(viewNormal, tangent);\n\n mat3 TBN = mat3(tangent, bitangent, viewNormal);\n\n float occlusion = 0.0;\n\n for (int i = 0; i < MAX_KERNEL_SIZE; i++){\n \n vec3 sampleVector = TBN * kernel[i];\n sampleVector = viewPosition + sampleVector * sampleRadius;\n\n vec4 offset = projMatrix * vec4(sampleVector, 1.0);\n offset.xyz /= offset.w;\n offset.xyz = offset.xyz * 0.5 + 0.5;\n\n float realDepth = texture2D(tDepth, offset.xy).r;\n vec3 realPos = viewPosFromDepth(realDepth, offset.xy);\n\n float rangeCheck = smoothstep(0.0, 1.0, sampleRadius / length(viewPosition - realPos));\n\n occlusion += (realPos.z >= sampleVector.z + bias ? 1.0 : 0.0) * rangeCheck;\n }\n\n float occlusionFactor = 1.0 - clamp(occlusion / float(MAX_KERNEL_SIZE), 0.0, 1.0);\n\n gl_FragColor = vec4(vec3(occlusionFactor), 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Copyright Cognite (C) 2019 Cognite\n//\n// Efficient Gaussian blur based on technique described by Daniel Rákos in\n// http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/\n//\n\nvarying vec2 vUv;\n\nuniform sampler2D tDiffuse;\nuniform sampler2D tAmbientOcclusion;\n\nuniform vec2 resolution;\n\nvoid main() {\n float blurredAO = 0.5 * (\n 2.0 * texture2D(tAmbientOcclusion, vUv).r * 0.2270270270 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703\n );\n\n gl_FragColor = vec4(texture2D(tDiffuse, vUv).rgb * blurredAO, 1.0);\n}\n\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// From http://www.science-and-fiction.org/rendering/noise.html\nfloat rand2d(in vec2 co){\n return fract(sin(dot(co.xy, vec2(12.9898,78.233))) * 43758.5453);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp float v_visible;\nvarying lowp vec2 v_seed;\n\nvarying vec3 v_viewPosition;\n\nvoid main() {\n \n if(v_visible != 1.0 || isSliced(v_viewPosition)){\n discard;\n }\n\n float v = rand2d(gl_FragCoord.xy + v_seed);\n if (v >= v_coverageFactor) {\n discard;\n }\n\n gl_FragColor = vec4(v_color, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nattribute mediump float a_sectorId;\nattribute lowp vec3 a_coverageFactor;\nattribute lowp float a_visible;\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp vec2 v_seed;\nvarying lowp float v_visible;\n\nvarying vec3 v_viewPosition;\n\nvoid main()\n{\n v_visible = a_visible;\n v_color = packIntToColor(a_sectorId);\n v_coverageFactor = abs(dot(a_coverageFactor, normal));\n // A seed to ensure that two overlapping sectors A and B \n // doesn't produce the same noise pattern\n v_seed = vec2(a_sectorId / 255.0, a_sectorId / 65025.0);\n\n vec4 mvPosition = modelViewMatrix * instanceMatrix * vec4( position, 1.0 );\n\n v_viewPosition = mvPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewMatrix * instanceMatrix * vec4(position, 1.0);\n}\n"},function(e,t){e.exports=require("@cognite/sdk")},function(e,t){e.exports=require("lodash/debounce")},function(e,t){e.exports=require("lodash/omit")},function(e,t){e.exports=require("skmeans")},function(e,t){e.exports=require("@cognite/sdk-core")},,function(e,t,n){"use strict";
|
|
248
|
+
class te extends a{constructor(e,t){super(te.classToken),this._indexSet=new i.e,this._areas=m.instance(),this._client=e,this._model=t,this._fetchResultHelper=void 0}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const t=this._model;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const n=new J(e=>e.map(e=>new i.i(e.treeIndex,e.subtreeSize)),e=>this.fetchBoundingBoxesForAssetMappings(e),()=>this.notifyChanged());this._fetchResultHelper=n;const r={assetId:e.assetId,intersectsBoundingBox:function(e){if(void 0===e)return;const n=(new l.Box3).copy(e);return t.mapBoxFromModelToCdfCoordinates(n,n),{min:[n.min.x,n.min.y,n.min.z],max:[n.max.x,n.max.y,n.max.z]}}(e.boundingBox),limit:1e3};this._indexSet=n.indexSet,this._areas=n.areas,this._filter=e;const o=this._client.assetMappings3D.list(t.modelId,t.revisionId,r);await n.pageResults(o)&&(this._fetchResultHelper=void 0)}async fetchBoundingBoxesForAssetMappings(e){return(await this._client.revisions3D.retrieve3DNodes(this._model.modelId,this._model.revisionId,e.map(e=>({id:e.nodeId})))).filter(e=>e.boundingBox).map(e=>{const t=e.boundingBox.min,n=e.boundingBox.max;return(new l.Box3).setFromArray([t[0],t[1],t[2],n[0],n[1],n[2]])})}getFilter(){return this._filter}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear()}getIndexSet(){return this._indexSet}getAreas(){return this._areas}serialize(){return{token:this.classToken,state:ee()(this._filter)}}}te.classToken="AssetNodeCollection";var ne=n(18),re=n.n(ne);
|
|
230
249
|
/*!
|
|
231
250
|
* Copyright 2021 Cognite AS
|
|
232
251
|
*/
|
|
233
|
-
|
|
252
|
+
class oe extends a{constructor(e,t,n={}){super(oe.classToken),this._indexSet=new i.e,this._areas=m.instance(),this._filter={},this._client=e,this._model=t,this._options={requestPartitions:1,...n}}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const{requestPartitions:t}=this._options;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const n=new J(e=>e.map(e=>new i.i(e.treeIndex,e.subtreeSize)),async e=>e.map(e=>{const t=new l.Box3;return void 0!==e.boundingBox&&(Object(i.u)(e.boundingBox,t),this._model.mapBoxFromCdfToModelCoordinates(t,t)),t}),()=>this.notifyChanged());this._fetchResultHelper=n,this._indexSet=n.indexSet,this._areas=n.areas;const{modelId:r,revisionId:o}=this._model,a=re()(1,t+1).map(async i=>{const a=this._client.revisions3D.list3DNodes(r,o,{properties:e,limit:1e3,sortByNodeId:!0,partition:`${i}/${t}`});return n.pageResults(a)});this._filter=e,this.notifyChanged(),await Promise.all(a)}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear(),this.notifyChanged()}getIndexSet(){return this._indexSet}getAreas(){return this._areas}serialize(){return{token:this.classToken,state:ee()(this._filter),options:{...this._options}}}}oe.classToken="PropertyFilterNodeCollection";class ie extends a{constructor(e,t){super(ie.classToken),this._innerCollection=t,this._innerCollection.on("changed",()=>{this._cachedIndexSet=void 0,this.notifyChanged()}),this._allTreeIndicesRange=new i.i(0,e.nodeCount)}get isLoading(){return this._innerCollection.isLoading}getIndexSet(){if(void 0===this._cachedIndexSet){const e=this._innerCollection.getIndexSet(),t=new i.e;t.addRange(this._allTreeIndicesRange),t.differenceWith(e),this._cachedIndexSet=t}return this._cachedIndexSet}getAreas(){throw new Error(`${this.getAreas.name} is not supported for ${ie.name}`)}serialize(){return{token:this.classToken,state:{innerCollection:this._innerCollection.serialize()}}}clear(){throw new Error("clear() is not supported")}}ie.classToken="InvertedNodeCollection";
|
|
234
253
|
/*!
|
|
235
254
|
* Copyright 2021 Cognite AS
|
|
236
255
|
*/
|
|
237
|
-
|
|
256
|
+
class ae extends a{constructor(e,t,n={}){super(ae.classToken),this._indexSet=new i.e,this._areas=m.instance(),this._filter={propertyCategory:"",propertyKey:"",propertyValues:new Array},this._client=e,this._model=t,this._options={requestPartitions:1,...n}}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e,t,n){const{requestPartitions:r}=this._options;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const o=new J(e=>e.map(e=>new i.i(e.treeIndex,e.subtreeSize)),async e=>e.map(e=>{const t=new l.Box3;return void 0!==e.boundingBox&&(Object(i.u)(e.boundingBox,t),this._model.mapBoxFromCdfToModelCoordinates(t,t)),t}),()=>this.notifyChanged());this._fetchResultHelper=o,this._indexSet=o.indexSet,this._areas=o.areas;const a=this.buildUrl(),s=Array.from(function*(e){for(let t=0;t<e.length;t+=1e3){const n=e.slice(t,Math.min(e.length,t+1e3));yield n}}(n)).flatMap(n=>{const i={properties:{[""+e]:{[""+t]:n}}};return re()(1,r+1).map(async e=>{const t=async function(e,t,n){const r=await de(e,t,n);return new se(e,t,n,r)}(this._client,a,{data:{filter:i,limit:1e3,partition:`${e}/${r}`}});return o.pageResults(t)})});this.notifyChanged(),await Promise.all(s)}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear(),this.notifyChanged()}getIndexSet(){return this._indexSet}getAreas(){return this._areas}buildUrl(){return`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${this._model.modelId}/revisions/${this._model.revisionId}/nodes/list`}serialize(){return{token:this.classToken,state:ee()(this._filter),options:{...this._options}}}}ae.classToken="SinglePropertyNodeCollection";class se{constructor(e,t,n,r){this.items=r.items,this.nextCursor=r.nextCursor,void 0!==this.nextCursor&&(this.next=async()=>{const r={...n,data:{...n.data,cursor:this.nextCursor}},o=await de(e,t,r);return new se(e,t,n,o)})}}async function de(e,t,n){const r=await e.post(t,n);if(200===r.status)return r.data;throw new Error(`Unexpected status from server while POST ${t}: ${r.status} (body: ${r.data})`)}
|
|
257
|
+
/*!
|
|
258
|
+
* Copyright 2021 Cognite AS
|
|
259
|
+
*/class le{constructor(){this._types=new Map,this._types=new Map,this.registerWellKnownNodeCollectionTypes()}registerNodeCollectionType(e,t){this._types.set(e,{deserializer:(e,n)=>t(e,n)})}async deserialize(e,t,n){const r={client:e,model:t};return this.getDeserializer(n.token)(n,r)}getDeserializer(e){const t=this._types.get(e);return o()(void 0!==t),t.deserializer}registerWellKnownNodeCollectionTypes(){this.registerNodeCollectionType(te.classToken,async(e,t)=>{const n=new te(t.client,t.model);return await n.executeFilter(e.state),n}),this.registerNodeCollectionType(oe.classToken,async(e,t)=>{const n=new oe(t.client,t.model,e.options);return await n.executeFilter(e.state),n}),this.registerNodeCollectionType(ae.classToken,async(e,t)=>{const n=new ae(t.client,t.model,e.options),{propertyCategory:r,propertyKey:o,propertyValues:i}=e.state;return await n.executeFilter(r,o,i),n}),this.registerNodeCollectionType(f.classToken,e=>{var t;const n=new i.e;e.state.forEach(e=>n.addRange(new i.i(e.from,e.count)));const r=new f(n);return void 0!==(null===(t=e.options)||void 0===t?void 0:t.areas)&&r.addAreas(e.options.areas),Promise.resolve(r)}),this.registerNodeCollectionType(h.classToken,async(e,t)=>{const n=await Promise.all(e.state.subCollections.map(e=>this.deserialize(t.client,t.model,e)));return new h(n)}),this.registerNodeCollectionType(p.classToken,async(e,t)=>{const n=await Promise.all(e.state.subCollections.map(e=>this.deserialize(t.client,t.model,e)));return new p(n)}),this.registerNodeCollectionType(ie.classToken,async(e,t)=>{const n=await this.deserialize(t.client,t.model,e.state.innerSet);return new ie(t.model,n)})}}function ce(e,t){le.Instance.registerNodeCollectionType(e,t)}
|
|
260
|
+
/*!
|
|
261
|
+
* Copyright 2021 Cognite AS
|
|
262
|
+
*/le.Instance=new le;class ue{constructor(e,t){this._viewer=e,this._cdfClient=t,this._cameraControls=e.cameraControls}getCurrentState(){const e=this._cameraControls.getState(),t=this._viewer.models.filter(e=>e instanceof q).map(e=>e).map(e=>({defaultNodeAppearance:e.getDefaultNodeAppearance(),modelId:e.modelId,revisionId:e.revisionId,styledSets:e.styledNodeCollections.map(e=>{const{nodeCollection:t,appearance:n}=e;return{...t.serialize(),appearance:n}})}));return{camera:{position:e.position,target:e.target},models:t}}async setState(e){const t=e.camera.position,n=e.camera.target;this._cameraControls.setState(new l.Vector3(t.x,t.y,t.z),new l.Vector3(n.x,n.y,n.z));const r=this._viewer.models.filter(e=>e instanceof q).map(e=>e);await Promise.all(e.models.map(e=>{const t=r.find(t=>t.modelId==e.modelId&&t.revisionId==e.revisionId);if(void 0===t)throw new Error(`Cannot apply model state. Model (modelId: ${e.modelId}, revisionId: ${e.revisionId}) has not been added to viewer.`);return{model:t,state:e}}).map(async e=>{const{model:t,state:n}=e;t.setDefaultNodeAppearance(n.defaultNodeAppearance),await Promise.all(n.styledSets.map(async e=>{const n=await le.Instance.deserialize(this._cdfClient,t,{token:e.token,state:e.state,options:e.options});t.assignStyledNodeCollection(n,e.appearance)}))}))}}
|
|
263
|
+
/*!
|
|
264
|
+
* Copyright 2021 Cognite AS
|
|
265
|
+
*/class pe{constructor(e,t,n){this._cadModelMap=new Map,this._subscription=new P.Subscription,this._needsRedraw=!1,this._markNeedsRedrawBound=this.markNeedsRedraw.bind(this),this._materialsChangedListener=this.handleMaterialsChanged.bind(this),this._materialManager=e,this._cadModelFactory=t,this._cadModelUpdateHandler=n,this._materialManager.on("materialsChanged",this._materialsChangedListener);this._subscription.add(this._cadModelUpdateHandler.consumedSectorObservable().subscribe({next:e=>{const t=this._cadModelMap.get(e.modelIdentifier);if(!t)return;e.instancedMeshes&&e.levelOfDetail===W.c.Detailed?t.updateInstancedMeshes(e.instancedMeshes,e.modelIdentifier,e.metadata.id):e.levelOfDetail!==W.c.Simple&&e.levelOfDetail!==W.c.Discarded||(t.discardInstancedMeshes(e.metadata.id),t.removeBatchedSectorGeometries(e.metadata.id)),e.geometryBatchingQueue&&e.geometryBatchingQueue.length>0&&e.levelOfDetail===W.c.Detailed&&t.batchGeometry(e.geometryBatchingQueue,e.metadata.id);const n=t.rootSector.sectorNodeMap.get(e.metadata.id);if(!n)throw new Error(`Could not find 3D node for sector ${e.metadata.id} - invalid id?`);e.group&&n.add(e.group),n.updateGeometry(e.group,e.levelOfDetail),this.markNeedsRedraw()},error:e=>{R.a.trackError(e,{moduleName:"CadManager",methodName:"constructor"})}}))}get materialManager(){return this._materialManager}get budget(){return this._cadModelUpdateHandler.budget}set budget(e){this._cadModelUpdateHandler.budget=e}get loadedStatistics(){return this._cadModelUpdateHandler.lastBudgetSpendage}dispose(){this._cadModelUpdateHandler.dispose(),this._cadModelFactory.dispose(),this._subscription.unsubscribe(),this._materialManager.off("materialsChanged",this._materialsChangedListener)}requestRedraw(){this._needsRedraw=!0}resetRedraw(){this._needsRedraw=!1}get needsRedraw(){return this._needsRedraw}updateCamera(e){this._cadModelUpdateHandler.updateCamera(e),this._needsRedraw=!0}get clippingPlanes(){return this._materialManager.clippingPlanes}set clippingPlanes(e){this._materialManager.clippingPlanes=e,this._cadModelUpdateHandler.clippingPlanes=e,this._needsRedraw=!0}get renderMode(){return this._materialManager.getRenderMode()}set renderMode(e){this._materialManager.setRenderMode(e)}async addModel(e,t){const n=await this._cadModelFactory.createModel(e,t);return n.addEventListener("update",this._markNeedsRedrawBound),this._cadModelMap.set(n.cadModelIdentifier,n),this._cadModelUpdateHandler.addModel(n),n}removeModel(e){if(!this._cadModelMap.delete(e.cadModelIdentifier))throw new Error(`Could not remove model ${e.cadModelIdentifier} because it's not added`);e.removeEventListener("update",this._markNeedsRedrawBound),e.clearCache(),this._cadModelUpdateHandler.removeModel(e)}getLoadingStateObserver(){return this._cadModelUpdateHandler.getLoadingStateObserver()}markNeedsRedraw(){this._needsRedraw=!0}handleMaterialsChanged(){this.requestRedraw()}}var me=n(22);
|
|
266
|
+
/*!
|
|
267
|
+
* Copyright 2021 Cognite AS
|
|
268
|
+
*/class he{constructor(e){this._geometryClipBox=e}createClippedModel(e){const t=function e(t,n){const r=t.bounds,o=t.bounds.clone();if(o.intersect(n),o.isEmpty())return;{const i=[];for(let r=0;r<t.children.length;r++){const o=e(t.children[r],n);void 0!==o&&i.push(o)}const a=ve(o)/ve(r),s=Math.min(1,1-1/(1+10*a));return{...t,children:i,estimatedDrawCallCount:Math.ceil(s*t.estimatedDrawCallCount),estimatedRenderCost:Math.ceil(s*t.estimatedRenderCost),bounds:o}}}(e.scene.root,this._geometryClipBox);if(void 0===t)throw new Error("No sectors inside provided geometry clip box");const n=new Map;Object(i.w)(t,e=>(n.set(e.id,e),!0));const r=(new W.f).createSectorScene(e.scene.version,e.scene.maxTreeIndex,e.scene.unit,t);return{...e,scene:r,geometryClipBox:this._geometryClipBox.clone()}}}const fe={size:new l.Vector3};function ve(e){const{size:t}=fe;return e.getSize(t),t.x*t.y*t.z}
|
|
269
|
+
/*!
|
|
270
|
+
* Copyright 2021 Cognite AS
|
|
271
|
+
*/const xe=new Float32Array([-.5,-.5,0,.5,-.5,0,.5,.5,0,.5,.5,0,-.5,.5,0,-.5,-.5,0]),ge=new l.Float32BufferAttribute(xe.buffer,3),be=(new l.Box3).setFromArray(xe),ye=new Map([["color",{offset:0,size:12}],["treeIndex",{offset:12,size:4}],["normal",{offset:16,size:12}],["instanceMatrix",{offset:28,size:64}]]);
|
|
238
272
|
/*!
|
|
239
273
|
* Copyright 2021 Cognite AS
|
|
240
274
|
*/
|
|
241
|
-
const
|
|
275
|
+
function*_e(e,t,n,r=null){const a=e.primitives;if(Te(a.boxCollection)&&(yield function(e,t,n,r){const o=Object(W.m)(e,t,W.i,r),i=new l.InstancedBufferGeometry,a=new l.Mesh(i,n);return i.setIndex(W.h.index),i.setAttribute("position",W.h.position),i.setAttribute("normal",W.h.normal),Ce(i,o,t,a),Ae(i),a.onBeforeRender=()=>Me(n,a.matrixWorld),a.name="Primitives (Boxes)",a}(a.boxCollection,a.boxAttributes,t.box,r)),Te(a.circleCollection)&&(yield function(e,t,n,r){const o=Object(W.m)(e,t,W.t,r),i=new l.InstancedBufferGeometry,a=new l.Mesh(i,n);return i.setIndex(W.s.index),i.setAttribute("position",W.s.position),i.setAttribute("normal",W.s.position),Ce(i,o,t,a),Ae(i),a.onBeforeRender=()=>Me(n,a.matrixWorld),a.name="Primitives (Circles)",a}(a.circleCollection,a.circleAttributes,t.circle,r)),Te(a.coneCollection)&&(yield function(e,t,n,r,o){const i=Object(W.n)(e,t,o),a=new l.InstancedBufferGeometry,s=new l.Mesh(a,n);return a.setIndex(W.j.index),a.setAttribute("position",W.j.position),Ce(a,i,t,s),Se(a,r),s.onBeforeRender=()=>Me(n,s.matrixWorld),s.name="Primitives (Cones)",s}(a.coneCollection,a.coneAttributes,t.cone,n,r)),Te(a.eccentricConeCollection)&&(yield function(e,t,n,r,o){const i=Object(W.n)(e,t,o),a=new l.InstancedBufferGeometry,s=new l.Mesh(a,n);return a.setIndex(W.j.index),a.setAttribute("position",W.j.position),Ce(a,i,t,s),Se(a,r),s.onBeforeRender=()=>Me(n,s.matrixWorld),s.name="Primitives (EccentricCones)",s}(a.eccentricConeCollection,a.eccentricConeAttributes,t.eccentricCone,n,r)),Te(a.ellipsoidSegmentCollection)&&(yield function(e,t,n,r,o){const i=Object(W.o)(e,t,o),a=new l.InstancedBufferGeometry,s=new l.Mesh(a,n);return a.setIndex(W.j.index),a.setAttribute("position",W.j.position),Ce(a,i,t,s),Se(a,r),s.onBeforeRender=()=>Me(n,s.matrixWorld),s.name="Primitives (EllipsoidSegments)",s}(a.ellipsoidSegmentCollection,a.ellipsoidSegmentAttributes,t.ellipsoidSegment,n,r)),Te(a.generalCylinderCollection)){const e=function(e,t,n,r,o){const i=Object(W.n)(e,t,o,"radius","radius");if(0===i.length)return null;const a=new l.InstancedBufferGeometry,s=new l.Mesh(a,n);return a.setIndex(W.j.index),a.setAttribute("position",W.j.position),Ce(a,i,t,s),Se(a,r),s.onBeforeRender=(e,t,r)=>{Me(n,s.matrixWorld),Ie(n,s,r)},s.name="Primitives (GeneralCylinders)",s}(a.generalCylinderCollection,a.generalCylinderAttributes,t.generalCylinder,n,r);e&&(yield e)}Te(a.generalRingCollection)&&(yield function(e,t,n,r){const o=Object(W.m)(e,t,W.t,r),i=new l.InstancedBufferGeometry,a=new l.Mesh(i,n);return i.setIndex(W.s.index),i.setAttribute("position",W.s.position),Ce(i,o,t,a),Ae(i),a.onBeforeRender=(e,t,r)=>{Me(n,a.matrixWorld),Ie(n,a,r)},a.name="Primitives (GeneralRings)",a}(a.generalRingCollection,a.generalRingAttributes,t.generalRing,r)),Te(a.quadCollection)&&(yield function(e,t,n,r){const o=Object(W.m)(e,t,W.t,r),i=new l.InstancedBufferGeometry,a=new l.Mesh(i,n);return i.setIndex(W.s.index),i.setAttribute("position",W.s.position),i.setAttribute("normal",W.s.normal),Ce(i,o,t,a),Ae(i),a.name="Primitives (Quads)",a}(a.quadCollection,a.quadAttributes,t.quad,r)),Te(a.sphericalSegmentCollection)&&(yield function(e,t,n,r,o){const i=Object(W.o)(e,t,o,"radius","radius"),a=new l.InstancedBufferGeometry,s=new l.Mesh(a,n);return a.setIndex(W.j.index),a.setAttribute("position",W.j.position),Ce(a,i,t,s),Se(a,r),a.setAttribute("a_horizontalRadius",a.getAttribute("a_radius")),a.setAttribute("a_verticalRadius",a.getAttribute("a_radius")),s.onBeforeRender=()=>Me(n,s.matrixWorld),s.name="Primitives (EllipsoidSegments)",s}(a.sphericalSegmentCollection,a.sphericalSegmentAttributes,t.sphericalSegment,n,r)),Te(a.torusSegmentCollection)&&(yield function(e,t,n,r){const o=function(e,t){const n=Array.from(t.values()).reduce((e,t)=>e+t.size,0),r=e.length/n;let o=0;const i=new DataView(e.buffer),a=t.get("size").offset;for(let e=0;e<r;e++)o=Math.max(o,i.getFloat32(e*n+a,!0));return o}(e,t),a=new i.b(new l.Box3);a.name="Primitives (TorusSegments)";let s=null,d=null;for(const[r,i]of W.u.entries()){const c=new l.InstancedBufferGeometry,u=new l.Mesh(c,n);if(c.setIndex(i.index),c.setAttribute("position",i.position),Ce(c,e,t,u),null===s){const e=Ne(c);s=e.boundingBox,d=e.boundingSphere,a.setBoundingBox(s)}c.boundingBox=s,c.boundingSphere=d,u.name="Primitives (TorusSegments) - LOD "+r,a.addLevel(u,we(o,r)),u.onBeforeRender=()=>Me(n,u.matrixWorld)}return a}(a.torusSegmentCollection,a.torusSegmentAttributes,t.torusSegment)),Te(a.trapeziumCollection)&&(yield function(e,t,n,r){const i=Object(W.p)(e,t,r),a=new l.InstancedBufferGeometry,s=new l.Mesh(a,n);return a.setIndex(W.v.index),a.setAttribute("position",W.v.position),Ce(a,i,t,s),function(e){const{bbox:t,p:n}=Re;t.makeEmpty();const r=e.getAttribute("a_vertex1"),i=e.getAttribute("a_vertex2"),a=e.getAttribute("a_vertex3"),s=e.getAttribute("a_vertex4");o()(void 0!==r&&void 0!==i&&void 0!==a&&void 0!==s);for(let e=0;e<r.count;++e)n.set(r.getX(e),r.getY(e),r.getZ(e)),t.expandByPoint(n),n.set(i.getX(e),i.getY(e),i.getZ(e)),t.expandByPoint(n),n.set(a.getX(e),a.getY(e),a.getZ(e)),t.expandByPoint(n),n.set(s.getX(e),s.getY(e),s.getZ(e)),t.expandByPoint(n);e.boundingBox=t,e.boundingSphere=e.boundingSphere||new l.Sphere,e.boundingBox.getBoundingSphere(e.boundingSphere)}
|
|
242
276
|
/*!
|
|
243
277
|
* Copyright 2021 Cognite AS
|
|
244
|
-
*/
|
|
278
|
+
*/(a),s.onBeforeRender=()=>Me(n,s.matrixWorld),s.name="Primitives (Trapeziums)",s}(a.trapeziumCollection,a.trapeziumAttributes,t.trapezium,r)),Te(a.nutCollection)&&(yield function(e,t,n,r){const o=Object(W.m)(e,t,W.r,r),i=new l.InstancedBufferGeometry,a=new l.Mesh(i,n);return i.setIndex(W.q.index),i.setAttribute("position",W.q.position),i.setAttribute("normal",W.q.normal),Ce(i,o,t,a),Ae(i),a.name="Primitives (Nuts)",a}(a.nutCollection,a.nutAttributes,t.nut,r))}function Te(e){return e.length>0}function Ce(e,t,n,r){const o=Array.from(n.values()).reduce((e,t)=>e+t.size,0),a=new l.InstancedInterleavedBuffer(t,o),s=new l.InstancedInterleavedBuffer(new Float32Array(t.buffer),o/4);for(const[t,r]of n){const n="color"===t,o=n?a:s,i=n?r.size:r.size/4,d=n?r.offset:r.offset/4;e.setAttribute("a_"+t,new l.InterleavedBufferAttribute(o,i,d,n))}r.onAfterRender=()=>{i.o.bind(s)(),i.o.bind(a)(),r.onAfterRender=()=>{}},e.instanceCount=t.length/o,function(){const a=new DataView(t.buffer),s=n.get("treeIndex").offset,d=new Map;for(let t=0;t<e.instanceCount;t++)Object(i.p)(d,a.getFloat32(t*o+s,!0));r.userData.treeIndices=d}()}function we(e,t){if(0===t)return 0;return e*5**t}function Me(e,t){e.uniforms.inverseModelMatrix.value.copy(t).invert()}function Ie(e,t,n){var r,o,i,a,s,d,l,c,u,p;null===(o=null===(r=e.uniforms.modelMatrix)||void 0===r?void 0:r.value)||void 0===o||o.copy(t.matrixWorld),null===(a=null===(i=e.uniforms.viewMatrix)||void 0===i?void 0:i.value)||void 0===a||a.copy(n.matrixWorld).invert(),null===(d=null===(s=e.uniforms.projectionMatrix)||void 0===s?void 0:s.value)||void 0===d||d.copy(n.projectionMatrix),null===(c=null===(l=e.uniforms.normalMatrix)||void 0===l?void 0:l.value)||void 0===c||c.copy(t.normalMatrix),null===(p=null===(u=e.uniforms.cameraPosition)||void 0===u?void 0:u.value)||void 0===p||p.copy(n.position)}function Se(e,t){e.boundingSphere=e.boundingSphere||new l.Sphere,t.getBoundingSphere(e.boundingSphere)}const Pe={baseBoundingBox:new l.Box3,instanceBoundingBox:new l.Box3,instanceMatrix:new l.Matrix4,p:new l.Vector3};function Ne(e){const{baseBoundingBox:t,instanceBoundingBox:n,instanceMatrix:r,p:i}=Pe;t.makeEmpty();const a=new l.Box3,s=e.getAttribute("position");for(let e=0;e<s.count;++e)i.set(s.getX(e),s.getY(e),s.getZ(e)),t.expandByPoint(i);const d=e.getAttribute("a_instanceMatrix");o()(void 0!==d);const c=d.offset,u=d.data.count,p=d.data.stride,m=new Float32Array(d.array);for(let e=0;e<u;++e){const o=c+e*p;r.set(m[o],m[o+4],m[o+8],m[o+12],m[o+1],m[o+5],m[o+9],m[o+13],m[o+2],m[o+6],m[o+10],m[o+14],m[o+3],m[o+7],m[o+11],m[o+15]),n.copy(t).applyMatrix4(r),a.expandByPoint(n.min),a.expandByPoint(n.max)}return{boundingBox:a,boundingSphere:a.getBoundingSphere(new l.Sphere)}}function Ae(e){const{boundingBox:t,boundingSphere:n}=Ne(e);e.boundingBox=t,e.boundingSphere=n}const Re={bbox:new l.Box3,p:new l.Vector3};function De(e,t,n,r){const o=new i.a;if(o.name="Quads",0===e.buffer.byteLength)return{sectorMeshes:new i.a,instancedMeshes:[]};const a=function(e,t,n,r){const o=new Uint8Array(e.buffer),a=Object(W.m)(o,ye,be,r);if(0===a.byteLength)return;const s=new Float32Array(a.buffer);if(a.byteLength%23!=0)throw new Error("Expected buffer size to be multiple of 23, but got "+s.byteLength);const d=new l.InstancedBufferGeometry,c=new l.InstancedInterleavedBuffer(s,23),u=new l.InterleavedBufferAttribute(c,3,0,!0),p=new l.InterleavedBufferAttribute(c,1,3,!1),m=new l.InterleavedBufferAttribute(c,3,4,!0),h=new l.InterleavedBufferAttribute(c,4,7,!1),f=new l.InterleavedBufferAttribute(c,4,11,!1),v=new l.InterleavedBufferAttribute(c,4,15,!1),x=new l.InterleavedBufferAttribute(c,4,19,!1);d.setAttribute("position",ge),d.setAttribute("color",u),d.setAttribute("treeIndex",p),d.setAttribute("normal",m),d.setAttribute("matrix0",h),d.setAttribute("matrix1",f),d.setAttribute("matrix2",v),d.setAttribute("matrix3",x);const g=new l.Mesh(d,t.simple);return g.name="Low detail geometry",g.onAfterRender=()=>{i.o.bind(c)(),g.onAfterRender=()=>{}},g.onBeforeRender=()=>{t.simple.uniforms.inverseModelMatrix.value.copy(g.matrixWorld).invert()},function(){const t=new Map;for(let n=0;n<e.length/23;n++)Object(i.p)(t,e[23*n+3]);g.userData.treeIndices=t}(),g.geometry.boundingSphere=new l.Sphere,n.getBoundingSphere(g.geometry.boundingSphere),g}(e.buffer,n,t,r);return void 0!==a&&o.add(a),{sectorMeshes:o,instancedMeshes:[]}}
|
|
245
279
|
/*!
|
|
246
280
|
* Copyright 2021 Cognite AS
|
|
247
|
-
*/
|
|
281
|
+
*/
|
|
282
|
+
class Ee{constructor(e){this.materialManager=e}transformSimpleSector(e,t,n,r){const i=this.materialManager.getModelMaterials(e);return o()(void 0!==i,"Could not find materials for model '"+e),Promise.resolve(De(n,t.bounds,i,r))}transformDetailedSector(e,t,n,r){const a=this.materialManager.getModelMaterials(e);return o()(void 0!==a,"Could not find materials for model '"+e),Promise.resolve(function(e,t,n,r){const o=t.bounds;null!==r&&function(e,t,n=1e-4){const r=e,o=t;return r.min.x-o.min.x>=n&&o.max.x-r.max.x>=n&&r.min.y-o.min.y>=n&&o.max.y-r.max.y>=n&&r.min.z-o.min.z>=n&&o.max.z-r.max.z>=n}(r,o)&&(r=null);const a=new i.a;for(const t of _e(e,n,o,r))a.add(t);const s=Object(W.k)(e.triangleMeshes,o,n.triangleMesh,r);for(const e of s)a.add(e);return{sectorMeshes:a,instancedMeshes:e.instanceMeshes.map(e=>{const t=e.instances.map(t=>Object(W.l)(e.vertices,e.indices,t,r)).filter(e=>e.treeIndices.length>0);return{fileId:e.fileId,vertices:e.vertices,indices:e.indices,instances:t}}).filter(e=>e.instances.length>0)}}(n,t,a,r))}}
|
|
248
283
|
/*!
|
|
249
284
|
* Copyright 2021 Cognite AS
|
|
250
|
-
*/
|
|
285
|
+
*/function*Be(e){const t=new Array(e.length);for(let n=0;n<e.length;++n)t[n]={fileId:e[n],index:n};t.sort((e,t)=>e.fileId-t.fileId);let n=0;for(;n<t.length;){const e=t[n].fileId,r=Oe(t,e,n,e=>e.fileId),o=new Array(r+1-n);for(let e=n;e<r+1;e++)o[e-n]=t[e].index;yield{id:e,meshIndices:o},n=r+1}}function Oe(e,t,n,r){let o=n,i=e.length-1,a=e.length;for(;o<=i;){const n=Math.floor((o+i)/2),s=r(e[n]);s>t?i=n-1:(s<t||(a=n),o=n+1)}return a}
|
|
251
286
|
/*!
|
|
252
287
|
* Copyright 2021 Cognite AS
|
|
253
|
-
*/
|
|
288
|
+
*/function ze(e){const t=new Array(e.length);return e.forEach((n,r)=>{t[r]=r>0?t[r-1]+e[r-1]:0}),t}
|
|
254
289
|
/*!
|
|
255
290
|
* Copyright 2021 Cognite AS
|
|
256
|
-
*/const
|
|
291
|
+
*/class Fe{constructor(e,t){this._modelSectorProvider=e,this._modelDataParser=new W.b,this._modelDataTransformer=new Ee(t),this._consumedSectorCache=new i.g(50,e=>{void 0!==e.group&&e.group.dereference()}),this._ctmFileCache=new i.h(10)}clear(){this._consumedSectorCache.clear(),this._ctmFileCache.clear()}async loadSector(e){var t,n;const r=this.wantedSectorCacheKey(e);try{if(this._consumedSectorCache.has(r))return this._consumedSectorCache.get(r);switch(e.levelOfDetail){case W.c.Detailed:{const n=await this.loadDetailedSectorFromNetwork(e);return this._consumedSectorCache.forceInsert(r,n),null===(t=null==n?void 0:n.group)||void 0===t||t.reference(),n}case W.c.Simple:{const t=await this.loadSimpleSectorFromNetwork(e);return this._consumedSectorCache.forceInsert(r,t),null===(n=null==t?void 0:t.group)||void 0===n||n.reference(),t}case W.c.Discarded:return{modelIdentifier:e.modelIdentifier,metadata:e.metadata,levelOfDetail:e.levelOfDetail,instancedMeshes:[],group:void 0};default:Object(i.l)(e.levelOfDetail)}}catch(e){throw this._consumedSectorCache.remove(r),R.a.trackError(e,{methodName:"loadSector",moduleName:"CachedRepository"}),e}}async loadSimpleSectorFromNetwork(e){const t=e.metadata,n=await this._modelSectorProvider.getBinaryFile(e.modelBaseUrl,t.facesFile.fileName),r=await this._modelDataParser.parseF3D(new Uint8Array(n)),o=await this._modelDataTransformer.transformSimpleSector(e.modelIdentifier,t,r,e.geometryClipBox);return{...e,group:o.sectorMeshes,instancedMeshes:o.instancedMeshes}}async loadI3DFromNetwork(e,t){const n=await this._modelSectorProvider.getBinaryFile(e,t);return this._modelDataParser.parseI3D(new Uint8Array(n))}async loadCtmsFromNetwork(e,t){const n=await Promise.all(t.map(t=>this.loadCtmFileFromNetwork(e,t)));return t.reduce((e,t,r)=>e.set(t,n[r]),new Map)}async loadDetailedSectorFromNetwork(e){const t=e.metadata,n=t.indexFile,r=this.loadI3DFromNetwork(e.modelBaseUrl,n.fileName),o=this.loadCtmsFromNetwork(e.modelBaseUrl,n.peripheralFiles),i=await r,a=await o,s=this.finalizeDetailed(i,a),d=await this._modelDataTransformer.transformDetailedSector(e.modelIdentifier,t,s,e.geometryClipBox);return{...e,group:d.sectorMeshes,instancedMeshes:d.instancedMeshes}}async loadCtmFileFromNetwork(e,t){const n=this.ctmFileCacheKey(e,t),r=this._ctmFileCache.get(n);if(void 0!==r)return r;const o=this._modelSectorProvider.getBinaryFile(e,t).then(e=>this._modelDataParser.parseCTM(new Uint8Array(e)));return this._ctmFileCache.set(n,o),o}finalizeDetailed(e,t){const{instanceMeshes:n,triangleMeshes:r}=e,o=(()=>{const{fileIds:e,colors:n,triangleCounts:o,treeIndices:i}=r,a=[];for(const{id:r,meshIndices:s}of Be(e)){const e=s.map(e=>o[e]),d=ze(e),l=`mesh_${r}.ctm`,{indices:c,vertices:u,normals:p}=t.get(l),m=new Uint8Array(3*c.length),h=new Float32Array(c.length);for(let t=0;t<s.length;t++){const r=s[t],o=i[r],a=d[t],l=e[t],[u,p,f]=[n[4*r+0],n[4*r+1],n[4*r+2]];for(let e=a;e<a+l;e++)for(let t=0;t<3;t++){const n=c[3*e+t];h[n]=o,m[3*n]=u,m[3*n+1]=p,m[3*n+2]=f}}const f={colors:m,fileId:r,treeIndices:h,indices:c,vertices:u,normals:p};a.push(f)}return a})(),i=(()=>{const{fileIds:e,colors:r,treeIndices:o,triangleCounts:i,triangleOffsets:a,instanceMatrices:s}=n,d=[];for(const{id:n,meshIndices:l}of Be(e)){const e=`mesh_${n}.ctm`,c=t.get(e),u=c.indices,p=c.vertices,m=[],h=new Float64Array(l.map(e=>a[e])),f=new Float64Array(l.map(e=>i[e]));for(const{id:e,meshIndices:t}of Be(h)){const n=f[t[0]],i=new Float32Array(16*t.length),a=new Float32Array(t.length),d=new Uint8Array(4*t.length);for(let e=0;e<t.length;e++){const n=l[t[e]],c=o[n],u=s.subarray(16*n,16*n+16);i.set(u,16*e),a[e]=c;const p=r.subarray(4*n,4*n+4);d.set(p,4*e)}m.push({triangleCount:n,triangleOffset:e,instanceMatrices:i,colors:d,treeIndices:a})}const v={fileId:n,indices:u,vertices:p,instances:m};d.push(v)}return d})();return{primitives:e.primitives,instanceMeshes:i,triangleMeshes:o}}wantedSectorCacheKey(e){return e.modelIdentifier+"."+e.metadata.id+"."+e.levelOfDetail}ctmFileCacheKey(e,t){return e+"."+t}}var Le;function ke(e,t=!0){const n=new l.PlaneBufferGeometry(1,1,1,1);return e.setIndex(n.getIndex()),e.setAttribute("position",n.getAttribute("position")),t&&e.setAttribute("normal",n.getAttribute("normal")),e.computeBoundingBox(),e.boundingBox}function Ge(e){const t=[];t.push(-1,1,-1),t.push(-1,-1,-1),t.push(1,1,-1),t.push(1,-1,-1),t.push(1,1,1),t.push(1,-1,1);const n=new Uint16Array([1,2,0,1,3,2,3,4,2,3,5,4]);return e.setIndex(new l.BufferAttribute(n,1)),e.setAttribute("position",new l.BufferAttribute(new Float32Array(t),3)),(new l.Box3).setFromArray(t)}function Ve(e){const t=[{tubularSegments:9,radialSegments:18},{tubularSegments:5,radialSegments:12},{tubularSegments:4,radialSegments:5}],n=
|
|
257
292
|
/*!
|
|
258
293
|
* Copyright 2021 Cognite AS
|
|
259
|
-
*/
|
|
294
|
+
*/
|
|
295
|
+
function(e,t,n=((e,t)=>[e,t,0])){const r=[],o=[],i=1/e,a=1/t;for(let o=0;o<=t;o++)for(let t=0;t<=e;t++){const[e,s,d]=n(t*i,o*a);r.push(e||0,s||0,d||0)}for(let n=1;n<=t;n++)for(let t=1;t<=e;t++){const r=(e+1)*n+t-1,i=(e+1)*(n-1)+t-1,a=(e+1)*(n-1)+t,s=(e+1)*n+t;o.push(r,i,s),o.push(i,a,s)}return{index:new l.Uint16BufferAttribute(o,1),position:new l.Float32BufferAttribute(r,3)}}(t[0].radialSegments,t[0].tubularSegments,(e,t)=>[e,2*t*Math.PI]);return e.setIndex(n.index),e.setAttribute("position",n.position),(new l.Box3).setFromArray(n.position.array)}function Ue(e,t){switch(e){case Le.BoxCollection:!function(e){const t=new l.BoxBufferGeometry(1,1,1,1,1,1);e.setIndex(t.getIndex()),e.setAttribute("position",t.getAttribute("position")),e.setAttribute("normal",t.getAttribute("normal")),e.computeBoundingBox(),e.boundingBox}(t);break;case Le.CircleCollection:ke(t);break;case Le.ConeCollection:case Le.EccentricConeCollection:case Le.EllipsoidSegmentCollection:case Le.GeneralCylinderCollection:Ge(t);break;case Le.GeneralRingCollection:ke(t,!1);break;case Le.NutCollection:!function(e){const t=new l.CylinderBufferGeometry(.5,.5,1,6);t.applyMatrix4((new l.Matrix4).makeRotationX(-Math.PI/2)),e.setIndex(t.getIndex()),e.setAttribute("position",t.getAttribute("position")),e.setAttribute("normal",t.getAttribute("normal")),(new l.Box3).setFromArray(t.getAttribute("position").array)}(t);break;case Le.QuadCollection:ke(t);break;case Le.TrapeziumCollection:!function(e){const t=[0,0,0,1,1,1,2,2,2,3,3,3];e.setIndex(new l.BufferAttribute(new Uint16Array([0,1,3,0,3,2]),1)),e.setAttribute("position",new l.BufferAttribute(new Float32Array(t),3)),(new l.Box3).setFromArray(t)}(t);break;case Le.TorusSegmentCollection:Ve(t);break;case Le.InstanceMesh:case Le.TriangleMesh:break;default:Object(i.l)(e)}}
|
|
260
296
|
/*!
|
|
261
297
|
* Copyright 2021 Cognite AS
|
|
262
|
-
|
|
298
|
+
*/!function(e){e[e.BoxCollection=0]="BoxCollection",e[e.CircleCollection=1]="CircleCollection",e[e.ConeCollection=2]="ConeCollection",e[e.EccentricConeCollection=3]="EccentricConeCollection",e[e.EllipsoidSegmentCollection=4]="EllipsoidSegmentCollection",e[e.GeneralCylinderCollection=5]="GeneralCylinderCollection",e[e.GeneralRingCollection=6]="GeneralRingCollection",e[e.QuadCollection=7]="QuadCollection",e[e.TorusSegmentCollection=8]="TorusSegmentCollection",e[e.TrapeziumCollection=9]="TrapeziumCollection",e[e.NutCollection=10]="NutCollection",e[e.TriangleMesh=11]="TriangleMesh",e[e.InstanceMesh=12]="InstanceMesh"}(Le||(Le={}));class je{constructor(){this._textDecoder=new TextDecoder}parseGlbMetadata(e){this.verifyGlbHeaders(e);const{length:t,json:n}=this.parseJson(e),{length:r,byteOffsetToBinContent:o}=this.parseBinHeaders(e,je.CHUNK_HEADER_BYTE_SIZE+t);return{json:n,byteOffsetToBinContent:o,binContentLength:r}}verifyGlbHeaders(e){const t=new DataView(e,0,je.GLB_HEADER_BYTE_SIZE),n=this._textDecoder.decode(new Uint8Array(e,0,4)),r=t.getUint32(4,!0);o()("glTF"===n,"Unknown file format"),o()(2===r,`Unsupported glTF version{${r}}`)}parseJson(e){const t=new DataView(e,je.GLB_HEADER_BYTE_SIZE,je.CHUNK_HEADER_BYTE_SIZE).getUint32(0,!0),n=this._textDecoder.decode(new Uint8Array(e,je.GLB_HEADER_BYTE_SIZE+4,4));o()("JSON"===n);const r=new Uint8Array(e,je.GLB_HEADER_BYTE_SIZE+je.CHUNK_HEADER_BYTE_SIZE,t),i=JSON.parse(this._textDecoder.decode(r));return o()(void 0!==i,"Failed to assign types to gltf json"),{type:n,length:t,json:i}}parseBinHeaders(e,t){const n=new DataView(e,je.GLB_HEADER_BYTE_SIZE+t,je.CHUNK_HEADER_BYTE_SIZE).getUint32(0,!0),r=this._textDecoder.decode(new Uint8Array(e,je.GLB_HEADER_BYTE_SIZE+t+4,4));return o()(r.includes("BIN")),{type:r,byteOffsetToBinContent:je.GLB_HEADER_BYTE_SIZE+t+je.CHUNK_HEADER_BYTE_SIZE,length:n}}}je.GLB_HEADER_BYTE_SIZE=12,je.CHUNK_HEADER_BYTE_SIZE=8;
|
|
263
299
|
/*!
|
|
264
300
|
* Copyright 2021 Cognite AS
|
|
265
301
|
*/
|
|
266
|
-
class
|
|
302
|
+
class We{constructor(){this._glbMetadataParser=new je}parseSector(e){const t=this._glbMetadataParser.parseGlbMetadata(e),n=t.json;return this.traverseDefaultSceneNodes(n,t,e)}traverseDefaultSceneNodes(e,t,n){const r=[];return e.scenes[e.scene].nodes.map(t=>e.nodes[t]).forEach(e=>{const o=this.processNode(e,t,n);void 0!==o&&r.push(o)}),r}processNode(e,t,n){var r;const i=null===(r=e.extensions)||void 0===r?void 0:r.EXT_mesh_gpu_instancing,a=e.mesh;if(void 0===i&&void 0===a)return;const s=i?new l.InstancedBufferGeometry:new l.BufferGeometry,d=Le[e.name],c={bufferGeometry:s,geometryType:d,glbHeaderData:t,instancingExtension:i,meshId:a,data:n};switch(d){case Le.InstanceMesh:return o()(void 0!==c.instancingExtension),this.processInstancedTriangleMesh(c);case Le.TriangleMesh:o()(void 0===c.instancingExtension),this.processTriangleMesh(c);break;default:o()(void 0!==c.instancingExtension),this.processPrimitiveCollection(c)}return{type:d,geometryBuffer:s}}processInstancedTriangleMesh(e){var t;const{bufferGeometry:n,glbHeaderData:r,meshId:i,data:a}=e,s=r.json;o()(void 0!==i);const d=s.meshes[i];o()(1===d.primitives.length),o()(void 0!==(null===(t=d.extras)||void 0===t?void 0:t.InstanceId));const c=d.primitives[0];this.setIndexBuffer(e,c,a,n),this.setPositionBuffer(e,c,a,n);return this.setInterleavedBufferAttributes(e.glbHeaderData,e.instancingExtension.attributes,e.data,e=>"a"+e,e.bufferGeometry,l.InstancedInterleavedBuffer),{type:Le.InstanceMesh,geometryBuffer:e.bufferGeometry,instanceId:d.extras.InstanceId.toString()}}processPrimitiveCollection(e){o()(null!==e.instancingExtension,"Primitive does not contain the instanced gltf extension"),Ue(e.geometryType,e.bufferGeometry);this.setInterleavedBufferAttributes(e.glbHeaderData,e.instancingExtension.attributes,e.data,e=>"a"+e,e.bufferGeometry,l.InstancedInterleavedBuffer)}processTriangleMesh(e){const{bufferGeometry:t,glbHeaderData:n,meshId:r,data:i}=e,a=n.json;o()(void 0!==r);const s=a.meshes[r];o()(1===s.primitives.length);const d=s.primitives[0];this.setIndexBuffer(e,d,i,t),this.setInterleavedBufferAttributes(e.glbHeaderData,d.attributes,e.data,(function(e){switch(e){case"COLOR_0":return"color";case"POSITION":return"position";case"_treeIndex":return"treeIndex";default:throw new Error}}),e.bufferGeometry,l.InterleavedBuffer)}setIndexBuffer(e,t,n,r){var o;const i=e.glbHeaderData.json,a=e.glbHeaderData.byteOffsetToBinContent,s=i.accessors[t.indices],d=i.bufferViews[s.bufferView];d.byteOffset=null!==(o=d.byteOffset)&&void 0!==o?o:0;const c=We.DATA_TYPE_BYTE_SIZES.get(s.componentType),u=new c(n,a+d.byteOffset,d.byteLength/c.BYTES_PER_ELEMENT),p=We.COLLECTION_TYPE_SIZES.get(s.type);r.setIndex(new l.BufferAttribute(u,p))}setPositionBuffer(e,t,n,r){var o;const i=e.glbHeaderData.json,a=e.glbHeaderData.byteOffsetToBinContent,s=i.accessors[t.attributes.POSITION],d=i.bufferViews[s.bufferView];d.byteOffset=null!==(o=d.byteOffset)&&void 0!==o?o:0;const c=We.DATA_TYPE_BYTE_SIZES.get(s.componentType),u=new c(n,a+d.byteOffset,d.byteLength/c.BYTES_PER_ELEMENT),p=We.COLLECTION_TYPE_SIZES.get(s.type);r.setAttribute("position",new l.BufferAttribute(u,p))}setInterleavedBufferAttributes(e,t,n,r,i,a){var s;const d=e.json,l=Object.values(t).map(e=>d.accessors[e].bufferView);o()(l.length>0);const c=l[0];for(let e=1;e<l.length;e++)o()(l[e]===c,"Unexpected number of unique buffer views");const u=d.bufferViews[c];u.byteOffset=null!==(s=u.byteOffset)&&void 0!==s?s:0;const p=e.byteOffsetToBinContent,m=Object.values(t).map(e=>d.accessors[e].componentType),h=this.getUniqueComponentViews(m,n,p,u,a);this.setAttributes(t,d,h,r,i)}setAttributes(e,t,n,r,i){Object.keys(e).forEach(a=>{var s;const d=t.accessors[e[a]],c=null!==(s=d.byteOffset)&&void 0!==s?s:0,u=n[d.componentType],p=We.COLLECTION_TYPE_SIZES.get(d.type);o()(void 0!==p);const m=We.DATA_TYPE_BYTE_SIZES.get(d.componentType);o()(void 0!==m);const h=m.BYTES_PER_ELEMENT;o()(void 0!==h);const f=new l.InterleavedBufferAttribute(u,p,c/h),v=r(a);i.setAttribute(v,f)})}getUniqueComponentViews(e,t,n,r,o){var i;const a=null!==(i=r.byteOffset)&&void 0!==i?i:0,s=[...new Set(e)].map(e=>{const i=We.DATA_TYPE_BYTE_SIZES.get(e),s=new i(t,n+a,r.byteLength/i.BYTES_PER_ELEMENT);return{componentType:e,interleavedBuffer:new o(s,r.byteStride/i.BYTES_PER_ELEMENT)}});return Object.assign({},...s.map(e=>({[e.componentType]:e.interleavedBuffer})))}}We.COLLECTION_TYPE_SIZES=new Map([["SCALAR",1],["VEC2",2],["VEC3",3],["VEC4",4],["MAT2",4],["MAT3",9],["MAT4",16]]),We.DATA_TYPE_BYTE_SIZES=new Map([[5120,Int8Array],[5121,Uint8Array],[5122,Int16Array],[5123,Uint16Array],[5125,Uint32Array],[5126,Float32Array]]);
|
|
267
303
|
/*!
|
|
268
304
|
* Copyright 2021 Cognite AS
|
|
269
|
-
*/
|
|
305
|
+
*/
|
|
306
|
+
var qe=n(11),He=n(13);
|
|
270
307
|
/*!
|
|
271
308
|
* Copyright 2021 Cognite AS
|
|
272
309
|
*/
|
|
273
|
-
|
|
310
|
+
function Ke(e,t,n){if(!n)return e;if(t===Le.InstanceMesh||t===Le.TriangleMesh){const t=e.boundingBox;return!t||t.intersectsBox(n)?e:void 0}const r=Qe(e,l.InterleavedBufferAttribute);let o;switch(t){case Le.BoxCollection:o=function(e,t){return $e(e,Ze,t)}(r,n);break;case Le.CircleCollection:o=function(e,t){return $e(e,Ye,t)}(r,n);break;case Le.ConeCollection:case Le.EccentricConeCollection:o=function(e,t){return et(e,t,"a_radiusA","a_radiusB")}(r,n);break;case Le.EllipsoidSegmentCollection:o=function(e,t){return function(e,t){const{center:n}=nt;return Je(e,(t,r,o,i)=>{const a=e.get("a_horizontalRadius").getX(t),s=e.get("a_verticalRadius").getX(t),d=e.get("a_height").getX(t),l=e.get("a_center");return n.set(l.getX(t),l.getY(t),l.getZ(t)),Object(qe.c)(a,s,d,n,i)},t)}(e,t)}(r,n);break;case Le.GeneralCylinderCollection:o=function(e,t){return et(e,t,"a_radius","a_radius")}(r,n);break;case Le.GeneralRingCollection:case Le.QuadCollection:o=function(e,t){return $e(e,Ye,t)}(r,n);break;case Le.TorusSegmentCollection:o=function(e,t){return function(e,t){const{boundingBox:n}=tt;return Je(e,(t,r,o,i)=>{const a=e.get("a_radius").getX(t),s=e.get("a_tubeRadius").getX(t);n.min.set(-a-s,-a-s,-s),n.max.set(a+s,a+s,s);const d=e.get("a_instanceMatrix"),l=d.offset*d.array.BYTES_PER_ELEMENT;return Object(qe.d)(o,l,r,t,n,i)},t)}(e,t)}(r,n);break;case Le.TrapeziumCollection:o=function(e,t){return function(e,t){return Je(e,(t,n,r,o)=>{const i=e.get("a_vertex1"),a=e.get("a_vertex2"),s=e.get("a_vertex3"),d=e.get("a_vertex4"),l=i.array.BYTES_PER_ELEMENT,c=i.offset*l,u=a.offset*l,p=s.offset*l,m=d.offset*l;return Object(qe.e)(c,u,p,m,r,n,t,o)},t)}(e,t)}(r,n);break;case Le.NutCollection:o=function(e,t){return $e(e,Ze,t)}
|
|
311
|
+
/*!
|
|
312
|
+
* Copyright 2021 Cognite AS
|
|
313
|
+
*/(r,n);break;default:Object(i.l)(t)}return 0!==o.length?function(e,t,n){const r=new l.BufferGeometry;return Qe(t,l["BufferAttribute"]).forEach((e,t)=>{r.setAttribute(t,e)}),r.setIndex(t.getIndex()),n.forEach((t,n)=>{const o=t.data.stride,i=t.array.BYTES_PER_ELEMENT,a=Xe.get(i),s=new l.InstancedInterleavedBuffer(new a(e.buffer),o);r.setAttribute(n,new l.InterleavedBufferAttribute(s,t.itemSize,t.offset,t.normalized))}),r}(o,e,r):void 0}const Xe=new Map([[1,Uint8Array],[4,Float32Array]]),Ye=new l.Box3(new l.Vector3(-.5,-.5,-1e-4),new l.Vector3(.5,.5,1e-4)),Ze=new l.Box3(new l.Vector3(-.5,-.5,-.5),new l.Vector3(.5,.5,.5));function Qe(e,t){return new Map(Object.entries(e.attributes).filter(e=>e[1]instanceof t).map(e=>[e[0],e[1]]))}function Je(e,t,n){const r=e.values().next().value,o=r.array,i=new Uint8Array(o.buffer,o.byteOffset,o.byteLength);return Object(He.a)(i,r.data.stride*o.BYTES_PER_ELEMENT,n,t)}function $e(e,t,n){return Je(e,(n,r,o,i)=>{const a=e.get("a_instanceMatrix"),s=a.offset*a.array.BYTES_PER_ELEMENT;return Object(qe.d)(o,s,r,n,t,i)},n)}function et(e,t,n,r){return Je(e,(t,o,i,a)=>{const s=e.get("a_centerA"),d=e.get("a_centerB"),l=e.get(n),c=e.get(r),u=s.array.BYTES_PER_ELEMENT;return Object(qe.a)(i,s.offset*u,d.offset*u,l.offset*u,c.offset*u,o,t,a)},t)}const tt={boundingBox:new l.Box3};const nt={center:new l.Vector3};class rt{constructor(e,t){this._gltfSectorParser=new We,this._sectorFileProvider=e,this._materialManager=t}async loadSector(e){const t=e.metadata;if(void 0===t.sectorFileName||0===t.downloadSize)return Promise.resolve({modelIdentifier:e.modelIdentifier,metadata:t,levelOfDetail:W.c.Detailed,group:void 0,instancedMeshes:[]});if(e.levelOfDetail===W.c.Discarded)return Promise.resolve({modelIdentifier:e.modelIdentifier,metadata:t,levelOfDetail:W.c.Discarded,instancedMeshes:[],group:void 0});const n=await this._sectorFileProvider.getBinaryFile(e.modelBaseUrl,t.sectorFileName),r=new i.a,o=this._gltfSectorParser.parseSector(n),a=this._materialManager.getModelMaterials(e.modelIdentifier),s=[];return o.forEach(t=>{var n;const o=t.type,d=Ke(t.geometryBuffer,o,null!==(n=e.geometryClipBox)&&void 0!==n?n:void 0);if(d)switch(o){case Le.BoxCollection:case Le.CircleCollection:case Le.ConeCollection:case Le.EccentricConeCollection:case Le.EllipsoidSegmentCollection:case Le.GeneralCylinderCollection:case Le.GeneralRingCollection:case Le.QuadCollection:case Le.TorusSegmentCollection:case Le.TrapeziumCollection:case Le.NutCollection:s.push({type:o,geometryBuffer:d,instanceId:o.toString()});break;case Le.InstanceMesh:s.push({type:o,geometryBuffer:d,instanceId:t.instanceId});break;case Le.TriangleMesh:this.createMesh(r,t.geometryBuffer,a.triangleMesh);break;default:Object(i.l)(o)}}),{levelOfDetail:e.levelOfDetail,group:r,instancedMeshes:[],metadata:t,modelIdentifier:e.modelIdentifier,geometryBatchingQueue:s}}createTreeIndexSet(e){const t=e.attributes.treeIndex;o()(void 0!==t);const n=new Map;for(let e=0;e<t.count;e++)Object(i.p)(n,t.getX(e));return n}createMesh(e,t,n){const r=new l.Mesh(t,n);e.add(r),r.frustumCulled=!1,r.userData.treeIndices=this.createTreeIndexSet(t),void 0!==n.uniforms.inverseModelMatrix&&(r.onBeforeRender=()=>{n.uniforms.inverseModelMatrix.value.copy(r.matrixWorld).invert()})}clear(){}}
|
|
274
314
|
/*!
|
|
275
315
|
* Copyright 2021 Cognite AS
|
|
276
316
|
*/
|
|
277
|
-
class G extends r.d{constructor(e,t){super(G.classToken),this._indexSet=new o.e,this._client=e,this._model=t,this._fetchResultHelper=void 0}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const t=this._model;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const n=new B(e=>new o.h(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=n;const r={assetId:e.assetId,intersectsBoundingBox:function(e){if(void 0===e)return;const n=(new i.Box3).copy(e);return t.mapBoxFromModelToCdfCoordinates(n,n),{min:[n.min.x,n.min.y,n.min.z],max:[n.max.x,n.max.y,n.max.z]}}(e.boundingBox),limit:1e3},a=new o.e;this._indexSet=a,this._filter=e;const s=this._client.assetMappings3D.list(t.modelId,t.revisionId,r);await n.pageResults(a,s)&&(this._fetchResultHelper=void 0)}getFilter(){return this._filter}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear()}getIndexSet(){return this._indexSet}serialize(){return{token:this.classToken,state:L()(this._filter)}}}G.classToken="AssetNodeCollection";var U=n(25),V=n.n(U);
|
|
278
317
|
/*!
|
|
279
318
|
* Copyright 2021 Cognite AS
|
|
280
319
|
*/
|
|
281
|
-
class j extends r.d{constructor(e,t,n={}){super(j.classToken),this._indexSet=new o.e,this._filter={},this._client=e,this._modelId=t.modelId,this._revisionId=t.revisionId,this._options={requestPartitions:1,...n}}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const t=new o.e,{requestPartitions:n}=this._options;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const r=new B(e=>new o.h(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=r,this._indexSet=t;const i=V()(1,n+1).map(async o=>{const i=this._client.revisions3D.list3DNodes(this._modelId,this._revisionId,{properties:e,limit:1e3,sortByNodeId:!0,partition:`${o}/${n}`});return r.pageResults(t,i)});this._filter=e,this.notifyChanged(),await Promise.all(i)}getFilter(){return this._filter}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear(),this.notifyChanged()}getIndexSet(){return this._indexSet}serialize(){return{token:this.classToken,state:L()(this._filter),options:{...this._options}}}}j.classToken="PropertyFilterNodeCollection";class W extends r.d{constructor(e,t){super(W.classToken),this._innerCollection=t,this._innerCollection.on("changed",()=>{this._cachedIndexSet=void 0,this.notifyChanged()}),this._allTreeIndicesRange=new o.h(0,e.nodeCount)}get isLoading(){return this._innerCollection.isLoading}getIndexSet(){if(void 0===this._cachedIndexSet){const e=this._innerCollection.getIndexSet(),t=new o.e;t.addRange(this._allTreeIndicesRange),t.differenceWith(e),this._cachedIndexSet=t}return this._cachedIndexSet}serialize(){return{token:this.classToken,state:{innerCollection:this._innerCollection.serialize()}}}clear(){throw new Error("clear() is not supported")}}W.classToken="InvertedNodeCollection";
|
|
282
320
|
/*!
|
|
283
321
|
* Copyright 2021 Cognite AS
|
|
284
322
|
*/
|
|
285
|
-
class
|
|
323
|
+
class ot{constructor(e,t){this._materialManager=t,this._instancedGeometryMap=new Map,this._instancedAttributeMap=new Map,this._processedSectorMap=new Map,this._instancedMeshGroup=e}addInstanceMeshes(e,t,n){if(this._processedSectorMap.has(n))return;this._instancedGeometryMap.has(e.fileId)||this._instancedGeometryMap.set(e.fileId,{vertices:new l.Float32BufferAttribute(e.vertices.buffer,3),indices:new l.Uint32BufferAttribute(e.indices.buffer,1)});const r=this._instancedGeometryMap.get(e.fileId),o=this._materialManager.getModelMaterials(t).instancedMesh;for(const t of e.instances){const i=JSON.stringify([e.fileId,t.triangleOffset]);if(this._instancedAttributeMap.has(i)){const e=this._instancedAttributeMap.get(i),o=e.treeIndexBuffer.add(t.treeIndices),a=e.colorBuffer.add(t.colors),s=e.instanceMatrixBuffer.add(t.instanceMatrices);e.updateAttributes(),this.addBatchDescriptor(i,o.batchId,a.batchId,s.batchId,n),e.mesh.count=e.treeIndexBuffer.length,(o.bufferIsReallocated||a.bufferIsReallocated||s.bufferIsReallocated)&&this.recreateBufferGeometry(r,e,t,i)}else this.createInstance(t,r,o,i,n)}}removeSectorInstancedMeshes(e){const t=this._processedSectorMap.get(e);if(t){for(const n of t){const t=this._instancedAttributeMap.get(n.instanceIdentifier);if(void 0===t)throw new Error("Cannot resolve instance identifier for sector "+e);t.treeIndexBuffer.remove(n.treeIndicesbatchId),t.colorBuffer.remove(n.colorsBatchId),t.instanceMatrixBuffer.remove(n.instanceMatricesBatchId),t.updateAttributes(),t.mesh.count=t.treeIndexBuffer.length}this._processedSectorMap.delete(e)}}createInstancedBufferGeometry(e,t,n,r,o){const i=new l.InstancedBufferGeometry;i.setIndex(t),i.setAttribute("position",e);const a=new l.InstancedBufferAttribute(n.bufferView,1);i.setAttribute("a_treeIndex",a);const s=new l.InstancedBufferAttribute(r.bufferView,4,!0);i.setAttribute("a_color",s);const d=new l.InstancedBufferAttribute(o.bufferView,16);i.setAttribute("a_instanceMatrix",d);return[i,()=>{a.needsUpdate=!0,s.needsUpdate=!0,d.needsUpdate=!0}]}recreateBufferGeometry(e,t,n,r){const[o,i]=this.createInstancedBufferGeometry(e.vertices,e.indices,t.treeIndexBuffer,t.colorBuffer,t.instanceMatrixBuffer);o.setDrawRange(3*n.triangleOffset,3*n.triangleCount),t.mesh.geometry.dispose(),t.mesh.geometry=o,t.mesh.count=t.treeIndexBuffer.length,this._instancedAttributeMap.set(r,{mesh:t.mesh,treeIndexBuffer:t.treeIndexBuffer,colorBuffer:t.colorBuffer,instanceMatrixBuffer:t.instanceMatrixBuffer,updateAttributes:i})}createInstance(e,t,n,r,o){const a=new i.c(e.treeIndices.length,Float32Array),s=a.add(e.treeIndices),d=new i.c(e.colors.length,Uint8Array),c=d.add(e.colors),u=new i.c(e.instanceMatrices.length,Float32Array),p=u.add(e.instanceMatrices);this.addBatchDescriptor(r,s.batchId,c.batchId,p.batchId,o);const[m,h]=this.createInstancedBufferGeometry(t.vertices,t.indices,a,d,u);m.setDrawRange(3*e.triangleOffset,3*e.triangleCount);const f=new l.InstancedMesh(m,n,a.length);f.frustumCulled=!1,this._instancedAttributeMap.set(r,{mesh:f,treeIndexBuffer:a,colorBuffer:d,instanceMatrixBuffer:u,updateAttributes:h}),this._instancedMeshGroup.add(f),f.updateMatrixWorld(!0)}addBatchDescriptor(e,t,n,r,o){const i={instanceIdentifier:e,treeIndicesbatchId:t,colorsBatchId:n,instanceMatricesBatchId:r},a=this._processedSectorMap.get(o);a?a.push(i):this._processedSectorMap.set(o,[i])}}
|
|
324
|
+
/*!
|
|
325
|
+
* Copyright 2021 Cognite AS
|
|
326
|
+
*/class it{constructor(e,t){this._batchedGeometriesGroup=e,this._materials=t,this._instancedTypeMap=new Map,this._sectorMap=new Map}batchGeometries(e,t){void 0===this._sectorMap.get(t)&&e.forEach(e=>{const{type:n,geometryBuffer:r,instanceId:o}=e;this.processGeometries(r,o,n,t)})}removeSectorBatches(e){const t=this._sectorMap.get(e);void 0!==t&&(t.forEach(e=>{const[t,n,r]=e,o=this._instancedTypeMap.get(t);if(void 0===o)return;const{defragBuffer:i,mesh:a}=o,s=i.getRangeForBatchId(n);this.removeTreeIndicesFromMeshUserData(a,s),i.remove(n),this.updateInstanceAttributes(a,s),a.count-=r}),this._sectorMap.delete(e))}processGeometries(e,t,n,r){var a,s;const d=this.getAttributes(e,l.InterleavedBufferAttribute),c=this.getInstanceAttributesSharedView(d),u=this.getTreeIndexAttribute(d);o()(void 0!==t);const p=null!==(a=this._instancedTypeMap.get(t))&&void 0!==a?a:this.createInstanceMeshGeometry(e,n,t),m=c.buffer,{defragBuffer:h,mesh:f}=p,v=c.byteLength,x=c.byteOffset,g=new Uint8Array(m,x,v),{batchId:b,bufferIsReallocated:y,updateRange:_}=h.add(g);for(let e=0;e<u.count;e++)Object(i.p)(f.userData.treeIndices,u.getX(e));const T=null!==(s=this._sectorMap.get(r))&&void 0!==s?s:this.createSectorBatch(r);o()(d.length>0);const C=d[0].attribute.count;T.push([t,b,C]),y?this.reallocateBufferGeometry(f,h):this.updateInstanceAttributes(f,_),f.count+=C}updateInstanceAttributes(e,t){this.getAttributes(e.geometry,l.InterleavedBufferAttribute).forEach(e=>{const n=e.attribute;this.extendUpdateRange(n,t),n.data.needsUpdate=!0})}extendUpdateRange(e,t){const n=e.data.array.BYTES_PER_ELEMENT,r=t.byteOffset/n,o=t.byteCount/n,{offset:i,count:a}=e.data.updateRange;if(-1===a)return e.data.updateRange={offset:r,count:o},void(e.data.needsUpdate=!0);const s=Math.min(i,r),d=Math.max(i+a,r+o)-s;e.data.updateRange={offset:s,count:d}}reallocateBufferGeometry(e,t){const n=this.createDefragmentedBufferGeometry(e.geometry,t);e.geometry.dispose(),e.geometry=n}createSectorBatch(e){const t=[];return this._sectorMap.set(e,t),t}createInstanceMeshGeometry(e,t,n){const r=new i.c(64,Uint8Array),o=this.createDefragmentedBufferGeometry(e,r),a=this.getShaderMaterial(t,this._materials),s={defragBuffer:r,mesh:this.createInstanceMesh(this._batchedGeometriesGroup,o,a,n)};return this._instancedTypeMap.set(n,s),s}getInstanceAttributesSharedView(e){o()(e.length>0);const t=e[0].attribute.array;for(let n=1;n<e.length;n++){const r=e[n].attribute.array;o()(t.buffer.byteLength===r.buffer.byteLength)}return t}getTreeIndexAttribute(e){const t=e.filter(e=>"a_treeIndex"===e.name);return o()(t.length>0),t[0].attribute}removeTreeIndicesFromMeshUserData(e,t){const{byteOffset:n,byteCount:r}=t,a=this.getAttributes(e.geometry,l.InterleavedBufferAttribute),s=this.getTreeIndexAttribute(a),d=s.data.array,c=s.data.stride*d.BYTES_PER_ELEMENT;o()(n%c==0),o()(r%c==0);const u=(n+r)/c;for(let t=n/c;t<u;t++){const n=s.getX(t);o()(e.userData.treeIndices.has(n)),Object(i.m)(e.userData.treeIndices,n)}}createDefragmentedBufferGeometry(e,t){const n=this.getAttributes(e,l.InterleavedBufferAttribute),r=this.copyGeometryWithBufferAttributes(e);return n.forEach(e=>{const{name:n,attribute:o}=e,i=o.data.stride,a=o.array.BYTES_PER_ELEMENT,s=t.bufferView.buffer,d=it.TypedArrayViews.get(a),c=new l.InstancedInterleavedBuffer(new d(s),i);r.setAttribute(n,new l.InterleavedBufferAttribute(c,o.itemSize,o.offset,o.normalized))}),r}createInstanceMesh(e,t,n,r){const o=new l.InstancedMesh(t,n,0);return e.add(o),o.frustumCulled=!1,o.name=r,o.onBeforeRender=(e,t,r)=>{var i,a,s,d,l,c,u,p,m,h,f,v;null===(a=null===(i=n.uniforms.inverseModelMatrix)||void 0===i?void 0:i.value)||void 0===a||a.copy(o.matrixWorld).invert(),null===(d=null===(s=n.uniforms.modelMatrix)||void 0===s?void 0:s.value)||void 0===d||d.copy(o.matrixWorld),null===(c=null===(l=n.uniforms.viewMatrix)||void 0===l?void 0:l.value)||void 0===c||c.copy(r.matrixWorld).invert(),null===(p=null===(u=n.uniforms.projectionMatrix)||void 0===u?void 0:u.value)||void 0===p||p.copy(r.projectionMatrix),null===(h=null===(m=n.uniforms.normalMatrix)||void 0===m?void 0:m.value)||void 0===h||h.copy(o.normalMatrix),null===(v=null===(f=n.uniforms.cameraPosition)||void 0===f?void 0:f.value)||void 0===v||v.copy(r.position)},o.userData.treeIndices=new Map,o.updateMatrixWorld(!0),o}getAttributes(e,t){return Object.entries(e.attributes).filter(e=>e[1]instanceof t).map(e=>({name:e[0],attribute:e[1]}))}copyGeometryWithBufferAttributes(e){const t=new l.BufferGeometry;return this.getAttributes(e,l.BufferAttribute).forEach(e=>{t.setAttribute(e.name,e.attribute)}),t.setIndex(e.getIndex()),t}getShaderMaterial(e,t){switch(e){case Le.BoxCollection:return t.box;case Le.CircleCollection:return t.circle;case Le.ConeCollection:return t.cone;case Le.EccentricConeCollection:return t.eccentricCone;case Le.EllipsoidSegmentCollection:return t.ellipsoidSegment;case Le.GeneralCylinderCollection:return t.generalCylinder;case Le.GeneralRingCollection:return t.generalRing;case Le.QuadCollection:return t.quad;case Le.TorusSegmentCollection:return t.torusSegment;case Le.TrapeziumCollection:return t.trapezium;case Le.NutCollection:return t.nut;case Le.TriangleMesh:return t.triangleMesh;case Le.InstanceMesh:return t.instancedMesh;default:Object(i.l)(e)}}}it.TypedArrayViews=new Map([[1,Uint8Array],[4,Float32Array]]);class at extends l.Object3D{constructor(e,t,n){super(),this.type="CadNode",this.name="Sector model",this._materialManager=t,this._sectorRepository=n;const r=new l.Group;r.name="InstancedMeshes";const o=new l.Group;o.name="Batched Geometry",this._instancedMeshManager=new ot(r,t);const i=t.getModelMaterials(e.modelIdentifier);this._geometryBatchingManager=new it(o,i);const a=new W.d(e);a.add(r),a.add(o),this._cadModelMetadata=e;const{scene:s}=e;this._sectorScene=s,this._rootSector=a,this.add(a),this.matrixAutoUpdate=!1,this.updateMatrixWorld(),this.setModelTransformation(e.modelMatrix)}get nodeTransformProvider(){return this._materialManager.getModelNodeTransformProvider(this._cadModelMetadata.modelIdentifier)}get nodeAppearanceProvider(){return this._materialManager.getModelNodeAppearanceProvider(this._cadModelMetadata.modelIdentifier)}get defaultNodeAppearance(){return this._materialManager.getModelDefaultNodeAppearance(this._cadModelMetadata.modelIdentifier)}set defaultNodeAppearance(e){this._materialManager.setModelDefaultNodeAppearance(this._cadModelMetadata.modelIdentifier,e)}get clippingPlanes(){return this._materialManager.clippingPlanes}set clippingPlanes(e){this._materialManager.clippingPlanes=e}get cadModelMetadata(){return this._cadModelMetadata}get cadModelIdentifier(){return this._cadModelMetadata.modelIdentifier}get sectorScene(){return this._sectorScene}get rootSector(){return this._rootSector}get materialManager(){return this._materialManager}set renderMode(e){this._materialManager.setRenderMode(e)}get renderMode(){return this._materialManager.getRenderMode()}loadSector(e){return this._sectorRepository.loadSector(e)}setModelTransformation(e){this._rootSector.setModelTransformation(e),this._cadModelMetadata.modelMatrix.copy(e)}getModelTransformation(e){return this._rootSector.getModelTransformation(e)}get prioritizedAreas(){return this.nodeAppearanceProvider.getPrioritizedAreas()}suggestCameraConfig(){const{position:e,target:t,near:n,far:r}=function(e){const t=new l.Vector3,n=new l.Vector3;let r=0;Object(i.w)(e,e=>(t.add(e.bounds.min),n.add(e.bounds.max),r+=1,!0)),t.divideScalar(r),n.divideScalar(r);const o=new l.Box3(t,n),a=o.getCenter(new l.Vector3),s=o.getSize(new l.Vector3);s.x*=-2,s.y*=-2,s.z*=2;const d=(new l.Vector3).addVectors(a,s);return{position:d,target:a,near:.1,far:12*d.distanceTo(a)}}(this._sectorScene.root),o=this.getModelTransformation(),a=e.clone(),s=t.clone();return a.applyMatrix4(o),s.applyMatrix4(o),{position:a,target:s,near:n,far:r}}updateInstancedMeshes(e,t,n){for(const r of e)this._instancedMeshManager.addInstanceMeshes(r,t,n)}discardInstancedMeshes(e){this._instancedMeshManager.removeSectorInstancedMeshes(e)}batchGeometry(e,t){this._geometryBatchingManager.batchGeometries(e,t)}removeBatchedSectorGeometries(e){this._geometryBatchingManager.removeSectorBatches(e)}clearCache(){this._sectorRepository.clear()}}var st=n(5);
|
|
286
327
|
/*!
|
|
287
328
|
* Copyright 2021 Cognite AS
|
|
288
|
-
*/class
|
|
329
|
+
*/class dt{constructor(e,t,n){this._materialManager=e,this._cadModelMetadataRepository=new W.a(t,n),this._v8SectorRepository=new Fe(n,e),this._gltfSectorRepository=new rt(n,e)}async createModel(e,t){const n=await this._cadModelMetadataRepository.loadData(e),r=function(e,t){if(null===t)return e;return new he(t).createClippedModel(e)}
|
|
289
330
|
/*!
|
|
290
331
|
* Copyright 2021 Cognite AS
|
|
291
|
-
*/
|
|
332
|
+
*/(n,function(e,t){if(void 0===e||void 0===e.boundingBox)return null;if(!e.isBoundingBoxInModelCoordinates)return e.boundingBox;const n=e.boundingBox.clone();return n.applyMatrix4(t.inverseModelMatrix),n}(t,n)),{modelIdentifier:o,scene:i,format:a,formatVersion:s}=r,d=this.getSectorRepository(a,s);this._materialManager.addModelMaterials(o,i.maxTreeIndex);const c=new at(r,this._materialManager,d);if(null!==r.geometryClipBox){const e=function(e,t){const n=e.min.clone().applyMatrix4(t.modelMatrix),r=e.max.clone().applyMatrix4(t.modelMatrix);return(new l.Box3).setFromPoints([n,r])}(r.geometryClipBox,r),t=new me.a(e).clippingPlanes;this._materialManager.setModelClippingPlanes(r.modelIdentifier,t)}return c}getSectorRepository(e,t){if(e===st.d.RevealCadModel&&8===t)return this._v8SectorRepository;if(e===st.d.GltfCadModel&&9===t)return this._gltfSectorRepository;throw new Error(`Model format [${e} v${t}] is not supported (only version 8 and 9 is supported)`)}dispose(){this._v8SectorRepository.clear(),this._gltfSectorRepository.clear()}}function lt(e){return Object(P.pipe)(Object(V.switchMap)(t=>function(e){return Object(P.of)(!1).pipe(Object(V.delay)(e),Object(V.startWith)(!0))}
|
|
292
333
|
/*!
|
|
293
334
|
* Copyright 2021 Cognite AS
|
|
294
|
-
*/
|
|
335
|
+
*/(e)),Object(V.distinctUntilChanged)())}function ct({models:e,loadingHints:t}){return e.length>0&&!0!==t.suspendLoading}var ut=n(32),pt=n.n(ut);
|
|
295
336
|
/*!
|
|
296
337
|
* Copyright 2021 Cognite AS
|
|
297
|
-
*/
|
|
338
|
+
*/
|
|
339
|
+
class mt{static async*raceUntilAllCompleted(e){const t=new Map(e.map(e=>[e,e.then(()=>[e])]));for(;t.size>0;){const[e]=await Promise.race(t.values());t.delete(e),yield e}}}
|
|
298
340
|
/*!
|
|
299
341
|
* Copyright 2021 Cognite AS
|
|
300
342
|
*/
|
|
301
|
-
class oe{constructor(e,t,n,r,o){this.version=e,this.maxTreeIndex=t,this.root=r,this.sectors=o,this.unit=n}get sectorCount(){return this.sectors.size}getSectorById(e){return this.sectors.get(e)}getAllSectors(){return[...this.sectors.values()]}getSectorsContainingPoint(e){const t=[];return Object(o.y)(this.root,n=>!!n.bounds.containsPoint(e)&&(t.push(n),!0)),t}getSectorsIntersectingBox(e){const t=[];return Object(o.y)(this.root,n=>!!n.bounds.intersectsBox(e)&&(t.push(n),!0)),t}getBoundsOfMostGeometry(){if(0===this.root.children.length)return this.root.bounds;const e=[],t=[];Object(o.y)(this.root,n=>(0===n.children.length&&(t.push(n.bounds.min.toArray(),n.bounds.max.toArray()),e.push(n.bounds,n.bounds)),!0));const n=Math.min(t.length,4),r=re()(t,n,"kmpp",10),a=new Array(r.idxs.length).fill(0),s=a.map(e=>new i.Box3);r.idxs.map(e=>a[e]++);const d=a.reduce((e,t,n)=>(t>e.count&&(e.count=t,e.idx=n),e),{count:0,idx:-1}).idx;r.idxs.forEach((t,n)=>{a[t]++,s[t].expandByPoint(e[n].min),s[t].expandByPoint(e[n].max)});const l=s.filter((e,t)=>!(t===d||!e.intersectsBox(s[d])));if(l.length>0){const e=s[d].clone();return l.forEach(t=>{e.expandByPoint(t.min),e.expandByPoint(t.max)}),e}return s[d]}getSectorsIntersectingFrustum(e,t){const n=(new i.Matrix4).multiplyMatrices(e,t),r=(new i.Frustum).setFromProjectionMatrix(n),a=[];return Object(o.y)(this.root,e=>!!r.intersectsBox(e.bounds)&&(a.push(e),!0)),a}}
|
|
302
343
|
/*!
|
|
303
344
|
* Copyright 2021 Cognite AS
|
|
304
|
-
*/
|
|
345
|
+
*/
|
|
346
|
+
const ht=new l.Box3(new l.Vector3(-1,-1,-1),new l.Vector3(1,1,1)),ft=new l.Box3;function vt(e,t){return function(e,t,n){const r=void 0!==n?n:new l.Box3;r.makeEmpty(),Object(i.x)(e,e=>{e.project(t),r.expandByPoint(e)})}(t,e,ft),ft.intersect(ht),ft.isEmpty()?0:.25*(ft.max.x-ft.min.x)*(ft.max.y-ft.min.y)}
|
|
305
347
|
/*!
|
|
306
348
|
* Copyright 2021 Cognite AS
|
|
307
|
-
*/
|
|
349
|
+
*/const xt={transformedBounds:new l.Box3};class gt{constructor(e){this._minSectorDistance=1/0,this._maxSectorDistance=-1/0,this._camera=e;const{near:t,far:n}=e,r=n-t;this._modifiedFrustums=[{near:t,far:t+.05*r,weight:.1},{near:t+.05*r,far:t+.4*r,weight:.7},{near:t+.4*r,far:t+1*r,weight:.2}].map(t=>{const n=function(e,t,n){const r=e.clone();return r.near=t,r.far=n,r.updateProjectionMatrix(),r.projectionMatrix}(e,t.near,t.far),r=(new l.Matrix4).multiplyMatrices(n,this._camera.matrixWorldInverse),o=(new l.Frustum).setFromProjectionMatrix(r);return{...t,frustum:o}})}addCandidateSectors(e,t){const{minDistance:n,maxDistance:r}=e.reduce((e,n)=>{const r=this.distanceToCamera(n,t);return e.maxDistance=Math.max(e.maxDistance,r),e.minDistance=Math.min(e.minDistance,r),e},{minDistance:1/0,maxDistance:-1/0});this._minSectorDistance=n,this._maxSectorDistance=r}computeTransformedSectorBounds(e,t,n){n.copy(e),n.applyMatrix4(t)}computeDistanceToCameraWeight(e){const t=this._minSectorDistance,n=this._maxSectorDistance;return 1-(e.distanceToPoint(this._camera.position)-t)/(n-t)}computeScreenAreaWeight(e){return e.distanceToPoint(this._camera.position)>0?vt(this._camera,e):1}computeFrustumDepthWeight(e){return this._modifiedFrustums.reduce((t,n)=>{const{frustum:r,weight:o}=n;return t+(r.intersectsBox(e)?o:0)},0)}computeSectorTreePlacementWeight(e){return 1===e.depth?1:1/3}computeMaximumNodeScreenSizeWeight(e,t){const n=e.distanceToPoint(this._camera.position);if(0===n)return 1;const r=this._camera.getWorldDirection(new l.Vector3).multiplyScalar(n).add(this._camera.position),o=r.clone().addScaledVector(this._camera.up,t);r.project(this._camera),o.project(this._camera);const i=o.distanceToSquared(r)/4;return Math.min(1,i/.05)}computePrioritizedAreaWeight(e,t){return t.reduce((t,n)=>e.intersectsBox(n.area)?Math.max(n.extraPriority,t):t,0)}distanceToCamera(e,t){const{transformedBounds:n}=xt;return n.copy(e.bounds),n.applyMatrix4(t),n.distanceToPoint(this._camera.position)}}function bt(e,t){const n=e;switch(t){case W.c.Detailed:return{downloadSize:n.downloadSize,drawCalls:n.estimatedDrawCallCount,renderCost:n.estimatedRenderCost};case W.c.Simple:throw new Error("Not supported");default:throw new Error("Can't compute cost for lod "+t)}}function yt(e,t){e.downloadSize+=t.downloadSize,e.drawCalls+=t.drawCalls,e.renderCost+=t.renderCost}class _t{computeSpentBudget(){const e=this.modelsMetadata,t=this.collectWantedSectors().filter(e=>e.levelOfDetail!==W.c.Discarded),n=e.reduce((e,t)=>e+t.scene.sectorCount,0),r=t.length,o=t.filter(e=>e.levelOfDetail===W.c.Simple).length,i=t.filter(e=>!Number.isFinite(e.priority)).length,a=t.filter(e=>Number.isFinite(e.priority)&&e.priority>0).reduce((e,t)=>e+t.priority,0);return{drawCalls:this.totalCost.drawCalls,downloadSize:this.totalCost.downloadSize,renderCost:this.totalCost.renderCost,totalSectorCount:n,forcedDetailedSectorCount:i,loadedSectorCount:r,simpleSectorCount:o,detailedSectorCount:r-o,accumulatedPriority:a}}}
|
|
350
|
+
/*!
|
|
351
|
+
* Copyright 2021 Cognite AS
|
|
352
|
+
*/class Tt extends _t{constructor(e){super(),this._totalCost={downloadSize:0,drawCalls:0,renderCost:0},this._models=new Map,this.determineSectorCost=e}get totalCost(){return{...this._totalCost}}get modelsMetadata(){return Array.from(this._models.values()).map(e=>e.modelMetadata)}initializeScene(e){o()(9===e.scene.version,"Only sector version 9 is supported, but got "+e.scene.version),this._models.set(e.modelIdentifier,{modelMetadata:e,sectorIds:new Map})}markSectorDetailed(e,t,n){const r=this._models.get(e.modelIdentifier);o()(!!r,"Could not find sector tree for "+e.modelIdentifier);const{sectorIds:i}=r,a=i.get(t);if(void 0!==a)i.set(t,Math.max(n,a));else{const r=e.scene.getSectorById(t);o()(void 0!==r);const a=this.determineSectorCost(r,W.c.Detailed);yt(this._totalCost,a),i.set(t,n)}}isWithinBudget(e){return this._totalCost.renderCost<e.maximumRenderCost}collectWantedSectors(){const e=new Array;for(const[t,n]of this._models){const{modelMetadata:r,sectorIds:o}=n,a=new Map;Object(i.w)(r.scene.root,e=>(a.set(e.id,Ct(t,r,e,W.c.Discarded,-1,r.geometryClipBox)),!0));for(const[e,n]of o){const o=r.scene.getSectorById(e),i=Ct(t,r,o,W.c.Detailed,n,r.geometryClipBox);a.set(e,i)}a.forEach(t=>e.push(t))}return e.sort((e,t)=>e.levelOfDetail===W.c.Discarded?-1:t.levelOfDetail===W.c.Discarded?1:t.priority-e.priority),e}clear(){this._models.clear()}}function Ct(e,t,n,r,o,i){return{modelIdentifier:e,modelBaseUrl:t.modelBaseUrl,geometryClipBox:i,levelOfDetail:r,metadata:n,priority:o}}var wt=n(10);
|
|
353
|
+
/*!
|
|
354
|
+
* Copyright 2021 Cognite AS
|
|
355
|
+
*/class Mt{constructor(e,t){this.sectors=[],this._totalCost={downloadSize:0,drawCalls:0,renderCost:0},this.determineSectorCost=t,Object(i.w)(e,e=>(this.sectors.length=Math.max(this.sectors.length,e.id),this.sectors[e.id]={sector:e,parentIndex:-1,priority:-1,cost:{downloadSize:0,drawCalls:0,renderCost:0},lod:W.c.Discarded},!0));for(let e=0;e<this.sectors.length;++e){const t=this.sectors[e];if(void 0!==t){const n=t.sector.children.map(e=>e.id);for(const t of n)this.sectors[t].parentIndex=e}}e.facesFile.fileName&&this.setSectorLod(e.id,W.c.Simple)}get totalCost(){return this._totalCost}getWantedSectorCount(){return this.sectors.reduce((e,t)=>e=t.lod!==W.c.Discarded?e+1:e,0)}toWantedSectors(e,t,n){return this.sectors.filter(e=>void 0!==e).map(r=>({modelIdentifier:e,modelBaseUrl:t,levelOfDetail:r.lod,metadata:r.sector,priority:r.priority,geometryClipBox:n})).sort((e,t)=>t.priority-e.priority)}markSectorDetailed(e,t){if(this.setSectorPriority(e,t),this.sectors[e].lod===W.c.Detailed)return;let n=this.sectors[e];for(;;){switch(n.lod){case W.c.Simple:this.replaceSimpleWithDetailed(n.sector.id);break;case W.c.Discarded:this.setSectorLod(n.sector.id,W.c.Detailed)}if(-1===n.parentIndex)break;n=this.sectors[n.parentIndex]}this.markAllDiscardedChildrenAsSimple(e)}replaceSimpleWithDetailed(e){It(this.sectors[e].lod===W.c.Simple,`Sector ${e} must be a Simple-sector, but got ${this.sectors[e].lod}`),this.setSectorLod(e,W.c.Detailed),this.markAllDiscardedChildrenAsSimple(e)}markAllDiscardedChildrenAsSimple(e){for(const t of this.sectors[e].sector.children)this.getSectorLod(t.id)===W.c.Discarded&&null!==t.facesFile.fileName&&this.setSectorLod(t.id,W.c.Simple)}setSectorLod(e,t){var n,r;It(t!==W.c.Simple||null!==this.sectors[e].sector.facesFile.fileName),this.sectors[e].lod=t,n=this._totalCost,r=this.sectors[e].cost,n.downloadSize-=r.downloadSize,n.drawCalls-=r.drawCalls,n.renderCost-=r.renderCost,this.sectors[e].cost=this.determineSectorCost(this.sectors[e].sector,t),yt(this._totalCost,this.sectors[e].cost)}setSectorPriority(e,t){this.sectors[e].priority=t}getSectorLod(e){return this.sectors[e].lod}}function It(e,t="assertion hit"){e||wt.a.error("[ASSERT]",t)}
|
|
356
|
+
/*!
|
|
357
|
+
* Copyright 2021 Cognite AS
|
|
358
|
+
*/class St extends _t{constructor(e){super(),this._takenSectorTrees=new Map,this.determineSectorCost=e}get totalCost(){const e={downloadSize:0,drawCalls:0,renderCost:0};return this._takenSectorTrees.forEach(({sectorTree:t})=>{yt(e,t.totalCost)}),e}get modelsMetadata(){return Array.from(this._takenSectorTrees.values()).map(e=>e.modelMetadata)}initializeScene(e){o()(8===e.scene.version,"Only sector version 8 is supported, but got "+e.scene.version);const t=e.scene.root;this._takenSectorTrees.set(e.modelIdentifier,{sectorTree:new Mt(t,this.determineSectorCost),modelMetadata:e})}getWantedSectorCount(){let e=0;return this._takenSectorTrees.forEach(({sectorTree:t})=>{e+=t.getWantedSectorCount()}),e}markSectorDetailed(e,t,n){const r=this._takenSectorTrees.get(e.modelIdentifier);o()(!!r,`Could not find sector tree for ${e.modelIdentifier} (have trees ${Array.from(this._takenSectorTrees.keys()).join(", ")})`);const{sectorTree:i}=r;i.markSectorDetailed(t,n)}isWithinBudget(e){return this.totalCost.renderCost<e.maximumRenderCost}collectWantedSectors(){const e=new Array;for(const[t,{sectorTree:n,modelMetadata:r}]of this._takenSectorTrees)e.push(...n.toWantedSectors(t,r.modelBaseUrl,r.geometryClipBox));return e.sort((e,t)=>t.priority-e.priority),e}clear(){this._takenSectorTrees.clear()}}
|
|
308
359
|
/*!
|
|
309
360
|
* Copyright 2021 Cognite AS
|
|
310
361
|
*/
|
|
311
|
-
const de=(new i.Matrix4).set(1,0,0,0,0,0,1,0,0,-1,0,0,0,0,0,1);function le(e,t){switch(t){case te.RevealCadModel:e.premultiply(de);break;case te.EptPointCloud:break;default:throw new Error("Unknown model format '"+t)}}
|
|
312
362
|
/*!
|
|
313
363
|
* Copyright 2021 Cognite AS
|
|
314
|
-
*/class
|
|
364
|
+
*/class Pt{constructor(e){this._determineSectorCost=(null==e?void 0:e.determineSectorCost)||bt}determineSectors(e){const t=new Tt(this._determineSectorCost),{cadModelsMetadata:n,camera:r}=e,o=r.matrixWorldInverse,a=r.projectionMatrix,s=new gt(r),d=new l.Box3,c=function(e,t,n,r){return e.reduce((e,o)=>{const a=function(e,t,n,r,o){if(9!==r.version)throw new Error("Expected model version 9, but got "+r.version);const a=new l.Matrix4;a.multiplyMatrices(e,n);const s=r.getSectorsIntersectingFrustum(t,a).map(e=>e);if(o.length<=0)return s;const d=new l.Box3;return s.filter(e=>{d.copy(e.bounds),d.applyMatrix4(n);return o.every(e=>Object(i.q)(d,e))})}(t,n,o.modelMatrix,o.scene,r.clippingPlanes);return e.set(o,a),e},new Map)}(n,o,a,e);!function(e,t,n){for(const[r,o]of e)t.initializeScene(r),n.addCandidateSectors(o,r.modelMatrix)}(c,t,s);const u=function(e,t,n,r){const o=new Array;for(const[i,a]of e)a.forEach(e=>{const a=e;t.computeTransformedSectorBounds(a.bounds,i.modelMatrix,n);const s=Nt(t,a,n,r);o.push({model:i,sectorId:a.id,priority:s})});return o.sort((e,t)=>t.priority-e.priority),o}(c,s,d,e.prioritizedAreas),p=function(e,t,n){let r=0;for(let o=0;e.isWithinBudget(t.budget)&&o<n.length;++o){const{model:t,sectorId:i,priority:a}=n[o];e.markSectorDetailed(t,i,a),r=o}return r}(t,e,u);wt.a.debug("Scheduled",p,"of",u.length,"candidates");const m=t.collectWantedSectors(),h=t.computeSpentBudget();return wt.a.debug("Budget:",{...e.budget}),wt.a.debug("Spent:",{...h}),{spentBudget:h,wantedSectors:m}}filterSectorsToLoad(e,t){return Promise.resolve(t)}dispose(){}}function Nt(e,t,n,r){return 2*e.computeSectorTreePlacementWeight(t)+1*e.computeDistanceToCameraWeight(n)+.3*e.computeScreenAreaWeight(n)+.2*e.computeFrustumDepthWeight(n)+1*(void 0!==t.maxDiagonalLength?e.computeMaximumNodeScreenSizeWeight(n,t.maxDiagonalLength):1)+1*e.computePrioritizedAreaWeight(n,r)}
|
|
365
|
+
/*!
|
|
366
|
+
* Copyright 2021 Cognite AS
|
|
367
|
+
*/class At{constructor(e,t,n,r,o){this._v8SectorCuller=e,this._gltfSectorCuller=new Pt,this._modelStateHandler=t,this._collectStatisticsCallback=n,this._progressCallback=r,this._continuousModelStreaming=o}async*loadSectors(e){if(!this._continuousModelStreaming&&e.cameraInMotion)return[];const t=e.models,n={...e,cadModelsMetadata:t.filter(e=>e.visible).map(e=>e.cadModelMetadata)};if(n.cadModelsMetadata.length<=0)return[];const r=this.getSectorCuller(n),o=r.determineSectors(n);this._collectStatisticsCallback(o.spentBudget);const i=this._modelStateHandler.hasStateChanged.bind(this._modelStateHandler),a=o.wantedSectors.filter(i),s=new Rt(this._progressCallback);s.start(a.length);for(const e of pt()(a,20)){const o=await this.filterSectors(n,e,r,s),i=this.startLoadingBatch(o,s,t);for await(const e of mt.raceUntilAllCompleted(i))this._modelStateHandler.updateState(e),yield e}}getSectorCuller(e){if((t=e.cadModelsMetadata[0]).format===st.d.RevealCadModel&&8===t.formatVersion)return this._v8SectorCuller;if(function(e){return e.format===st.d.GltfCadModel&&9===e.formatVersion}
|
|
315
368
|
/*!
|
|
316
369
|
* Copyright 2021 Cognite AS
|
|
317
|
-
*/
|
|
370
|
+
*/(e.cadModelsMetadata[0]))return this._gltfSectorCuller;throw new Error("No supported sector culler for format "+e.cadModelsMetadata[0].format);var t}async filterSectors(e,t,n,r){const o=await n.filterSectorsToLoad(e,t);return r.reportNewSectorsCulled(t.length-o.length),o}startLoadingBatch(e,t,n){return e.map(async e=>{try{return n.filter(t=>t.cadModelMetadata.modelIdentifier===e.modelIdentifier)[0].loadSector(e)}catch(t){return wt.a.error("Failed to load sector",e,"error:",t),{modelIdentifier:e.modelIdentifier,metadata:e.metadata,levelOfDetail:W.c.Discarded,group:void 0,instancedMeshes:void 0}}finally{t.reportNewSectorsLoaded(1)}})}}class Rt{constructor(e){this._sectorsScheduled=0,this._sectorsLoaded=0,this._sectorsCulled=0,this._progressCallback=e}start(e){this._sectorsScheduled=e,this._sectorsLoaded=0,this._sectorsCulled=0,this.triggerCallback()}reportNewSectorsLoaded(e){this._sectorsLoaded+=e,this.triggerCallback()}reportNewSectorsCulled(e){this._sectorsCulled+=e,this._sectorsLoaded+=e,this.triggerCallback()}triggerCallback(){this._progressCallback(this._sectorsLoaded,this._sectorsScheduled,this._sectorsCulled)}}const Dt=Object(i.r)()?{highDetailProximityThreshold:5,maximumRenderCost:7e6}:{highDetailProximityThreshold:10,maximumRenderCost:15e6};
|
|
318
371
|
/*!
|
|
319
372
|
* Copyright 2021 Cognite AS
|
|
320
|
-
*/
|
|
373
|
+
*/class Et{constructor(){this._sceneModelState={}}hasStateChanged(e){const t=this._sceneModelState[e.modelIdentifier];o()(void 0!==t,`Model ${e.modelIdentifier} has not been added`);const n=t[e.metadata.id];return void 0!==n?n!==e.levelOfDetail:e.levelOfDetail!==W.c.Discarded}addModel(e){o()(void 0===this._sceneModelState[e],`Model ${e} is already added`),this._sceneModelState[e]={}}removeModel(e){o()(void 0!==this._sceneModelState[e],`Model ${e} is not added`),delete this._sceneModelState[e]}updateState(e){if(void 0===this._sceneModelState[e.modelIdentifier])return;const t=this._sceneModelState[e.modelIdentifier];e.levelOfDetail===W.c.Discarded?delete t[e.metadata.id]:t[e.metadata.id]=e.levelOfDetail}}
|
|
321
374
|
/*!
|
|
322
375
|
* Copyright 2021 Cognite AS
|
|
323
|
-
*/class
|
|
376
|
+
*/const Bt={isLoading:!1,itemsLoaded:0,itemsRequested:0,itemsCulled:0};class Ot{constructor(e,t=!1){this._cameraSubject=new P.Subject,this._clippingPlaneSubject=new P.Subject,this._loadingHintsSubject=new P.Subject,this._modelSubject=new P.Subject,this._budgetSubject=new P.Subject,this._progressSubject=new P.BehaviorSubject(Bt),this._sectorCuller=e,this._modelStateHandler=new Et,this._budget=Dt,this._lastSpent={downloadSize:0,drawCalls:0,renderCost:0,loadedSectorCount:0,simpleSectorCount:0,detailedSectorCount:0,forcedDetailedSectorCount:0,totalSectorCount:0,accumulatedPriority:0};const n=Object(P.combineLatest)([Object(P.combineLatest)([this._loadingHintsSubject.pipe(Object(V.startWith)({})),this._budgetSubject.pipe(Object(V.startWith)(this._budget))]).pipe(Object(V.map)(zt)),Object(P.combineLatest)([this._cameraSubject.pipe(Object(V.auditTime)(500)),this._cameraSubject.pipe(Object(V.auditTime)(250),lt(600))]).pipe(Object(V.map)(Ft)),Object(P.combineLatest)([this._clippingPlaneSubject.pipe(Object(V.startWith)([]))]).pipe(Object(V.map)(Lt)),this.loadingModelObservable()]),r=new At(e,this._modelStateHandler,e=>{this._lastSpent=e},(e,t,n)=>{const r={isLoading:t>e,itemsRequested:t,itemsLoaded:e,itemsCulled:n};this._progressSubject.next(r)},t);this._updateObservable=n.pipe(Object(V.observeOn)(P.asyncScheduler),Object(V.auditTime)(250),Object(V.map)(kt),Object(V.filter)(ct),Object(V.mergeMap)(async e=>async function*(e){for await(const t of r.loadSectors(e))yield t}(e)),Object(V.mergeMap)(e=>e))}dispose(){this._sectorCuller.dispose()}updateCamera(e){this._cameraSubject.next(e),this._progressSubject.next(Bt)}set clippingPlanes(e){this._clippingPlaneSubject.next(e)}get budget(){return this._budget}set budget(e){this._budget=e,this._budgetSubject.next(e)}get lastBudgetSpendage(){return this._lastSpent}addModel(e){this._modelStateHandler.addModel(e.cadModelMetadata.modelIdentifier),this._modelSubject.next({model:e,operation:"add"})}removeModel(e){this._modelStateHandler.removeModel(e.cadModelMetadata.modelIdentifier),this._modelSubject.next({model:e,operation:"remove"})}updateLoadingHints(e){this._loadingHintsSubject.next(e)}consumedSectorObservable(){return this._updateObservable.pipe(Object(V.share)())}getLoadingStateObserver(){return this._progressSubject}loadingModelObservable(){return this._modelSubject.pipe(Object(V.scan)((e,t)=>{const{model:n,operation:r}=t;switch(r){case"add":return e.push(n),e;case"remove":return e.filter(e=>e.cadModelMetadata.modelIdentifier!==n.cadModelMetadata.modelIdentifier);default:Object(i.l)(r)}},[]))}}function zt([e,t]){return{loadingHints:e,budget:t}}function Ft([e,t]){return{camera:e,cameraInMotion:t}}function Lt([e]){return{clippingPlanes:e}}function kt([e,t,n,r]){return{...t,...e,...n,prioritizedAreas:r.flatMap(e=>e.prioritizedAreas),models:r}}
|
|
324
377
|
/*!
|
|
325
378
|
* Copyright 2021 Cognite AS
|
|
326
|
-
*/class
|
|
379
|
+
*/class Gt{constructor(e){this.options={renderer:e.renderer,determineSectorCost:e&&e.determineSectorCost?e.determineSectorCost:Vt,logCallback:e&&e.logCallback?e.logCallback:()=>{},coverageUtil:e.coverageUtil},this.takenSectors=new St(this.options.determineSectorCost)}dispose(){this.options.coverageUtil.dispose()}determineSectors(e){const t=this.update(e.camera,e.cadModelsMetadata,e.clippingPlanes,e.budget),n=t.collectWantedSectors(),r=t.computeSpentBudget(),o=(100*(r.loadedSectorCount-r.loadedSectorCount)/r.totalSectorCount).toPrecision(3),i=(100*r.loadedSectorCount/r.totalSectorCount).toPrecision(3);return this.log(`Scene: ${r.loadedSectorCount} (${r.forcedDetailedSectorCount} required, ${r.totalSectorCount} sectors, ${i}% of all sectors - ${o}% detailed)`),{spentBudget:r,wantedSectors:n}}filterSectorsToLoad(e,t){const n=this.options.coverageUtil.cullOccludedSectors(e.camera,t);return Promise.resolve(n)}update(e,t,n,r){const{coverageUtil:o}=this.options,i=this.takenSectors;i.clear(),t.forEach(e=>i.initializeScene(e)),o.setModels(t),o.setClipping(n);const a=o.orderSectorsByVisibility(e);this.addHighDetailsForNearSectors(e,t,r,i,n);let s=0;const d=a.length;let l=0;for(l=0;l<d&&i.isWithinBudget(r);l++){const e=a[l];i.markSectorDetailed(e.model,e.sectorId,e.priority),s+=e.priority}return this.log(`Retrieving ${l} of ${d} (last: ${a.length>0?a[l-1]:null})`),this.log(`Total scheduled: ${i.getWantedSectorCount()} of ${d} (cost: ${i.totalCost.renderCost}/${r.maximumRenderCost}, priority: ${s})`),i}addHighDetailsForNearSectors(e,t,n,r,o){const i=e.clone(!0);i.far=n.highDetailProximityThreshold,i.updateProjectionMatrix();const a=i.matrixWorldInverse,s=i.projectionMatrix,d=new l.Matrix4;t.forEach(e=>{d.multiplyMatrices(a,e.modelMatrix);let t=e.scene.getSectorsIntersectingFrustum(s,d).map(e=>e);null!=o&&o.length>0&&(t=this.testForClippingOcclusion(t,o,e.modelMatrix)),this.markSectorsAsDetailed(t,r,e)})}testForClippingOcclusion(e,t,n){const r=[],o=new l.Box3;for(let a=0;a<e.length;a++){o.copy(e[a].bounds),o.applyMatrix4(n);t.every(e=>Object(i.q)(o,e))&&r.push(e[a])}return r}markSectorsAsDetailed(e,t,n){for(let r=0;r<e.length;r++)t.markSectorDetailed(n,e[r].id,1/0)}log(e,...t){this.options.logCallback(e,...t)}}function Vt(e,t){switch(t){case W.c.Detailed:return{downloadSize:e.indexFile.downloadSize,drawCalls:e.estimatedDrawCallCount,renderCost:e.estimatedRenderCost};case W.c.Simple:return{downloadSize:e.facesFile.downloadSize,drawCalls:1,renderCost:Math.ceil(e.facesFile.downloadSize/100)};default:throw new Error("Can't compute cost for lod "+t)}}var Ut=n(6),jt=n.n(Ut);
|
|
327
380
|
/*!
|
|
328
381
|
* Copyright 2021 Cognite AS
|
|
329
382
|
*/
|
|
383
|
+
const Wt={defines:{COGNITE_COLOR_BY_TREE_INDEX:!1}},qt={simpleMesh:{fragment:jt()(n(35).default),vertex:jt()(n(36).default)},detailedMesh:{fragment:jt()(n(37).default),vertex:jt()(n(38).default)},instancedMesh:{fragment:jt()(n(39).default),vertex:jt()(n(40).default)},boxPrimitive:{fragment:jt()(n(27).default),vertex:jt()(n(28).default)},circlePrimitive:{fragment:jt()(n(41).default),vertex:jt()(n(42).default)},conePrimitive:{fragment:jt()(n(43).default),vertex:jt()(n(44).default)},eccentricConePrimitive:{fragment:jt()(n(45).default),vertex:jt()(n(46).default)},ellipsoidSegmentPrimitive:{fragment:jt()(n(47).default),vertex:jt()(n(48).default)},generalCylinderPrimitive:{fragment:jt()(n(49).default),vertex:jt()(n(50).default)},generalRingPrimitive:{fragment:jt()(n(51).default),vertex:jt()(n(52).default)},nutPrimitive:{fragment:jt()(n(27).default),vertex:jt()(n(28).default)},quadPrimitive:{fragment:jt()(n(27).default),vertex:jt()(n(28).default)},torusSegmentPrimitive:{fragment:jt()(n(53).default),vertex:jt()(n(54).default)},trapeziumPrimitive:{fragment:jt()(n(55).default),vertex:jt()(n(56).default)}},Ht={fragment:jt()(n(57).default),vertex:jt()(n(58).default)},Kt={fragment:jt()(n(59).default),vertex:jt()(n(60).default)},Xt={fragment:jt()(n(61).default),vertex:jt()(n(30).default)},Yt={fragment:jt()(n(62).default),vertex:jt()(n(30).default)},Zt={fragment:jt()(n(63).default),vertex:jt()(n(64).default)};const Qt=new l.Quaternion;class Jt{constructor(e){this.sectorIdOffset=0,this.scene=new l.Scene,this.containers=new Map,this.buffers={size:new l.Vector2,rtBuffer:new Uint8Array,sectorVisibilityBuffer:[]},this.coverageMaterial=new l.ShaderMaterial({vertexShader:Zt.vertex,fragmentShader:Zt.fragment,clipping:!0,side:l.DoubleSide,glslVersion:l.GLSL3}),this._ensureBuffersCorrectSizeVars={size:new l.Vector2},this._renderer=e.renderer,this._alreadyLoadedProvider=e.occludingGeometryProvider,this.renderTarget=new l.WebGLRenderTarget(1,1,{generateMipmaps:!1,type:l.UnsignedByteType,format:l.RGBAFormat,stencilBuffer:!1})}dispose(){this._renderer.dispose()}get renderer(){return this._renderer}createDebugCanvas(e){if(this._debugImageElement)throw new Error("createDebugCanvas() can only be called once");const t=e?e.width:this.renderTarget.width,n=e?e.height:this.renderTarget.height;return this._debugImageElement=document.createElement("img"),this._debugImageElement.style.width=t+"px",this._debugImageElement.style.height=n+"px",this._debugImageElement}setModels(e){const t=new Set;for(const n of e){const e=n.modelIdentifier;t.add(e);const r=this.containers.get(e);r?this.updateModel(r,n):this.addModel(n)}const n=new Set(this.containers.keys()),r=new Set([...n].filter(e=>!t.has(e)));for(const e of r)this.removeModel(e)}setClipping(e){this.coverageMaterial.clippingPlanes=e}cullOccludedSectors(e,t){try{this.setAllSectorsVisible(!1),this.setSectorsVisibility(t,!0);const n=this.orderSectorsByVisibility(e);return t.filter(t=>{const r=this.containers.get(t.modelIdentifier);if(void 0===r)throw new Error(`Model ${t.modelIdentifier} is not registered`);const i=t.metadata;o()(void 0!==i,i+" is not of type supported type V8SectorMetadata");const a=function(e,t,n){const{sectorBounds:r}=$t;return r.copy(t.bounds),r.applyMatrix4(e.modelMatrix),r.containsPoint(n)}(r.model,i,e.position);return n.some(e=>e.model.modelIdentifier===t.modelIdentifier&&e.sectorId===t.metadata.id)||a})}finally{this.setAllSectorsVisible(!0)}}orderSectorsByVisibility(e){this._debugImageElement&&(this.renderSectors(null,e),this._debugImageElement.src=this._renderer.domElement.toDataURL()),this.ensureBuffersCorrectSize(),this.renderSectors(this.renderTarget,e),this._renderer.readRenderTargetPixels(this.renderTarget,0,0,this.renderTarget.width,this.renderTarget.height,this.buffers.rtBuffer);const t=this.unpackSectorVisibility(this.renderTarget.width,this.renderTarget.height,this.buffers.rtBuffer),n=t.reduce((e,t)=>t.weight+e,0);return t.filter(e=>e.weight>0).sort((e,t)=>e&&t?t.weight-e.weight:e?-1:t?1:0).map(e=>{const t=this.findSectorContainer(e.sectorIdWithOffset),r=e.sectorIdWithOffset-t.sectorIdOffset;return{model:t.model,sectorId:r,priority:e.weight/n,depth:e.distance}})}ensureBuffersCorrectSize(){const{size:e}=this._ensureBuffersCorrectSizeVars;if(this._renderer.getSize(e),!this.buffers.size.equals(e)){const t=Math.max(Math.floor(e.width*Jt.CoverageRenderTargetScalingFactor),64),n=Math.max(Math.floor(e.height*Jt.CoverageRenderTargetScalingFactor),64);this.renderTarget.setSize(t,n),this.buffers.rtBuffer.length<4*t*n&&(this.buffers.rtBuffer=new Uint8Array(4*t*n)),this.buffers.size.copy(e)}}renderSectors(e,t){const n=new i.j(this._renderer);try{n.localClippingEnabled=!0,n.setRenderTarget(e),n.setClearColor("#FFFFFF",1),n.autoClear=!1,n.setSize(this.buffers.size.width,this.buffers.size.height),this._renderer.clear(!0,!0),this._alreadyLoadedProvider.renderOccludingGeometry(e,t),this._renderer.render(this.scene,t)}finally{n.resetState()}}setAllSectorsVisible(e){const t=e?1:0;this.containers.forEach(e=>{for(let n=0;n<e.sectors.length;++n){const r=e.sectors[n].id,o=e.sectorIndexById[r];e.attributesValues[5*o+4]=t}e.attributesBuffer.needsUpdate=!0})}setSectorsVisibility(e,t){const n=t?1:0;e.forEach(e=>{const t=e.metadata.id,r=this.containers.get(e.modelIdentifier);if(void 0===r)throw new Error(`Sector ${e} is from a model not added`);const o=r.sectorIndexById[t];r.attributesValues[5*o+4]=n,r.attributesBuffer.needsUpdate=!0})}removeModel(e){const t=this.containers.get(e);if(!t)throw new Error(`Could not find model '${e}'`);t.mesh.geometry.dispose(),this.deleteSectorsFromBuffers(t.sectorIdOffset,t.lastSectorIdWithOffset),this.scene.remove(t.renderable),this.containers.delete(e)}deleteSectorsFromBuffers(e,t){const n=this.buffers.sectorVisibilityBuffer;for(let r=e;r<=t;++r)n[r]={sectorIdWithOffset:-1,weight:-1,distance:1/0}}addModel(e){const t=e.scene.getAllSectors().map(e=>e),[n,r,o]=this.createSectorTreeGeometry(this.sectorIdOffset,t),i=new l.Group;i.matrixAutoUpdate=!1,i.applyMatrix4(e.modelMatrix),i.updateMatrixWorld(),i.add(n);const a=t.reduce((e,t)=>Math.max(t.id,e),0),s=new Array(a);t.forEach((e,t)=>s[e.id]=t),this.containers.set(e.modelIdentifier,{model:e,sectors:t,sectorIndexById:s,sectorIdOffset:this.sectorIdOffset,lastSectorIdWithOffset:this.sectorIdOffset+a,renderable:i,mesh:n,attributesBuffer:r,attributesValues:o}),this.sectorIdOffset+=a+1,this.scene.add(i)}updateModel(e,t){e.renderable.matrix.copy(t.modelMatrix),e.renderable.updateMatrixWorld(!0)}findSectorContainer(e){for(const t of this.containers.values())if(e>=t.sectorIdOffset&&e<=t.lastSectorIdWithOffset)return t;throw new Error(`Sector ID ${e} is out of range`)}unpackSectorVisibility(e,t,n){function r(e,t){const n=e*e+t*t;return.5*(2.5-n)+Math.exp(-Math.sqrt(n))}const o=this.buffers.sectorVisibilityBuffer;!function(e){for(let t=0;t<e.length;t++){const n=e[t];n&&(n.weight=0)}}(o);const i=t/2,a=e/2;for(let s=0;s<t;s++){const d=(s-t/2)/i;for(let t=0;t<e;t++){const i=t+e*s,l=n[4*i+0],c=n[4*i+1],u=n[4*i+2],p=n[4*i+3];if(255!==l||255!==c||255!==u){const e=(s-a)/a,t=u+255*c+255*l*255,n=o[t]||{sectorIdWithOffset:t,weight:0,distance:p};n.weight+=r(e,d),n.distance=Math.min(n.distance,p),o[t]=n}}}return o}createSectorTreeGeometry(e,t){const n=new l.Vector3,r=new l.Vector3,o=t.length,i=new Float32Array(5*o),a=new l.BoxBufferGeometry,s=new l.InstancedMesh(a,this.coverageMaterial,o),d=new l.Vector3;t.forEach((t,o)=>{const{xy:a,xz:c,yz:u}=t.facesFile.coverageFactors;d.set(u,c,a),((t,o,a,d)=>{t.getCenter(n),t.getSize(r);const c=(new l.Matrix4).compose(n,Qt,r);s.setMatrixAt(o,c),i[5*o+0]=e+a,i[5*o+1]=d.x,i[5*o+2]=d.y,i[5*o+3]=d.z,i[5*o+4]=1})(t.bounds,o,t.id,d)});const c=new l.InstancedInterleavedBuffer(i,5);return a.setAttribute("a_sectorId",new l.InterleavedBufferAttribute(c,1,0)),a.setAttribute("a_coverageFactor",new l.InterleavedBufferAttribute(c,3,1)),a.setAttribute("a_visible",new l.InterleavedBufferAttribute(c,1,4)),[s,c,i]}}Jt.CoverageRenderTargetScalingFactor=.5;const $t={sectorBounds:new l.Box3};
|
|
330
384
|
/*!
|
|
331
385
|
* Copyright 2021 Cognite AS
|
|
332
|
-
*/
|
|
386
|
+
*/
|
|
333
387
|
/*!
|
|
334
388
|
* Copyright 2021 Cognite AS
|
|
335
|
-
*/
|
|
389
|
+
*/
|
|
390
|
+
function en(e,t,n,r,o,i){const a=new dt(r,e,t),{internal:s}=i,d=s&&s.sectorCuller?s.sectorCuller:function(e,t){const n=new Jt({renderer:e,occludingGeometryProvider:t});return new Gt({renderer:e,coverageUtil:n})}(n,o),l=new Ot(d,i.continuousModelStreaming);return new pe(r,a,l)}
|
|
336
391
|
/*!
|
|
337
392
|
* Copyright 2021 Cognite AS
|
|
338
|
-
*/class
|
|
393
|
+
*/var tn,nn,rn,on,an,sn,dn;(nn=tn||(tn={}))[nn.Circle=X.PointShape.CIRCLE]="Circle",nn[nn.Square=X.PointShape.SQUARE]="Square",(on=rn||(rn={}))[on.Rgb=X.PointColorType.RGB]="Rgb",on[on.Depth=X.PointColorType.DEPTH]="Depth",on[on.Height=X.PointColorType.HEIGHT]="Height",on[on.PointIndex=X.PointColorType.POINT_INDEX]="PointIndex",on[on.LevelOfDetail=X.PointColorType.LOD]="LevelOfDetail",on[on.Classification=X.PointColorType.CLASSIFICATION]="Classification",on[on.Intensity=X.PointColorType.INTENSITY]="Intensity",(sn=an||(an={}))[sn.Adaptive=X.PointSizeType.ADAPTIVE]="Adaptive",sn[sn.Fixed=X.PointSizeType.FIXED]="Fixed",function(e){e[e.Default=-1]="Default",e[e.Created=0]="Created",e[e.Unclassified=1]="Unclassified",e[e.Ground=2]="Ground",e[e.LowVegetation=3]="LowVegetation",e[e.MedVegetation=4]="MedVegetation",e[e.HighVegetation=5]="HighVegetation",e[e.Building=6]="Building",e[e.LowPoint=7]="LowPoint",e[e.ReservedOrHighPoint=8]="ReservedOrHighPoint",e[e.Water=9]="Water",e[e.Rail=10]="Rail",e[e.RoadSurface=11]="RoadSurface",e[e.ReservedOrBridgeDeck=12]="ReservedOrBridgeDeck",e[e.WireGuard=13]="WireGuard",e[e.WireConductor=14]="WireConductor",e[e.TransmissionTower=15]="TransmissionTower",e[e.WireStructureConnector=16]="WireStructureConnector",e[e.BridgeDeck=17]="BridgeDeck",e[e.HighNoise=18]="HighNoise",e[e.OverheadStructure=19]="OverheadStructure",e[e.IgnoredGround=20]="IgnoredGround",e[e.Snow=21]="Snow",e[e.TemporalExclusion=22]="TemporalExclusion",e[e.UserDefinableOffset=64]="UserDefinableOffset"}(dn||(dn={}));class ln extends l.Group{constructor(e,t,n){super(),this.name="PointCloudNode",this._potreeGroup=e,this._potreeNode=t,this._cameraConfiguration=n,this.add(this._potreeGroup),this.matrixAutoUpdate=!1}get potreeGroup(){return this._potreeGroup}get potreeNode(){return this._potreeNode}get hasCameraConfiguration(){return void 0!==this._cameraConfiguration}get cameraConfiguration(){return this._cameraConfiguration}get needsRedraw(){return this._potreeGroup.needsRedraw}requestRedraw(){this._potreeGroup.requestRedraw()}get pointSize(){return this._potreeNode.pointSize}set pointSize(e){this._potreeNode.pointSize=e}get pointSizeType(){return this._potreeNode.pointSizeType}set pointSizeType(e){this._potreeNode.pointSizeType=e}get pointBudget(){return this._potreeNode.pointBudget}set pointBudget(e){this._potreeNode.pointBudget=e}get visiblePointCount(){return this._potreeNode.visiblePointCount}get pointColorType(){return this._potreeNode.pointColorType}set pointColorType(e){this._potreeNode.pointColorType=e}get pointShape(){return this._potreeNode.pointShape}set pointShape(e){this._potreeNode.pointShape=e}setClassVisible(e,t){if(!this.hasClass(e))throw new Error("Point cloud model doesn't have class "+e);const n=cn(e);this._potreeNode.classification[n].w=t?1:0,this._potreeNode.recomputeClassification()}isClassVisible(e){if(!this.hasClass(e))throw new Error("Point cloud model doesn't have class "+e);const t=cn(e);return 0!==this._potreeNode.classification[t].w}hasClass(e){const t=cn(e);return void 0!==this._potreeNode.classification[t]}getClasses(){return Object.keys(this._potreeNode.classification).map(e=>"DEFAULT"===e?-1:parseInt(e,10)).sort((e,t)=>e-t)}getBoundingBox(e){return(e=e||new l.Box3).copy(this._potreeNode.boundingBox),e.applyMatrix4(this.matrixWorld),e}setModelTransformation(e){this.matrix.copy(e),this.updateMatrixWorld(!0)}getModelTransformation(e=new l.Matrix4){return e.copy(this.matrix)}}function cn(e){return e===dn.Default?"DEFAULT":e}
|
|
339
394
|
/*!
|
|
340
395
|
* Copyright 2021 Cognite AS
|
|
341
|
-
*/class
|
|
396
|
+
*/class un extends l.Object3D{constructor(e=200){super(),this._needsRedraw=!1,this._lastDrawPointBuffersHash=0,this._forceLoadingSubject=new P.Subject,this.nodes=[],this.numNodesLoadingAfterLastRedraw=0,this.numChildrenAfterLastRedraw=0,this.potreeGroup=new X.Group,this.potreeGroup.name="Potree.Group",this.name="Potree point cloud wrapper",this.add(this.potreeGroup);const t=new l.Mesh(new l.BufferGeometry);t.name="onAfterRender trigger (no geometry)",t.frustumCulled=!1,t.onAfterRender=()=>{this.resetRedraw(),this._lastDrawPointBuffersHash=this.pointBuffersHash},this.add(t),this._loadingObservable=this.createLoadingStateObservable(e),this._lastDrawPointBuffersHash=this.pointBuffersHash,this.pointBudget=2e6}get needsRedraw(){return this._needsRedraw||this._lastDrawPointBuffersHash!==this.pointBuffersHash||X.Global.numNodesLoading!==this.numNodesLoadingAfterLastRedraw||this.numChildrenAfterLastRedraw!==this.potreeGroup.children.length||this.nodes.some(e=>e.needsRedraw)}get pointBudget(){return this.potreeGroup.pointBudget}set pointBudget(e){this.potreeGroup.pointBudget=e}getLoadingStateObserver(){return this._loadingObservable}addPointCloud(e){this.potreeGroup.add(e.octtree),this.nodes.push(e),this._forceLoadingSubject.next(),this.requestRedraw()}removePointCloud(e){const t=this.nodes.indexOf(e);if(-1===t)throw new Error("Point cloud is not added - cannot remove it");this.potreeGroup.remove(e.octtree),this.nodes.splice(t,1)}requestRedraw(){this._needsRedraw=!0}resetRedraw(){this._needsRedraw=!1,this.numNodesLoadingAfterLastRedraw=X.Global.numNodesLoading,this.numChildrenAfterLastRedraw=this.potreeGroup.children.length,this.nodes.forEach(e=>e.resetRedraw())}createLoadingStateObservable(e){const t=this._forceLoadingSubject.pipe((n=5*e,Object(P.pipe)(Object(V.switchMap)(()=>Object(P.of)(!1).pipe(Object(V.delay)(n),Object(V.startWith)(!0))),Object(V.distinctUntilChanged)())));var n;return Object(P.combineLatest)([Object(P.interval)(e).pipe(Object(V.map)(pn),Object(V.distinctUntilChanged)((e,t)=>e.isLoading===t.isLoading&&e.itemsLoaded===t.itemsLoaded&&e.itemsRequested===t.itemsRequested)),t]).pipe(Object(V.map)(e=>{const[t,n]=e;return n&&!t.isLoading?{isLoading:!0,itemsLoaded:0,itemsRequested:1,itemsCulled:0}:t}),Object(V.startWith)({isLoading:!1,itemsLoaded:0,itemsRequested:0,itemsCulled:0}),Object(V.distinctUntilChanged)(),Object(V.share)())}get pointBuffersHash(){const e=this.potreeGroup.pointclouds;let t=3131961357;for(const n of e)n.traverseVisible(e=>{if(e.isPoints){const n=e.geometry;t^=n.getAttribute("position").count}}),t^=n.id;return t}}function pn(){return{isLoading:X.Global.numNodesLoading>0,itemsLoaded:0,itemsRequested:X.Global.numNodesLoading,itemsCulled:0}}
|
|
342
397
|
/*!
|
|
343
398
|
* Copyright 2021 Cognite AS
|
|
344
|
-
*/class
|
|
399
|
+
*/class mn{constructor(e,t){this._pointCloudMetadataRepository=e,this._pointCloudFactory=t,this._pointCloudGroupWrapper=new un}requestRedraw(){this._pointCloudGroupWrapper.requestRedraw()}resetRedraw(){this._pointCloudGroupWrapper.resetRedraw()}get pointBudget(){return this._pointCloudGroupWrapper.pointBudget}set pointBudget(e){this._pointCloudGroupWrapper.pointBudget=e}get needsRedraw(){return this._pointCloudGroupWrapper.needsRedraw}set clippingPlanes(e){this._pointCloudGroupWrapper.traverse(t=>{if(t.material){const n=t.material;n.clipping=!0,n.clipIntersection=!1,n.clippingPlanes=e}})}getLoadingStateObserver(){return this._pointCloudGroupWrapper.getLoadingStateObserver()}updateCamera(e){}async addModel(e){const t=await this._pointCloudMetadataRepository.loadData(e),n=this._pointCloudFactory.createModel(t);this._pointCloudGroupWrapper.addPointCloud(n);const r=new ln(this._pointCloudGroupWrapper,n,t.cameraConfiguration);return r.setModelTransformation(t.modelMatrix),r}removeModel(e){this._pointCloudGroupWrapper.removePointCloud(e.potreeNode)}}
|
|
345
400
|
/*!
|
|
346
401
|
* Copyright 2021 Cognite AS
|
|
347
|
-
*/class
|
|
402
|
+
*/const hn=(new l.Matrix4).identity();class fn{constructor(e,t,n="ept.json"){this._modelMetadataProvider=e,this._modelDataProvider=t,this._blobFileName=n}async loadData(e){const t=await this.getSupportedOutput(e),n=this._modelMetadataProvider.getModelUri(e,t),r=this._modelMetadataProvider.getModelMatrix(e,t.format),o=this._modelMetadataProvider.getModelCamera(e),a=await n,s=await r,d=await this._modelDataProvider.getJsonFile(a,this._blobFileName),l=await o;return{modelBaseUrl:a,modelMatrix:s,cameraConfiguration:Object(i.v)(l,hn),scene:d}}async getSupportedOutput(e){const t=(await this._modelMetadataProvider.getModelOutputs(e)).find(e=>e.format===st.d.EptPointCloud);if(!t)throw new Error(`Model does not contain supported point cloud output [${st.d.EptPointCloud}]`);return t}}
|
|
348
403
|
/*!
|
|
349
404
|
* Copyright 2021 Cognite AS
|
|
350
|
-
*/(e.
|
|
405
|
+
*/class vn{constructor(e){this._needsRedraw=!1,this.octtree=e,this.pointSize=2,this.pointSizeType=an.Adaptive,this.pointColorType=rn.Rgb,this.pointShape=tn.Circle,this.pointBudget=1/0}get needsRedraw(){return this._needsRedraw}get pointSize(){return this.octtree.material.size}set pointSize(e){this.octtree.material.size=e,this._needsRedraw=!0}get pointSizeType(){return this.octtree.material.pointSizeType}set pointSizeType(e){this.octtree.material.pointSizeType=e,this._needsRedraw=!0}get pointBudget(){return this.octtree.pointBudget}set pointBudget(e){this.octtree.pointBudget=e,this._needsRedraw=!0}get visiblePointCount(){return this.octtree.numVisiblePoints||0}get boundingBox(){const e=this.octtree.pcoGeometry.tightBoundingBox||this.octtree.pcoGeometry.boundingBox||this.octtree.boundingBox,t=new l.Vector3(e.min.x,e.min.z,-e.min.y),n=new l.Vector3(e.max.x,e.max.z,-e.max.y);return(new l.Box3).setFromPoints([t,n])}get pointColorType(){return this.octtree.material.pointColorType}set pointColorType(e){this.octtree.material.pointColorType=e,this._needsRedraw=!0}get pointShape(){return this.octtree.material.shape}set pointShape(e){this.octtree.material.shape=e,this._needsRedraw=!0}get classification(){return this.octtree.material.classification}recomputeClassification(){this.octtree.material.recomputeClassification()}resetRedraw(){this._needsRedraw=!1}}
|
|
351
406
|
/*!
|
|
352
407
|
* Copyright 2021 Cognite AS
|
|
353
|
-
*/class
|
|
408
|
+
*/class xn{constructor(e){this._httpHeadersProvider=e}createModel(e){this.initializePointCloudXhrRequestHeaders();const{modelBaseUrl:t,scene:n}=e,r=new X.PointCloudEptGeometry(t+"/",n),o=r.offset.x,i=r.offset.y,a=r.offset.z,s=new X.PointCloudEptGeometryNode(r,r.boundingBox,0,o,i,a);r.root=s,r.root.load();const d=new X.PointCloudOctree(r);d.name="PointCloudOctree: "+t;return new vn(d)}initializePointCloudXhrRequestHeaders(){const e=this._httpHeadersProvider.headers;let t=X.XHRFactory.config.customHeaders;for(const[n,r]of Object.entries(e))t=t.filter(e=>e.header!==n),t.push({header:n,value:r});X.XHRFactory.config.customHeaders=t.filter(e=>e.header)}}
|
|
354
409
|
/*!
|
|
355
410
|
* Copyright 2021 Cognite AS
|
|
356
|
-
*/
|
|
411
|
+
*/
|
|
357
412
|
/*!
|
|
358
413
|
* Copyright 2021 Cognite AS
|
|
359
|
-
*/
|
|
414
|
+
*/
|
|
415
|
+
class gn{constructor(e,t,n){this._lastCamera={position:new l.Vector3(NaN,NaN,NaN),quaternion:new l.Quaternion(NaN,NaN,NaN,NaN),zoom:NaN},this._isDisposed=!1,this._subscriptions=new P.Subscription,this._events={loadingStateChanged:new i.d},this._effectRenderManager=t,this._cadManager=e,this._pointCloudManager=n,this.initLoadingStateObserver(this._cadManager,this._pointCloudManager),this._updateSubject=new P.Subject,this._updateSubject.pipe(Object(V.auditTime)(5e3),Object(V.tap)(()=>{R.a.trackCameraNavigation({moduleName:"RevealManager",methodName:"update"})})).subscribe()}dispose(){this._isDisposed||(this._cadManager.dispose(),this._subscriptions.unsubscribe(),this._isDisposed=!0)}requestRedraw(){this._cadManager.requestRedraw(),this._pointCloudManager.requestRedraw()}resetRedraw(){this._cadManager.resetRedraw(),this._pointCloudManager.resetRedraw()}get debugRenderTiming(){return this._effectRenderManager.debugRenderTimings}set debugRenderTiming(e){this._effectRenderManager.debugRenderTimings=e}get renderOptions(){return this._effectRenderManager.renderOptions}set renderOptions(e){this._effectRenderManager.renderOptions=null!=e?e:N}get needsRedraw(){return this._cadManager.needsRedraw||this._pointCloudManager.needsRedraw}update(e){(this._lastCamera.zoom!==e.zoom||!this._lastCamera.position.equals(e.position)||!this._lastCamera.quaternion.equals(e.quaternion))&&(this._lastCamera.position.copy(e.position),this._lastCamera.quaternion.copy(e.quaternion),this._lastCamera.zoom=e.zoom,this._cadManager.updateCamera(e),this._updateSubject.next())}get cadBudget(){return this._cadManager.budget}set cadBudget(e){this._cadManager.budget=e}get cadLoadedStatistics(){return this._cadManager.loadedStatistics}get cadRenderMode(){return this._cadManager.renderMode}set cadRenderMode(e){this._cadManager.renderMode=e}get pointCloudBudget(){return{numberOfPoints:this._pointCloudManager.pointBudget}}set pointCloudBudget(e){this._pointCloudManager.pointBudget=e.numberOfPoints}set clippingPlanes(e){this._cadManager.clippingPlanes=e,this._pointCloudManager.clippingPlanes=e}get clippingPlanes(){return this._cadManager.clippingPlanes}on(e,t){switch(e){case"loadingStateChanged":this._events.loadingStateChanged.subscribe(t);break;default:throw new Error(`Unsupported event '${e}'`)}}off(e,t){switch(e){case"loadingStateChanged":this._events.loadingStateChanged.unsubscribe(t);break;default:throw new Error(`Unsupported event '${e}'`)}}render(e){this._effectRenderManager.render(e),this.resetRedraw()}setRenderTarget(e,t=!0){this._effectRenderManager.setRenderTarget(e),this._effectRenderManager.setRenderTargetAutoSize(t)}async addModel(e,t,n){switch(R.a.trackLoadModel({type:e},t),e){case"cad":return this._cadManager.addModel(t,null==n?void 0:n.geometryFilter);case"pointcloud":return this._pointCloudManager.addModel(t);default:throw new Error(`Model type '${e}' is not supported`)}}removeModel(e,t){switch(e){case"cad":this._cadManager.removeModel(t);break;case"pointcloud":this._pointCloudManager.removeModel(t);break;default:Object(i.l)(e)}}addUiObject(e,t,n){this._effectRenderManager.addUiObject(e,t,n),this.requestRedraw()}removeUiObject(e){this._effectRenderManager.removeUiObject(e),this.requestRedraw()}notifyLoadingStateChanged(e){this._events.loadingStateChanged.fire(e)}initLoadingStateObserver(e,t){this._subscriptions.add(Object(P.combineLatest)([e.getLoadingStateObserver(),t.getLoadingStateObserver()]).pipe(Object(V.observeOn)(P.asyncScheduler),Object(V.subscribeOn)(P.asyncScheduler),Object(V.map)(([e,t])=>({isLoading:e.isLoading||t.isLoading,itemsLoaded:e.itemsLoaded+t.itemsLoaded,itemsRequested:e.itemsRequested+t.itemsRequested,itemsCulled:e.itemsCulled+t.itemsCulled})),Object(V.distinctUntilChanged)((e,t)=>e.itemsLoaded===t.itemsLoaded&&e.itemsRequested===t.itemsRequested)).subscribe(this.notifyLoadingStateChanged.bind(this),e=>R.a.trackError(e,{moduleName:"RevealManager",methodName:"constructor"})))}}
|
|
360
416
|
/*!
|
|
361
417
|
* Copyright 2021 Cognite AS
|
|
362
|
-
*/
|
|
418
|
+
*/class bn{constructor(e){this._renderManager=e}renderOccludingGeometry(e,t){const n={renderTarget:this._renderManager.getRenderTarget(),autoSize:this._renderManager.getRenderTargetAutoSize()};try{this._renderManager.setRenderTarget(e),this._renderManager.renderDetailedToDepthOnly(t)}finally{this._renderManager.setRenderTarget(n.renderTarget),this._renderManager.setRenderTargetAutoSize(n.autoSize)}}}
|
|
363
419
|
/*!
|
|
364
420
|
* Copyright 2021 Cognite AS
|
|
365
|
-
*/class
|
|
421
|
+
*/class yn{constructor(e,t){this._defaultAppearance={},this._handleStylesChangedListener=this.handleStylesChanged.bind(this),this._needsUpdate=!0,this._allTreeIndices=new i.e,this._allTreeIndices.addRange(new i.i(0,e)),this._styleProvider=t,this._styleProvider.on("changed",this._handleStylesChangedListener);const n=function(e){const{width:t,height:n}=Object(i.n)(e),r=t*n,o=new l.DataTexture(new Uint8ClampedArray(4*r),t,n),a=new Uint8ClampedArray(3*r),s=new l.DataTexture(a,t,n,l.RGBFormat);return{overrideColorPerTreeIndexTexture:o,transformOverrideIndexTexture:s}}(e);this._overrideColorPerTreeIndexTexture=n.overrideColorPerTreeIndexTexture,this._overrideColorDefaultAppearanceRgba=new Uint8ClampedArray(this._overrideColorPerTreeIndexTexture.image.data.length),this._regularNodesTreeIndices=new i.e,this._ghostedNodesTreeIndices=new i.e,this._infrontNodesTreeIndices=new i.e,this.setDefaultAppearance(g.Default)}getDefaultAppearance(){return this._defaultAppearance}setDefaultAppearance(e){var t,n;
|
|
366
422
|
/*!
|
|
367
423
|
* Copyright 2021 Cognite AS
|
|
368
|
-
*/
|
|
424
|
+
*/(t=e,n=this._defaultAppearance,JSON.stringify(t)!==JSON.stringify(n))&&(this._defaultAppearance=e,function(e,t){const[n,r,o,i]=_n(t);for(let t=0;t<e.length;++t)e[4*t+0]=n,e[4*t+1]=r,e[4*t+2]=o,e[4*t+3]=i}(this._overrideColorDefaultAppearanceRgba,e),this._needsUpdate=!0)}get regularNodeTreeIndices(){return this._regularNodesTreeIndices}get ghostedNodeTreeIndices(){return this._ghostedNodesTreeIndices}get infrontNodeTreeIndices(){return this._infrontNodesTreeIndices}get needsUpdate(){return this._needsUpdate}get overrideColorPerTreeIndexTexture(){return this._overrideColorPerTreeIndexTexture}dispose(){this._styleProvider.off("changed",this._handleStylesChangedListener),this._overrideColorPerTreeIndexTexture.dispose()}build(){if(!this._needsUpdate)return;const e=this._overrideColorPerTreeIndexTexture.image.data;this.populateTexture(e),this.populateNodeSets(e),this._needsUpdate=!1}populateTexture(e){e.set(this._overrideColorDefaultAppearanceRgba),this._styleProvider.applyStyles((e,t)=>{const n={...this._defaultAppearance,...t};this.applyStyleToNodes(e,n)}),this._overrideColorPerTreeIndexTexture.needsUpdate=!0}populateNodeSets(e){this._regularNodesTreeIndices.clear(),this._infrontNodesTreeIndices.clear(),this._ghostedNodesTreeIndices.clear();const t={rangeStart:-1,inFront:!1,ghosted:!1},n=e=>{const n=i.i.createFromInterval(t.rangeStart,e-1);t.inFront?this._infrontNodesTreeIndices.addRange(n):t.ghosted?this._ghostedNodesTreeIndices.addRange(n):this._regularNodesTreeIndices.addRange(n)};for(let r=0;r<this._allTreeIndices.count;++r){const o=0!=(2&e[4*r+3]),i=0!=(4&e[4*r+3]);-1===t.rangeStart?(t.rangeStart=r,t.inFront=o,t.ghosted=i):t.inFront===o&&t.ghosted===i||(n(r),t.rangeStart=r,t.inFront=o,t.ghosted=i)}-1!==t.rangeStart&&n(this._allTreeIndices.count)}applyStyleToNodes(e,t){0!==e.count&&function(e,t,n){const[r,o,i,a]=_n(n),s=void 0!==n.color?255:0,d=~s,l=r&s,c=o&s,u=i&s,p=(void 0!==n.visible?1:0)|(void 0!==n.renderInFront?2:0)|(void 0!==n.renderGhosted?4:0)|(void 0!==n.outlineColor?224:0),m=~p,h=a&p;t.forEachRange(t=>{for(let n=t.from;n<=t.toInclusive;++n)e[4*n+0]=r&d|l,e[4*n+1]=o&d|c,e[4*n+2]=i&d|u,e[4*n+3]=e[4*n+3]&m|h})}(this._overrideColorPerTreeIndexTexture.image.data,e,t)}handleStylesChanged(){this._needsUpdate=!0}}function _n(e){const[t,n,r]=e.color||[0,0,0];return[t,n,r,(void 0===e.visible||!!e.visible?1:0)+(!!e.renderInFront?2:0)+(!!e.renderGhosted?4:0)+((e.outlineColor?Number(e.outlineColor):0)<<5)]}class Tn{constructor(e){this._textureBuffer=new Uint8Array(Tn.MIN_NUMBER_OF_TREE_INDICES*Tn.NUMBER_OF_ELEMENTS_PER_MATRIX*Tn.BYTES_PER_FLOAT),this._dataTexture=new l.DataTexture(this._textureBuffer,Tn.NUMBER_OF_ELEMENTS_PER_MATRIX,Tn.MIN_NUMBER_OF_TREE_INDICES),this._onGenerateNewDataTextureCallback=e,this._unusedIndices=[...Array(Tn.MIN_NUMBER_OF_TREE_INDICES).keys()].map((e,t)=>t),this._treeIndexToOverrideIndex=new Map}get dataTexture(){return this._dataTexture}get overrideIndices(){return this._treeIndexToOverrideIndex}dispose(){this._dataTexture.dispose()}addOverrideTransform(e,t){const n=t.toArray();let r;this._treeIndexToOverrideIndex.has(e)?r=this._treeIndexToOverrideIndex.get(e):(r=this._unusedIndices.pop(),void 0===r&&(this.recomputeDataTexture(),r=this._unusedIndices.pop()),this._treeIndexToOverrideIndex.set(e,r));for(let e=0;e<Tn.NUMBER_OF_ELEMENTS_PER_MATRIX;e++){const t=(r*Tn.NUMBER_OF_ELEMENTS_PER_MATRIX+e)*Tn.BYTES_PER_FLOAT,o=n[e%4*4+Math.floor(e/4)];Object(i.s)(o,this._dataTexture.image.data,t)}return this._dataTexture.needsUpdate=!0,r}removeOverrideTransform(e){if(!this._treeIndexToOverrideIndex.has(e))return;const t=this._treeIndexToOverrideIndex.get(e);this._unusedIndices.push(t),this._treeIndexToOverrideIndex.delete(e)}recomputeDataTexture(){const e=this._textureBuffer.length,t=new Uint8Array(2*e);t.set(this._textureBuffer);const n=Object(i.n)(2*e/Tn.BYTES_PER_FLOAT),r=new l.DataTexture(t,n.width,n.height),o=e/(Tn.BYTES_PER_FLOAT*Tn.NUMBER_OF_ELEMENTS_PER_MATRIX);for(let e=o;e<2*o;e++)this._unusedIndices.push(e);this._textureBuffer=t,this._dataTexture=r,this._onGenerateNewDataTextureCallback(r)}}Tn.MIN_NUMBER_OF_TREE_INDICES=16,Tn.NUMBER_OF_ELEMENTS_PER_MATRIX=16,Tn.BYTES_PER_FLOAT=4;
|
|
369
425
|
/*!
|
|
370
426
|
* Copyright 2021 Cognite AS
|
|
371
|
-
*/
|
|
427
|
+
*/
|
|
428
|
+
class Cn{constructor(e,t){this._needsUpdate=!1,this._handleTransformChangedBound=this.handleTransformChanged.bind(this);const n=function(e){const{width:t,height:n}=Object(i.n)(e),r=new Uint8ClampedArray(t*n*3);return{transformOverrideIndexTexture:new l.DataTexture(r,t,n,l.RGBFormat)}}
|
|
372
429
|
/*!
|
|
373
430
|
* Copyright 2021 Cognite AS
|
|
374
|
-
*/
|
|
431
|
+
*/(e);this._transformOverrideIndexTexture=n.transformOverrideIndexTexture,this._transformOverrideBuffer=new Tn(this.handleNewTransformTexture.bind(this)),this._transformProvider=t,this._transformProvider.on("changed",this._handleTransformChangedBound)}dispose(){this._transformOverrideBuffer.dispose(),this._transformOverrideIndexTexture.dispose(),this._transformProvider.off("changed",this._handleTransformChangedBound)}get needsUpdate(){return this._needsUpdate}get overrideTransformIndexTexture(){return this._transformOverrideIndexTexture}get transformLookupTexture(){return this._transformOverrideBuffer.dataTexture}build(){this._needsUpdate=!1}setNodeTransform(e,t){const n=this._transformOverrideBuffer.addOverrideTransform(e.from,t);e.forEach(e=>this.setOverrideIndex(e,n)),this._needsUpdate=!0}resetNodeTransform(e){this._transformOverrideBuffer.removeOverrideTransform(e.from),e.forEach(e=>this.setOverrideIndex(e,-1)),this._needsUpdate=!0}setOverrideIndex(e,t){const n=this._transformOverrideIndexTexture.image.data;n[3*e+0]=t+1>>16,n[3*e+1]=t+1>>8,n[3*e+2]=t+1>>0,this._transformOverrideIndexTexture.needsUpdate=!0}handleNewTransformTexture(){this._needsUpdate=!0}handleTransformChanged(e,t,n){switch(e){case"set":this.setNodeTransform(t,n);break;case"reset":this.resetNodeTransform(t);break;default:Object(i.l)(e,`Unexpected change type '${e}'`)}}}const wn=(new l.Matrix4).identity();class Mn{constructor(){this._events={changed:new i.d}}on(e,t){switch(e){case"changed":this._events.changed.subscribe(t);break;default:Object(i.l)(e,`Unsupported event: '${e}'`)}}off(e,t){switch(e){case"changed":this._events.changed.unsubscribe(t);break;default:Object(i.l)(e,`Unsupported event: '${e}'`)}}setNodeTransform(e,t){this._events.changed.fire("set",e,t)}resetNodeTransform(e){this._events.changed.fire("reset",e,wn)}}
|
|
375
432
|
/*!
|
|
376
433
|
* Copyright 2021 Cognite AS
|
|
377
|
-
*/class qe{constructor(e){this._renderManager=e}renderOccludingGeometry(e,t){const n={renderTarget:this._renderManager.getRenderTarget(),autoSize:this._renderManager.getRenderTargetAutoSize()};try{this._renderManager.setRenderTarget(e),this._renderManager.renderDetailedToDepthOnly(t)}finally{this._renderManager.setRenderTarget(n.renderTarget),this._renderManager.setRenderTargetAutoSize(n.autoSize)}}}function He(e,t,n,r,i,a={}){const s=n.getApplicationIdentifier();Object(o.p)(!1!==a.logMetrics,e,s,{moduleName:"createRevealManager",methodName:"createRevealManager",constructorOptions:a});const d=a.renderOptions||{},l=new h.c,c=new h.f(r,i,l,d),u=Ee(t,n,r,l,new qe(c),a),m=je(t,n);return new We(u,c,m)}class Ke{constructor(e,t){switch(e){case"cdf":{const e=t;this.addCadModel=t=>Ke.addCdfCadModel(t,e),this.addPointCloudModel=t=>Ke.addCdfPointCloudModel(t,e),this._revealManager=e}break;case"local":{const e=t;this.addCadModel=t=>Ke.addLocalCadModel(t,e),this.addPointCloudModel=()=>{throw new Error("Local point cloud models are not supported")},this._revealManager=e}break;default:Object(o.k)(e)}}static createLocalHelper(e,t,n){const r=function(e,t,n={}){return He("local",new fe,new pe,e,t,n)}(e,t,n);return new Ke("local",r)}static createCdfHelper(e,t,n,r){const o=function(e,t,n,r={}){const o=new ue(e),i=new se(e);return He(e.project,o,i,t,n,r)}(r,e,t,n);return new Ke("cdf",o)}get revealManager(){return this._revealManager}static addLocalCadModel(e,t){if(void 0===e.localPath)throw new Error("addLocalCadModel only works with local models");return t.addModel("cad",{fileName:e.localPath},{geometryFilter:e.geometryFilter})}static addCdfCadModel(e,t){if(-1===e.modelId||-1===e.revisionId)throw new Error("addCdfCadModel only works with local models");return t.addModel("cad",{modelId:e.modelId,revisionId:e.revisionId},{geometryFilter:e.geometryFilter})}static addCdfPointCloudModel(e,t){if(-1===e.modelId||-1===e.revisionId)throw new Error("addCdfPointCloudModel only works with local models");return t.addModel("pointcloud",{modelId:e.modelId,revisionId:e.revisionId})}}
|
|
434
|
+
*/const In=new Image;In.src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAABgGlDQ1BJQ0MgcHJvZmlsZQAAKJFjYGCqSCwoyGFhYGDIzSspCnJ3UoiIjFJgv8PAzcDDIMRgxSCemFxc4BgQ4MOAE3y7xsAIoi/rgsxqOqd2d+pGwehjat+yq+1cc3DrAwPulNTiZAYGRg4gOyWlODkXyAbp0UsuKCoBsucA2brlJQUg9hkgW6QI6EAg+wGInQ5hfwGxk8BsJg6wmpAgZyBbBsgWSIKwdUDsdAjbBsROzkhMAbJB/tKBuAEMuIJdFAzNDXx1HQk4nFSQm1MKswMUWjypeaHBQFoIiGUYghlcGBQYDBnMGQwYfBl0GYCWl6RWlIAUO+cXVBZlpmeUKDgCQzdVwTk/t6C0JLVIR8EzL1lPR8HIwNAApA4UbxDjPweBbWAUO48Qy5rMwGDxhoGBuQohlrKcgWGLPQODeDBCTH020EnvGRh2hBckFiXCHc/4jYUQvzjN2AjC5nFiYGC99///ZzUGBvZJDAx/J/7//3vR//9/FwPtv8PAcCAHALbUa33lfYEHAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5AobCyAEEhU0UQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAACAASURBVHjalV3bkiLXESzoC9cZdle7lixF7IMj7Ad/qn/RVliybMmyZhkGGKCBBj+s6ig7yTqNJmJjbzPN6XOpysrKqjP429/+djUzu16vNhgMbDAY2HA4tLIsbTKZWFmW1rat7fd7O5/P1ratXS4Xw5+5Xq82HA47v1+vVzOz9Hcz6/zc5XKxtm07z/Ev/34elz9vOBzaYDCwtm3tfD7b6XSy6/Vq5/M5fU9RFFbXtZVlaXVdm5mlZ1wuFyvLMn2GP78oCjMzO51Odjqd0rNxrP5VFEXnl7+nf4bP4+VySe/jn+Hj97nzMU+nUyuKwo7HY3onfGZRFDabzWyxWNjpdLLVamVN06Tvw3nHL5xb/LfL5WIlDsJ/90Vu29bKsrTr9Wpt23YWHxfocrl0Jtf/HQeE/68GpDaBP5tfwsdzPp/tcrnYcDi0qqpsNptZVVVW17UVRZEWwZ91PB7TJuHF8gnxBR0MBukQ+Lh9kx2Px84iFUVhZVlaWZbpM3Hjq7nCeTezNObL5WLn81keoLqubTqd2vl8ts1mY03TpHGphY/mFte69P/gTaBOLf7CBR0Oh9a2rQ2Hw5tF8pfljYaf65/hz8Hv8/8/nU5p0cuytNFoZKPRyKqqSiceFxInECfRTzlaFPydLZSP1Z8zHo9tNBqlzXA6naxpGjsej3Y4HNJmqOu680z/bJ7H6/WaNu3lcrHD4ZDmDdfBF9/MbL1e236/l3OFm1ptAt4MJU4Un27frfhwPvG+0GaWTheamMgMqQ2AL+QL0DSNnc/nNPnT6dTG47FVVWWXy+XmM/CXLxr/P1shXxh2P/w8f0devNFolMbcNI01TWObzSZtVLcqfDh83t1FuYXiuSrL0sbjsRVFYa+vr2lO+DRHi8//jgei4wLYLKEFwIfipsAHKlPEbsD/7j/DE3q9Xu10OtnxeEy+ej6fp5PuC+obEn0qnlb/HceMC41+GBfbD4JbFDTZuFkQE7jFGQwGaZO6RXh9fU2bt67rjoVyrDUYDOxwOEi/79akqiprmsb2+33CJYy5/J0iF8DWouMCcKH5FLA7wMlmP+0P9wnCF1bPRdxxOp3scDiYmSVTy2bUzb0vBO9uXEj8N5wsPgkM3HDh8eTi+7CP5w3sG3c6nVrTNHY4HGy321ld1+k0O25wPKHG65vEN5SDPv8+drO5Lwad1+vVSpw0BSR48ZWJ50lEk46m3jcLY4jz+Wz7/d7MzCaTiY3H405UgSi9LEuJotmPIwD033E8yiziBLKlUT4Ux6Asqf8aj8c2Ho/tfD6nk+6Wwt0cR09u2aqqSt9zPB5vcJjCWJEVUAel7DMXKkrAk4MbQ/0M/r/7bNwg+/3ertdr8u24iH7a/d9wlzv6xs9UoSJvCPb/yl/ySeF/ZzPt74KuyDcbbtCyLG02m9nxeLTlcmnr9boTauM40d0cDoe0+BGuUqFzzhI44C5VWKJMmzrx/H3RTvOXc2DXtq3tdrt0OqbT6c3pcx/M5h9NI5rhnAnk08LjRovC40Bcge+IGwMxA4NqPiyOazzeX61WVlVVQvg4FneLDo755OM40MKyJYxcQbIA/NDIfCCCxYHyKcdFcfLIB7jf761pGiuKwubzeQJG7mv9xLMlYB/NII5fjMOo6KSrU65cCm9qhSXYwuAYGF9UVWXv3r2zpmlsuVzadrtNmGE0Gtn5fLamaTqRUUTm8EG455397yWHfAzsGDUiaFOxpm8QRpwO8Nq2tel0mtCvLzqSKMpcqZBMvVhkFpU7UBtMPVtZumizoFtSB8ldl5/4wWCQNsJ6vbbL5WJffPGFtW1rx+Mx+5loYdW7RiAZfy/ZxOMH9G0ChejdAvjv/iL7/d6KorDFYiERvZ98NGEK6KD5V0RPNFF4Chn0KbPPoFfNQy7WZvCLOMCt3uFw6LiFN2/e2MvLi/3jH/+wh4eHRA5dr1cry7IzN7zROWTNYRt0ryX7cwYZiNpVjM+cAVoBZ7aOx2MCOz7JfuJ9UdD0c6zOuABPm0LCjD944ftOewRsmRPhRVbg0RfFf7aqKptMJtY0TWfDOU56//697XY7Wy6XKWR0V6rcNLu6vq8bkolPj4rpmRLOoU8ndy6Xi+12O2vb1h4fH7t+B0I5PNFMaiDVrHw/x+/3+EF2MfckTnI4QJ0wB4Uq9h6Px50owefVreBkMkkU99PTk202G5vP5535UGPhOWGgGkUGQ4WIOeTLgUIflLNTnkXbbDZmZvbw8NAhNHzx0fw7y4exPsb+vFkQATMw5BOuACQyg8oN8IRGG94XmRlK3NA4Vmf0PBJifsPNvLOg7969s7qubbPZdDKnmLBjfKBctTqk/ueSzRxSnNGu4ZPmg3FSZ7PZWF3XNp/POz4nonHVCceQBicqOtF8Mpmq7ov1owhIAVLlShStnOjWX13faDRKC8zP8twGxvxuPXe7na3X60SJq3dCfKA4EA7fE/ZQ8TsnaCIziMSOZ8VeX19TbO8vzwvINGeExvHvjq5V7K8yYYrQ4mRQxI71bYK+UJFdElq/3W53Q/e6yXdSrCiK5PNPp1PSCaxWK5tOpwkcKmJKhZ6RBTCz3yxAn+lQdDCe+uPxaNvt1kajkU0mk87i4knHiVfiCPydQV4U4qgdz36Pn63Qfk5U0cccKs4BN3pVVck94pdbShfc8OINh0M7nU42Go3s7du39unTJ5vNZglLME7jw9PHGg45XufJZ3yA3+chXtM0tt1uE4/vQM99Gy40U7ycuUOqGK0RAy4kXSLTG0UtDB7v/cq5iQhLIFbwhA/O8WKxsMFgkEgfprI9RHRxzvv37ztJIU6JMwvIhwA35WAwsKECCYzWVU7AxRCHw8HW63VKeLgUC+N7NufI/CkWUqmJWJiCoCjiANRpxZdX38efo/QRymVE6Nx/d5CM3/P4+GiTycS2220KC3EhfQ18jtq2taIo7O3bt0kXwGAwEogo63S9Xm0YERbKr+BEOFW52WxSrtuFCxzjq8VnQQbG9Zg34J3tCFqZP/+ZSCSCAhTMp0fRzT0hJIet/Hmoe8D/H4/H9vDwYOv12g6Hww2NzVwJsozD4dDevXtnu92uIx9Dy+hzhO+KVH7CJzn5EJ9G1sVtt1ur69pGo1FSv6DpR9dRVVVnctACKHGo2tEMSO/RwanwMQJFvFlzz+bf2TqwtcS/V1Vlj4+PdjgcUrgcRV58iHy8ZVnaYrGw19fXThTmohJXL+XY3Ov1+hkDRKecdw/G+a+vr4m4iDRwatAe9+MmUdIuJoFyC6iUOQr98gJFGUx10nPkl+JReBPg5vcIabVadUx4n7ADuRNUS202Gzufz8nKsAVQhziNF3eof7CLL9mMuOnfbrdmZimp4+ZfWQ5W1Mznc/v48aN98cUXHTm1ynqxG1J4RAFAxQJGGkUlO+9zAyqrxtR0pGr2uVqv1wkXMKGj8hN+WPCzLpdLcr/r9TqtmzrpYRiLA2X/iYvvO8w1aXjy0VfxwB3I+NdoNLIPHz7YH/7wh5scO6pwGRiyzDuaZLWwyqzzpEesopKOq39ncKkApH/PbrfroPhc3p6BNOZO6rq26/VqDw8PNhgMkjtQqqUIGJd9oQ9ugP1+b/v9PpERTO0qMoTB0Xa7tR9//DHt4FyYxSpkxA/olljVq15YkUIKKKq6BUVQRZOMIR4mgvDnnOqNSKvISiFuSlRuWdrpdLLFYmHPz89Ja8GgV2kkzew3RVBfbYCreKqqsqqqOmlNJngY6WOat21b+/nnn+/i4BlguctAMKjSooq34CKRKNcfbQLFlnKaWlkdfj4DwkjHwGOPimNQiPL4+Jjk6FhowuPthPosjMBkA7oGlzdPp9MbAsd3Ji4UZsMGg4FVVSUFlszIKbUNF1cwsFHWR20Qj6Mj/95HG7MUS/ECnEhjidY9GUac25zcnj/XaxT8oPr7qkKT5AI4z81MHypS5/N5h+hRIQqKOB28RJOAAFBtRD5VeLr4pRTZhAQNppvVZKiSN1VIweOLijAUd8CJNrXhlb7BP8utB6eGHWedz2ebTCZJQj4ejyXNjfNTIkGhTpGHfF6Nw3G+CtkYbEVaOvWiKoOnJNwKP/wenj4KD9Umjfw8vw9L3vkU42HDwlGmw/F70b36IiMmYJbwfD7bbDZLHA1GYgpwllEs7IN+fX1NzBXLtPGlMQRyfX9RFNY0Ta+eD/16tBH59KF/yxE3OZDFG6Ev0xidzD4pNi42RywqJa3EppEa2zcBbgbXHbgMzzcHR0+yLgB9//F4tLZtUx4aK2AVJ+6boCgK+/rrr62ua/v3v/+deO5cJQ/69hz1yieFcUWfNPwegicifaJows2zqoD2PzuvojaHUjnl1Fio2/C/Y2n85XKx+Xxuz8/PxhaesVfJOx+R6uFwSFW4zuDxALlmzwd0OBxukkkKAEbmkpU8+L2cM2DSKYrFGZmzrDqXSYwKY1URLZ9YP4G4cLl6hByHgGNgsanPjeOj6XRq2+3WHh4e0uexrqLkl/IvL2FySZcvPgM+9QKXy8U+ffpkq9WqUzShkK7K0eOCKgEkAz6up1eIPIoM1AZVglgGyQpIKq0CLzjTvlEdZq5Gg78HRbIOqN0K7HY7Ox6PnSYZiKFKPAFu/p308eJM1tPhLlLNFdCSMBOYyzxGNGu0qBiKRdnLKAZWYZwSkOQKZHJ6AVUHGdG9rEvgRY4KeCMXgfPpGUcPw7mQZOgL5pkjL832ej1H/FzCzaDPQz6UfXHlkKrdizQA+GL4ghxloOBEFZDk6GEUdXJFUo4+jYpJ8d9yMi0Oadn8I7B2F9u38dSzXE42HA5vCkvTBmDT7QWbTiQoAQW/HCZ1ol2uMm7RIvVJuHmTOTuZE2gqmRZPsHJlKr/PHUmQ/FK4Rsm1ImuXqnZ/xU+oc2D8owplMNLwn314eEjtZLzeMM0xL5JnqNxkqDx8TjSCp52zd7nYnXsLsNWI2D/k2ZUKKccPRMkfLjyJ0txK4II1D4qLjwpsWEDqgLFv4VVvJnw39P++CXBDDdn0+A9wvR738olKoZSIFF0BLti9ihs+wVztg1qDvuyXeo/xeJxCXdxAqstIJJ5ly5UTpkaAlCloLiqN8hNctcz5icvlYrPZrNOEwjfBkJFq27Y2m81SzI9FnKr4k09nTnuPANF9HLdZi4Af73LvG6QYOHXKIzc2mUzsm2++sY8fP9r79+9vElzRxnTMpLALS+Aj8osl8vyeyrQr/V+fwPV8PicrwBT6EHUA3toENfuo7OU0YyS85EVQfLyLF1QuXrkS1NBj6OoRBkqhFL+vNoNXLj0+PtpoNLLFYtHBEpjb4OTX27dv7e3bt1KDF9U25MApWzelYWSQGPH8OF6k7Ou67qiGLpfL5w2AqNETCKpok30sPihqRKTy2MhmqdJmBk94ir755hv78ssvE17Bz0Okq1yIcguo1nVtvppkNO2TycQ+fvxoHz9+tIeHh45Qlq1AZL6VGIWbS/KpVwkrnD+2eniYL5eLjUajToOptm2tdPPvyNBjf25r1hcWqTCNY2EVEqk+A5z08e9tmsZWq5X99a9/tePxaE9PTx2dgT+PGT8mlHDCttut/fTTTzYcDlPnTbeInA7m/LoTZOwOo3K1HL5BjMRKZ5U5RGV0LoLCVjWj0cg2m42dTqfkEko8PQ6ClJ9SG4CTFUxCqMRJLn8fsX44hv/85z82Ho/t3bt39vT0ZOfzuZPt8g2Ai5ir9vGNxCdOjccnc7vd2g8//GBFUdh6vb5h5WazmRVFkbSTPJaorgC/T4leGOzhZlel6Vym3ratjUajBPRTcaj/53Q6vYnvlc9X1Tw5LNCnsuWBcg6ew6h//vOfKdvogBA3gdI15j6L2TvF6jHj6d08eB6qqrJvvvnG6rq2H3/80ZbL5Q0oVJlXThMrMQlrJllWx+6DtY9t26YsoYPoIZoJ7JEbMVY5ti0nwuTkiWK2nNRBkz2ZTGyxWNycYG+Vys/B2FmVl+XUOWhyVeZSpWN5wh2bIJnDTa9U+KiqozgyQFwRFb6oSiZ059gAMymCfGe4egcnU1GcESUZDYw7kESly6rJw/l8tj/96U+22+3su+++k/42om+jvn8Rr99X94d/5kaYPkeHw8H+9a9/2XQ6TU0vPV1clmVi5fb7fW/TB6VC4p5AUSfXKA/ifQp8zVNdgNenI9qPkjLKX7IuPurioWrxVaoVZU7ffvutDQYDm8/nlqtl5LHhaYmaODKxpXL9GAarDcWfsd/vbblcpnJvfLfRaJTK55yKVhY3IqIi6xm5VWy84e9eVVWKdkrfTd54IOpEGZUtMUGEZicqcmBLgKbVuQh/OW+Z9t1333W0hgrxRp/HYaoytRhD89hUY0iuWeBwTG3Q0+lkr6+v6fmLxcI+fPhgm83Gfv755xSJMf7hpJhqapWzIPxOvtZt237WBHJm7J48uiIrlComCoEUIsZCUew+ggWWub44qh4hkorj5Kl4H8uz2TJhroK7lUZ0uP/ZS+q8NNwrhI/Ho/3yyy/SKubo9ahcTrXDZxDZtq0N3Tep7FfUCJonCZUmyryyKe5rxcKgyReeFcecgeOxMIHD1icaC5pNVSfJERFHC6qFjurA4l1APW/Pc8RiD6y8Vhb4HlzDVmvojQc6JcPEnSsNXh9B5C/uL6nSxrmmCqpejtO5yhqoTKBqOq1MJfcGwA2FixlpFKOmknyC/Tnr9bqjl1SSfC7OURgod2VMpIN0fFWi2eXYkxdN9aTJJWFUzV8uWuC4FjckM4ZRkiWXjkZlLH6WWlgUuJ5Op5s5wGiAMcY9m2AwGNhms7GffvrJ5vO5LZdLKaHHknyl+mG2U9HFqtWvg/6SgQTGieoyBv4QlexQdWgsBlVsHy+a6nkTpWijsA+tgCJSfOH9RKCF8EnCfAEjcUzecASB4FBxCOfz2Z6enmy5XIbKKAdseEVM1KJXsYaROAf0C+VNOOSnA82OYth4QfvKnnIl3oo8Yl5A8d3qciYOz9j/4+8fPnywtm3t06dPYbNJb9TEuQDFGCr3qe4pYHenzDluOi/66NMWqjnCMfszXPMxVHIuJaDgU88Lyrl9lk0pWravSZOipZX1iEIh1WeIy8j++Mc/2rt37zob5i9/+Yu9efMmxfGYUWM6XJVzRxFErhZBgUzPVXg1MfdaUA0gmAuJtJZwZU0pmaNIY8YxMwMl3iSqzInDtIjUQD+NUYYCVaoalyMb9e9///vfb3ICP/74ox0OBxsOh/bnP//ZvvvuO1utVimdiu3yFcmk4vGcDEyFocj6cTpYpYdz8jAFpr3DSMlgSBVH8I5V3b8UMkZRI4d47ttQXaNEoz4ZKGTEujj+xWxjdLWN/9mrnvH7PIEzHA7t+++/t6ZprK7rRON++eWX9vT0ZKvV6qZzuYPG6OJIRSQp8KzawOKBVA20ouqoTjEo0P2DweC3W8OiFit9SQYEWHxrBloJVMx4MYdvvijs5CSN6tatiis5KuCESFQMwwLO6/Vqz8/PKWxaLpf21Vdf2ddff935P3w/DN0QtEUWLFfYij6c8QBGNFHSTV31w1xI6R0mMPnDk6PoUMwaMv8eTSbHuXhd6n6/T63Trtdr6nRRlmXKq/PNWhENnLsoMkoOcRcPlGe7fM2tw7fffmur1aqzkeu67txxyFp+VgZxqjfXtj833qiLisIFLCsbDoefK4PQz6LWTlXiRMSL6mad6zjuz/nw4UNqjb5YLOzt27epIcVkMrGqquz5+TnRpNgSjYWbDNhU+baiVVWXMt/MHP7t93v74YcfOnODdyqrOgB2P6p1m7pfKMfwKc0gWjoFNPH5/n2lDz7qzRu1M3HFMJssjKXV9XH4fGfDvIs2K1mc+mT1S9QyTlX9qMZXbNX4NCnwxBskYt0U6IpOqNoErDFQtRSKEIoSUMql4zNKVI0yv85EB1fBqElFLgE3QVQfuFwuOy3QvQml3yh2PB5tt9vdtJFXyZwPHz4kda/n5r///nvJYURavQitRz2Vo43FFoIvlc718EdXzCrhnKYhCuf5Kh/8zBIJBqXaiQonUQnLDCBe8Ij6PA5z/FQ4BtlsNqkhRV3X9t///je1qOGaObYsZpawgn+/X7qUaxAZ9SpWrKfCRlFDZgav6FZyIDC3WH3C0qgRVC4LW+IV7JiocGsQ5feRo+b+OyigZI2Ain85MnAFMHbQVoWYLBnf7Xa23++TyDOq2GVwmkue5Pj9SHmj7jFWlkOF3SpiUalsdb08W6Xo4quOK+eXUehU7XjuUKGUQmx6o4nHGN7DySipoZA6jo1Lzxj5Rr4wEoIq/35P7X5UnMJzGgFCdVpz4bmy3HiAWQ/gn1MyEMKya9XESN0BHHX1iLpx5nwZRwxsrlk+heCQQafKOjIzqQQUTMsqnkQdHLWJIqAYbRw0/4zq++4GRveIa4J9gnAtzufz5zCQrxyNBAfRtaROK7JQgweh2DA+obw4yuRyiKbAmbe25WtquPBC1R2quFkBPEVDR907VPUuz4OqnVCp+JzVUdfVMW2P710q/R/XnDMAyl1IpGJgpdRljkC1heer3DHJxI2YFOePqV60ZNyePmqujIvIUvMINHI4misHj274VMmbeyMOjBpUcgnnpm3b35pFs89TDQ4RJGKygs04mxp1kll5rFgsjCjw55TPYyHlzYvSFfKRikaNQ+kf7uldFNVERh1B1FgiU88/E3USVXON71Fyu1a+sJHboirqFHco07C4ee5pdcIhFPtADFm9OJSRME8IF22qhheRwkhpJVU4qWRyUcZPAVyOTriXT1RxHc1hdKkF/l8qDkV5U0Qm5O74UbsZJx19cZ9ciSMNJn4Y8CgaV4E0Vvuo+r9IucMKKRaV8viQ94hYuYgLUPqLaIMqljJXycQ5m+v1+pkH8ELBXK99VQShysQU2ENAGCUqolIybHTI2r2ojDsq4+IcRVSmpWTZWLeg5GdcFNt3e6dqO8/AOXcJBrfr442jxoAZRD8Upep8He20qMqHVbjYPFLRsDnyRWUhWX+HJ00JKRR6jpizHMOmQF6OGIquuFPm+vemhu9RPXEndbd6qgQ/9QfwH4yuU8stvAJxDIByeepIIBFZiqjMC8eGiS2+kCqaVMVrRGPmLGOkQFI0sYpU+m4qVcomfAa/nyLGVFrYN0ip2r2xsvXmssGgxRuqilUVCw5Y8QlKIJmbVL+N0wkirgdkd6FSshwb9/nsXDvZvqIM1UcwKrfrE3qqht2RNVRVRe4KS6xcjV5E9eBVSRHuAaTQOde+K8CV09Mz2VEUhX311Ve22+3s5eUl1DKiKeT7CCIAmMv65ZIykc+/B7UrwWxf63ylKFYVUS4yLYoiXWBdeqjG3bE4BMtJxHjCOe2p+HgGJBGAi7qH+vdVVZVu3PYaO7QyKu7nEDcqCv09JjqSzvWJXxXax0qg6OLN6Fq4SCugoqK6rq3EzlH4gGjxlXiRlSxR2ZJqO6N6/uViVwasl8vFnp6eUvrZXYK6twdZRSVAUQg9KtDM5RlyTF/OiiiNAja84LsVVBeQHPDFNUobwGvF8Yvr4SNRJWfcUMjQNwGK31d3BEcnxuldNO+uK4hy9pxhU1fZRMUhCh/0RS8qNOy70DraCEx5K7wRqaT4GS5tL8vSht4XL+nEodWp6qun/FtEVXJcmruOViUz7hVEKCTvZg41hMqn8wmLFiPXJY0roFV003dbiapm5iwq9xlSBFhUiIMXgHhziF/HPEwdI1AIijtG3UjJiD2K9bnIUXHYSrBxT8UrdtbKNV7Kxd65f0MxSw6kjkajpG7OJWByBTAqP8CAMAoh1cbmZ/lt706hpwsjfACOCvF+Geb4FVDDU6dSyhHTpvxylAlD369YQdWbUJnqKOLIMYcMYplg8hYy3jDS9YxKvxhFBSqncE+95b38AY+j0ybOY2inObFYg5tF5VDy7yE41G5lV6C6hUfAUNXvRT0M2R+r5JdyWYqS9XE0TZNKyTysvsfUR+xkFMfnXGFOBIpaSrdq3uByiD1lPUbMceqqrRmb+SjH3vcrwheKo1cNqxEdqz7B3JQ6QtC5tiyqZOt8Ptt6vU79f9R7KCwUldRFSap7Fj4ShvqzveAmrTveTOFuICpsiDaDMrNRWzmVD1d3DnCLWbUYSpiRk3vnJOARWo+IHbZOrkC+p01LBDKjjmdRoqyvJ5HKRzRNk3I1g8Hg840hTqhgtkvJm3Pp29yt2arFuco55HrxR9agj77NTaA6QUgcKUsX3R8YgVfl76PkUK4WIbpmPqdm4s3tEY/7/4QBMJTZ7Xb28PDQAYNcsKAaL0R1A4rDz5n7KL2r0tT3mMSofCxqdhHJp3MW5B5iJ9rYUeY191m5cJjdMeZo/E5h0gj+NkF1XSc3wFe4YciF+YEoXle7mJMYkUmMmlUpsibCGSqppNLXCjhFobA6ibnCE/WluPvIGuQkYQyGcyGv/7lpms4NsKk83L/cDex2O5tMJp2SJoxVc02fVLMGJpSiiWYqNgeaIq18X46dpdNqQXONnbiqKZJ5Ky5enWKlpVTRj5KeM15T7tijG7/C9+Zyb5zwtm1tPB6nDaAWgAs91WlU1Tw8aWweuQeuyoJFDFvuPh3+d59kLC9jNTKGhHi6WFiBCTPmKaJ6h5wkLHJBCnQyDxM9A83/aDS6vZ+YpVIOEA6Hw02Ha97BUf/AiJDgMrH5fJ4aJHJ+PufjVReNnFtRVCu+W45lU3oFZAlzl0UpU50Doip05HSuqrrKVWYNh0M7HA6J6r/BReyf/V6e3W4X1qlH9fY5lk0t0HQ6tTdv3nQuprynK7mimqN2qY58lSI2pwdkqlbpDFi4mqNpVYlYdFCiSuZc7iWnMt7tdlbX9U29x2Aw+CwI4YUejUbWNE26RURl9iKdWa6aGF+wbVt7eXnpXFDJNQiRHu6epBTToEhVq7Erqll1GlWtc90VcVCcmwAACrxJREFUcL2EqkTKZRBzQFCRT9GB8Dkuy9Kapkl3QWFpXcIBKtvlzRk2m83NzlftYLGtKcf8zA7iRluv1/by8pImq67rTtuZnNQqyjoq/psxB/t9ZX1UTWOuKIOtjGoBn5PC33PrKiuBVM9F7u+42WzS/UBSpqZiXu9rb/a5QyXX1qnMH19+hKlYXBQmSBCFs8o36uARsWtoPXCRlannTF/fla7chk3VCCpuoS+TmOt6nsMG7Io4GVZVVSqv9w2AUYY/o1SXGPnkTSYT2263aTPwSWFTGpV95+hf1P+rCc6FjSpe9uew9IuRvVqAqL8P9zJWdDdrHVUxS870MyBFF8J4Symv2PWWZWmfPn36LesHFqkTskdKF9T3bzab1C2rz4z1UZJ9SFhlBXMKl6jxgqKwOcyMmLk+di66mSPi4/siDH+uK7F4kRWNzkQdvmNVVbZerztrGLahy+n9zMym06nt9/tOKzmFUrmnPi+OuiRSmWVG50p8krs9O6pEVmxlpFKK8EYkic/5dNVeNqoSqus69fCNGL7IdWC3Fr8BHq+miSKPYU7m5AObTCa2Wq06N4pHcW7OOkSMW8TIqYxWdJ2NUteoNHJkbfqijOhEq8ZPqr5PYQR8Vl3XNp1OO4DNDxsKXyJq29+nLEtbrVZWlmVqvhX1D7xer583gKqwwZZxzgp6Sze+4p2JHvbj2EOPTZfKl0eFFLk7DJX5VUxkBEpzXEaEG/pqGThSyfVVwK5gLr9TuYic7tD5G2d08eq6CD+VuZOGoGg+n9vz87ONRqMU80aFnkwv50qjWHUU+SpFJed4dSX3UpEEbiZ1KUVfZjJK5UZ8Apd3I/D2Cx2jqMK1fCyju1wuKXez3W5tNptJt6jmpezrlIUuYjKZ2HK5tPfv33dMlCpcVNfAcZ4g0ujxhsmlaCPhQy4yyYlVuNlUbnHVpsuVdedqIvF6HRVq8vO5OqooCvvll19sOp3enPpcTeJQyb7xA/Dk+HWt7mOwQTKaV9XiPIcbchp8Zaa5aVVOcRRtLKWZU9fh3KO94xI0/gzuzqVOIp78XHdTzgU4abdcLq0sS5tOp507oHNjTyAwusJN9brx8isPDVkurnLUEUDknZm701eRK4rDiKIZhQ8Uq5kDsrmMmxqzN6pi8Mr1+pEWIddxDAmf5+dnO5/PNp1ObyKjHLk1GAw+9wiKZMiqD79fcbbdbq1pmrQJItZNycmia1lytQB48nM5/z7tnGqIpU5ZjuPIJXUUJ6HqF3jTYKgcye/w5Ltia7PZJL8fSe1yX8Mo/FKCD5y42Wxmq9UqScndEmB/2wikReRNn8lVdGjU0JHj9Rxaxw2YUzf16e+jcJjzFZFiKgqVOc+CiZ7VamXT6TTUL0YbIR1s1vb1vTzKyCeTiT0/P3d680Vl11wZpPLbOc38PVU2fRKzqJomB/b6yuD6tP+sqWQ3k0uW4S88YF7csVqtbDKZhAdC3aF4g/VyA4/MOJIX4/HYPn36FE6kMl8RSufIQW08NUG/pwVLpHO8J5uYk55xJBRtXGY7VSWVqkvgQtjn52cbj8dJ5BlhJqar+d6mYdSaVeW+Fdjy27D9/pwoFOpD9VGGL7rWJaex6xOM5iYqp8LN5e9VQUjUExA3nUr2cGYUw1JefGYEo45hkcCljKpflDhCoW7v6z8YDOz5+dnevHmTyqO4x4ACWmoDclu5HE6IFiXCNUq4mdMf5OoK1bWuOYzAvRhVG1hOqfv/1XVt+/3eXl5ebDabpZOv7nlmJXbOIpWR7+U+/1wQya3NncN+eXmxh4cHq+s6AURWDLPPUyxd1FiSFwzVRMpMcx1AdIVaX/dwRZnnupcoSZjqHRxlD/HZTvFuNpvO4vdZS9WxhX+mvAeJRxODcWxRFDYajWw4HNp6vbaHh4d0zx76H54wXxyWROcqd1XpuUoCRQ2ZFXGkZNX3CDgjyTdrG6LOnbzo3PK2qirbbDa22Wzs8fGxo0Jm85+7H5FTyGkDRL6PW8FGBQ58Unx3bjYbO5/PNpvNkjqIawNY0x6d4qhrR7Qhojau0QbpSwblqn4iV6O6nEXEjkqQ+aZ+eXmxpmns8fGxk91TafHcGDEqkMkg7sHTp2Pnxccd5pLv3W5n5/PZFovFjQZfgSN1+yX7RwVMWcmjJkmZXmVdVDzdF3bmLABzIuqwqXa9g8HAVquVtW1r8/m8c/Lv6Q+g6juVdH2owj9uL9ZHKnCrWD/ps9ksXQzl5ozJEG51rkIgJELUBY2KZ8iVb0Wtb6IoQOUT+qxErjsp5wkwDewxvl9mvVgskqRbnXh+h2itIkay7CNbOGd9j1XAD59Op3Y8Hu35+dlms5nNZrMOYo40dgzwVA6BrQbr6iNFM+sDI+4i14uP9ZN9rWf58xURVJalrddra5rGZrNZquML1TwBfZ/bqNgsItUF9PEAaNpzzYgVZ+9hYlVVtt/vrWkaWywWhjeWRrdps2nnhApLtSMBqQJeEXmUGwsveqT46esn7OJV7HjWtq09PT3ZYDCw+Xze6besOpfcE+Jx5MWHqCiKWz2A6pLdRyxwLaAqw/YXOxwOtlwubTqd2mw2kyVOqmBD5erZp6F5QyyTYxoVgRLJp7CiKCKeGIfkxJ0+9u12mzR8k8kkdWtTauWoiimX9IqIrMvl8tkF5KRPLFmKwoyopy6bYb8O1q2BcwaqVQuesoho6Us65W4fizaYAlJ9esIICPL1tNjy7XA4pBK8+XzeEXDyHCh53D2ZyQivpeqhCLhwL/uIo88lYlRxhZNGdV3b4XCw9XptVVUlgiMifBSC5jHeZLqCzmbMeEY99nJJHQaz7KZUHsRPddM09vr6mrR7KAePxDmRVcoplCOLgPihZFKGOeiI5eoTSOQ2B1LIzhhuNhsrisKm02mnjJk7fkeiFS5Nd1PN16hFt33mJGTKKkUdRqIWMd6Ea7fb2eVySQvPsvFIxaT+LZd3ENKvGzY3WYC+Xvi5+3RyjOE9HILr2cbjsZ1OJ3t9fbXdbmfT6TS5C9fKqVtKWCTCpEeuVWzU01+BwL7iFnUZtYtlmqZJal1vKslhncp9qL7DvQKP4F4mNP2dBhG5erbcxN3LDCr0GsXW4/HYRqORnc9ne319tfV6bePxONHKHisr3MGXQTGZpe7tU5MaXegUsYUM8HxDe/Vzasv+q16Pr7/Dkxm1z4k4GLbWag1yjGGHCu7jAiJFLg+QO4iwv1b4gusFPGw8nU623+/tf//7n5VlabPZrAOUONpgppEznNgGv09MosSuSrGMFul4PHbuPB4Oh+m0R00ocxRuX0kZ1yoii6i6qysLkiWC+pJDSqacawKVa7nGhEsiKn7lCxwnOIj0AhVPQPmFF03TdBZQET54jWr0/qq6B822T/7pdLLD4dBxU2VZdnw8/pzqtJbLQEYUL46Hay9YNa2ypZ0NoNQsfRyzMj0KZavFjSwJ3+zpP1NVVZpUpFAxjPJNgTeg4Wd7Qkq5O743gMeHoahvxuPxmGRaZVmmghnVERSf7xs10laouxiiu5zQEqhEUt9NJWbWZQL7Up4KKEZ3/+QAS3TTJpeFMRPpk435hsvlkjbCer3u+NeyLFPTiaIoUl0DXlLBp7pt29RV43g82vF4TKebGUH06RGhxuKPXIUx8wf3MH2clbw3UeSf+X/9B04mXw6cfAAAAABJRU5ErkJggg==";var Sn=In;
|
|
378
435
|
/*!
|
|
379
436
|
* Copyright 2021 Cognite AS
|
|
380
|
-
*/
|
|
437
|
+
*/function Pn(e,t,n,r,o,i){const a=new l.Vector2(t.image.width,t.image.height),s=new l.Vector2(r.image.width,r.image.height),d=e.uniforms;e.setValues({...Wt,uniforms:{...d,renderMode:{value:i},treeIndexTextureSize:{value:a},transformOverrideTextureSize:{value:s},colorDataTexture:{value:t},transformOverrideIndexTexture:{value:n},transformOverrideTexture:{value:r},matCapTexture:{value:o}}}),e.uniformsNeedUpdate=!0}var Nn=n(33),An=n.n(Nn);
|
|
381
438
|
/*!
|
|
382
439
|
* Copyright 2021 Cognite AS
|
|
383
|
-
*/const Ze=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");function Qe(e,t,n){return new i.Vector2(t-e.offsetLeft,n-e.offsetTop)}function Je(e,t){if(2!==t.length)throw new Error("getPinchInfo only works if touches.length === 2");const n=[t[0],t[1]].map(({clientX:t,clientY:n})=>Qe(e,t,n));return{center:n[0].clone().add(n[1]).multiplyScalar(.5),distance:n[0].distanceTo(n[1]),offsets:n}}const $e=Math.PI/360,et=10*$e;class tt extends i.EventDispatcher{constructor(e,t){super(),this.enabled=!0,this.enableDamping=!0,this.dampingFactor=.2,this.dynamicTarget=!0,this.minDistance=1,this.maxDistance=1/0,this.dollyFactor=.98,this.minPolarAngle=0,this.maxPolarAngle=Math.PI,this.minAzimuthAngle=-1/0,this.maxAzimuthAngle=1/0,this.panDollyMinDistanceFactor=10,this.firstPersonRotationFactor=.4,this.pointerRotationSpeedAzimuth=$e,this.pointerRotationSpeedPolar=$e,this.enableKeyboardNavigation=!0,this.keyboardRotationSpeedAzimuth=et,this.keyboardRotationSpeedPolar=et,this.mouseFirstPersonRotationSpeed=2*$e,this.keyboardDollySpeed=2,this.keyboardPanSpeed=10,this.keyboardSpeedFactor=3,this.pinchEpsilon=2,this.pinchPanSpeed=1,this.EPSILON=.001,this.minZoom=0,this.maxZoom=1/0,this.orthographicCameraDollyFactor=.3,this.temporarilyDisableDamping=!1,this.firstPersonMode=!1,this.reusableVector3=new i.Vector3,this._accumulatedMouseMove=new i.Vector2,this.target=new i.Vector3,this.targetEnd=new i.Vector3,this.spherical=new i.Spherical,this.sphericalEnd=new i.Spherical,this.deltaTarget=new i.Vector3,this.keyboard=new Ye,this.offsetVector=new i.Vector3,this.panVector=new i.Vector3,this.raycaster=new i.Raycaster,this.targetFPS=30,this.targetFPSOverActualFPS=1,this.isFocused=!1,this.update=e=>{const{camera:t,target:n,targetEnd:r,spherical:o,sphericalEnd:i,deltaTarget:a,handleKeyboard:s,enableDamping:d,dampingFactor:l,EPSILON:c,targetFPS:u,enabled:m}=this;if(!m)return!1;const h=Math.min(1/e,u);this.targetFPSOverActualFPS=u/h,s(),this._accumulatedMouseMove.lengthSq()>0&&(this.rotate(this._accumulatedMouseMove.x,this._accumulatedMouseMove.y),this._accumulatedMouseMove.set(0,0)),i.theta=Math.sign(i.theta)*Math.min(Math.abs(i.theta),2*Math.PI);let p=i.theta-o.theta;Math.abs(p)>Math.PI&&(p-=2*Math.PI*Math.sign(p));const f=i.phi-o.phi,v=i.radius-o.radius;a.subVectors(r,n);let x=!1;const g=d&&!this.temporarilyDisableDamping?Math.min(l*this.targetFPSOverActualFPS,1):1;return this.temporarilyDisableDamping=!1,Math.abs(p)>c||Math.abs(f)>c||Math.abs(v)>c||Math.abs(a.x)>c||Math.abs(a.y)>c||Math.abs(a.z)>c?(o.set(o.radius+v*g,o.phi+f*g,o.theta+p*g),o.theta=o.theta%(2*Math.PI),n.add(a.multiplyScalar(g)),x=!0):(o.copy(i),n.copy(r)),o.makeSafe(),t.position.setFromSpherical(o).add(n),t.lookAt(n),x&&this.triggerCameraChangeEvent(),x},this.getState=()=>{const{target:e,camera:t}=this;return{target:e.clone(),position:t.position.clone()}},this.setState=(e,t)=>{const n=e.clone().sub(t);this.targetEnd.copy(t),this.sphericalEnd.setFromVector3(n),this.target.copy(this.targetEnd),this.spherical.copy(this.sphericalEnd),this.update(1e3/this.targetFPS),this.triggerCameraChangeEvent()},this.triggerCameraChangeEvent=()=>{const{camera:e,target:t}=this;this.dispatchEvent({type:"cameraChange",camera:{position:e.position,target:t}})},this.onMouseDown=e=>{if(this.enabled)switch(e.button){case i.MOUSE.LEFT:this.startMouseRotation(e);break;case i.MOUSE.RIGHT:e.preventDefault(),this.startMousePan(e)}},this.onMouseUp=e=>{this._accumulatedMouseMove.set(0,0)},this.onMouseWheel=e=>{if(!this.enabled)return;e.preventDefault();let t=0;if(e.wheelDelta)t=-e.wheelDelta/40;else if(e.detail)t=e.detail;else if(e.deltaY){const n=Ze?1:40;t=e.deltaY/n}const{domElement:n}=this;let r=e.offsetX,o=e.offsetY;r=r/n.clientWidth*2-1,o=o/n.clientHeight*-2+1;const i=t<0,a=this.camera.isPerspectiveCamera?this.getDollyDeltaDistance(i,Math.abs(t)):Math.sign(t)*this.orthographicCameraDollyFactor;this.dolly(r,o,a)},this.onTouchStart=e=>{if(this.enabled)switch(e.preventDefault(),e.touches.length){case 1:this.startTouchRotation(e);break;case 2:this.startTouchPinch(e)}},this.onFocusChanged=e=>{this.isFocused="blur"!==e.type&&(e.target===this.domElement||document.activeElement===this.domElement),this.keyboard.disabled=!this.isFocused},this.onContextMenu=e=>{this.enabled&&e.preventDefault()},this.rotate=(e,t)=>{if(0===e&&0===t)return;const n=(this.firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedAzimuth)*e,r=(this.firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedPolar)*t;this.firstPersonMode?(this.temporarilyDisableDamping=!0,this.rotateFirstPersonMode(n,r)):this.rotateSpherical(n,r)},this.startMouseRotation=e=>{let t=Qe(this.domElement,e.clientX,e.clientY);const n=e=>{const n=Qe(this.domElement,e.clientX,e.clientY),r=t.clone().sub(n);this._accumulatedMouseMove.add(r),t=n},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startMousePan=e=>{let t=Qe(this.domElement,e.clientX,e.clientY);const n=e=>{const n=Qe(this.domElement,e.clientX,e.clientY),r=n.x-t.x,o=n.y-t.y;t=n,this.pan(r,o)},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startTouchRotation=e=>{const{domElement:t}=this;let n=Qe(t,e.touches[0].clientX,e.touches[0].clientY);const r=e=>{if(1!==e.touches.length)return;const r=Qe(t,e.touches[0].clientX,e.touches[0].clientY);this.rotate(n.x-r.x,n.y-r.y),n=r},o=e=>{1!==e.touches.length&&a()},i=()=>{a()},a=()=>{document.removeEventListener("touchstart",o),document.removeEventListener("touchmove",r),document.removeEventListener("touchend",i)};document.addEventListener("touchstart",o),document.addEventListener("touchmove",r,{passive:!1}),document.addEventListener("touchend",i,{passive:!1})},this.startTouchPinch=e=>{const{domElement:t}=this;let n=Je(t,e.touches);const r=Je(t,e.touches),o=this.spherical.radius,i=e=>{if(2!==e.touches.length)return;const i=Je(t,e.touches),a=r.distance/i.distance;this.sphericalEnd.radius=Math.max(a*o,this.minDistance/5);const s=i.center.clone().sub(n.center);s.length()>this.pinchEpsilon&&(s.multiplyScalar(this.pinchPanSpeed),this.pan(s.x,s.y)),n=i},a=e=>{2!==e.touches.length&&d()},s=()=>{d()},d=()=>{document.removeEventListener("touchstart",a),document.removeEventListener("touchmove",i),document.removeEventListener("touchend",s)};document.addEventListener("touchstart",a),document.addEventListener("touchmove",i),document.addEventListener("touchend",s)},this.handleKeyboard=()=>{if(!this.enabled||!this.enableKeyboardNavigation||!this.isFocused)return;const{keyboard:e,keyboardDollySpeed:t,keyboardPanSpeed:n,keyboardSpeedFactor:r}=this,o=this.keyboardRotationSpeedAzimuth*(Number(e.isPressed("left"))-Number(e.isPressed("right")));let i=this.keyboardRotationSpeedPolar*(Number(e.isPressed("up"))-Number(e.isPressed("down")));if(0!==o||0!==i){const{sphericalEnd:e}=this,t=e.phi;e.phi+=i,e.makeSafe(),i=e.phi-t,e.phi=t,this.rotateFirstPersonMode(o,i)}this.firstPersonMode=!1;const a=e.isPressed("shift")?r:1,s=!!e.isPressed("w")||!e.isPressed("s")&&void 0;void 0!==s&&(this.dolly(0,0,this.getDollyDeltaDistance(s,t*a)),this.firstPersonMode=!0);const d=Number(e.isPressed("a"))-Number(e.isPressed("d")),l=Number(e.isPressed("e"))-Number(e.isPressed("q"));0===d&&0===l||(this.pan(a*n*d,a*n*l),this.firstPersonMode=!0)},this.rotateSpherical=(e,t)=>{const{sphericalEnd:n}=this,r=i.MathUtils.clamp(n.theta+e,this.minAzimuthAngle,this.maxAzimuthAngle),o=i.MathUtils.clamp(n.phi+t,this.minPolarAngle,this.maxPolarAngle);n.theta=r,n.phi=o,n.makeSafe()},this.rotateFirstPersonMode=(e,t)=>{const{firstPersonRotationFactor:n,reusableCamera:r,reusableVector3:o,sphericalEnd:i,targetEnd:a}=this;r.position.setFromSpherical(i).add(a),r.lookAt(a),r.rotateX(n*t),r.rotateY(n*e);const s=a.distanceTo(r.position);r.getWorldDirection(o),a.addVectors(r.position,o.multiplyScalar(s)),i.setFromVector3(o.subVectors(r.position,a)),i.makeSafe()},this.pan=(e,t)=>{const{domElement:n,camera:r,offsetVector:o,target:i}=this;o.copy(r.position).sub(i);let a=o.length();a=Math.max(a,this.panDollyMinDistanceFactor*this.minDistance),r.isPerspectiveCamera&&(a*=Math.tan(r.fov/2*Math.PI/180)),this.panLeft(2*e*a/n.clientHeight),this.panUp(2*t*a/n.clientHeight)},this.dollyOrthographicCamera=(e,t,n)=>{const r=this.camera;r.zoom*=1-n,r.zoom=i.MathUtils.clamp(r.zoom,this.minZoom,this.maxZoom),r.updateProjectionMatrix()},this.dollyPerspectiveCamera=(e,t,n)=>{const{dynamicTarget:r,minDistance:o,raycaster:a,reusableVector3:s,sphericalEnd:d,targetEnd:l,camera:c,reusableCamera:u}=this,m=Math.tan(i.MathUtils.degToRad(90-.5*c.fov)),h=Math.sqrt(m*m+e*e+t*t)/m,p=s.setFromSpherical(d).length();u.copy(c),u.position.setFromSpherical(d).add(l),u.lookAt(l),a.setFromCamera({x:e,y:t},u);const f=s;let v=p+n;v<o&&(v=o,r?(u.getWorldDirection(f),l.add(f.normalize().multiplyScalar(Math.abs(n)))):n=p-v);const x=-n*h;d.radius=v,u.getWorldDirection(f),f.normalize().multiplyScalar(n);const g=a.ray.direction.normalize().multiplyScalar(x).add(f);l.add(g)},this.dolly=(e,t,n)=>{const{camera:r}=this;r.isOrthographicCamera?this.dollyOrthographicCamera(e,t,n):r.isPerspectiveCamera&&this.dollyPerspectiveCamera(e,t,n)},this.getDollyDeltaDistance=(e,t=1)=>{const{sphericalEnd:n,dollyFactor:r}=this,o=r**t,i=e?o:1/o;return Math.max(n.radius,this.panDollyMinDistanceFactor*this.minDistance)*(i-1)},this.panLeft=e=>{const{camera:t,targetEnd:n,panVector:r}=this;r.setFromMatrixColumn(t.matrix,0),r.multiplyScalar(-e),n.add(r)},this.panUp=e=>{const{camera:t,targetEnd:n,panVector:r}=this;r.setFromMatrixColumn(t.matrix,1),r.multiplyScalar(e),n.add(r)},this.camera=e,this.reusableCamera=e.clone(),this.domElement=t,this.spherical.setFromVector3(e.position),this.sphericalEnd.copy(this.spherical),t.addEventListener("mousedown",this.onMouseDown),t.addEventListener("touchstart",this.onTouchStart),t.addEventListener("wheel",this.onMouseWheel),t.addEventListener("contextmenu",this.onContextMenu),t.addEventListener("focus",this.onFocusChanged),t.addEventListener("blur",this.onFocusChanged),window.addEventListener("mouseup",this.onMouseUp),window.addEventListener("mousedown",this.onFocusChanged),window.addEventListener("touchstart",this.onFocusChanged),this.dispose=()=>{t.removeEventListener("mousedown",this.onMouseDown),t.removeEventListener("wheel",this.onMouseWheel),t.removeEventListener("touchstart",this.onTouchStart),t.removeEventListener("contextmenu",this.onContextMenu),t.removeEventListener("focus",this.onFocusChanged),t.removeEventListener("blur",this.onFocusChanged),window.removeEventListener("mouseup",this.onMouseUp),window.removeEventListener("mousedown",this.onFocusChanged),window.removeEventListener("touchstart",this.onFocusChanged)}}}
|
|
440
|
+
*/
|
|
441
|
+
class Rn{constructor(){this._events={materialsChanged:new i.d},this._renderMode=A.Color,this.materialsMap=new Map,this._clippingPlanes=[]}get clippingPlanes(){return this._clippingPlanes}set clippingPlanes(e){this._clippingPlanes=e;for(const e of this.materialsMap.keys())this.updateClippingPlanesForModel(e);this.triggerMaterialsChanged()}on(e,t){switch(e){case"materialsChanged":this._events.materialsChanged.subscribe(t);break;default:Object(i.l)(e,"Unexpected event '"+e)}}off(e,t){switch(e){case"materialsChanged":this._events.materialsChanged.unsubscribe(t);break;default:Object(i.l)(e,"Unexpected event '"+e)}}addModelMaterials(e,t){const n=new _,r=new yn(t+1,n);r.build();const o=new Mn,i=new Cn(t+1,o);i.build();const a=An()(()=>this.updateMaterials(e),75,{leading:!0,trailing:!0}),s=()=>this.updateTransforms(e);n.on("changed",a),o.on("changed",s);const d=function(e,t,n,r,o){const i=new l.Texture(Sn);i.needsUpdate=!0;const a=new l.ShaderMaterial({name:"Primitives (Box)",clipping:!0,clippingPlanes:t,extensions:{fragDepth:!0},vertexShader:qt.boxPrimitive.vertex,fragmentShader:qt.boxPrimitive.fragment,side:l.DoubleSide,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},glslVersion:l.GLSL3}),s=new l.ShaderMaterial({name:"Primitives (Circle)",clipping:!0,clippingPlanes:t,extensions:{fragDepth:!0},vertexShader:qt.circlePrimitive.vertex,fragmentShader:qt.circlePrimitive.fragment,side:l.DoubleSide,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},glslVersion:l.GLSL3}),d=new l.ShaderMaterial({name:"Primitives (Nuts)",clipping:!0,clippingPlanes:t,vertexShader:qt.nutPrimitive.vertex,fragmentShader:qt.nutPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),c=new l.ShaderMaterial({name:"Primitives (Quads)",clipping:!0,clippingPlanes:t,vertexShader:qt.quadPrimitive.vertex,fragmentShader:qt.quadPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),u=new l.RawShaderMaterial({name:"Primitives (General rings)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4},modelMatrix:{value:new l.Matrix4},viewMatrix:{value:new l.Matrix4},projectionMatrix:{value:new l.Matrix4},normalMatrix:{value:new l.Matrix3}},extensions:{fragDepth:!0},vertexShader:qt.generalRingPrimitive.vertex,fragmentShader:qt.generalRingPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),p=new l.ShaderMaterial({name:"Primitives (Cone)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},extensions:{fragDepth:!0},vertexShader:qt.conePrimitive.vertex,fragmentShader:qt.conePrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),m=new l.ShaderMaterial({name:"Primitives (Eccentric cone)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},extensions:{fragDepth:!0},vertexShader:qt.eccentricConePrimitive.vertex,fragmentShader:qt.eccentricConePrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),h=new l.ShaderMaterial({name:"Primitives (Ellipsoid segments)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},extensions:{fragDepth:!0},vertexShader:qt.ellipsoidSegmentPrimitive.vertex,fragmentShader:qt.ellipsoidSegmentPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),f=new l.RawShaderMaterial({name:"Primitives (General cylinder)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4},modelMatrix:{value:new l.Matrix4},modelViewMatrix:{value:new l.Matrix4},projectionMatrix:{value:new l.Matrix4},normalMatrix:{value:new l.Matrix3},cameraPosition:{value:new l.Vector3}},extensions:{fragDepth:!0},vertexShader:qt.generalCylinderPrimitive.vertex,fragmentShader:qt.generalCylinderPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),v=new l.ShaderMaterial({name:"Primitives (Trapezium)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},extensions:{fragDepth:!0},vertexShader:qt.trapeziumPrimitive.vertex,fragmentShader:qt.trapeziumPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),x=new l.ShaderMaterial({name:"Primitives (Torus segment)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},extensions:{fragDepth:!0,derivatives:!0},vertexShader:qt.torusSegmentPrimitive.vertex,fragmentShader:qt.torusSegmentPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),g=new l.ShaderMaterial({name:"Primitives (Spherical segment)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},extensions:{fragDepth:!0},vertexShader:qt.ellipsoidSegmentPrimitive.vertex,fragmentShader:qt.ellipsoidSegmentPrimitive.fragment,side:l.DoubleSide,glslVersion:l.GLSL3}),b=new l.ShaderMaterial({name:"Triangle meshes",clipping:!0,clippingPlanes:t,extensions:{derivatives:!0},side:l.DoubleSide,fragmentShader:qt.detailedMesh.fragment,vertexShader:qt.detailedMesh.vertex,glslVersion:l.GLSL3}),y={box:a,circle:s,nut:d,generalRing:u,quad:c,cone:p,eccentricCone:m,sphericalSegment:g,torusSegment:x,generalCylinder:f,trapezium:v,ellipsoidSegment:h,instancedMesh:new l.ShaderMaterial({name:"Instanced meshes",clipping:!0,clippingPlanes:t,extensions:{derivatives:!0},side:l.DoubleSide,fragmentShader:qt.instancedMesh.fragment,vertexShader:qt.instancedMesh.vertex,glslVersion:l.GLSL3}),triangleMesh:b,simple:new l.ShaderMaterial({name:"Low detail material",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new l.Matrix4}},side:l.FrontSide,fragmentShader:qt.simpleMesh.fragment,vertexShader:qt.simpleMesh.vertex,glslVersion:l.GLSL3})};for(const t of Object.values(y))Pn(t,n,r,o,i,e);return{...y}}(this._renderMode,this._clippingPlanes,r.overrideColorPerTreeIndexTexture,i.overrideTransformIndexTexture,i.transformLookupTexture);this.materialsMap.set(e,{materials:d,perModelClippingPlanes:[],nodeAppearanceProvider:n,nodeTransformProvider:o,nodeAppearanceTextureBuilder:r,nodeTransformTextureBuilder:i,updateMaterialsCallback:a,updateTransformsCallback:s}),this.updateClippingPlanesForModel(e)}getModelMaterials(e){return this.getModelMaterialsWrapper(e).materials}getModelNodeAppearanceProvider(e){return this.getModelMaterialsWrapper(e).nodeAppearanceProvider}getModelNodeTransformProvider(e){return this.getModelMaterialsWrapper(e).nodeTransformProvider}getModelDefaultNodeAppearance(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.getDefaultAppearance()}setModelClippingPlanes(e,t){const n=this.materialsMap.get(e);if(void 0===n)throw new Error(`Materials for model ${e} has not been added, call ${this.addModelMaterials.name} first`);n.perModelClippingPlanes=t,this.updateClippingPlanesForModel(e),this.triggerMaterialsChanged()}updateClippingPlanesForModel(e){const t=this.materialsMap.get(e);if(void 0===t)throw new Error(`Materials for model ${e} has not been added, call ${this.addModelMaterials.name} first`);const n=[...t.perModelClippingPlanes,...this.clippingPlanes],r=n.map(e=>new l.Vector4(e.normal.x,e.normal.y,e.normal.z,-e.constant));Dn(t.materials,e=>{e.clipping=n.length>0,e.clipIntersection=!1,e.clippingPlanes=n,!0===e.isRawShaderMaterial&&function(e,t){e.defines={...e.defines,NUM_CLIPPING_PLANES:t.length,UNION_CLIPPING_PLANES:0},t.length>0?e.uniforms.clippingPlanes={value:t}:delete e.uniforms.clippingPlanes}
|
|
442
|
+
/*!
|
|
443
|
+
* Copyright 2021 Cognite AS
|
|
444
|
+
*/(e,r)})}setModelDefaultNodeAppearance(e,t){this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.setDefaultAppearance(t),this.updateMaterials(e)}getModelBackTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.regularNodeTreeIndices}getModelInFrontTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.infrontNodeTreeIndices}getModelGhostedTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.ghostedNodeTreeIndices}setRenderMode(e){this._renderMode=e;const t=e!==A.DepthBufferOnly;this.applyToAllMaterials(n=>{n.uniforms.renderMode.value=e,n.colorWrite=t})}getRenderMode(){return this._renderMode}updateMaterials(e){const t=this.getModelMaterialsWrapper(e);if(t.nodeAppearanceTextureBuilder.needsUpdate){const{nodeAppearanceTextureBuilder:e}=t;e.build()}this.triggerMaterialsChanged()}updateTransforms(e){const t=this.getModelMaterialsWrapper(e);if(t.nodeTransformTextureBuilder.needsUpdate){const{nodeTransformTextureBuilder:e,materials:n}=t;e.build();const r=e.transformLookupTexture,o=new l.Vector2(r.image.width,r.image.height);Dn(n,e=>{e.uniforms.transformOverrideTexture.value=r,e.uniforms.transformOverrideTextureSize.value=o})}this.triggerMaterialsChanged()}getModelMaterialsWrapper(e){const t=this.materialsMap.get(e);if(void 0===t)throw new Error(`Model ${e} has not been added to MaterialManager`);return t}applyToAllMaterials(e){for(const t of this.materialsMap.values()){Dn(t.materials,e)}}triggerMaterialsChanged(){this._events.materialsChanged.fire()}}function Dn(e,t){t(e.box),t(e.circle),t(e.generalRing),t(e.nut),t(e.quad),t(e.cone),t(e.eccentricCone),t(e.sphericalSegment),t(e.torusSegment),t(e.generalCylinder),t(e.trapezium),t(e.ellipsoidSegment),t(e.instancedMesh),t(e.triangleMesh),t(e.simple)}class En{}En.Black=new l.Color("rgb(0, 0, 0)"),En.White=new l.Color("rgb(255, 255, 255)"),En.Cyan=new l.Color("rgb(102, 213, 234)"),En.Blue=new l.Color("rgb(77, 106, 242)"),En.Purple=new l.Color("rgb(186, 82, 212)"),En.Pink=new l.Color("rgb(232, 64, 117)"),En.Orange=new l.Color("rgb(238, 113, 53)"),En.Yellow=new l.Color("rgb(246, 189, 65)"),En.VeryLightGray=new l.Color("rgb(247, 246, 245)"),En.LightGray=new l.Color("rgb(242, 241, 240)");class Bn{}Bn.Red=new l.Color("rgb(235,0,4)"),Bn.Green=new l.Color("rgb(46,164,79)");
|
|
445
|
+
/*!
|
|
446
|
+
* Copyright 2021 Cognite AS
|
|
447
|
+
*/
|
|
448
|
+
class On{constructor(e,t,n,r){var o,i;this._lastFrameSceneState={hasBackElements:!0,hasInFrontElements:!0,hasGhostElements:!0,hasCustomObjects:!0},this._rootSectorNodeBuffer=new Set,this._outlineTexelSize=2,this._autoSetTargetSize=!1,this._debugRenderTimings=!1,this._uiObjects=[],this._deepFlushRendererArgs={buffer:new ArrayBuffer(4)},this._renderer=e,this._renderOptions=r,this._materialManager=n,this._orthographicCamera=new l.OrthographicCamera(-1,1,1,-1,-1,1),this._renderTarget=null,this._originalScene=t,this._cadScene=new l.Scene,this._cadScene.autoUpdate=!1,this._normalScene=new l.Scene,this._normalScene.autoUpdate=!1,this._inFrontScene=new l.Scene,this._inFrontScene.autoUpdate=!1,this._compositionScene=new l.Scene,this._compositionScene.autoUpdate=!1,this._fxaaScene=new l.Scene,this._fxaaScene.autoUpdate=!1,this._ssaoScene=new l.Scene,this._ssaoScene.autoUpdate=!1,this._ssaoBlurCombineScene=new l.Scene,this._ssaoBlurCombineScene.autoUpdate=!1,this._emptyScene=new l.Scene,this._emptyScene.autoUpdate=!1;const a=e.capabilities.isWebGL2,s=this.createOutlineColorTexture();this._inFrontRenderedCadModelTarget=zn(a,this.multiSampleCountHint,{stencilBuffer:!1}),this._inFrontRenderedCadModelTarget.depthTexture=new l.DepthTexture(0,0),this._inFrontRenderedCadModelTarget.depthTexture.format=l.DepthFormat,this._inFrontRenderedCadModelTarget.depthTexture.type=l.UnsignedIntType,this._normalRenderedCadModelTarget=zn(a,this.multiSampleCountHint,{stencilBuffer:!1}),this._normalRenderedCadModelTarget.depthTexture=new l.DepthTexture(0,0),this._normalRenderedCadModelTarget.depthTexture.format=l.DepthFormat,this._normalRenderedCadModelTarget.depthTexture.type=l.UnsignedIntType,this._ghostObjectRenderTarget=zn(a,this.multiSampleCountHint,{stencilBuffer:!1}),this._ghostObjectRenderTarget.depthTexture=new l.DepthTexture(0,0),this._ghostObjectRenderTarget.depthTexture.format=l.DepthFormat,this._ghostObjectRenderTarget.depthTexture.type=l.UnsignedIntType,this._customObjectRenderTarget=zn(a,this.multiSampleCountHint,{stencilBuffer:!1}),this._customObjectRenderTarget.depthTexture=new l.DepthTexture(0,0),this._customObjectRenderTarget.depthTexture.format=l.DepthFormat,this._customObjectRenderTarget.depthTexture.type=l.UnsignedIntType,this._compositionTarget=new l.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._compositionTarget.depthTexture=new l.DepthTexture(0,0),this._compositionTarget.depthTexture.format=l.DepthFormat,this._compositionTarget.depthTexture.type=l.UnsignedIntType,this._ssaoTarget=new l.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoTarget.depthTexture=new l.DepthTexture(0,0),this._ssaoTarget.depthTexture.format=l.DepthFormat,this._ssaoTarget.depthTexture.type=l.UnsignedIntType,this._ssaoBlurCombineTarget=new l.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoBlurCombineTarget.depthTexture=new l.DepthTexture(0,0),this._ssaoBlurCombineTarget.depthTexture.format=l.DepthFormat,this._ssaoBlurCombineTarget.depthTexture.type=l.UnsignedIntType,this._combineOutlineDetectionMaterial=new l.ShaderMaterial({vertexShader:Ht.vertex,fragmentShader:Ht.fragment,uniforms:{tFront:{value:this._inFrontRenderedCadModelTarget.texture},tFrontDepth:{value:this._inFrontRenderedCadModelTarget.depthTexture},tBack:{value:this._normalRenderedCadModelTarget.texture},tBackDepth:{value:this._normalRenderedCadModelTarget.depthTexture},tCustom:{value:this._customObjectRenderTarget.texture},tCustomDepth:{value:this._customObjectRenderTarget.depthTexture},tGhost:{value:this._ghostObjectRenderTarget.texture},tGhostDepth:{value:this._ghostObjectRenderTarget.depthTexture},tOutlineColors:{value:s},resolution:{value:new l.Vector2(0,0)},texelSize:{value:new l.Vector2(0,0)},cameraNear:{value:.1},cameraFar:{value:1e4},edgeStrengthMultiplier:{value:2.5},edgeGrayScaleIntensity:{value:.1}},extensions:{fragDepth:!0},defines:{EDGES:null!==(i=null===(o=this._renderOptions.edgeDetectionParameters)||void 0===o?void 0:o.enabled)&&void 0!==i?i:N.edgeDetectionParameters.enabled},glslVersion:l.GLSL3});const d=this.ssaoParameters(this._renderOptions),c=d.sampleSize,u=this.createKernel(c),p=d.sampleRadius,m=d.depthCheckBias;this._ssaoMaterial=new l.ShaderMaterial({uniforms:{tDepth:{value:this._compositionTarget.depthTexture},kernel:{value:u},sampleRadius:{value:p},bias:{value:m},projMatrix:{value:new l.Matrix4},inverseProjectionMatrix:{value:new l.Matrix4},resolution:{value:new l.Vector2}},defines:{MAX_KERNEL_SIZE:c},vertexShader:Xt.vertex,fragmentShader:Xt.fragment,glslVersion:l.GLSL3}),this._ssaoBlurCombineMaterial=new l.ShaderMaterial({uniforms:{tDiffuse:{value:this._compositionTarget.texture},tAmbientOcclusion:{value:this._ssaoTarget.texture},resolution:{value:new l.Vector2}},vertexShader:Yt.vertex,fragmentShader:Yt.fragment,glslVersion:l.GLSL3});const h=this.supportsSsao(d)?this._ssaoBlurCombineTarget.texture:this._compositionTarget.texture;this._fxaaMaterial=new l.ShaderMaterial({uniforms:{tDiffuse:{value:h},tDepth:{value:this._compositionTarget.depthTexture},resolution:{value:new l.Vector2},inverseResolution:{value:new l.Vector2}},vertexShader:Kt.vertex,fragmentShader:Kt.fragment,extensions:{fragDepth:!0},glslVersion:l.GLSL3}),this.setupCompositionScene(),this.setupSsaoScene(),this.setupSsaoBlurCombineScene(),this.setupFxaaScene(),this._normalSceneBuilder=new Ln(this._normalScene),this._inFrontSceneBuilder=new Ln(this._inFrontScene)}set renderOptions(e){const t=this.ssaoParameters(e),n={...t};this.setSsaoParameters(n),this._renderOptions={...e,ssaoRenderParameters:{...t}}}set debugRenderTimings(e){this._debugRenderTimings=e}get debugRenderTimings(){return this._debugRenderTimings}addUiObject(e,t,n){this._uiObjects.push({object:e,screenPos:t,width:n.x,height:n.y})}removeUiObject(e){this._uiObjects=this._uiObjects.filter(t=>{const n=t.object;return e!==n})}ssaoParameters(e){var t;return null!==(t=null==e?void 0:e.ssaoRenderParameters)&&void 0!==t?t:{...N.ssaoRenderParameters}}get antiAliasingMode(){const{antiAliasing:e=N.antiAliasing}=this._renderOptions;return e}get multiSampleCountHint(){const{multiSampleCountHint:e=N.multiSampleCountHint}=this._renderOptions;return e}supportsSsao(e){return!Object(i.r)()&&(this._renderer.capabilities.isWebGL2||this._renderer.extensions.has("EXT_frag_depth"))&&e.sampleSize!==C.None}renderDetailedToDepthOnly(e){const t={renderMode:this._materialManager.getRenderMode()},n=new i.j(this._renderer);this._materialManager.setRenderMode(A.DepthBufferOnly);try{n.setRenderTarget(this._renderTarget),this.setVisibilityOfSectors(W.c.Simple,!1),this.traverseForRootSectorNode(this._originalScene),this.extractCadNodes(this._originalScene),this.clearTarget(this._renderTarget);const{hasBackElements:r,hasInFrontElements:o,hasGhostElements:i}=this.splitToScenes();r&&!i?this.renderNormalCadModelsFromBaseScene(e,this._renderTarget):r&&i&&(this.renderNormalCadModels(e,this._renderTarget),this._normalSceneBuilder.restoreOriginalScene()),o&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene())}finally{this._materialManager.setRenderMode(t.renderMode),n.resetState(),this.restoreCadNodes(),this.setVisibilityOfSectors(W.c.Simple,!0)}}render(e){this.setupRenderTargetSpectorDebugging(),this._debugRenderTimings&&wt.a.debug("============== RENDER BEGIN ==============");const t=this._renderer,n=this._originalScene,r=new i.j(t),o={autoClear:t.autoClear,clearAlpha:t.getClearAlpha(),renderMode:this._materialManager.getRenderMode()};t.info.autoReset=!1,t.info.reset(),r.autoClear=!1;try{r.setRenderTarget(this._renderTarget),this.updateRenderSize(t),t.info.autoReset=!1,t.info.reset(),r.autoClear=!1,this.traverseForRootSectorNode(n),this.extractCadNodes(n),this.clearTarget(this._ghostObjectRenderTarget),this.clearTarget(this._compositionTarget),this.clearTarget(this._customObjectRenderTarget),t.setClearAlpha(0),this.clearTarget(this._normalRenderedCadModelTarget),this.clearTarget(this._inFrontRenderedCadModelTarget),t.setClearAlpha(o.clearAlpha);const i={...this._lastFrameSceneState},{hasBackElements:a,hasInFrontElements:s,hasGhostElements:d}=this.splitToScenes(),l=n.children.length>0;this._lastFrameSceneState={hasBackElements:a,hasInFrontElements:s,hasGhostElements:d,hasCustomObjects:l},a&&!d?this.renderNormalCadModelsFromBaseScene(e):a&&d?(this.renderNormalCadModels(e),this._normalSceneBuilder.restoreOriginalScene(),this.renderGhostedCadModelsFromBaseScene(e)):!a&&d&&this.renderGhostedCadModelsFromBaseScene(e),s&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene()),l&&this.renderCustomObjects(n,e),t.capabilities.isWebGL2&&(!a&&i.hasBackElements&&this.explicitFlushRender(e,this._normalRenderedCadModelTarget),!d&&i.hasGhostElements&&this.explicitFlushRender(e,this._ghostObjectRenderTarget),!s&&i.hasInFrontElements&&this.explicitFlushRender(e,this._inFrontRenderedCadModelTarget),!l&&i.hasInFrontElements&&this.explicitFlushRender(e,this._customObjectRenderTarget));const c=this.supportsSsao(this.ssaoParameters(this._renderOptions));switch(this.antiAliasingMode){case T.FXAA:this.renderComposition(e,this._compositionTarget),r.autoClear=o.autoClear,c&&(this.renderSsao(this._ssaoTarget,e),this.renderPostProcessStep("ssao-blur-combine",this._ssaoBlurCombineTarget,this._ssaoBlurCombineScene)),this.renderPostProcessStep("fxaa",this._renderTarget,this._fxaaScene);break;case T.NoAA:t.autoClear=o.autoClear,c?(this.renderComposition(e,this._compositionTarget),this.renderSsao(this._ssaoTarget,e),this.renderPostProcessStep("ssao-blur-combine",this._renderTarget,this._ssaoBlurCombineScene)):this.renderComposition(e,this._renderTarget);break;default:throw new Error("Unsupported anti-aliasing mode: "+this.antiAliasingMode)}}finally{r.resetState(),this._materialManager.setRenderMode(o.renderMode),this.restoreCadNodes(),this._debugRenderTimings&&wt.a.debug("=============== RENDER END ===============")}}restoreCadNodes(){this._rootSectorNodeBuffer.forEach(e=>{e[1].add(e[0])}),this._rootSectorNodeBuffer.clear()}extractCadNodes(e){this._rootSectorNodeBuffer.forEach(t=>{if(t[1].parent!==e&&null!==t[1].parent&&t[1].parent.parent!==e)throw new Error("CadNode must be put at scene root");this._cadScene.add(t[0])})}setRenderTarget(e){this._renderTarget=e}getRenderTarget(){return this._renderTarget}setRenderTargetAutoSize(e){this._autoSetTargetSize=e}getRenderTargetAutoSize(){return this._autoSetTargetSize}clearTarget(e){this._renderer.setRenderTarget(e),this._renderer.clear()}explicitFlushRender(e,t){this._renderer.setRenderTarget(t),this.renderStep("flushRender",this._emptyScene,e)}splitToScenes(){const e={hasBackElements:!1,hasInFrontElements:!1,hasGhostElements:!1};this._rootSectorNodeBuffer.forEach(t=>{const n=t[1],r=this._materialManager.getModelBackTreeIndices(n.cadModelMetadata.modelIdentifier),o=this._materialManager.getModelInFrontTreeIndices(n.cadModelMetadata.modelIdentifier),i=this._materialManager.getModelGhostedTreeIndices(n.cadModelMetadata.modelIdentifier),a=r.count>0,s=o.count>0,d=i.count>0;e.hasBackElements=e.hasBackElements||a,e.hasInFrontElements=e.hasInFrontElements||s,e.hasGhostElements=e.hasGhostElements||d});const{hasBackElements:t,hasInFrontElements:n,hasGhostElements:r}=e;return this._rootSectorNodeBuffer.forEach(e=>{const o=e[0],i=e[1],a=this._materialManager.getModelBackTreeIndices(i.cadModelMetadata.modelIdentifier),s=this._materialManager.getModelInFrontTreeIndices(i.cadModelMetadata.modelIdentifier),d=new l.Object3D;d.applyMatrix4(o.matrix),t&&r&&this._normalScene.add(d);const c=new l.Object3D;c.applyMatrix4(o.matrix),n&&this._inFrontScene.add(c);const u=[e[0]];for(;u.length>0;){const e=u.pop(),o=e.userData.treeIndices;o?(n&&s.hasIntersectionWith(o)&&this._inFrontSceneBuilder.addElement(e,c),t&&!r||r&&a.hasIntersectionWith(o)&&this._normalSceneBuilder.addElement(e,d)):u.push(...e.children)}}),e}renderNormalCadModels(e,t=this._normalRenderedCadModelTarget){this._normalSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this.renderStep("normal",this._normalScene,e)}renderNormalCadModelsFromBaseScene(e,t=this._normalRenderedCadModelTarget){this._renderer.setRenderTarget(t),this.renderStep("normalCadModelsFromBaseScene",this._cadScene,e)}renderInFrontCadModels(e,t=this._inFrontRenderedCadModelTarget){this._inFrontSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this._materialManager.setRenderMode(A.Effects),this.renderStep("infront",this._inFrontScene,e)}renderGhostedCadModelsFromBaseScene(e){this._renderer.setRenderTarget(this._ghostObjectRenderTarget),this._materialManager.setRenderMode(A.Ghost),this.renderStep("ghosted",this._cadScene,e)}renderCustomObjects(e,t){const n=e.autoUpdate;try{e.autoUpdate=!0,this._renderer.setRenderTarget(this._customObjectRenderTarget),this.renderStep("customobjects",e,t)}finally{e.autoUpdate=n}}updateRenderSize(e){const t=new l.Vector2;return e.getSize(t),this._renderTarget&&this._autoSetTargetSize&&t.x!==this._renderTarget.width&&t.y!==this._renderTarget.height&&this._renderTarget.setSize(t.x,t.y),t.x===this._normalRenderedCadModelTarget.width&&t.y===this._normalRenderedCadModelTarget.height||(this._normalRenderedCadModelTarget.setSize(t.x,t.y),this._inFrontRenderedCadModelTarget.setSize(t.x,t.y),this._customObjectRenderTarget.setSize(t.x,t.y),this._ghostObjectRenderTarget.setSize(t.x,t.y),this._compositionTarget.setSize(t.x,t.y),this._ssaoTarget.setSize(t.x,t.y),this._ssaoBlurCombineTarget.setSize(t.x,t.y),this._combineOutlineDetectionMaterial.uniforms.texelSize.value=new l.Vector2(this._outlineTexelSize/t.x,this._outlineTexelSize/t.y),this._combineOutlineDetectionMaterial.uniforms.resolution.value=t,this._ssaoMaterial.uniforms.resolution.value=t,this._ssaoBlurCombineMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.inverseResolution.value=new l.Vector2(1/t.x,1/t.y)),t}renderComposition(e,t){this._combineOutlineDetectionMaterial.uniforms.cameraNear.value=e.near,this._combineOutlineDetectionMaterial.uniforms.cameraFar.value=e.far,this.renderPostProcessStep("composition",t,this._compositionScene)}setSsaoParameters(e){var t;const n=N.ssaoRenderParameters;if(this._ssaoMaterial.uniforms.sampleRadius.value=e.sampleRadius,this._ssaoMaterial.uniforms.bias.value=e.depthCheckBias,e.sampleSize!==this.ssaoParameters(this._renderOptions).sampleSize){const r=null!==(t=null==e?void 0:e.sampleSize)&&void 0!==t?t:n.sampleSize,o=this.createKernel(r);this._fxaaMaterial.uniforms.tDiffuse.value=e.sampleSize!==C.None?this._ssaoBlurCombineTarget.texture:this._compositionTarget.texture,this._ssaoMaterial.uniforms.kernel.value=o,this._ssaoMaterial.defines={MAX_KERNEL_SIZE:r},this._ssaoMaterial.needsUpdate=!0}}renderPostProcessStep(e,t,n){const r=this._renderer;if(r.setRenderTarget(t),this.renderStep(e,n,this._orthographicCamera),t===this._renderTarget){const e=r.getSize(new l.Vector2),t=new l.Vector2(r.domElement.clientWidth,r.domElement.clientHeight),n=new l.Vector2(e.x/t.x,e.y/t.y);r.autoClear=!1,this._uiObjects.forEach(e=>{const t=new l.Scene;t.add(e.object);const o=e.screenPos.clone().multiply(n),i=e.width*n.x,a=e.height*n.y;r.setViewport(o.x,o.y,i,a),r.clearDepth(),r.render(t,this._orthographicCamera)}),r.setViewport(0,0,e.x,e.y),r.autoClear=!0}}renderSsao(e,t){this._ssaoMaterial.uniforms.inverseProjectionMatrix.value=t.projectionMatrixInverse,this._ssaoMaterial.uniforms.projMatrix.value=t.projectionMatrix,this.renderPostProcessStep("ssao",e,this._ssaoScene)}renderStep(e,t,n){if(!this._debugRenderTimings)return void this._renderer.render(t,n);this.deepFlushRenderer();const r=performance.now();this._renderer.render(t,n),this.deepFlushRenderer(),wt.a.log(`Render stage '${e}' took ${performance.now()-r} ms`)}deepFlushRenderer(){const{buffer:e}=this._deepFlushRendererArgs,t=this._renderer.getContext(),n=this._renderer.getRenderTarget();t.flush(),t.finish(),null!==n&&this._renderer.readRenderTargetPixels(n,0,0,1,1,e)}createOutlineColorTexture(){const e=new Uint8Array(32),t=new l.DataTexture(e,8,1);return Fn(t.image.data,d.White,En.White),Fn(t.image.data,d.Black,En.Black),Fn(t.image.data,d.Cyan,En.Cyan),Fn(t.image.data,d.Blue,En.Blue),Fn(t.image.data,d.Green,Bn.Green),Fn(t.image.data,d.Red,Bn.Red),Fn(t.image.data,d.Orange,En.Orange),t}setupCompositionScene(){const e=this.createRenderTriangle(),t=new l.Mesh(e,this._combineOutlineDetectionMaterial);this._compositionScene.add(t)}setupFxaaScene(){const e=this.createRenderTriangle(),t=new l.Mesh(e,this._fxaaMaterial);this._fxaaScene.add(t)}setupSsaoScene(){const e=this.createRenderTriangle(),t=new l.Mesh(e,this._ssaoMaterial);this._ssaoScene.add(t)}setupSsaoBlurCombineScene(){const e=this.createRenderTriangle(),t=new l.Mesh(e,this._ssaoBlurCombineMaterial);this._ssaoBlurCombineScene.add(t)}createKernel(e){const t=[];for(let r=0;r<e;r++){const o=new l.Vector3;for(;o.length()<.5;)o.x=2*Math.random()-1,o.y=2*Math.random()-1,o.z=Math.random();o.normalize();let i=r/e;i=n(.1,1,i*i),o.multiplyScalar(i),t.push(o)}return t;function n(e,t,n){return e+(t-e)*(n=(n=n<0?0:n)>1?1:n)}}createRenderTriangle(){const e=new l.BufferGeometry,t=new Float32Array([-1,-1,0,3,-1,0,-1,3,0]),n=new Float32Array([0,0,2,0,0,2]);return e.setAttribute("position",new l.BufferAttribute(t,3)),e.setAttribute("uv",new l.BufferAttribute(n,2)),e}traverseForRootSectorNode(e){const t=[e];for(;t.length>0;){const e=t.pop();if(e instanceof W.d){const t=e.parent;t.visible&&this._rootSectorNodeBuffer.add([e,t])}else e instanceof l.Group||t.push(...e.children)}}setVisibilityOfSectors(e,t){this._originalScene.traverse(n=>{n instanceof W.e&&n.levelOfDetail===e&&(n.visible=t)})}setupRenderTargetSpectorDebugging(){!1}assignSpectorJsMetadataToRenderTarget(e,t){const n=this._renderer.getRenderTarget();try{this._renderer.setRenderTarget(e);const r=this._renderer.getContext();r.getParameter(r.FRAMEBUFFER_BINDING).__SPECTOR_Metadata=t}finally{this._renderer.setRenderTarget(n)}}}function zn(e,t,n){if(e&&t>1){const e=new l.WebGLMultisampleRenderTarget(0,0,{...n,ignoreDepth:!1});return e.samples=t,e}return new l.WebGLRenderTarget(0,0,n)}function Fn(e,t,n){e[4*t+0]=Math.floor(255*n.r),e[4*t+1]=Math.floor(255*n.g),e[4*t+2]=Math.floor(255*n.b),e[4*t+3]=255}class Ln{constructor(e){this.buffer=[],this.temporaryScene=e}addElement(e,t){this.buffer.push({object:e,parent:e.parent,sceneParent:t})}populateTemporaryScene(){this.buffer.forEach(e=>e.sceneParent.add(e.object))}restoreOriginalScene(){this.buffer.forEach(e=>{e.parent.add(e.object)}),this.buffer.length=0,this.temporaryScene.remove(...this.temporaryScene.children)}}function kn(e,t,n,r,o,i,a={}){R.a.init(!1!==a.logMetrics,e,t,{constructorOptions:a});const s=a.renderOptions||{},d=new Rn,l=new On(o,i,d,s),c=en(n,r,o,d,new bn(l),a),u=function(e,t){const n=new fn(e,t),r=new xn(t);return new mn(n,r)}(n,r);return new gn(c,l,u)}var Gn=n(21);
|
|
449
|
+
/*!
|
|
450
|
+
* Copyright 2021 Cognite AS
|
|
451
|
+
*/class Vn{constructor(e,t){switch(this._revealManager=t,e){case"cdf":this.addCadModel=e=>Vn.addCdfCadModel(e,t),this.addPointCloudModel=e=>Vn.addCdfPointCloudModel(e,t);break;case"local":this.addCadModel=e=>Vn.addLocalCadModel(e,t),this.addPointCloudModel=()=>{throw new Error("Local point cloud models are not supported")};break;default:Object(i.l)(e)}}static createLocalHelper(e,t,n){const r=function(e,t,n={}){return kn("local","local-dataSource-appId",new st.g,new st.e,e,t,n)}(e,t,n);return new Vn("local",r)}static createCdfHelper(e,t,n,r){const o=function(e,t,n,r={}){const o=e.getDefaultRequestHeaders()["x-cdp-app"]||"unknown",i=new st.c(e),a=new st.a(e);return kn(e.project,o,i,a,t,n,r)}(r,e,t,n);return new Vn("cdf",o)}static createCustomDataSourceHelper(e,t,n,r){const o=kn("custom-datasource","custom-datasource-app",r.getModelMetadataProvider(),r.getModelDataProvider(),e,t,n);return new Vn("cdf",o)}get revealManager(){return this._revealManager}static addLocalCadModel(e,t){if(void 0===e.localPath)throw new Error("addLocalCadModel only works with local models");const n=new st.f(e.localPath);return t.addModel("cad",n,{geometryFilter:e.geometryFilter})}static addCdfCadModel(e,t){if(-1===e.modelId||-1===e.revisionId)throw new Error("addCdfCadModel only works with CDF hosted models");const n=new st.b(e.modelId,e.revisionId);return t.addModel("cad",n,{geometryFilter:e.geometryFilter})}static addCdfPointCloudModel(e,t){if(-1===e.modelId||-1===e.revisionId)throw new Error("addCdfPointCloudModel only works with CDF hosted models");const n=new st.b(e.modelId,e.revisionId);return t.addModel("pointcloud",n)}}
|
|
452
|
+
/*!
|
|
453
|
+
* Copyright 2021 Cognite AS
|
|
454
|
+
*/const Un={16:"shift",17:"ctrl",18:"alt",27:"escape",32:"space",37:"left",38:"up",39:"right",40:"down",65:"a",66:"b",67:"c",68:"d",69:"e",70:"f",81:"q",83:"s",87:"w"};class jn{constructor(){this.keys={},this._disabled=!1,this.addEventListeners=()=>{this.clearPressedKeys(),window.addEventListener("keydown",this.onKeydown),window.addEventListener("keyup",this.onKeyup),window.addEventListener("blur",this.clearPressedKeys)},this.removeEventListeners=()=>{window.removeEventListener("keydown",this.onKeydown),window.removeEventListener("keyup",this.onKeyup),window.removeEventListener("blur",this.clearPressedKeys)},this.onKeydown=e=>{e.metaKey||e.altKey||e.ctrlKey||e.keyCode in Un&&(0===this.keys[Un[e.keyCode]]&&(this.keys[Un[e.keyCode]]=2),e.preventDefault())},this.onKeyup=e=>{e.keyCode in Un&&(this.keys[Un[e.keyCode]]=0)},this.clearPressedKeys=()=>{Object.keys(Un).forEach(e=>{this.keys[Un[e]]=0})},this.addEventListeners()}get disabled(){return this._disabled}set disabled(e){this._disabled=e,e?this.removeEventListeners():this.addEventListeners()}isPressed(e){return this.keys[e]>=1}comsumePressed(e){const t=2===this.keys[e];return t&&(this.keys[e]=1),t}}
|
|
455
|
+
/*!
|
|
456
|
+
* Copyright 2021 Cognite AS
|
|
457
|
+
*/const Wn=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");function qn(e,t,n){return new l.Vector2(t-e.offsetLeft,n-e.offsetTop)}function Hn(e,t){if(2!==t.length)throw new Error("getPinchInfo only works if touches.length === 2");const n=[t[0],t[1]].map(({clientX:t,clientY:n})=>qn(e,t,n));return{center:n[0].clone().add(n[1]).multiplyScalar(.5),distance:n[0].distanceTo(n[1]),offsets:n}}const Kn=Math.PI/360,Xn=10*Kn;class Yn extends l.EventDispatcher{constructor(e,t){super(),this.enabled=!0,this.enableDamping=!0,this.dampingFactor=.25,this.dynamicTarget=!0,this.minDistance=.8,this.minZoomDistance=.4,this.dollyFactor=.99,this.minPolarAngle=0,this.maxPolarAngle=Math.PI,this.minAzimuthAngle=-1/0,this.maxAzimuthAngle=1/0,this.panDollyMinDistanceFactor=10,this.firstPersonRotationFactor=.4,this.pointerRotationSpeedAzimuth=Kn,this.pointerRotationSpeedPolar=Kn,this.enableKeyboardNavigation=!0,this.keyboardRotationSpeedAzimuth=Xn,this.keyboardRotationSpeedPolar=Xn,this.mouseFirstPersonRotationSpeed=2*Kn,this.keyboardDollySpeed=2,this.keyboardPanSpeed=10,this.keyboardSpeedFactor=3,this.pinchEpsilon=2,this.pinchPanSpeed=1,this.EPSILON=.001,this.minZoom=0,this.maxZoom=1/0,this.orthographicCameraDollyFactor=.3,this.lookAtViewTarget=!1,this.useScrollTarget=!1,this.zoomToCursor=!0,this.minDeltaRatio=1,this.maxDeltaRatio=8,this.minDeltaDownscaleCoefficient=.1,this.maxDeltaDownscaleCoefficient=1,this._temporarilyDisableDamping=!1,this._firstPersonMode=!1,this._reusableVector3=new l.Vector3,this._accumulatedMouseMove=new l.Vector2,this._target=new l.Vector3,this._viewTarget=new l.Vector3,this._scrollTarget=new l.Vector3,this._targetEnd=new l.Vector3,this._spherical=new l.Spherical,this._sphericalEnd=new l.Spherical,this._deltaTarget=new l.Vector3,this._keyboard=new jn,this._offsetVector=new l.Vector3,this._panVector=new l.Vector3,this._raycaster=new l.Raycaster,this._targetFPS=30,this._targetFPSOverActualFPS=1,this._isFocused=!1,this.update=e=>{const{_camera:t,_target:n,_targetEnd:r,_spherical:o,_sphericalEnd:i,_deltaTarget:a,handleKeyboard:s,enableDamping:d,dampingFactor:l,EPSILON:c,_targetFPS:u,enabled:p}=this;if(!p)return!1;const m=Math.min(1/e,u);this._targetFPSOverActualFPS=u/m,s(),this._accumulatedMouseMove.lengthSq()>0&&(this.rotate(this._accumulatedMouseMove.x,this._accumulatedMouseMove.y),this._accumulatedMouseMove.set(0,0));let h=0;this._firstPersonMode?h+=this.calculateShortestDeltaTheta(i.theta,o.theta):h=i.theta-o.theta;const f=i.phi-o.phi,v=i.radius-o.radius;a.subVectors(r,n);let x=!1;const g=d&&!this._temporarilyDisableDamping?Math.min(l*this._targetFPSOverActualFPS,1):1;return this._temporarilyDisableDamping=!1,Math.abs(h)>c||Math.abs(f)>c||Math.abs(v)>c||Math.abs(a.x)>c||Math.abs(a.y)>c||Math.abs(a.z)>c?(o.set(o.radius+v*g,o.phi+f*g,o.theta+h*g),n.add(a.multiplyScalar(g)),x=!0):(o.copy(i),n.copy(r)),o.makeSafe(),t.position.setFromSpherical(o).add(n),t.lookAt(this.lookAtViewTarget?this._viewTarget:n),x&&this.triggerCameraChangeEvent(),x},this.getState=()=>{const{_target:e,_camera:t}=this;return{target:e.clone(),position:t.position.clone()}},this.setState=(e,t)=>{const n=e.clone().sub(t);this._targetEnd.copy(t),this._sphericalEnd.setFromVector3(n),this._target.copy(this._targetEnd),this._scrollTarget.copy(t),this._spherical.copy(this._sphericalEnd),this.update(1e3/this._targetFPS),this.triggerCameraChangeEvent()},this.setViewTarget=e=>{this._viewTarget.copy(e)},this.setScrollTarget=e=>{this._scrollTarget.copy(e)},this.triggerCameraChangeEvent=()=>{const{_camera:e,_target:t}=this;this.dispatchEvent({type:"cameraChange",camera:{position:e.position,target:t}})},this.convertPixelCoordinatesToNormalized=(e,t)=>({x:e/this._domElement.clientWidth*2-1,y:t/this._domElement.clientHeight*-2+1}),this.onMouseDown=e=>{if(this.enabled)switch(this._firstPersonMode=!1,this._sphericalEnd.copy(this._spherical),e.button){case l.MOUSE.LEFT:this.startMouseRotation(e);break;case l.MOUSE.RIGHT:e.preventDefault(),this.startMousePan(e)}},this.onMouseUp=e=>{this._accumulatedMouseMove.set(0,0)},this.onMouseWheel=e=>{if(!this.enabled)return;e.preventDefault();let t=0;if(e.wheelDelta)t=-e.wheelDelta/40;else if(e.detail)t=e.detail;else if(e.deltaY){const n=Wn?1:40;t=e.deltaY/n}const{x:n,y:r}=this.convertPixelCoordinatesToNormalized(e.offsetX,e.offsetY),o=t<0,i=this._camera.isPerspectiveCamera?this.getDollyDeltaDistance(o,Math.abs(t)):Math.sign(t)*this.orthographicCameraDollyFactor;this.dolly(n,r,i,!1)},this.onTouchStart=e=>{if(this.enabled)switch(e.preventDefault(),this._firstPersonMode=!1,this._sphericalEnd.copy(this._spherical),e.touches.length){case 1:this.startTouchRotation(e);break;case 2:this.startTouchPinch(e)}},this.onFocusChanged=e=>{this._isFocused="blur"!==e.type&&(this.isDescendant(this._domElement.parentElement,e.target)||document.activeElement===this._domElement),this._keyboard.disabled=!this._isFocused},this.isDescendant=(e,t)=>{let n=t.parentNode;for(;null!==n;){if(n===e)return!0;n=n.parentNode}return!1},this.onContextMenu=e=>{this.enabled&&e.preventDefault()},this.rotate=(e,t)=>{if(0===e&&0===t)return;const n=(this._firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedAzimuth)*e,r=(this._firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedPolar)*t;this._firstPersonMode?(this._temporarilyDisableDamping=!0,this.rotateFirstPersonMode(n,r)):this.rotateSpherical(n,r)},this.startMouseRotation=e=>{let t=qn(this._domElement,e.clientX,e.clientY);const n=e=>{const n=qn(this._domElement,e.clientX,e.clientY),r=t.clone().sub(n);this._accumulatedMouseMove.add(r),t=n},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startMousePan=e=>{let t=qn(this._domElement,e.clientX,e.clientY);const n=e=>{const n=qn(this._domElement,e.clientX,e.clientY),r=n.x-t.x,o=n.y-t.y;t=n,this.pan(r,o)},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startTouchRotation=e=>{const{_domElement:t}=this;let n=qn(t,e.touches[0].clientX,e.touches[0].clientY);const r=e=>{if(1!==e.touches.length)return;const r=qn(t,e.touches[0].clientX,e.touches[0].clientY);this.rotate(n.x-r.x,n.y-r.y),n=r},o=e=>{1!==e.touches.length&&a()},i=()=>{a()},a=()=>{document.removeEventListener("touchstart",o),document.removeEventListener("touchmove",r),document.removeEventListener("touchend",i)};document.addEventListener("touchstart",o),document.addEventListener("touchmove",r,{passive:!1}),document.addEventListener("touchend",i,{passive:!1})},this.startTouchPinch=e=>{const{_domElement:t}=this;let n=Hn(t,e.touches);const r=Hn(t,e.touches),o=this._spherical.radius,i=e=>{if(2!==e.touches.length)return;const i=Hn(t,e.touches),a=r.distance/i.distance;this._sphericalEnd.radius=Math.max(a*o,this.minDistance/5);const s=i.center.clone().sub(n.center);s.length()>this.pinchEpsilon&&(s.multiplyScalar(this.pinchPanSpeed),this.pan(s.x,s.y)),n=i},a=e=>{2!==e.touches.length&&d()},s=()=>{d()},d=()=>{document.removeEventListener("touchstart",a),document.removeEventListener("touchmove",i),document.removeEventListener("touchend",s)};document.addEventListener("touchstart",a),document.addEventListener("touchmove",i),document.addEventListener("touchend",s)},this.handleKeyboard=()=>{if(!this.enabled||!this.enableKeyboardNavigation||!this._isFocused)return;const{_keyboard:e,keyboardDollySpeed:t,keyboardPanSpeed:n,keyboardSpeedFactor:r}=this,o=this.keyboardRotationSpeedAzimuth*(Number(e.isPressed("left"))-Number(e.isPressed("right")));let i=this.keyboardRotationSpeedPolar*(Number(e.isPressed("up"))-Number(e.isPressed("down")));if(0!==o||0!==i){this._firstPersonMode=!0;const{_sphericalEnd:e}=this,t=e.phi;e.phi+=i,e.makeSafe(),i=e.phi-t,e.phi=t,this.rotateFirstPersonMode(o,i)}const a=e.isPressed("shift")?r:1,s=!!e.isPressed("w")||!e.isPressed("s")&&void 0;void 0!==s&&(this.dolly(0,0,this.getDollyDeltaDistance(s,t*a),!0),this._firstPersonMode=!0);const d=Number(e.isPressed("a"))-Number(e.isPressed("d")),l=Number(e.isPressed("e"))-Number(e.isPressed("q"));0===d&&0===l||(this._firstPersonMode=!0,this.pan(a*n*d,a*n*l))},this.rotateSpherical=(e,t)=>{const{_sphericalEnd:n}=this,r=l.MathUtils.clamp(n.theta+e,this.minAzimuthAngle,this.maxAzimuthAngle),o=l.MathUtils.clamp(n.phi+t,this.minPolarAngle,this.maxPolarAngle);n.theta=r,n.phi=o,n.makeSafe()},this.rotateFirstPersonMode=(e,t)=>{const{firstPersonRotationFactor:n,_reusableCamera:r,_reusableVector3:o,_sphericalEnd:i,_targetEnd:a}=this;r.position.setFromSpherical(i).add(a),r.lookAt(a),r.rotateX(n*t),r.rotateY(n*e);const s=a.distanceTo(r.position);r.getWorldDirection(o),a.addVectors(r.position,o.multiplyScalar(s)),i.setFromVector3(o.subVectors(r.position,a)),i.makeSafe()},this.pan=(e,t)=>{const{_domElement:n,_camera:r,_offsetVector:o,_target:i}=this;o.copy(r.position).sub(i);let a=Math.max(o.length(),this.panDollyMinDistanceFactor*this.minDistance);r.isPerspectiveCamera&&(a*=Math.tan(r.fov/2*Math.PI/180)),this.panLeft(2*e*a/n.clientHeight),this.panUp(2*t*a/n.clientHeight)},this.dollyOrthographicCamera=(e,t,n)=>{const r=this._camera;r.zoom*=1-n,r.zoom=l.MathUtils.clamp(r.zoom,this.minZoom,this.maxZoom),r.updateProjectionMatrix()},this.calculateNewRadiusAndTargetOffsetLerp=(e,t,n,r)=>{const{dynamicTarget:o,minZoomDistance:i,_raycaster:a,_targetEnd:s,_reusableCamera:d}=this,c=Math.tan(l.MathUtils.degToRad(90-.5*this._camera.fov)),u=Math.sqrt(c*c+e*e+t*t)/c,p=r.length(),m=n>0;a.setFromCamera({x:e,y:t},d);let h=p+n;h<i&&!m&&(h=p,o?(d.getWorldDirection(r),s.add(r.normalize().multiplyScalar(Math.abs(n)))):n=0);const f=-n*u;d.getWorldDirection(r),r.normalize().multiplyScalar(n);return{targetOffset:a.ray.direction.normalize().multiplyScalar(f).add(r),radius:h}},this.clampedMap=(e,t,n,r,o)=>e=e<t?r:e>n?o:l.MathUtils.mapLinear(e,t,n,r,o),this.calculateNewRadiusAndTargetOffsetScrollTarget=(e,t)=>{const{minZoomDistance:n,_reusableVector3:r,_target:o,_scrollTarget:i,_camera:a,minDeltaRatio:s,maxDeltaRatio:d,minDeltaDownscaleCoefficient:c,maxDeltaDownscaleCoefficient:u}=this,p=t.length(),m=e>0;m&&this.setScrollTarget(o);const h=r.subVectors(i,o),f=(new l.Vector3).subVectors(o,a.position),v=(new l.Vector3).subVectors(i,a.position),x=f.angleTo(v),g=h.clone().negate().angleTo(v.clone().negate());let b=e*(Math.sin(x)/Math.sin(g));const y=Math.abs(b/e),_=this.clampedMap(y,s,d,u,c);b*=_;let T=p+(e*=_);T<n&&(this._temporarilyDisableDamping=!0,(i.distanceTo(o)<n||T<=0)&&(b=0,T=p));return{targetOffset:h.negate().normalize().multiplyScalar(m?0:b),radius:T}},this.dollyWithWheelScroll=(e,t,n,r)=>{const{_targetEnd:o,_sphericalEnd:i,useScrollTarget:a,zoomToCursor:s}=this,d=n<0,c=new l.Vector3;let u=i.radius;if(s)if(a&&d){const{radius:e,targetOffset:t}=this.calculateNewRadiusAndTargetOffsetScrollTarget(n,r);u=e,c.copy(t)}else{const{radius:o,targetOffset:i}=this.calculateNewRadiusAndTargetOffsetLerp(e,t,n,r);u=o,c.copy(i)}else{const{radius:e,targetOffset:t}=this.calculateNewRadiusAndTargetOffsetLerp(0,0,n,r);u=e,c.copy(t)}o.add(c),i.radius=u},this.dollyPerspectiveCamera=(e,t,n,r=!1)=>{const{_reusableVector3:o,_targetEnd:i,_reusableCamera:a,_sphericalEnd:s,_camera:d}=this;a.copy(d),a.position.setFromSpherical(s).add(i),a.lookAt(i);const l=o.clone().setFromSpherical(s);r?(a.getWorldDirection(l),i.add(l.normalize().multiplyScalar(-n))):this.dollyWithWheelScroll(e,t,n,l)},this.dolly=(e,t,n,r)=>{const{_camera:o}=this;o.isOrthographicCamera?this.dollyOrthographicCamera(e,t,n):o.isPerspectiveCamera&&this.dollyPerspectiveCamera(e,t,n,r)},this.getDollyDeltaDistance=(e,t=1)=>{const{_sphericalEnd:n,dollyFactor:r}=this,o=r**t,i=e?o:1/o;return Math.max(n.radius,this.panDollyMinDistanceFactor*this.minDistance)*(i-1)},this.panLeft=e=>{const{_camera:t,_targetEnd:n,_panVector:r}=this;r.setFromMatrixColumn(t.matrix,0),r.multiplyScalar(-e),n.add(r)},this.panUp=e=>{const{_camera:t,_targetEnd:n,_panVector:r}=this;r.setFromMatrixColumn(t.matrix,1),r.multiplyScalar(e),n.add(r)},this._camera=e,this._reusableCamera=e.clone(),this._domElement=t,this._spherical.setFromVector3(e.position),this._sphericalEnd.copy(this._spherical),t.addEventListener("mousedown",this.onMouseDown),t.addEventListener("touchstart",this.onTouchStart),t.addEventListener("wheel",this.onMouseWheel),t.addEventListener("contextmenu",this.onContextMenu),t.addEventListener("focus",this.onFocusChanged),t.addEventListener("blur",this.onFocusChanged),window.addEventListener("mouseup",this.onMouseUp),window.addEventListener("mousedown",this.onFocusChanged),window.addEventListener("touchstart",this.onFocusChanged),this.dispose=()=>{t.removeEventListener("mousedown",this.onMouseDown),t.removeEventListener("wheel",this.onMouseWheel),t.removeEventListener("touchstart",this.onTouchStart),t.removeEventListener("contextmenu",this.onContextMenu),t.removeEventListener("focus",this.onFocusChanged),t.removeEventListener("blur",this.onFocusChanged),window.removeEventListener("mouseup",this.onMouseUp),window.removeEventListener("mousedown",this.onFocusChanged),window.removeEventListener("touchstart",this.onFocusChanged)}}calculateShortestDeltaTheta(e,t){const n=e%(2*Math.PI)-t%(2*Math.PI);let r=Math.min(Math.abs(n),2*Math.PI-Math.abs(n));return r*=(r===Math.abs(n)?1:-1)*Math.sign(n),r}}
|
|
458
|
+
/*!
|
|
459
|
+
* Copyright 2021 Cognite AS
|
|
460
|
+
*/class Zn{constructor(e,t,n){this._events={cameraChange:new i.d},this._raycaster=new l.Raycaster,this.isDisposed=!1,this._onClick=void 0,this._onWheel=void 0,this._cameraControlsOptions={...Zn.DefaultCameraControlsOptions},this.automaticNearFarPlane=!0,this.automaticControlsSensitivity=!0,this._updateNearAndFarPlaneBuffers={cameraPosition:new l.Vector3,cameraDirection:new l.Vector3,corners:new Array(new l.Vector3,new l.Vector3,new l.Vector3,new l.Vector3,new l.Vector3,new l.Vector3,new l.Vector3,new l.Vector3)},this._calculateCameraFarBuffers={nearPlaneCoplanarPoint:new l.Vector3,nearPlane:new l.Plane},this._camera=e,this._domElement=t,this._inputHandler=new i.f(t),this._modelRaycastCallback=n,this.setCameraControlsOptions(this._cameraControlsOptions),this._controls=new Yn(e,t),this._controls.minZoomDistance=Zn.DefaultMinZoomDistance,this._controls.addEventListener("cameraChange",e=>{const{position:t,target:n}=e.camera;this._events.cameraChange.fire({camera:{position:t.clone(),target:n.clone()}})}),e||(this._camera=new l.PerspectiveCamera(60,void 0,.1,1e4),this._camera.position.x=30,this._camera.position.y=10,this._camera.position.z=50,this._camera.lookAt(new l.Vector3))}on(e,t){switch(e){case"cameraChange":this._events.cameraChange.subscribe(t);break;default:Object(i.l)(e)}}off(e,t){switch(e){case"cameraChange":this._events.cameraChange.subscribe(t);break;default:Object(i.l)(e)}}fitCameraToBoundingBox(e,t,n=2){const r=e.getBoundingSphere(new l.Sphere),o=r.center,i=r.radius*n,a=new l.Vector3(0,0,-1);a.applyQuaternion(this._camera.quaternion);const s=new l.Vector3;s.copy(a).multiplyScalar(-i).add(o),this.moveCameraTo(s,o,t)}set cameraControlsEnabled(e){this._controls.enabled=e}get cameraControlsEnabled(){return this._controls.enabled}set keyboardNavigationEnabled(e){this._controls.enableKeyboardNavigation=e}get keyboardNavigationEnabled(){return this._controls.enableKeyboardNavigation}get cameraControls(){return this._controls}setCameraTarget(e,t=!1){if(this.isDisposed)return;const n=t?Zn.DefaultAnimationDuration:0;this.moveCameraTargetTo(e,n)}setCameraControlsState(e){this._controls.setState(e.position,e.target)}getCameraControlsState(){return this._controls.getState()}getCameraControlsOptions(){return this._cameraControlsOptions}setCameraControlsOptions(e){this._cameraControlsOptions={...Zn.DefaultCameraControlsOptions,...e},this.teardownControls(!1),this.setupControls()}moveCameraTo(e,t,n){if(this.isDisposed)return;const{_camera:r}=this;n=null!=n?n:this.calculateDefaultDuration(t.distanceTo(r.position));const o=this.calculateAnimationStartTarget(t),i={x:r.position.x,y:r.position.y,z:r.position.z,targetX:o.x,targetY:o.y,targetZ:o.z},a={x:e.x,y:e.y,z:e.z,targetX:t.x,targetY:t.y,targetZ:t.z},s=new l.Vector3,d=new l.Vector3,{tween:c,stopTween:u}=this.createTweenAnimation(i,a,n);c.onUpdate(()=>{this.isDisposed||(d.set(i.x,i.y,i.z),s.set(i.targetX,i.targetY,i.targetZ),this._camera&&this._controls.setState(d,s))}).onStop(()=>{this._controls.setState(d,s)}).onComplete(()=>{this.isDisposed||this._domElement.removeEventListener("pointerdown",u)}).start(M.a.now()),c.update(M.a.now())}moveCameraTargetTo(e,t){if(this.isDisposed)return;if(0===t)return void this._controls.setState(this._camera.position,e);const{_camera:n,_controls:r}=this;t=null!=t?t:this.calculateDefaultDuration(e.distanceTo(n.position));const o=this.calculateAnimationStartTarget(e),i={targetX:o.x,targetY:o.y,targetZ:o.z},a={targetX:e.x,targetY:e.y,targetZ:e.z},s=new l.Vector3,{tween:d,stopTween:c}=this.createTweenAnimation(i,a,t);d.onStart(()=>{r.lookAtViewTarget=!0,r.setState(this._camera.position,e)}).onUpdate(()=>{this.isDisposed||(s.set(i.targetX,i.targetY,i.targetZ),this._camera&&("zoomToCursor"===this._cameraControlsOptions.mouseWheelAction&&r.setScrollTarget(s),r.setViewTarget(s)))}).onStop(()=>{r.lookAtViewTarget=!1,r.setState(this._camera.position,s)}).onComplete(()=>{this.isDisposed||(r.lookAtViewTarget=!1,r.enableKeyboardNavigation=!0,r.setState(this._camera.position,s),this._domElement.removeEventListener("pointerdown",c))}).start(M.a.now()),d.update(M.a.now())}updateCameraNearAndFar(e,t){if(this.isDisposed)return;if(!this.automaticControlsSensitivity&&!this.automaticNearFarPlane)return;const{cameraPosition:n,cameraDirection:r,corners:o}=this._updateNearAndFarPlaneBuffers;!function(e,t){if(8!==(t=t||re()(0,8).map(e=>new l.Vector3)).length)throw new Error(`outBuffer must hold exactly 8 elements, but holds ${t.length} elemnents`);const n=e.min,r=e.max;t[0].set(n.x,n.y,n.z),t[1].set(r.x,n.y,n.z),t[2].set(n.x,r.y,n.z),t[3].set(n.x,n.y,r.z),t[4].set(r.x,r.y,n.z),t[5].set(r.x,r.y,r.z),t[6].set(r.x,n.y,r.z),t[7].set(n.x,r.y,r.z)}
|
|
384
461
|
/*!
|
|
385
462
|
* Copyright 2021 Cognite AS
|
|
386
463
|
*/
|
|
387
464
|
/*!
|
|
388
465
|
* Copyright 2021 Cognite AS
|
|
389
|
-
*/class nt{constructor(e){var t,n;this._subscription=new m.Subscription,this._boundAnimate=this.animate.bind(this),this._events={cameraChange:new o.d,click:new o.d,hover:new o.d,sceneRendered:new o.d,disposed:new o.d},this._models=[],this._extraObjects=[],this.isDisposed=!1,this.latestRequestId=-1,this.clock=new i.Clock,this._clippingNeedsUpdate=!1,this._updateNearAndFarPlaneBuffers={combinedBbox:new i.Box3,bbox:new i.Box3,cameraPosition:new i.Vector3,cameraDirection:new i.Vector3,nearPlaneCoplanarPoint:new i.Vector3,nearPlane:new i.Plane,corners:new Array(new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3)},this.startPointerEventListeners=()=>{const e=this.canvas;let t=!1,n=0,r=!1;const i=l()(t=>{this._events.hover.fire(Object(o.l)(t,e))},100),a=n=>{const{offsetX:i,offsetY:a}=Object(o.l)(n,e),{offsetX:s,offsetY:d}=Object(o.l)(n,e);t&&r&&Math.abs(i-s)+Math.abs(a-d)>4&&(r=!1)},s=d=>{const l=d.timeStamp-n;t&&r&&l<250&&this._events.click.fire(Object(o.l)(d,e)),t=!1,r=!1,e.removeEventListener("mousemove",a),e.removeEventListener("touchmove",a),e.removeEventListener("mouseup",s),e.removeEventListener("touchend",s),e.addEventListener("mousemove",i)},d=o=>{event=o,t=!0,r=!0,n=o.timeStamp,e.addEventListener("mousemove",a),e.addEventListener("touchmove",a),e.addEventListener("mouseup",s),e.addEventListener("touchend",s),e.removeEventListener("mousemove",i)};e.addEventListener("mousedown",d),e.addEventListener("touchstart",d),e.addEventListener("mousemove",i)},this._renderer=e.renderer||new i.WebGLRenderer,this._renderer.localClippingEnabled=!0,this._automaticNearFarPlane=void 0===e.automaticCameraNearFar||e.automaticCameraNearFar,this._automaticControlsSensitivity=void 0===e.automaticControlsSensitivity||e.automaticControlsSensitivity,this.canvas.style.width="640px",this.canvas.style.height="480px",this.canvas.style.minWidth="100%",this.canvas.style.minHeight="100%",this.canvas.style.maxWidth="100%",this.canvas.style.maxHeight="100%",this._domElement=e.domElement||function(){const e=document.createElementNS("http://www.w3.org/1999/xhtml","div");return e.style.width="100%",e.style.height="100%",e}(),this._domElement.appendChild(this.canvas),this.spinner=new N(this.domElement),this.camera=new i.PerspectiveCamera(60,void 0,.1,1e4),this.camera.position.x=30,this.camera.position.y=10,this.camera.position.z=50,this.camera.lookAt(new i.Vector3),this.scene=new i.Scene,this.scene.autoUpdate=!1,this.controls=new tt(this.camera,this.canvas),this.controls.dollyFactor=.992,this.controls.minDistance=1,this.controls.maxDistance=100,this.controls.addEventListener("cameraChange",e=>{const{position:t,target:n}=e.camera;this._events.cameraChange.fire(t.clone(),n.clone())}),this.sdkClient=e.sdk,this.renderController=new C(this.camera),this._viewStateHelper=new Z(this,this.sdkClient);const r=function(e){var t;const n={internal:{}};n.internal={sectorCuller:e._sectorCuller};const{antiAliasing:r,multiSampleCount:i}=function(e){switch(e=e||"fxaa"){case"disabled":return{antiAliasing:h.a.NoAA,multiSampleCount:1};case"fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:1};case"msaa2":return{antiAliasing:h.a.NoAA,multiSampleCount:2};case"msaa4":return{antiAliasing:h.a.NoAA,multiSampleCount:4};case"msaa8":return{antiAliasing:h.a.NoAA,multiSampleCount:8};case"msaa16":return{antiAliasing:h.a.NoAA,multiSampleCount:16};case"msaa2+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:2};case"msaa4+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:4};case"msaa8+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:8};case"msaa16+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:16};default:Object(o.k)(e,"Unsupported anti-aliasing mode: "+e)}}(e.antiAliasingHint),a=function(e){const t={...h.j.ssaoRenderParameters};switch(e){case void 0:break;case"medium":t.sampleSize=h.h.Medium;break;case"high":t.sampleSize=h.h.High;break;case"veryhigh":t.sampleSize=h.h.VeryHigh;break;case"disabled":t.sampleSize=h.h.None;break;default:Object(o.k)(e,`Unexpected SSAO quality mode: '${e}'`)}return t}(e.ssaoQualityHint),s={enabled:null!==(t=e.enableEdges)&&void 0!==t?t:h.j.edgeDetectionParameters.enabled};return n.renderOptions={antiAliasing:r,multiSampleCountHint:i,ssaoRenderParameters:a,edgeDetectionParameters:s},n}(e);!0===e._localModels?this._revealManagerHelper=Ke.createLocalHelper(this._renderer,this.scene,r):this._revealManagerHelper=Ke.createCdfHelper(this._renderer,this.scene,r,this.sdkClient),this.startPointerEventListeners(),this.revealManager.setRenderTarget((null===(t=e.renderTargetOptions)||void 0===t?void 0:t.target)||null,null===(n=e.renderTargetOptions)||void 0===n?void 0:n.autoSetSize),this._subscription.add(Object(m.fromEventPattern)(e=>this.revealManager.on("loadingStateChanged",e),e=>this.revealManager.off("loadingStateChanged",e)).subscribe(t=>{this.spinner.loading=t.itemsLoaded!=t.itemsRequested,e.onLoading&&e.onLoading(t.itemsLoaded,t.itemsRequested,t.itemsCulled)},e=>Object(o.u)(e,{moduleName:"Cognite3DViewer",methodName:"constructor"}))),this.animate(0),Object(o.v)("construct3dViewer",{moduleName:"Cognite3DViewer",methodName:"constructor",constructorOptions:u()(e,["sdk","domElement","renderer","_sectorCuller"])})}get canvas(){return this.renderer.domElement}static isBrowserSupported(){return!0}get domElement(){return this._domElement}get renderer(){return this._renderer}get revealManager(){return this._revealManagerHelper.revealManager}get cadBudget(){return this.revealManager.cadBudget}set cadBudget(e){this.revealManager.cadBudget=e}get pointCloudBudget(){return this.revealManager.pointCloudBudget}set pointCloudBudget(e){this.revealManager.pointCloudBudget=e}get models(){return this._models.slice()}get cadLoadedStatistics(){return this.revealManager.cadLoadedStatistics}getVersion(){return"2.1.2"}dispose(){if(!this.isDisposed){this.isDisposed=!0,void 0!==this.latestRequestId&&cancelAnimationFrame(this.latestRequestId),this._subscription.unsubscribe(),this.revealManager.dispose(),this.domElement.removeChild(this.canvas),this.renderer.dispose();for(const e of this._models.values())e.dispose();this._models.splice(0),this.spinner.dispose(),this._events.disposed.fire()}}on(e,t){switch(e){case"click":this._events.click.subscribe(t);break;case"hover":this._events.hover.subscribe(t);break;case"cameraChange":this._events.cameraChange.subscribe(t);break;case"sceneRendered":this._events.sceneRendered.subscribe(t);break;case"disposed":this._events.disposed.subscribe(t);break;default:Object(o.k)(e)}}off(e,t){switch(e){case"click":this._events.click.unsubscribe(t);break;case"hover":this._events.hover.unsubscribe(t);break;case"cameraChange":this._events.cameraChange.unsubscribe(t);break;case"sceneRendered":this._events.sceneRendered.unsubscribe(t);break;case"disposed":this._events.disposed.unsubscribe(t);break;default:Object(o.k)(e)}}getViewState(){return this._viewStateHelper.getCurrentState()}setViewState(e){return this.models.filter(e=>e instanceof P).map(e=>e).forEach(e=>{e.styledNodeCollections.forEach(t=>e.unassignStyledNodeCollection(t.nodes)),e.styledNodeCollections.splice(0)}),this._viewStateHelper.setState(e)}async addModel(e){if(void 0!==e.localPath)throw new Error("addModel() only supports CDF hosted models. Use addCadModel() and addPointCloudModel() to use self-hosted models");switch(await this.determineModelType(e.modelId,e.revisionId)){case"cad":return this.addCadModel(e);case"pointcloud":return this.addPointCloudModel(e);default:throw new Error("Model is not supported")}}async addCadModel(e){let t;t=e.localPath?new ee(e.localPath):new J(this.sdkClient);const{modelId:n,revisionId:r}=e,o=await this._revealManagerHelper.addCadModel(e),i=new P(n,r,o,t);return this._models.push(i),this.scene.add(i),i}async addPointCloudModel(e){if(e.localPath)throw new T("localPath is not supported");if(e.geometryFilter)throw new T("geometryFilter is not supported for point clouds");const{modelId:t,revisionId:n}=e,r=await this._revealManagerHelper.addPointCloudModel(e),o=new R(t,n,r);return this._models.push(o),this.scene.add(o),o}removeModel(e){const t=this._models.indexOf(e);if(-1===t)throw new Error("Model is not added to viewer");switch(this._models.splice(t,1),this.scene.remove(e),this.renderController.redraw(),e.type){case"cad":const t=e;return void this.revealManager.removeModel(e.type,t.cadNode);case"pointcloud":const n=e;return void this.revealManager.removeModel(e.type,n.pointCloudNode);default:Object(o.k)(e.type,`Model type ${e.type} cannot be removed`)}}async determineModelType(e,t){const n=new ue(this.sdkClient),r=await n.getOutputs({modelId:e,revisionId:t,format:te.AnyFormat});return void 0!==r.findMostRecentOutput(te.RevealCadModel)?"cad":void 0!==r.findMostRecentOutput(te.EptPointCloud)?"pointcloud":""}addObject3D(e){this.isDisposed||(this.scene.add(e),e.updateMatrixWorld(!0),this._extraObjects.push(e),this.renderController.redraw(),this.updateCameraNearAndFar(this.camera))}removeObject3D(e){if(this.isDisposed)return;this.scene.remove(e);const t=this._extraObjects.indexOf(e);t>=0&&this._extraObjects.splice(t,1),this.renderController.redraw(),this.updateCameraNearAndFar(this.camera)}addUiObject(e,t,n){this.isDisposed||this.revealManager.addUiObject(e,t,n)}removeUiObject(e){this.isDisposed||this.revealManager.removeUiObject(e)}setBackgroundColor(e){this.isDisposed||(this.renderer.setClearColor(e),this.spinner.updateBackgroundColor(e),this.requestRedraw())}setClippingPlanes(e){this.revealManager.clippingPlanes=e,this._clippingNeedsUpdate=!0}setSlicingPlanes(e){this.setClippingPlanes(e)}getClippingPlanes(){return this.revealManager.clippingPlanes}getCamera(){return this.camera}getScene(){return this.scene}getCameraPosition(){return this.isDisposed?new i.Vector3(-1/0,-1/0,-1/0):this.controls.getState().position.clone()}getCameraTarget(){return this.isDisposed?new i.Vector3(-1/0,-1/0,-1/0):this.controls.getState().target.clone()}setCameraPosition(e){this.isDisposed||this.controls.setState(e,this.getCameraTarget())}setCameraTarget(e){this.isDisposed||this.controls.setState(this.getCameraPosition(),e)}get cameraControls(){return this.controls}get cameraControlsEnabled(){return this.controls.enabled}set cameraControlsEnabled(e){this.controls.enabled=e}loadCameraFromModel(e){const t=e.getCameraConfiguration();t?this.controls.setState(t.position,t.target):this.fitCameraToModel(e,0)}fitCameraToModel(e,t){const n=e.getModelBoundingBox(new i.Box3,!0);this.fitCameraToBoundingBox(n,t)}fitCameraToBoundingBox(e,t,n=2){const r=(new i.Vector3).lerpVectors(e.min,e.max,.5),o=.5*(new i.Vector3).subVectors(e.max,e.min).length(),a=new i.Sphere(r,o),s=a.center,d=a.radius*n,l=new i.Vector3(0,0,-1);l.applyQuaternion(this.camera.quaternion);const c=new i.Vector3;c.copy(l).multiplyScalar(-d).add(s),this.moveCameraTo(c,s,t)}requestRedraw(){this.revealManager.requestRedraw()}enableKeyboardNavigation(){this.controls.enableKeyboardNavigation=!0}disableKeyboardNavigation(){this.controls.enableKeyboardNavigation=!1}worldToScreen(e,t){this.camera.updateMatrixWorld();const n=new i.Vector3;return t?Object(p.a)(this.renderer,this.camera,e,n):Object(p.b)(this.renderer,this.camera,e,n),n.x<0||n.x>1||n.y<0||n.y>1||n.z<0||n.z>1?null:new i.Vector2(n.x,n.y)}async getScreenshot(e=this.canvas.width,t=this.canvas.height){if(this.isDisposed)throw new Error("Viewer is disposed");const{width:n,height:r}=this.canvas,o=this.camera.clone();rt(o,e,t),this.renderer.setSize(e,t),this.renderer.render(this.scene,o),this.revealManager.render(o);const i=this.renderer.domElement.toDataURL();return this.renderer.setSize(n,r),this.renderer.render(this.scene,this.camera),this.requestRedraw(),i}async getIntersectionFromPixel(e,t,n){const r=this.getModels("cad"),o=this.getModels("pointcloud"),i=r.map(e=>e.cadNode),a=o.map(e=>e.pointCloudNode),s={normalizedCoords:{x:e/this.renderer.domElement.clientWidth*2-1,y:t/this.renderer.domElement.clientHeight*-2+1},camera:this.camera,renderer:this.renderer,clippingPlanes:this.getClippingPlanes(),domElement:this.renderer.domElement},d=function(e,t){const n=[];for(const r of e){const e=x(r,t);e&&n.push(e)}return n.sort((e,t)=>e.distance-t.distance)}(i,s),l=O(a,s,null==n?void 0:n.pointIntersectionThreshold),c=[];if(l.length>0){const e=l[0];for(const t of o)if(t.pointCloudNode===e.pointCloudNode){const n={type:"pointcloud",model:t,point:e.point,pointIndex:e.pointIndex,distanceToCamera:e.distance};c.push(n);break}}if(d.length>0){const e=d[0];for(const t of r)if(t.cadNode===e.cadNode){const n={type:"cad",model:t,treeIndex:e.treeIndex,point:e.point,distanceToCamera:e.distance};c.push(n)}}return c.sort((e,t)=>e.distanceToCamera-t.distanceToCamera),c.length>0?c[0]:null}getModels(e){return this._models.filter(t=>t.type===e)}moveCameraTo(e,t,n){if(this.isDisposed)return;const{camera:r}=this;if(null==n){n=125*e.distanceTo(r.position),n=Math.min(Math.max(n,600),2500)}const o=new i.Raycaster;o.setFromCamera(new i.Vector2(0,0),r);const a=t.distanceTo(r.position),d=o.ray.direction.clone().multiplyScalar(a),l=o.ray.origin.clone().add(d),c={x:r.position.x,y:r.position.y,z:r.position.z,targetX:l.x,targetY:l.y,targetZ:l.z},u={x:e.x,y:e.y,z:e.z,targetX:t.x,targetY:t.y,targetZ:t.z},m=new s.a.Tween(c),h=e=>{if(this.isDisposed)return document.removeEventListener("keydown",h),void m.stop();("keydown"!==e.type||this.controls.enableKeyboardNavigation)&&(m.stop(),this.canvas.removeEventListener("pointerdown",h),this.canvas.removeEventListener("wheel",h),document.removeEventListener("keydown",h))};this.canvas.addEventListener("pointerdown",h),this.canvas.addEventListener("wheel",h),document.addEventListener("keydown",h);const p=new i.Vector3,f=new i.Vector3;m.to(u,n).easing(e=>s.a.Easing.Circular.Out(e)).onUpdate(()=>{this.isDisposed||(f.set(c.x,c.y,c.z),p.set(c.targetX,c.targetY,c.targetZ),this.camera&&(this.setCameraPosition(f),this.setCameraTarget(p)))}).onComplete(()=>{this.isDisposed||this.canvas.removeEventListener("pointerdown",h)}).start(s.a.now()).update(s.a.now())}async animate(e){if(this.isDisposed)return;this.latestRequestId=requestAnimationFrame(this._boundAnimate);const{display:t,visibility:n}=window.getComputedStyle(this.canvas);if("visible"===n&&"none"!==t){const{renderController:t}=this;s.a.update(e);if(this.resizeIfNecessary()&&this.requestRedraw(),this.controls.update(this.clock.getDelta()),t.update(),this.revealManager.update(this.camera),t.needsRedraw||this.revealManager.needsRedraw||this._clippingNeedsUpdate){const e=this.renderer.info.render.frame,n=Date.now();this.updateCameraNearAndFar(this.camera),this.revealManager.render(this.camera),t.clearNeedsRedraw(),this.revealManager.resetRedraw(),this._clippingNeedsUpdate=!1;const r=Date.now()-n;this._events.sceneRendered.fire({frameNumber:e,renderTime:r,renderer:this.renderer,camera:this.camera})}}}updateCameraNearAndFar(e){if(this.isDisposed)return;if(!this._automaticControlsSensitivity&&!this._automaticNearFarPlane)return;const{combinedBbox:t,bbox:n,cameraPosition:r,cameraDirection:o,corners:a,nearPlane:s,nearPlaneCoplanarPoint:d}=this._updateNearAndFarPlaneBuffers;t.makeEmpty(),this._models.forEach(e=>{e.getModelBoundingBox(n),n.isEmpty()||(t.expandByPoint(n.min),t.expandByPoint(n.max))}),this._extraObjects.forEach(e=>{n.setFromObject(e),n.isEmpty()||(t.expandByPoint(n.min),t.expandByPoint(n.max))}),function(e,t){if(8!==(t=t||[new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3]).length)throw new Error(`outBuffer must hold exactly 8 elements, but holds ${t.length} elemnents`);const n=e.min,r=e.max;t[0].set(n.x,n.y,n.z),t[1].set(r.x,n.y,n.z),t[2].set(n.x,r.y,n.z),t[3].set(n.x,n.y,r.z),t[4].set(r.x,r.y,n.z),t[5].set(r.x,r.y,r.z),t[6].set(r.x,n.y,r.z),t[7].set(n.x,r.y,r.z)}(t,a),e.getWorldPosition(r),e.getWorldDirection(o);let l=t.distanceToPoint(r);l/=Math.sqrt(1+Math.tan(e.fov/180*Math.PI/2)**2*(e.aspect**2+1)),l=Math.max(.1,l),d.copy(r).addScaledVector(o,l),s.setFromNormalAndCoplanarPoint(o,d);let c=-1/0;for(let e=0;e<8;++e)if(s.distanceToPoint(a[e])>=0){const t=a[e].distanceTo(r);c=Math.max(c,t)}c=Math.max(2*l,c);const u=t.min.distanceTo(t.max);t.containsPoint(r)&&(l=Math.min(.1,c/1e3)),this._automaticNearFarPlane&&(e.near=l,e.far=c,e.updateProjectionMatrix()),this._automaticControlsSensitivity&&(this.controls.minDistance=Math.min(Math.max(.02*u,.1*l),10))}resizeIfNecessary(){if(this.isDisposed)return!1;const e=this.renderer.getSize(new i.Vector2),t=e.width,n=e.height,r=0!==this.domElement.clientWidth?this.domElement.clientWidth:this.canvas.clientWidth,o=0!==this.domElement.clientHeight?this.domElement.clientHeight:this.canvas.clientHeight,a=this.renderer.getPixelRatio()*r,s=this.renderer.getPixelRatio()*o,d=a*s,l=d>14e5?Math.sqrt(14e5/d):1,c=a*l,u=s*l;return!(Math.abs(t-c)<.1&&Math.abs(n-u)<.1)&&(this.renderer.setSize(c,u),rt(this.camera,c,u),this.camera instanceof i.OrthographicCamera&&(this.controls.orthographicCameraDollyFactor=20/c,this.controls.keyboardDollySpeed=2/c),!0)}}function rt(e,t,n){e instanceof i.PerspectiveCamera?(e.aspect=t/n,e.updateProjectionMatrix()):e instanceof i.OrthographicCamera&&(e.left=-t,e.right=t,e.top=n,e.bottom=-n)}var ot=n(20);
|
|
466
|
+
*/(t,o),e.getWorldPosition(n),e.getWorldDirection(r);let i=this.calculateCameraNear(e,t,n);const a=this.calculateCameraFar(i,n,r,o),s=t.min.distanceTo(t.max);t.containsPoint(n)&&(i=Math.min(.1,a/1e3)),this.automaticNearFarPlane&&(e.near=i,e.far=a,e.updateProjectionMatrix()),this.automaticControlsSensitivity&&(this._controls.minDistance=Math.min(Math.max(.02*s,.1*i),Zn.DefaultMinDistance))}updateCameraControlsState(e){this._controls.update(e)}dispose(){this.isDisposed=!0,this._controls.dispose(),this.teardownControls()}calculateAnimationStartTarget(e){const{_raycaster:t,_camera:n}=this;t.setFromCamera(new l.Vector2,n);const r=e.distanceTo(n.position),o=t.ray.direction.clone().multiplyScalar(r);return t.ray.origin.clone().add(o)}createTweenAnimation(e,t,n){const r=new M.a.Tween(e),o=e=>{if(this.isDisposed)return document.removeEventListener("keydown",o),void r.stop();("keydown"!==e.type||this._controls.enableKeyboardNavigation)&&(r.stop(),this._domElement.removeEventListener("pointerdown",o),this._domElement.removeEventListener("wheel",o),document.removeEventListener("keydown",o))};this._domElement.addEventListener("pointerdown",o),this._domElement.addEventListener("wheel",o),document.addEventListener("keydown",o);return{tween:r.to(t,n).easing(e=>M.a.Easing.Circular.Out(e)),stopTween:o}}calculateCameraFar(e,t,n,r){const{nearPlane:o,nearPlaneCoplanarPoint:i}=this._calculateCameraFarBuffers;i.copy(t).addScaledVector(n,e),o.setFromNormalAndCoplanarPoint(n,i);let a=-1/0;for(let e=0;e<r.length;++e)if(o.distanceToPoint(r[e])>=0){const n=r[e].distanceTo(t);a=Math.max(a,n)}return a=Math.max(2*e,a),a}calculateCameraNear(e,t,n){let r=t.distanceToPoint(n);return r/=Math.sqrt(1+Math.tan(e.fov/180*Math.PI/2)**2*(e.aspect**2+1)),r=Math.max(.1,r),r}calculateNewTargetWithoutModel(e,t){const n=t.min.distanceTo(t.max);this._raycaster.setFromCamera(e,this._camera);return this._raycaster.ray.direction.clone().normalize().multiplyScalar(Math.max(this._camera.position.distanceTo(t.getCenter(new l.Vector3)),n)).add(this._camera.position)}async calculateNewTarget(e){var t,n;const{offsetX:r,offsetY:o}=e,{x:i,y:a}=this.convertPixelCoordinatesToNormalized(r,o),s=await this._modelRaycastCallback(r,o);return null!==(n=null===(t=s.intersection)||void 0===t?void 0:t.point)&&void 0!==n?n:this.calculateNewTargetWithoutModel({x:i,y:a},s.modelsBoundingBox)}teardownControls(e=!0){void 0!==this._onClick&&(this._inputHandler.off("click",this._onClick),this._onClick=void 0),void 0!==this._onWheel&&e&&(this._domElement.removeEventListener("wheel",this._onWheel),this._onWheel=void 0)}handleMouseWheelActionChange(e){const{_controls:t}=this;switch(null==e?void 0:e.mouseWheelAction){case"zoomToTarget":t.zoomToCursor=!1;break;case"zoomPastCursor":t.useScrollTarget=!1,t.zoomToCursor=!0;break;case"zoomToCursor":t.setScrollTarget(t.getState().target),t.useScrollTarget=!0,t.zoomToCursor=!0;break;case void 0:break;default:Object(i.l)(e.mouseWheelAction)}}setupControls(){let e=!1;const t=new l.Clock,n=async e=>{this._controls.enableKeyboardNavigation=!1;const t=await this.calculateNewTarget(e);this.setCameraTarget(t,!0)},r=async n=>{t.getDelta()>Zn.DefaultMinimalTimeBetweenRaycasts&&(e=!1);const r=!e&&n.deltaY<0,o="zoomToCursor"===this._cameraControlsOptions.mouseWheelAction;if(r&&o){e=!0;const t=await this.calculateNewTarget(n);this._controls.setScrollTarget(t)}};this._controls&&this.handleMouseWheelActionChange(this._cameraControlsOptions),this._cameraControlsOptions.changeCameraTargetOnClick&&void 0===this._onClick&&(this._inputHandler.on("click",n),this._onClick=n),void 0===this._onWheel&&(this._domElement.addEventListener("wheel",r),this._onWheel=r)}convertPixelCoordinatesToNormalized(e,t){return{x:e/this._domElement.clientWidth*2-1,y:t/this._domElement.clientHeight*-2+1}}calculateDefaultDuration(e){let t=125*e;return t=Math.min(Math.max(t,Zn.DefaultMinAnimationDuration),Zn.DefaultMaxAnimationDuration),t}}Zn.DefaultAnimationDuration=300,Zn.DefaultMinAnimationDuration=300,Zn.DefaultMaxAnimationDuration=1250,Zn.DefaultMinDistance=.8,Zn.DefaultMinZoomDistance=.4,Zn.DefaultMinimalTimeBetweenRaycasts=.08,Zn.DefaultCameraControlsOptions={mouseWheelAction:"zoomPastCursor",changeCameraTargetOnClick:!1};var Qn=n(29);
|
|
467
|
+
/*!
|
|
468
|
+
* Copyright 2021 Cognite AS
|
|
469
|
+
*/class Jn{constructor(e){this._client=e}async mapTreeIndicesToNodeIds(e,t,n){const r=[...$n(n,Jn.MaxItemsPerRequest)].map(async n=>this.postByTreeIndicesRequest(e,t,n));return(await Promise.all(r)).flat()}async mapNodeIdsToTreeIndices(e,t,n){const r=[...$n(n,Jn.MaxItemsPerRequest)].map(async n=>this.postByNodeIdsRequest(e,t,n));return(await Promise.all(r)).flat()}async determineTreeIndexAndSubtreeSizesByNodeIds(e,t,n){const r=n.map(e=>({id:e}));return(await this._client.revisions3D.retrieve3DNodes(e,t,r)).map(e=>({treeIndex:e.treeIndex,subtreeSize:e.subtreeSize}))}async determineNodeAncestorsByNodeId(e,t,n,r){const i=await this._client.revisions3D.list3DNodeAncestors(e,t,n,{limit:1e3}),a=i.items.find(e=>e.id===n);o()(void 0!==a,"Could not find ancestor for node with nodeId "+n),r=Math.min(a.depth,r);const s=i.items.find(e=>e.depth===a.depth-r);return o()(void 0!==a,`Could not find ancestor for node with nodeId ${n} at 'generation' ${r}`),{treeIndex:s.treeIndex,subtreeSize:s.subtreeSize}}async getBoundingBoxesByNodeIds(e,t,n){const r=[...$n(n,Jn.MaxItemsPerRequest)].map(async n=>this._client.revisions3D.retrieve3DNodes(e,t,n.map(e=>({id:e}))));return(await Promise.all(r)).flat().filter(e=>e.boundingBox).map(e=>Object(i.u)(e.boundingBox))}async postByTreeIndicesRequest(e,t,n){console.assert(n.length<=Jn.MaxItemsPerRequest);const r=`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${e}/revisions/${t}/nodes/internalids/bytreeindices`,o=await this._client.post(r,{data:{items:n}});if(200===o.status)return o.data.items;throw new Qn.HttpError(o.status,o.data,o.headers)}async postByNodeIdsRequest(e,t,n){console.assert(n.length<=Jn.MaxItemsPerRequest);const r=`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${e}/revisions/${t}/nodes/treeindices/byinternalids`,o=await this._client.post(r,{data:{items:n}});if(200===o.status)return o.data.items;throw new Qn.HttpError(o.status,o.data,o.headers)}}function*$n(e,t){let n=0;for(;n<e.length;){const r=Math.min(e.length-n,t);yield e.slice(n,n+r),n+=r}}
|
|
470
|
+
/*!
|
|
471
|
+
* Copyright 2021 Cognite AS
|
|
472
|
+
*/Jn.MaxItemsPerRequest=1e3;class er{constructor(){}mapTreeIndicesToNodeIds(e,t,n){return Promise.resolve(n)}mapNodeIdsToTreeIndices(e,t,n){return Promise.resolve(n)}determineTreeIndexAndSubtreeSizesByNodeIds(e,t,n){throw new Error("Not supported for local models")}determineNodeAncestorsByNodeId(e,t,n,r){throw new Error("Not supported for local models")}getBoundingBoxesByNodeIds(e,t,n){throw new Error("Not supported for local models")}}class tr{constructor(e){this._metadataProvider=new st.c(e),this._nodesApiClient=new Jn(e),this._modelDataClient=new st.a(e)}getNodesApiClient(){return this._nodesApiClient}getModelDataProvider(){return this._modelDataClient}getModelMetadataProvider(){return this._metadataProvider}}
|
|
473
|
+
/*!
|
|
474
|
+
* Copyright 2021 Cognite AS
|
|
475
|
+
*/class nr{getNodesApiClient(){return new er}getModelDataProvider(){return new st.e}getModelMetadataProvider(){throw new st.g}}
|
|
476
|
+
/*!
|
|
477
|
+
* Copyright 2021 Cognite AS
|
|
478
|
+
*/class rr{constructor(e){var t,n,r,o,a,s,d,c;this._subscription=new P.Subscription,this._boundAnimate=this.animate.bind(this),this._events={cameraChange:new i.d,click:new i.d,hover:new i.d,sceneRendered:new i.d,disposed:new i.d},this._models=[],this._extraObjects=[],this.isDisposed=!1,this.latestRequestId=-1,this.clock=new l.Clock,this._clippingNeedsUpdate=!1,this._updateNearAndFarPlaneBuffers={combinedBbox:new l.Box3,bbox:new l.Box3},this.startPointerEventListeners=()=>{const e=new i.f(this.domElement);e.on("click",e=>{this._events.click.fire(e)}),e.on("hover",e=>{this._events.hover.fire(e)})},this._renderer=e.renderer||new l.WebGLRenderer,this._renderer.localClippingEnabled=!0,this._automaticNearFarPlane=null===(t=e.automaticCameraNearFar)||void 0===t||t,this._automaticControlsSensitivity=null===(n=e.automaticControlsSensitivity)||void 0===n||n,this.canvas.style.width="640px",this.canvas.style.height="480px",this.canvas.style.minWidth="100%",this.canvas.style.minHeight="100%",this.canvas.style.maxWidth="100%",this.canvas.style.maxHeight="100%",this._domElement=e.domElement||function(){const e=document.createElementNS("http://www.w3.org/1999/xhtml","div");return e.style.width="100%",e.style.height="100%",e}(),this._domElement.appendChild(this.canvas),this.spinner=new K(this.domElement),this.spinner.placement=null!==(o=null===(r=e.loadingIndicatorStyle)||void 0===r?void 0:r.placement)&&void 0!==o?o:"topLeft",this.spinner.opacity=Math.max(.2,null!==(s=null===(a=e.loadingIndicatorStyle)||void 0===a?void 0:a.opacity)&&void 0!==s?s:1),this.camera=new l.PerspectiveCamera(60,void 0,.1,1e4),this.camera.position.x=30,this.camera.position.y=10,this.camera.position.z=50,this.camera.lookAt(new l.Vector3),this.scene=new l.Scene,this.scene.autoUpdate=!1,this._cameraManager=new Zn(this.camera,this.canvas,this.modelIntersectionCallback.bind(this)),this._cameraManager.automaticControlsSensitivity=this._automaticControlsSensitivity,this._cameraManager.automaticNearFarPlane=this._automaticNearFarPlane,this._cameraManager.on("cameraChange",e=>{const{position:t,target:n}=e.camera;this._events.cameraChange.fire(t.clone(),n.clone())});const u=function(e){var t;const n={continuousModelStreaming:e.continuousModelStreaming,internal:{}};n.internal={sectorCuller:e._sectorCuller};const{antiAliasing:r,multiSampleCount:o}=function(e){switch(e=e||"fxaa"){case"disabled":return{antiAliasing:T.NoAA,multiSampleCount:1};case"fxaa":return{antiAliasing:T.FXAA,multiSampleCount:1};case"msaa2":return{antiAliasing:T.NoAA,multiSampleCount:2};case"msaa4":return{antiAliasing:T.NoAA,multiSampleCount:4};case"msaa8":return{antiAliasing:T.NoAA,multiSampleCount:8};case"msaa16":return{antiAliasing:T.NoAA,multiSampleCount:16};case"msaa2+fxaa":return{antiAliasing:T.FXAA,multiSampleCount:2};case"msaa4+fxaa":return{antiAliasing:T.FXAA,multiSampleCount:4};case"msaa8+fxaa":return{antiAliasing:T.FXAA,multiSampleCount:8};case"msaa16+fxaa":return{antiAliasing:T.FXAA,multiSampleCount:16};default:Object(i.l)(e,"Unsupported anti-aliasing mode: "+e)}}(e.antiAliasingHint),a=function(e){const t={...N.ssaoRenderParameters};switch(e){case void 0:break;case"medium":t.sampleSize=C.Medium;break;case"high":t.sampleSize=C.High;break;case"veryhigh":t.sampleSize=C.VeryHigh;break;case"disabled":t.sampleSize=C.None;break;default:Object(i.l)(e,`Unexpected SSAO quality mode: '${e}'`)}return t}
|
|
390
479
|
/*!
|
|
391
480
|
* Copyright 2021 Cognite AS
|
|
392
481
|
*/
|
|
@@ -401,5 +490,5 @@ const de=(new i.Matrix4).set(1,0,0,0,0,0,1,0,0,-1,0,0,0,0,0,1);function le(e,t){
|
|
|
401
490
|
*/
|
|
402
491
|
/*!
|
|
403
492
|
* Copyright 2021 Cognite AS
|
|
404
|
-
*/}])}));
|
|
493
|
+
*/(e.ssaoQualityHint),s={enabled:null!==(t=e.enableEdges)&&void 0!==t?t:N.edgeDetectionParameters.enabled};return n.logMetrics=e.logMetrics,n.renderOptions={antiAliasing:r,multiSampleCountHint:o,ssaoRenderParameters:a,edgeDetectionParameters:s},n}(e);!0===e._localModels?(this._dataSource=new nr,this._cdfSdkClient=void 0,this._revealManagerHelper=Vn.createLocalHelper(this._renderer,this.scene,u)):void 0!==e.customDataSource?(this._dataSource=e.customDataSource,this._revealManagerHelper=Vn.createCustomDataSourceHelper(this._renderer,this.scene,u,e.customDataSource)):(this._dataSource=new tr(e.sdk),this._cdfSdkClient=e.sdk,this._revealManagerHelper=Vn.createCdfHelper(this._renderer,this.scene,u,e.sdk)),this.renderController=new G(this.camera),this.startPointerEventListeners(),this.revealManager.setRenderTarget((null===(d=e.renderTargetOptions)||void 0===d?void 0:d.target)||null,null===(c=e.renderTargetOptions)||void 0===c?void 0:c.autoSetSize),this._subscription.add(Object(P.fromEventPattern)(e=>this.revealManager.on("loadingStateChanged",e),e=>this.revealManager.off("loadingStateChanged",e)).subscribe(t=>{this.spinner.loading=t.itemsLoaded!=t.itemsRequested,e.onLoading&&e.onLoading(t.itemsLoaded,t.itemsRequested,t.itemsCulled)},e=>R.a.trackError(e,{moduleName:"Cognite3DViewer",methodName:"constructor"}))),this.animate(0),R.a.trackEvent("construct3dViewer",{constructorOptions:S()(e,["sdk","domElement","renderer","renderTargetOptions","onLoading","_sectorCuller"])})}get canvas(){return this.renderer.domElement}static isBrowserSupported(){return!0}get domElement(){return this._domElement}get renderer(){return this._renderer}get revealManager(){return this._revealManagerHelper.revealManager}get cadBudget(){return this.revealManager.cadBudget}set cadBudget(e){this.revealManager.cadBudget=e}get pointCloudBudget(){return this.revealManager.pointCloudBudget}set pointCloudBudget(e){this.revealManager.pointCloudBudget=e}get models(){return this._models.slice()}get cadLoadedStatistics(){return this.revealManager.cadLoadedStatistics}getVersion(){return"3.0.0-alpha.0"}setLogLevel(e){switch(e){case"none":this.setLogLevel("silent");break;default:wt.a.setLevel(e)}}dispose(){if(!this.isDisposed){this.isDisposed=!0,void 0!==this.latestRequestId&&cancelAnimationFrame(this.latestRequestId),this._subscription.unsubscribe(),this._cameraManager.dispose(),this.revealManager.dispose(),this.domElement.removeChild(this.canvas),this.renderer.dispose();for(const e of this._models.values())e.dispose();this._models.splice(0),this.spinner.dispose(),this._events.disposed.fire()}}on(e,t){switch(e){case"click":this._events.click.subscribe(t);break;case"hover":this._events.hover.subscribe(t);break;case"cameraChange":this._events.cameraChange.subscribe(t);break;case"sceneRendered":this._events.sceneRendered.subscribe(t);break;case"disposed":this._events.disposed.subscribe(t);break;default:Object(i.l)(e)}}off(e,t){switch(e){case"click":this._events.click.unsubscribe(t);break;case"hover":this._events.hover.unsubscribe(t);break;case"cameraChange":this._events.cameraChange.unsubscribe(t);break;case"sceneRendered":this._events.sceneRendered.unsubscribe(t);break;case"disposed":this._events.disposed.unsubscribe(t);break;default:Object(i.l)(e)}}setCameraControlsOptions(e){this._cameraManager.setCameraControlsOptions(e)}getCameraControlsOptions(){return this._cameraManager.getCameraControlsOptions()}getViewState(){return this.createViewStateHelper().getCurrentState()}setViewState(e){const t=this.createViewStateHelper();return this.models.filter(e=>e instanceof q).map(e=>e).forEach(e=>e.removeAllStyledNodeCollections()),t.setState(e)}async addModel(e){if(void 0!==e.localPath)throw new Error("addModel() only supports CDF hosted models. Use addCadModel() and addPointCloudModel() to use self-hosted models");switch(await this.determineModelType(e.modelId,e.revisionId)){case"cad":return this.addCadModel(e);case"pointcloud":return this.addPointCloudModel(e);default:throw new Error("Model is not supported")}}async addCadModel(e){const t=this._dataSource.getNodesApiClient(),{modelId:n,revisionId:r}=e,o=await this._revealManagerHelper.addCadModel(e),i=new q(n,r,o,t);return this._models.push(i),this.scene.add(i),i}async addPointCloudModel(e){if(e.localPath)throw new k("localPath is not supported");if(e.geometryFilter)throw new k("geometryFilter is not supported for point clouds");const{modelId:t,revisionId:n}=e,r=await this._revealManagerHelper.addPointCloudModel(e),o=new H(t,n,r);return this._models.push(o),this.scene.add(o),o}removeModel(e){const t=this._models.indexOf(e);if(-1===t)throw new Error("Model is not added to viewer");switch(this._models.splice(t,1),this.scene.remove(e),this.renderController.redraw(),e.type){case"cad":const t=e;return void this.revealManager.removeModel(e.type,t.cadNode);case"pointcloud":const n=e;return void this.revealManager.removeModel(e.type,n.pointCloudNode);default:Object(i.l)(e.type,`Model type ${e.type} cannot be removed`)}}async determineModelType(e,t){if(void 0===this._cdfSdkClient)throw new Error(this.determineModelType.name+"() is only supported when connecting to Cognite Data Fusion");const n=new st.b(e,t),r=(await this._dataSource.getModelMetadataProvider().getModelOutputs(n)).map(e=>e.format);return o(st.d.GltfCadModel)||o(st.d.RevealCadModel)?"cad":o(st.d.EptPointCloud)?"pointcloud":"";function o(e){return r.includes(e)}}addObject3D(e){this.isDisposed||(this.scene.add(e),e.updateMatrixWorld(!0),this._extraObjects.push(e),this.renderController.redraw(),this.updateCameraNearAndFar(this.camera))}removeObject3D(e){if(this.isDisposed)return;this.scene.remove(e);const t=this._extraObjects.indexOf(e);t>=0&&this._extraObjects.splice(t,1),this.renderController.redraw(),this.updateCameraNearAndFar(this.camera)}addUiObject(e,t,n){this.isDisposed||this.revealManager.addUiObject(e,t,n)}removeUiObject(e){this.isDisposed||this.revealManager.removeUiObject(e)}setBackgroundColor(e){this.isDisposed||(this.renderer.setClearColor(e),this.spinner.updateBackgroundColor(e),this.requestRedraw())}setClippingPlanes(e){this.revealManager.clippingPlanes=e,this._clippingNeedsUpdate=!0}setSlicingPlanes(e){this.setClippingPlanes(e)}getClippingPlanes(){return this.revealManager.clippingPlanes}getCamera(){return this.camera}getScene(){return this.scene}getCameraPosition(){return this.isDisposed?new l.Vector3(-1/0,-1/0,-1/0):this.camera.position.clone()}getCameraTarget(){return this.isDisposed?new l.Vector3(-1/0,-1/0,-1/0):this._cameraManager.getCameraControlsState().target.clone()}setCameraPosition(e){this.isDisposed||this._cameraManager.setCameraControlsState({position:e,target:this.getCameraTarget()})}setCameraTarget(e,t=!1){this.isDisposed||this._cameraManager.setCameraTarget(e,t)}get cameraControls(){return this._cameraManager.cameraControls}get cameraControlsEnabled(){return this._cameraManager.cameraControlsEnabled}set cameraControlsEnabled(e){this._cameraManager.cameraControlsEnabled=e}loadCameraFromModel(e){const t=e.getCameraConfiguration();t?this._cameraManager.setCameraControlsState({position:t.position,target:t.target}):this.fitCameraToModel(e,0)}fitCameraToModel(e,t){const n=e.getModelBoundingBox(new l.Box3,!0);this._cameraManager.fitCameraToBoundingBox(n,t)}fitCameraToBoundingBox(e,t,n=2){this._cameraManager.fitCameraToBoundingBox(e,t,n)}requestRedraw(){this.revealManager.requestRedraw()}enableKeyboardNavigation(){this._cameraManager.keyboardNavigationEnabled=!0}disableKeyboardNavigation(){this._cameraManager.keyboardNavigationEnabled=!1}worldToScreen(e,t){this.camera.updateMatrixWorld();const n=new l.Vector3;return t?Object(D.a)(this.camera,e,n):Object(D.b)(this.renderer,this.camera,e,n),n.x<0||n.x>1||n.y<0||n.y>1||n.z<0||n.z>1?null:new l.Vector2(n.x,n.y)}async getScreenshot(e=this.canvas.width,t=this.canvas.height){if(this.isDisposed)throw new Error("Viewer is disposed");const{width:n,height:r}=this.canvas,o=this.camera.clone();or(o,e,t),this.renderer.setSize(e,t),this.renderer.render(this.scene,o),this.revealManager.render(o);const i=this.renderer.domElement.toDataURL();return this.renderer.setSize(n,r),this.renderer.render(this.scene,this.camera),this.requestRedraw(),i}async getIntersectionFromPixel(e,t,n){const r=this.getModels("cad"),o=this.getModels("pointcloud"),i=r.map(e=>e.cadNode),a=o.map(e=>e.pointCloudNode),s={normalizedCoords:{x:e/this.renderer.domElement.clientWidth*2-1,y:t/this.renderer.domElement.clientHeight*-2+1},camera:this.camera,renderer:this.renderer,clippingPlanes:this.getClippingPlanes(),domElement:this.renderer.domElement},d=function(e,t){const n=[];for(const r of e){const e=B(r,t);e&&n.push(e)}return n.sort((e,t)=>e.distance-t.distance)}(i,s),l=Q(a,s,null==n?void 0:n.pointIntersectionThreshold),c=[];if(l.length>0){const e=l[0];for(const t of o)if(t.pointCloudNode===e.pointCloudNode){const n={type:"pointcloud",model:t,point:e.point,pointIndex:e.pointIndex,distanceToCamera:e.distance};c.push(n);break}}if(d.length>0){const e=d[0];for(const t of r)if(t.cadNode===e.cadNode){const n={type:"cad",model:t,treeIndex:e.treeIndex,point:e.point,distanceToCamera:e.distance};c.push(n)}}return c.sort((e,t)=>e.distanceToCamera-t.distanceToCamera),c.length>0?c[0]:null}getModels(e){return this._models.filter(t=>t.type===e)}createViewStateHelper(){if(void 0===this._cdfSdkClient)throw new Error(this.setViewState.name+"() is only supported when connecting to Cognite Data Fusion");return new ue(this,this._cdfSdkClient)}async animate(e){if(this.isDisposed)return;this.latestRequestId=requestAnimationFrame(this._boundAnimate);const{display:t,visibility:n}=window.getComputedStyle(this.canvas);if("visible"===n&&"none"!==t){const{renderController:t}=this;M.a.update(e);if(this.resizeIfNecessary()&&this.requestRedraw(),this._cameraManager.updateCameraControlsState(this.clock.getDelta()),t.update(),this.revealManager.update(this.camera),t.needsRedraw||this.revealManager.needsRedraw||this._clippingNeedsUpdate){const e=this.renderer.info.render.frame,n=Date.now();this.updateCameraNearAndFar(this.camera),this.revealManager.render(this.camera),t.clearNeedsRedraw(),this.revealManager.resetRedraw(),this._clippingNeedsUpdate=!1;const r=Date.now()-n;this._events.sceneRendered.fire({frameNumber:e,renderTime:r,renderer:this.renderer,camera:this.camera})}}}async modelIntersectionCallback(e,t){return{intersection:await this.getIntersectionFromPixel(e,t),modelsBoundingBox:this._updateNearAndFarPlaneBuffers.combinedBbox}}updateCameraNearAndFar(e){if(this.isDisposed)return;if(!this._automaticControlsSensitivity&&!this._automaticNearFarPlane)return;const{combinedBbox:t,bbox:n}=this._updateNearAndFarPlaneBuffers;t.makeEmpty(),this._models.forEach(e=>{e.getModelBoundingBox(n),n.isEmpty()||t.union(n)}),this._extraObjects.forEach(e=>{n.setFromObject(e),n.isEmpty()||t.union(n)}),this._cameraManager.updateCameraNearAndFar(e,t)}resizeIfNecessary(){if(this.isDisposed)return!1;const e=this.renderer.getSize(new l.Vector2),t=e.width,n=e.height,r=0!==this.domElement.clientWidth?this.domElement.clientWidth:this.canvas.clientWidth,o=0!==this.domElement.clientHeight?this.domElement.clientHeight:this.canvas.clientHeight,i=this.renderer.getPixelRatio()*r,a=this.renderer.getPixelRatio()*o,s=i*a,d=s>14e5?Math.sqrt(14e5/s):1,c=i*d,u=a*d;return!(Math.abs(t-c)<.1&&Math.abs(n-u)<.1)&&(this.renderer.setSize(c,u),or(this.camera,c,u),!0)}}function or(e,t,n){e instanceof l.PerspectiveCamera?(e.aspect=t/n,e.updateProjectionMatrix()):e instanceof l.OrthographicCamera&&(e.left=-t,e.right=t,e.top=n,e.bottom=-n)}}])}));
|
|
405
494
|
//# sourceMappingURL=index.map
|