@cognite/reveal 2.1.2 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/cad.d.ts +4 -5
- package/core/index.d.ts +7 -7
- package/core/src/datamodels/base/SupportedModelTypes.d.ts +4 -4
- package/core/src/datamodels/base/index.d.ts +6 -6
- package/core/src/datamodels/base/types.d.ts +14 -14
- package/core/src/datamodels/cad/CadManager.d.ts +45 -43
- package/core/src/datamodels/cad/CadModelFactory.d.ts +7 -7
- package/core/src/datamodels/cad/CadModelSectorLoadStatistics.d.ts +38 -38
- package/core/src/datamodels/cad/createCadManager.d.ts +9 -10
- package/core/src/datamodels/cad/picking.d.ts +28 -28
- package/core/src/datamodels/cad/rendering/RenderAlreadyLoadedGeometryProvider.d.ts +10 -9
- package/core/src/datamodels/cad/sector/CadModelClipper.d.ts +10 -10
- package/core/src/datamodels/cad/styling/AssetNodeCollection.d.ts +39 -39
- package/core/src/datamodels/cad/styling/InvertedNodeCollection.d.ts +24 -24
- package/core/src/datamodels/cad/styling/NodeCollectionDeserializer.d.ts +26 -26
- package/core/src/datamodels/cad/styling/PopulateIndexSetFromPagedResponseHelper.d.ts +24 -24
- package/core/src/datamodels/cad/styling/PropertyFilterNodeCollection.d.ts +61 -61
- package/core/src/datamodels/cad/styling/SinglePropertyFilterNodeCollection.d.ts +60 -60
- package/core/src/datamodels/cad/styling/index.d.ts +8 -8
- package/core/src/datamodels/pointcloud/PointCloudFactory.d.ts +9 -9
- package/core/src/datamodels/pointcloud/PointCloudManager.d.ts +26 -25
- package/core/src/datamodels/pointcloud/PointCloudMetadata.d.ts +11 -11
- package/core/src/datamodels/pointcloud/PointCloudMetadataRepository.d.ts +13 -13
- package/core/src/datamodels/pointcloud/PointCloudNode.d.ts +62 -62
- package/core/src/datamodels/pointcloud/PotreeGroupWrapper.d.ts +39 -39
- package/core/src/datamodels/pointcloud/PotreeNodeWrapper.d.ts +38 -38
- package/core/src/datamodels/pointcloud/createPointCloudManager.d.ts +6 -8
- package/core/src/datamodels/pointcloud/index.d.ts +5 -0
- package/core/src/datamodels/pointcloud/picking.d.ts +29 -29
- package/core/src/datamodels/pointcloud/types.d.ts +114 -114
- package/core/src/index.d.ts +19 -19
- package/core/src/internals.d.ts +15 -0
- package/core/src/migration.d.ts +8 -8
- package/core/src/public/RevealManager.d.ts +64 -61
- package/core/src/public/createRevealManager.d.ts +23 -24
- package/core/src/public/migration/Cognite3DModel.d.ts +339 -323
- package/core/src/public/migration/Cognite3DViewer.d.ts +555 -545
- package/core/src/public/migration/CogniteModelBase.d.ts +17 -17
- package/core/src/public/migration/CognitePointCloudModel.d.ts +121 -121
- package/core/src/public/migration/NodeIdAndTreeIndexMaps.d.ts +4 -4
- package/core/src/public/migration/NotSupportedInMigrationWrapperError.d.ts +11 -11
- package/core/src/public/migration/RenderController.d.ts +4 -4
- package/core/src/public/migration/types.d.ts +266 -240
- package/core/src/public/types.d.ts +46 -45
- package/core/src/{public/migration → storage}/RevealManagerHelper.d.ts +58 -58
- package/core/src/utilities/BoundingBoxClipper.d.ts +23 -23
- package/core/src/utilities/Spinner.d.ts +27 -28
- package/core/src/utilities/ViewStateHelper.d.ts +33 -33
- package/core/src/utilities/callActionWithIndicesAsync.d.ts +4 -4
- package/core/src/utilities/index.d.ts +8 -8
- package/core/src/utilities/reflection.d.ts +7 -7
- package/core/src/utilities/worldToViewport.d.ts +30 -31
- package/core/utilities.d.ts +4 -4
- package/extensions/datasource.d.ts +9 -0
- package/extensions/datasource.js +33 -0
- package/extensions/datasource.map +1 -0
- package/index.d.ts +8 -8
- package/index.js +142 -130
- package/index.map +1 -1
- package/package.json +1 -1
- package/packages/cad-geometry-loaders/index.d.ts +14 -20
- package/packages/cad-geometry-loaders/src/CadLoadingHints.d.ts +11 -11
- package/packages/cad-geometry-loaders/src/CadModelSectorBudget.d.ts +26 -26
- package/packages/cad-geometry-loaders/src/CadModelUpdateHandler.d.ts +44 -44
- package/packages/cad-geometry-loaders/src/sector/CachedRepository.d.ts +25 -25
- package/packages/cad-geometry-loaders/src/sector/ModelStateHandler.d.ts +12 -12
- package/packages/cad-geometry-loaders/src/sector/Repository.d.ts +9 -9
- package/packages/cad-geometry-loaders/src/sector/SectorLoader.d.ts +28 -28
- package/packages/cad-geometry-loaders/src/sector/SimpleAndDetailedToSector3D.d.ts +20 -20
- package/packages/cad-geometry-loaders/src/sector/culling/ByVisibilityGpuSectorCuller.d.ts +53 -53
- package/packages/cad-geometry-loaders/src/sector/culling/OccludingGeometryProvider.d.ts +6 -6
- package/packages/cad-geometry-loaders/src/sector/culling/OrderSectorsByVisibilityCoverage.d.ts +107 -107
- package/packages/cad-geometry-loaders/src/sector/culling/SectorCuller.d.ts +33 -33
- package/packages/cad-geometry-loaders/src/sector/culling/TakenSectorTree.d.ts +24 -24
- package/packages/cad-geometry-loaders/src/sector/culling/types.d.ts +71 -71
- package/packages/cad-geometry-loaders/src/sector/rxSectorUtilities.d.ts +12 -12
- package/packages/cad-geometry-loaders/src/sector/sectorUtilities.d.ts +20 -19
- package/packages/cad-geometry-loaders/src/utilities/PromiseUtils.d.ts +18 -18
- package/packages/cad-geometry-loaders/src/utilities/arrays.d.ts +5 -5
- package/packages/cad-geometry-loaders/src/utilities/groupMeshesByNumber.d.ts +8 -8
- package/packages/cad-geometry-loaders/src/utilities/rxOperations.d.ts +5 -5
- package/packages/cad-geometry-loaders/src/utilities/types.d.ts +25 -48
- package/packages/cad-parsers/index.d.ts +21 -20
- package/packages/cad-parsers/src/cad/CadSectorParser.d.ts +15 -15
- package/packages/cad-parsers/src/cad/LevelOfDetail.d.ts +8 -8
- package/packages/{cad-geometry-loaders/src/material-manager/rendering → cad-parsers/src/cad}/filterInstanceMesh.d.ts +6 -6
- package/packages/cad-parsers/src/cad/filterPrimitives.d.ts +9 -9
- package/packages/cad-parsers/src/cad/primitiveGeometries.d.ts +31 -31
- package/packages/{cad-geometry-loaders/src/material-manager/rendering → cad-parsers/src/cad}/triangleMeshes.d.ts +6 -6
- package/packages/cad-parsers/src/cad/types.d.ts +56 -56
- package/packages/cad-parsers/src/metadata/CadMetadataParser.d.ts +7 -7
- package/packages/cad-parsers/src/metadata/CadModelMetadata.d.ts +44 -44
- package/packages/cad-parsers/src/metadata/CadModelMetadataRepository.d.ts +16 -16
- package/packages/cad-parsers/src/metadata/MetadataRepository.d.ts +7 -6
- package/packages/cad-parsers/src/metadata/parsers/CadMetadataParserV8.d.ts +51 -51
- package/packages/cad-parsers/src/metadata/types.d.ts +41 -41
- package/packages/{cad-geometry-loaders → cad-parsers}/src/sector/RootSectorNode.d.ts +12 -12
- package/packages/{cad-geometry-loaders → cad-parsers}/src/sector/SectorNode.d.ts +21 -21
- package/packages/cad-parsers/src/utilities/SectorScene.d.ts +21 -21
- package/packages/cad-parsers/src/utilities/SectorSceneFactory.d.ts +12 -12
- package/packages/cad-parsers/src/{cad → utilities}/computeBoundingBoxFromAttributes.d.ts +9 -9
- package/packages/{cad-geometry-loaders → cad-parsers}/src/utilities/float32BufferToMatrix.d.ts +12 -12
- package/packages/cad-parsers/src/utilities/types.d.ts +48 -48
- package/packages/cad-styling/index.d.ts +9 -9
- package/packages/cad-styling/src/CombineNodeCollectionBase.d.ts +31 -31
- package/packages/cad-styling/src/IntersectionNodeCollection.d.ts +15 -15
- package/packages/cad-styling/src/NodeAppearance.d.ts +62 -62
- package/packages/cad-styling/src/NodeAppearanceProvider.d.ts +29 -29
- package/packages/cad-styling/src/NodeCollectionBase.d.ts +47 -47
- package/packages/cad-styling/src/TreeIndexNodeCollection.d.ts +23 -23
- package/packages/cad-styling/src/UnionNodeCollection.d.ts +15 -15
- package/packages/camera-manager/index.d.ts +4 -4
- package/packages/camera-manager/src/ComboControls.d.ts +84 -83
- package/packages/camera-manager/src/Keyboard.d.ts +14 -14
- package/packages/data-source/index.d.ts +6 -0
- package/packages/data-source/src/CdfDataSource.d.ts +19 -0
- package/packages/data-source/src/DataSource.d.ts +26 -0
- package/packages/data-source/src/LocalDataSource.d.ts +15 -0
- package/packages/logger/index.d.ts +5 -5
- package/packages/logger/src/Log.d.ts +5 -5
- package/packages/metrics/index.d.ts +5 -0
- package/packages/metrics/src/MetricsLogger.d.ts +21 -0
- package/packages/metrics/src/types.d.ts +7 -0
- package/packages/modeldata-api/index.d.ts +13 -8
- package/packages/modeldata-api/src/{CdfModelDataClient.d.ts → CdfModelDataProvider.d.ts} +15 -17
- package/packages/modeldata-api/src/CdfModelIdentifier.d.ts +17 -0
- package/packages/modeldata-api/src/CdfModelMetadataProvider.d.ts +19 -36
- package/packages/modeldata-api/src/CdfModelOutputsProvider.d.ts +15 -0
- package/packages/modeldata-api/src/{LocalModelDataClient.d.ts → LocalModelDataProvider.d.ts} +9 -10
- package/packages/modeldata-api/src/LocalModelIdentifier.d.ts +14 -0
- package/packages/modeldata-api/src/LocalModelMetadataProvider.d.ts +14 -19
- package/packages/modeldata-api/src/Model3DOutputList.d.ts +17 -17
- package/packages/modeldata-api/src/ModelIdentifier.d.ts +12 -0
- package/packages/modeldata-api/src/ModelMetadataProvider.d.ts +17 -0
- package/packages/modeldata-api/src/applyDefaultModelTransformation.d.ts +6 -6
- package/packages/modeldata-api/src/types.d.ts +41 -55
- package/packages/modeldata-api/src/utilities.d.ts +7 -7
- package/packages/nodes-api/index.d.ts +6 -7
- package/packages/nodes-api/src/NodesApiClient.d.ts +52 -43
- package/packages/nodes-api/src/NodesCdfClient.d.ts +24 -19
- package/packages/nodes-api/src/NodesLocalClient.d.ts +30 -25
- package/packages/nodes-api/src/types.d.ts +10 -14
- package/packages/rendering/index.d.ts +17 -0
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/CadMaterialManager.d.ts +37 -37
- package/packages/{cad-geometry-loaders → rendering}/src/InstancedMeshManager.d.ts +20 -20
- package/packages/{cad-geometry-loaders → rendering}/src/cameraconfig.d.ts +12 -12
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/rendering/EffectRenderManager.d.ts +99 -83
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/rendering}/NodeAppearanceTextureBuilder.d.ts +51 -50
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/RenderMode.d.ts +15 -15
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/rendering/createSimpleGeometryMesh.d.ts +6 -6
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/matCapTextureData.d.ts +5 -5
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/materials.d.ts +23 -23
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/primitives.d.ts +12 -12
- package/packages/{cad-parsers/src/cad → rendering/src/rendering}/shaders.d.ts +101 -101
- package/packages/{cad-geometry-loaders/src/material-manager → rendering/src}/rendering/types.d.ts +76 -76
- package/packages/{cad-geometry-loaders/src → rendering/src/sector}/CadNode.d.ts +54 -54
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/transform}/NodeTransformProvider.d.ts +13 -13
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/transform}/NodeTransformTextureBuilder.d.ts +23 -23
- package/packages/{cad-geometry-loaders/src/material-manager/styling → rendering/src/transform}/TransformOverrideBuffer.d.ts +21 -21
- package/packages/rendering/src/utilities/types.d.ts +26 -0
- package/packages/tools/index.d.ts +15 -12
- package/packages/tools/src/AxisView/AxisViewTool.d.ts +32 -32
- package/packages/tools/src/AxisView/types.d.ts +98 -98
- package/packages/tools/src/Cognite3DViewerToolBase.d.ts +25 -25
- package/packages/tools/src/DebugCameraTool.d.ts +20 -20
- package/packages/tools/src/DebugLoadedSectorsTool.d.ts +23 -23
- package/packages/tools/src/ExplodedViewTool.d.ts +12 -12
- package/packages/tools/src/Geomap/Geomap.d.ts +14 -14
- package/packages/tools/src/Geomap/GeomapTool.d.ts +21 -21
- package/packages/tools/src/Geomap/MapConfig.d.ts +195 -195
- package/packages/tools/src/HtmlOverlay/BucketGrid2D.d.ts +28 -0
- package/packages/tools/src/HtmlOverlay/HtmlOverlayTool.d.ts +159 -0
- package/packages/tools/src/Timeline/Keyframe.d.ts +38 -0
- package/packages/tools/src/Timeline/TimelineTool.d.ts +79 -0
- package/packages/tools/src/Timeline/types.d.ts +13 -0
- package/packages/tools/src/types.d.ts +4 -4
- package/packages/utilities/index.d.ts +24 -24
- package/packages/utilities/src/CameraConfiguration.d.ts +10 -10
- package/packages/utilities/src/IndexSet.d.ts +1 -1
- package/packages/utilities/src/NumericRange.d.ts +21 -21
- package/packages/utilities/src/RandomColors.d.ts +34 -0
- package/packages/utilities/src/WebGLRendererStateHelper.d.ts +15 -15
- package/packages/utilities/src/assertNever.d.ts +7 -7
- package/packages/utilities/src/cache/MemoryRequestCache.d.ts +22 -22
- package/packages/utilities/src/cache/MostFrequentlyUsedCache.d.ts +19 -19
- package/packages/utilities/src/cache/RequestCache.d.ts +13 -13
- package/packages/utilities/src/datastructures/DynamicDefragmentedBuffer.d.ts +22 -22
- package/packages/utilities/src/determinePowerOfTwoDimensions.d.ts +11 -11
- package/packages/utilities/src/disposeAttributeArrayOnUpload.d.ts +17 -17
- package/packages/utilities/src/events/EventTrigger.d.ts +13 -13
- package/packages/utilities/src/events/clickOrTouchEventOffset.d.ts +13 -13
- package/packages/utilities/src/events/index.d.ts +5 -5
- package/packages/utilities/src/indexset/IndexSet.d.ts +26 -26
- package/packages/utilities/src/indexset/IntermediateIndexNode.d.ts +26 -26
- package/packages/utilities/src/indexset/LeafIndexNode.d.ts +16 -16
- package/packages/utilities/src/isMobileOrTablet.d.ts +4 -4
- package/packages/utilities/src/networking/isTheSameDomain.d.ts +11 -11
- package/packages/utilities/src/objectTraversal.d.ts +8 -8
- package/packages/utilities/src/packFloat.d.ts +6 -6
- package/packages/utilities/src/revealEnv.d.ts +10 -10
- package/packages/utilities/src/three/AutoDisposeGroup.d.ts +17 -17
- package/packages/utilities/src/three/BoundingBoxLOD.d.ts +27 -27
- package/packages/utilities/src/three/getBox3CornerPoints.d.ts +5 -5
- package/packages/utilities/src/transformCameraConfiguration.d.ts +6 -6
- package/packages/utilities/src/types.d.ts +10 -10
- package/packages/utilities/src/workers/WorkerPool.d.ts +14 -14
- package/tools.d.ts +7 -7
- package/tools.js +70 -123
- package/tools.map +1 -1
- package/packages/tools/src/HtmlOverlayTool.d.ts +0 -88
- package/packages/utilities/src/metrics.d.ts +0 -15
package/tools.js
CHANGED
|
@@ -1,281 +1,228 @@
|
|
|
1
|
-
!function(e,
|
|
1
|
+
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var r=t();for(var n in r)("object"==typeof exports?exports:e)[n]=r[n]}}("undefined"!=typeof self?self:this,(function(){return function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}return r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="https://apps-cdn.cogniteapp.com/@cognite/reveal-parser-worker/1.2.0/",r(r.s=65)}([function(e,t){e.exports=require("three")},function(e,t,r){"use strict";
|
|
2
2
|
/*!
|
|
3
3
|
* Copyright 2021 Cognite AS
|
|
4
4
|
*/
|
|
5
|
-
function
|
|
5
|
+
function n(e,t){if(t(e))for(let r=0;r<e.children.length;r++)n(e.children[r],t)}function o(e,t){if(void 0===e)return;const{position:r,target:n}=e;return r.applyMatrix4(t),n.applyMatrix4(t),{position:r,target:n}}r.d(t,"t",(function(){return n})),r.d(t,"s",(function(){return o})),r.d(t,"d",(function(){return c})),r.d(t,"l",(function(){return a})),r.d(t,"k",(function(){return l})),r.d(t,"h",(function(){return d})),r.d(t,"m",(function(){return h})),r.d(t,"e",(function(){return w})),r.d(t,"q",(function(){return x})),r.d(t,"c",(function(){return S})),r.d(t,"a",(function(){return E})),r.d(t,"b",(function(){return P})),r.d(t,"o",(function(){return D})),r.d(t,"p",(function(){return T})),r.d(t,"i",(function(){return A})),r.d(t,"j",(function(){return O})),r.d(t,"f",(function(){return z})),r.d(t,"g",(function(){return $})),r.d(t,"n",(function(){return j})),r.d(t,"r",(function(){return F}));var i=r(0);
|
|
6
6
|
/*!
|
|
7
7
|
* Copyright 2021 Cognite AS
|
|
8
|
-
*/
|
|
9
|
-
/*!
|
|
10
|
-
* Copyright 2021 Cognite AS
|
|
11
|
-
*/t.d(n,"y",(function(){return r})),t.d(n,"x",(function(){return o})),t.d(n,"d",(function(){return a})),t.d(n,"l",(function(){return i})),t.d(n,"k",(function(){return s})),t.d(n,"h",(function(){return l})),t.d(n,"m",(function(){return d})),t.d(n,"e",(function(){return h})),t.d(n,"r",(function(){return x})),t.d(n,"c",(function(){return C})),t.d(n,"a",(function(){return M})),t.d(n,"b",(function(){return D})),t.d(n,"o",(function(){return R})),t.d(n,"q",(function(){return N})),t.d(n,"i",(function(){return z})),t.d(n,"j",(function(){return B})),t.d(n,"f",(function(){return G})),t.d(n,"g",(function(){return U})),t.d(n,"u",(function(){return Y})),t.d(n,"w",(function(){return X})),t.d(n,"t",(function(){return Z})),t.d(n,"v",(function(){return K})),t.d(n,"p",(function(){return H})),t.d(n,"n",(function(){return J})),t.d(n,"s",(function(){return A}));class a{constructor(){this._listeners=[]}subscribe(e){this._listeners.push(e)}unsubscribe(e){const n=this._listeners.indexOf(e);-1!==n&&this._listeners.splice(n,1)}unsubscribeAll(){this._listeners.splice(0)}fire(...e){this._listeners.forEach(n=>n(...e))}}
|
|
8
|
+
*/class s{static color(e){const t=s._colors.get(e);if(void 0!==t)return t;const r=s.generateRandomColor();return s._colors.set(e,r),r}static colorRGB(e){const t=s.color(e);return[Math.floor(255*t.r),Math.floor(255*t.g),Math.floor(255*t.b)]}static colorCSS(e){const[t,r,n]=s.colorRGB(e);return`rgb(${t}, ${r}, ${n})`}static generateRandomColor(){const e=Math.random(),t=.4+.6*Math.random(),r=.3+.4*Math.random();return(new i.Color).setHSL(e,t,r)}}
|
|
12
9
|
/*!
|
|
13
10
|
* Copyright 2021 Cognite AS
|
|
14
11
|
*/
|
|
12
|
+
function a(e,t){const r=t.getBoundingClientRect();if(e instanceof MouseEvent)return{offsetX:e.clientX-r.left,offsetY:e.clientY-r.top};if(e.changedTouches.length>0){const t=e.changedTouches[0];return{offsetX:t.clientX-r.left,offsetY:t.clientY-r.top}}return{offsetX:-1,offsetY:-1}}
|
|
15
13
|
/*!
|
|
16
14
|
* Copyright 2021 Cognite AS
|
|
17
|
-
*/
|
|
15
|
+
*/s._colors=new Map;class c{constructor(){this._listeners=[]}subscribe(e){this._listeners.push(e)}unsubscribe(e){const t=this._listeners.indexOf(e);-1!==t&&this._listeners.splice(t,1)}unsubscribeAll(){this._listeners.splice(0)}fire(...e){this._listeners.forEach(t=>t(...e))}}
|
|
18
16
|
/*!
|
|
19
17
|
* Copyright 2021 Cognite AS
|
|
20
|
-
*/
|
|
18
|
+
*/
|
|
21
19
|
/*!
|
|
22
20
|
* Copyright 2021 Cognite AS
|
|
23
|
-
*/function
|
|
24
|
-
/*!
|
|
25
|
-
* Copyright 2021 Cognite AS../NumericRange
|
|
26
|
-
*/class p{constructor(e){this.range=e,this.maxSubtreeDepth=0,this.count=e.count}static fromInterval(e,n){return new p(l.createFromInterval(e,n))}traverse(e){e(this.range)}contains(e){return this.range.contains(e)}addRange(e){return this.range.intersectsOrCoinciding(e)?new p(this.range.union(e)):v.fromIndexNodesAndBalance(this,new p(e))}removeRange(e){if(!e.intersects(this.range))return this;if(this.range.isInside(e))return;let n=void 0,t=void 0;return this.range.from<e.from&&(n=l.createFromInterval(this.range.from,e.from-1)),this.range.toInclusive>e.toInclusive&&(t=l.createFromInterval(e.toInclusive+1,this.range.toInclusive)),null!=n&&null!=t?v.fromIndexNodesAndBalance(new p(n),new p(t)):null!=n?new p(n):null!=t?new p(t):void 0}hasIntersectionWith(e){return e.range.intersects(this.range)}soak(e){return this.range.intersectsOrCoinciding(e)?[void 0,this.range.union(e)]:[this,e]}clone(){return new p(this.range)}}class h{constructor(e){if(null==e)this.rootNode=void 0;else if(e instanceof l)this.addRange(e);else for(const n of e)this.add(n)}forEachRange(e){this.rootNode&&this.rootNode.traverse(e)}add(e){const n=new l(e,1);this.addRange(n)}addRange(e){this.rootNode?this.rootNode=this.rootNode.addRange(e):this.rootNode=new p(e)}remove(e){const n=new l(e,1);this.removeRange(n)}removeRange(e){this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))}contains(e){return!!this.rootNode&&this.rootNode.contains(e)}get count(){return this.rootNode?this.rootNode.count:0}toRangeArray(){const e=[];return this.forEachRange(n=>{e.push(n)}),e}toIndexArray(){const e=[];return this.rootNode&&this.forEachRange(n=>{n.forEach(n=>{e.push(n)})}),e}toPlainSet(){const e=this.toIndexArray();return new Set(e)}invertedRanges(){const e=this.toRangeArray(),n=[];for(let t=0;t<e.length-1;t++)e[t].toInclusive+1>=e[t+1].from||n.push(l.createFromInterval(e[t].toInclusive+1,e[t+1].from));return n}unionWith(e){return this.rootNode?e.forEachRange(e=>{this.rootNode=this.rootNode.addRange(e)}):this.rootNode=e.rootNode,this}differenceWith(e){return this.rootNode&&e.forEachRange(e=>{var n;this.rootNode=null===(n=this.rootNode)||void 0===n?void 0:n.removeRange(e)}),this}hasIntersectionWith(e){if(e instanceof h)return void 0!==this.rootNode&&void 0!==e.rootNode&&this.rootNode.hasIntersectionWith(e.rootNode);for(const n of e)if(this.contains(n))return!0;return!1}intersectWith(e){if(this.rootNode&&e.rootNode){if(this.rootNode.range.from<e.rootNode.range.from){const n=l.createFromInterval(this.rootNode.range.from,e.rootNode.range.from-1);if(this.rootNode=this.rootNode.removeRange(n),!this.rootNode)return this}if(this.rootNode.range.toInclusive>e.rootNode.range.toInclusive){const n=l.createFromInterval(e.rootNode.range.toInclusive+1,this.rootNode.range.toInclusive);this.rootNode=this.rootNode.removeRange(n)}e.invertedRanges().forEach(e=>{this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))})}else this.rootNode&&(this.rootNode=void 0);return this}clear(){this.rootNode=void 0}clone(){const e=new h;return this.rootNode&&(e.rootNode=this.rootNode.clone()),e}}
|
|
21
|
+
*/function l(e,t){throw new Error(t||"Unexpected object: "+e)}
|
|
27
22
|
/*!
|
|
28
23
|
* Copyright 2021 Cognite AS
|
|
29
|
-
*/
|
|
24
|
+
*/class d{constructor(e,t){if(t<0)throw new Error("Range cannot have negative number of elements");this.from=e,this.count=t,this.toInclusive=e+t-1}static createFromInterval(e,t){return new d(e,t-e+1)}*values(){for(let e=this.from;e<=this.toInclusive;++e)yield e}toArray(){return Array.from(this.values())}equal(e){return this.from===e.from&&this.count===e.count}contains(e){return e>=this.from&&e<=this.toInclusive}intersects(e){return this.from<=e.toInclusive&&this.toInclusive>=e.from}intersectsOrCoinciding(e){return this.from<=e.toInclusive+1&&this.toInclusive+1>=e.from}intersectionWith(e){return this.intersects(e)?d.createFromInterval(Math.max(this.from,e.from),Math.min(this.toInclusive,e.toInclusive)):void 0}isInside(e){return this.from>=e.from&&this.toInclusive<=e.toInclusive}union(e){return d.createFromInterval(Math.min(this.from,e.from),Math.max(this.toInclusive,e.toInclusive))}forEach(e){for(let t=this.from;t<=this.toInclusive;++t)e(t)}str(){return"("+this.from+", "+this.toInclusive+")"}}
|
|
30
25
|
/*!
|
|
31
26
|
* Copyright 2021 Cognite AS
|
|
32
|
-
*/
|
|
27
|
+
*/function h(e){const t=Math.max(1,m(Math.sqrt(e)));return{width:t,height:Math.max(1,m(e/t))}}const u=Math.log(2);function m(e){return Math.pow(2,Math.ceil(Math.log(e)/u))}var p=r(7),f=r.n(p);class g{constructor(e,t){this.left=e,this.right=t,this.maxSubtreeDepth=Math.max(this.left.maxSubtreeDepth,this.right.maxSubtreeDepth)+1,this.range=d.createFromInterval(this.left.range.from,this.right.range.toInclusive),this.count=this.left.count+this.right.count}static fromIndexNodesAndBalance(e,t){return e.range.from>t.range.toInclusive+1?new g(t,e).balance():e.range.toInclusive+1<t.range.from?new g(e,t).balance():void f()(!1,"Internal error in IndexSet: Overlapping nodes")}traverse(e){this.left.traverse(e),this.right.traverse(e)}contains(e){return!!this.range.contains(e)&&(this.left.contains(e)||this.right.contains(e))}addRange(e){if(!e.intersectsOrCoinciding(this.range)){if(e.from<this.range.from){const t=this.left.addRange(e);return g.fromIndexNodesAndBalance(t,this.right)}{const t=this.right.addRange(e);return g.fromIndexNodesAndBalance(this.left,t)}}const t=e.intersectsOrCoinciding(this.left.range),r=e.intersectsOrCoinciding(this.right.range);if(t&&r){const[t,r]=this.left.soak(e),[n,o]=this.right.soak(e),i=r.union(o);if(void 0===t&&void 0===n)return new v(i);if(void 0===t&&void 0!==n)return n.addRange(i);if(void 0===n&&void 0!==t)return t.addRange(i);return g.fromIndexNodesAndBalance(t,n).addRange(i)}return t?g.fromIndexNodesAndBalance(this.left.addRange(e),this.right):r?g.fromIndexNodesAndBalance(this.left,this.right.addRange(e)):this.left.maxSubtreeDepth<this.right.maxSubtreeDepth?g.fromIndexNodesAndBalance(this.left.addRange(e),this.right):g.fromIndexNodesAndBalance(this.left,this.right.addRange(e))}removeRange(e){if(!e.intersects(this.range))return this;const[t,r]=this.soak(e);let n=void 0,o=void 0;if(r.from<e.from&&(n=d.createFromInterval(r.from,e.from-1)),r.toInclusive>e.toInclusive&&(o=d.createFromInterval(e.toInclusive+1,r.toInclusive)),void 0===t)return void 0!==n&&void 0!==o?g.fromIndexNodesAndBalance(new v(n),new v(o)):null!=n?new v(n):null!=o?new v(o):void 0;{let e=t;return void 0!==n&&(e=e.addRange(n)),void 0!==o&&(e=e.addRange(o)),e}}balance(){const e=this.left.maxSubtreeDepth,t=this.right.maxSubtreeDepth;if(t+2<=e){const e=this.left.rotateSmallerRight();return new g(e,this.right).rotateRight().balance()}if(e+2<=t){const e=this.right.rotateSmallerLeft();return new g(this.left,e).rotateLeft().balance()}return this}clone(){return g.fromIndexNodesAndBalance(this.left.clone(),this.right.clone())}hasIntersectionWith(e){return!!e.range.intersects(this.range)&&(this.range.isInside(e.range)?e.hasIntersectionWith(this):!(!this.left.range.intersects(e.range)||!this.left.hasIntersectionWith(e))||!(!this.right.range.intersects(e.range)||!this.right.hasIntersectionWith(e)))}soak(e){let[t,r]=[this.left,e],[n,o]=[this.right,e];if(this.right.range.isInside(e)&&this.left.range.isInside(e))return[void 0,e];this.left.range.intersectsOrCoinciding(e)&&([t,r]=this.left.soak(e)),this.right.range.intersectsOrCoinciding(e)&&([n,o]=this.right.soak(e));const i=r.union(o);if(null==n)return[t,i];if(null==t)return[n,i];return[g.fromIndexNodesAndBalance(t,n),i]}rotateRight(){return"right"in this.left?new g(this.left.left,new g(this.left.right,this.right)):this}rotateLeft(){return"left"in this.right?new g(new g(this.left,this.right.left),this.right.right):this}rotateSmallerLeft(){if(this.left.maxSubtreeDepth>this.right.maxSubtreeDepth){let e=this.rotateRight();return e=e.rotateSmallerLeft(),e}return this}rotateSmallerRight(){if(this.right.maxSubtreeDepth>this.left.maxSubtreeDepth){let e=this.rotateLeft();return e=e.rotateSmallerRight(),e}return this}}
|
|
33
28
|
/*!
|
|
34
|
-
* Copyright 2021 Cognite AS
|
|
35
|
-
*/
|
|
29
|
+
* Copyright 2021 Cognite AS../NumericRange
|
|
30
|
+
*/class v{constructor(e){this.range=e,this.maxSubtreeDepth=0,this.count=e.count}static fromInterval(e,t){return new v(d.createFromInterval(e,t))}traverse(e){e(this.range)}contains(e){return this.range.contains(e)}addRange(e){return this.range.intersectsOrCoinciding(e)?new v(this.range.union(e)):g.fromIndexNodesAndBalance(this,new v(e))}removeRange(e){if(!e.intersects(this.range))return this;if(this.range.isInside(e))return;let t=void 0,r=void 0;return this.range.from<e.from&&(t=d.createFromInterval(this.range.from,e.from-1)),this.range.toInclusive>e.toInclusive&&(r=d.createFromInterval(e.toInclusive+1,this.range.toInclusive)),null!=t&&null!=r?g.fromIndexNodesAndBalance(new v(t),new v(r)):null!=t?new v(t):null!=r?new v(r):void 0}hasIntersectionWith(e){return e.range.intersects(this.range)}soak(e){return this.range.intersectsOrCoinciding(e)?[void 0,this.range.union(e)]:[this,e]}clone(){return new v(this.range)}}class w{constructor(e){if(null==e)this.rootNode=void 0;else if(e instanceof d)this.addRange(e);else for(const t of e)this.add(t)}forEachRange(e){this.rootNode&&this.rootNode.traverse(e)}add(e){const t=new d(e,1);this.addRange(t)}addRange(e){this.rootNode?this.rootNode=this.rootNode.addRange(e):this.rootNode=new v(e)}remove(e){const t=new d(e,1);this.removeRange(t)}removeRange(e){this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))}contains(e){return!!this.rootNode&&this.rootNode.contains(e)}get count(){return this.rootNode?this.rootNode.count:0}toRangeArray(){const e=[];return this.forEachRange(t=>{e.push(t)}),e}toIndexArray(){const e=[];return this.rootNode&&this.forEachRange(t=>{t.forEach(t=>{e.push(t)})}),e}toPlainSet(){const e=this.toIndexArray();return new Set(e)}invertedRanges(){const e=this.toRangeArray(),t=[];for(let r=0;r<e.length-1;r++)e[r].toInclusive+1>=e[r+1].from||t.push(d.createFromInterval(e[r].toInclusive+1,e[r+1].from-1));return t}unionWith(e){return this.rootNode?e.forEachRange(e=>{this.rootNode=this.rootNode.addRange(e)}):this.rootNode=e.rootNode,this}differenceWith(e){return this.rootNode&&e.forEachRange(e=>{var t;this.rootNode=null===(t=this.rootNode)||void 0===t?void 0:t.removeRange(e)}),this}hasIntersectionWith(e){if(e instanceof w)return void 0!==this.rootNode&&void 0!==e.rootNode&&this.rootNode.hasIntersectionWith(e.rootNode);for(const t of e)if(this.contains(t))return!0;return!1}intersectWith(e){if(this.rootNode&&e.rootNode){if(this.rootNode.range.from<e.rootNode.range.from){const t=d.createFromInterval(this.rootNode.range.from,e.rootNode.range.from-1);if(this.rootNode=this.rootNode.removeRange(t),!this.rootNode)return this}if(this.rootNode.range.toInclusive>e.rootNode.range.toInclusive){const t=d.createFromInterval(e.rootNode.range.toInclusive+1,this.rootNode.range.toInclusive);this.rootNode=this.rootNode.removeRange(t)}e.invertedRanges().forEach(e=>{this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))})}else this.rootNode&&(this.rootNode=void 0);return this}clear(){this.rootNode=void 0}clone(){const e=new w;return this.rootNode&&(e.rootNode=this.rootNode.clone()),e}}
|
|
36
31
|
/*!
|
|
37
32
|
* Copyright 2021 Cognite AS
|
|
38
|
-
*/const
|
|
33
|
+
*/function x(e,t,r){const n=B(e);if(0==n)return;const o=_(0,-e);let i=C(M(n));const s=n/y(i);s<1&&(i-=1),i+=127,t[r]=128*o+C(i*y(-1)),t[r+1]=128*b(i,2)+b(C(128*s),128),t[r+2]=C(b(C(s*y(15)),y(8))),t[r+3]=C(y(23)*b(s,y(-15)))}function _(e,t){return t<e?0:1}function y(e){return Math.pow(2,e)}function b(e,t){return e-t*C(e/t)}function C(e){return Math.floor(e)}function M(e){return Math.log(e)/Math.log(2)}function B(e){return Math.abs(e)}
|
|
39
34
|
/*!
|
|
40
35
|
* Copyright 2021 Cognite AS
|
|
41
|
-
*/
|
|
36
|
+
*/class S{constructor(e,t){this._numFilled=0,this._batchIdCounter=0,this._batchMap=new Map,this._type=t;const r=Math.pow(2,Math.ceil(Math.log2(e)));this._buffer=new t(r)}get length(){return this._numFilled}get buffer(){return this._buffer}add(e){let t=!1;if(this._numFilled+e.length>this._buffer.length){const r=Math.pow(2,Math.ceil(Math.log2(this._numFilled+e.length)));this.allocateNewBuffer(r),t=!0}this._buffer.set(e,this._numFilled);const r=this.createBatch(e);return this._numFilled+=e.length,{batchId:r,bufferIsReallocated:t}}remove(e){const t=this._batchMap.get(e);if(!t)throw new Error("batch does not exist in buffer");this._buffer.copyWithin(t.from,t.from+t.count,this.buffer.length),this._numFilled-=t.count,this._currentTail===t&&(this._currentTail=t.prev);const r=t.prev,n=t.next;r&&(r.next=n),n&&(n.prev=r);let o=n;for(;o;)o.from-=t.count,o=o.next;this._batchMap.delete(e)}createBatch(e){const t={from:this._numFilled,count:e.length,prev:this._currentTail,next:void 0};this._currentTail&&(this._currentTail.next=t),this._currentTail=t;const r=this._batchIdCounter;return this._batchIdCounter++,this._batchMap.set(r,t),r}allocateNewBuffer(e){const t=new this._type(e);t.set(this._buffer),this._buffer=t}}
|
|
42
37
|
/*!
|
|
43
38
|
* Copyright 2021 Cognite AS
|
|
44
|
-
*/
|
|
39
|
+
*/const I=new i.BufferGeometry;class E extends i.Group{constructor(){super(...arguments),this._isDisposed=!1,this._referenceCount=0}reference(){this.ensureNotDisposed(),this._referenceCount++}dereference(){if(this.ensureNotDisposed(),0===this._referenceCount)throw new Error("No references");0==--this._referenceCount&&this.dispose()}dispose(){this.ensureNotDisposed(),this._isDisposed=!0;const e=this.children.filter(e=>e instanceof i.Mesh).map(e=>e);for(const t of e)void 0!==t.geometry&&(t.geometry.dispose(),t.geometry=I)}ensureNotDisposed(){if(this._isDisposed)throw new Error("Already disposed/dereferenced")}}
|
|
45
40
|
/*!
|
|
46
41
|
* Copyright 2021 Cognite AS
|
|
47
|
-
*/class
|
|
42
|
+
*/const k={camPos:new i.Vector3,bounds:new i.Box3};class P extends i.Object3D{constructor(e){super(),this._activeLevel=0,this._levels=[],this.isLOD=!0,this.autoUpdate=!0,this._boundingBox=e.clone(),this.type="BoundingBoxLOD"}setBoundingBox(e){this._boundingBox.copy(e)}addLevel(e,t=0){this._levels.push({object:e,distance:Math.abs(t)}),this._levels.sort((e,t)=>t.distance-e.distance),e.visible=!1,this.add(e)}getCurrentLevel(){return this._levels.length>0?this._levels.length-this._activeLevel-1:0}update(e){this.updateCurrentLevel(e)}updateCurrentLevel(e){const t=this._levels,{camPos:r,bounds:n}=k;n.copy(this._boundingBox).applyMatrix4(this.matrixWorld);const o=e instanceof i.PerspectiveCamera?e.zoom:1;if(t.length>0){r.setFromMatrixPosition(e.matrixWorld);const i=n.distanceToPoint(r)/o;t[this._activeLevel].object.visible=!1,this._activeLevel=t.findIndex(e=>i>=e.distance),this._activeLevel=this._activeLevel>=0?this._activeLevel:t.length-1,t[this._activeLevel].object.visible=!0}}}
|
|
48
43
|
/*!
|
|
49
44
|
* Copyright 2021 Cognite AS
|
|
50
|
-
*/
|
|
45
|
+
*/function D(e){return[new i.Vector3(e.min.x,e.min.y,e.min.z),new i.Vector3(e.min.x,e.min.y,e.max.z),new i.Vector3(e.min.x,e.max.y,e.min.z),new i.Vector3(e.min.x,e.max.y,e.max.z),new i.Vector3(e.max.x,e.min.y,e.min.z),new i.Vector3(e.max.x,e.min.y,e.max.z),new i.Vector3(e.max.x,e.max.y,e.min.z),new i.Vector3(e.max.x,e.max.y,e.max.z)]}
|
|
51
46
|
/*!
|
|
52
47
|
* Copyright 2021 Cognite AS
|
|
53
|
-
*/var
|
|
48
|
+
*/function T(){let e=!1;var t;return t=navigator.userAgent||navigator.vendor||window.opera,(/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino|android|ipad|playbook|silk/i.test(t)||/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(t.substr(0,4)))&&(e=!0),e}
|
|
54
49
|
/*!
|
|
55
50
|
* Copyright 2021 Cognite AS
|
|
56
|
-
*/class
|
|
51
|
+
*/class A{constructor(e){this._originalState={},this._renderer=e,this._originalState={}}setClearColor(e,t){this._originalState={clearColor:this._renderer.getClearColor(new i.Color),clearAlpha:this._renderer.getClearAlpha(),...this._originalState},this._renderer.setClearColor(e,t)}setSize(e,t){this._originalState={size:this._renderer.getSize(new i.Vector2),...this._originalState},this._renderer.setSize(e,t)}set localClippingEnabled(e){this._originalState={localClippingEnabled:this._renderer.localClippingEnabled,...this._originalState},this._renderer.localClippingEnabled=e}set autoClear(e){this._originalState={autoClear:this._renderer.autoClear,...this._originalState},this._renderer.autoClear=e}setRenderTarget(e){this._originalState={renderTarget:this._renderer.getRenderTarget(),...this._originalState},this._renderer.setRenderTarget(e)}resetState(){void 0!==this._originalState.autoClear&&(this._renderer.autoClear=this._originalState.autoClear),void 0!==this._originalState.clearColor&&this._renderer.setClearColor(this._originalState.clearColor,this._originalState.clearAlpha),void 0!==this._originalState.localClippingEnabled&&(this._renderer.localClippingEnabled=this._originalState.localClippingEnabled),void 0!==this._originalState.size&&this._renderer.setSize(this._originalState.size.width,this._originalState.size.height),void 0!==this._originalState.renderTarget&&this._renderer.setRenderTarget(this._originalState.renderTarget),this._originalState={}}}var N=r(21);
|
|
57
52
|
/*!
|
|
58
53
|
* Copyright 2021 Cognite AS
|
|
59
|
-
*/
|
|
54
|
+
*/const F={publicPath:""};
|
|
60
55
|
/*!
|
|
61
56
|
* Copyright 2021 Cognite AS
|
|
62
|
-
*/
|
|
57
|
+
*/var R=r(10);
|
|
63
58
|
/*!
|
|
64
59
|
* Copyright 2021 Cognite AS
|
|
65
|
-
*/
|
|
66
|
-
const{VERSION:L,MIXPANEL_TOKEN:W}={VERSION:"2.1.2",WORKER_VERSION:"1.2.0",MIXPANEL_TOKEN:"8c900bdfe458e32b768450c20750853d",IS_DEVELOPMENT_MODE:!1};let j=!0;const q={VERSION:L,project:"unknown",application:"unknown",sessionId:"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(function(e){const n=16*Math.random()|0;return("x"==e?n:3&n|8).toString(16)}))};function H(e,n,t,r){j=e,j&&(k.a.init(W,{disable_cookie:!0,disable_persistence:!0,ip:!1,property_blacklist:["$city","$region","mp_country_code","$geo_source","$timezone","mp_lib","$lib_version","$device_id","$user_id","$current_url","$screen_width","$screen_height","$referrer","$referring_domain","$initial_referrer","$initial_referring_domain"]}),k.a.reset(),k.a.identify("reveal-single-user"),n&&(q.project=n),t&&(q.application=t),K("init",r))}function K(e,n){if(!j)return;const t={...q,...n};k.a.track(e,t)}function X(e,n){K("loadModel",{...e,modelIdentifier:n})}function Y(e,n){F.a.error(e),K("error",{message:e.message,name:e.name,stack:e.stack,...n})}function Z(e){K("cameraNavigated",e)}
|
|
60
|
+
*/class O{constructor(){this.workerList=[];const e=this.determineNumberOfWorkers();for(let t=0;t<e;t++){const e={worker:Object(N.wrap)(this.createWorker()),activeJobCount:0,messageIdCounter:0};this.workerList.push(e)}(async function(e){let t;try{t=await e.getVersion()}catch(e){t="1.1.0"}const r="1.2.0",[n,o,i]=r.split(".").map(e=>parseInt(e,10)),[s,a,c]=t.split(".").map(e=>parseInt(e,10)),l=`Update your local copy of @cognite/reveal-parser-worker. Required version is ${r}. Received ${t}.`;if(n!==s)throw new Error(l);if(a<o)throw new Error(l);if(a===o&&c<i)throw new Error(l)}
|
|
67
61
|
/*!
|
|
68
62
|
* Copyright 2021 Cognite AS
|
|
69
|
-
*/function
|
|
63
|
+
*/)(this.workerList[0].worker).catch(e=>R.a.error(e)),this.workerObjUrl&&URL.revokeObjectURL(this.workerObjUrl)}static get defaultPool(){return O._defaultPool=O._defaultPool||new O,O._defaultPool}createWorker(){const e=(F.publicPath||r.p)+"reveal.parser.worker.js",t={name:"reveal.parser #"+this.workerList.length};if(function(e,t=location.origin){const r=e=>!e.match(/^.*\/\//);if(r(t))throw new Error("isTheSameDomain: the second argument must be an absolute url or omitted. Received "+t);if(r(e))return!0;try{const r=[e,t].map(e=>e.startsWith("//")?"https:"+e:e).map(e=>new URL(e));return r[0].host===r[1].host}catch(r){return console.error(`can not create URLs for ${e} and ${t}`,r),!1}}(e))return new Worker(e,t);if(!this.workerObjUrl){const t=new Blob([`importScripts(${JSON.stringify(e)});`],{type:"text/javascript"});this.workerObjUrl=URL.createObjectURL(t)}return new Worker(this.workerObjUrl,t)}async postWorkToAvailable(e){const t=this.workerList.reduce((e,t)=>e.activeJobCount>t.activeJobCount?t:e,this.workerList[0]);t.activeJobCount+=1;return await(async()=>{try{return await e(t.worker)}finally{t.activeJobCount-=1}})()}determineNumberOfWorkers(){return Math.max(2,Math.min(4,window.navigator.hardwareConcurrency||2))}}class V{constructor(e){this._value=e,this._lastAccessTime=Date.now()}get value(){return this.touch(),this._value}get lastAccessTime(){return this._lastAccessTime}touch(){this._lastAccessTime=Date.now()}}class z{constructor(e=50,t,r=10){this._data=new Map,this._maxElementsInCache=e,this._defaultCleanupCount=r,this._removeCallback=t}has(e){return this._data.has(e)}forceInsert(e,t){this.isFull()&&this.cleanCache(this._defaultCleanupCount),this.insert(e,t)}insert(e,t){if(!(this._data.size<this._maxElementsInCache))throw new Error("Cache full, please clean Cache and retry adding data");this._data.set(e,new V(t))}remove(e){if(void 0!==this._removeCallback){const t=this._data.get(e);void 0!==t&&this._removeCallback(t.value)}this._data.delete(e)}get(e){const t=this._data.get(e);if(void 0!==t)return t.value;throw new Error(`Cache element ${e} does not exist`)}isFull(){return!(this._data.size<this._maxElementsInCache)}cleanCache(e){const t=Array.from(this._data.entries());t.sort((e,t)=>t[1].lastAccessTime-e[1].lastAccessTime);for(let r=0;r<e;r++){const e=t.pop();if(void 0===e)return;this.remove(e[0])}}clear(){if(void 0!==this._removeCallback)for(const e of this._data.values())this._removeCallback(e.value);this._data.clear()}}
|
|
70
64
|
/*!
|
|
71
65
|
* Copyright 2021 Cognite AS
|
|
72
|
-
*/
|
|
66
|
+
*/class ${constructor(e,t){this._cache=new Map,this._retrieves=new Map,this._capacity=e,this._disposeCallback=t}get(e){const t=this._retrieves.get(e)||0;return this._retrieves.set(e,t+1),this._cache.get(e)}set(e,t){return this._cache.has(e)||this._capacity<this._cache.size?(this._cache.set(e,t),!0):(this._cache.set(e,t),this.ensureWithinCapacity(),this._cache.has(e))}remove(e){this._retrieves.delete(e);const t=this._cache.get(e);return void 0!==t&&(void 0!==this._disposeCallback&&this._disposeCallback(t),this._cache.delete(e),!0)}clear(){if(void 0!==this._disposeCallback)for(const e of this._cache.values())this._disposeCallback(e);this._retrieves.clear(),this._cache.clear()}ensureWithinCapacity(){if(this._capacity>=this._cache.size)return;const e=Array.from(this._cache.keys()).map(e=>({key:e,retrivalCount:this._retrieves.get(e)||0})).sort((e,t)=>e.retrivalCount-t.retrivalCount).slice(0,this._cache.size-this._capacity).map(e=>e.key);for(const t of e)this.remove(t)}}
|
|
73
67
|
/*!
|
|
74
68
|
* Copyright 2021 Cognite AS
|
|
75
|
-
*/
|
|
76
|
-
class s{constructor(e){this._changedEvent=new a.d,this._classToken=e}get classToken(){return this._classToken}on(e,n){i()("changed"===e),this._changedEvent.subscribe(n)}off(e,n){i()("changed"===e),this._changedEvent.unsubscribe(n)}notifyChanged(){this._changedEvent.fire()}}class l extends s{constructor(e,n){super(e),this._cachedCombinedIndexSet=void 0,this._nodeCollections=[],this._changedUnderlyingNodeCollectionHandler=this.makeDirty.bind(this),n&&n.forEach(e=>this.add(e))}add(e){e.on("changed",this._changedUnderlyingNodeCollectionHandler),this._nodeCollections.push(e),this.makeDirty()}remove(e){const n=this._nodeCollections.indexOf(e);if(n<0)throw new Error("Could not find set");e.off("changed",this._changedUnderlyingNodeCollectionHandler),this._nodeCollections.splice(n,1),this.makeDirty()}clear(){this._nodeCollections.forEach(e=>e.clear())}makeDirty(){void 0!==this._cachedCombinedIndexSet&&(this._cachedCombinedIndexSet=void 0,this.notifyChanged())}getIndexSet(){var e;return this._cachedCombinedIndexSet=null!==(e=this._cachedCombinedIndexSet)&&void 0!==e?e:this.createCombinedIndexSet(),this._cachedCombinedIndexSet}get isLoading(){return this._nodeCollections.some(e=>e.isLoading)}}class d extends l{constructor(e){super(d.classToken,e)}serialize(){return{token:this.classToken,state:{subCollections:this._nodeCollections.map(e=>e.serialize())}}}createCombinedIndexSet(){if(0===this._nodeCollections.length)return new a.e;const e=this._nodeCollections[0].getIndexSet().clone();for(let n=1;n<this._nodeCollections.length;++n)e.unionWith(this._nodeCollections[n].getIndexSet());return e}}d.classToken="UnionNodeCollection";class c extends l{constructor(e){super(d.classToken,e)}serialize(){return{token:this.classToken,state:{subCollections:this._nodeCollections.map(e=>e.serialize())}}}createCombinedIndexSet(){if(0===this._nodeCollections.length)return new a.e;const e=this._nodeCollections[0].getIndexSet().clone();for(let n=1;n<this._nodeCollections.length;++n)e.intersectWith(this._nodeCollections[n].getIndexSet());return e}}c.classToken="IntersectionNodeCollection";
|
|
69
|
+
*/function j(){this.array=null}
|
|
77
70
|
/*!
|
|
78
71
|
* Copyright 2021 Cognite AS
|
|
79
|
-
*/
|
|
80
|
-
class u extends s{constructor(e){super(u.classToken),e instanceof a.e?this._treeIndices=e:(a.h,this._treeIndices=new a.e(e))}updateSet(e){this._treeIndices=e,this.notifyChanged()}clear(){this._treeIndices=new a.e,this.notifyChanged()}getIndexSet(){return this._treeIndices}get isLoading(){return!1}serialize(){return{token:this.classToken,state:this._treeIndices.toRangeArray()}}}u.classToken="TreeIndexNodeCollection",function(e){e[e.NoOutline=0]="NoOutline",e[e.White=1]="White",e[e.Black=2]="Black",e[e.Cyan=3]="Cyan",e[e.Blue=4]="Blue",e[e.Green=5]="Green",e[e.Red=6]="Red",e[e.Orange=7]="Orange"}(r||(r={}));const m={visible:!0,outlineColor:r.White},f={visible:!0,renderInFront:!0},v={Default:{visible:!0,renderGhosted:!1,renderInFront:!1,outlineColor:r.NoOutline,color:[0,0,0]},Outlined:m,Hidden:{visible:!1},InFront:f,Ghosted:{visible:!0,renderGhosted:!0},Highlighted:{...f,visible:!0,color:[100,100,255],...m}};
|
|
72
|
+
*/},function(e,t,r){"use strict";r.d(t,"a",(function(){return l})),r.d(t,"b",(function(){return h})),r.d(t,"g",(function(){return f})),r.d(t,"f",(function(){return g})),r.d(t,"e",(function(){return v})),r.d(t,"c",(function(){return w})),r.d(t,"d",(function(){return u})),r.d(t,"n",(function(){return M})),r.d(t,"o",(function(){return B})),r.d(t,"p",(function(){return I})),r.d(t,"q",(function(){return S})),r.d(t,"m",(function(){return k})),r.d(t,"i",(function(){return P})),r.d(t,"t",(function(){return T})),r.d(t,"k",(function(){return R})),r.d(t,"x",(function(){return N})),r.d(t,"r",(function(){return $})),r.d(t,"w",(function(){return V})),r.d(t,"j",(function(){return D})),r.d(t,"u",(function(){return A})),r.d(t,"v",(function(){return z})),r.d(t,"s",(function(){return j})),r.d(t,"h",(function(){return d})),r.d(t,"l",(function(){return L}));var n=r(0),o=r(1),i=r(24),s=r.n(i);
|
|
81
73
|
/*!
|
|
82
74
|
* Copyright 2021 Cognite AS
|
|
83
75
|
*/
|
|
84
|
-
class
|
|
76
|
+
class a{constructor(e,t,r,n,o){this.version=e,this.maxTreeIndex=t,this.root=n,this.sectors=o,this.unit=r}get sectorCount(){return this.sectors.size}getSectorById(e){return this.sectors.get(e)}getAllSectors(){return[...this.sectors.values()]}getSectorsContainingPoint(e){const t=[];return Object(o.t)(this.root,r=>!!r.bounds.containsPoint(e)&&(t.push(r),!0)),t}getSectorsIntersectingBox(e){const t=[];return Object(o.t)(this.root,r=>!!r.bounds.intersectsBox(e)&&(t.push(r),!0)),t}getBoundsOfMostGeometry(){if(0===this.root.children.length)return this.root.bounds;const e=[],t=[];Object(o.t)(this.root,r=>(0===r.children.length&&(t.push(r.bounds.min.toArray(),r.bounds.max.toArray()),e.push(r.bounds,r.bounds)),!0));const r=Math.min(t.length,4),i=s()(t,r,"kmpp",10),a=new Array(i.idxs.length).fill(0),c=a.map(e=>new n.Box3);i.idxs.map(e=>a[e]++);const l=a.reduce((e,t,r)=>(t>e.count&&(e.count=t,e.idx=r),e),{count:0,idx:-1}).idx;i.idxs.forEach((t,r)=>{a[t]++,c[t].expandByPoint(e[r].min),c[t].expandByPoint(e[r].max)});const d=c.filter((e,t)=>!(t===l||!e.intersectsBox(c[l])));if(d.length>0){const e=c[l].clone();return d.forEach(t=>{e.expandByPoint(t.min),e.expandByPoint(t.max)}),e}return c[l]}getSectorsIntersectingFrustum(e,t){const r=(new n.Matrix4).multiplyMatrices(e,t),i=(new n.Frustum).setFromProjectionMatrix(r),s=[];return Object(o.t)(this.root,e=>!!i.intersectsBox(e.bounds)&&(s.push(e),!0)),s}}
|
|
85
77
|
/*!
|
|
86
78
|
* Copyright 2021 Cognite AS
|
|
87
|
-
*/
|
|
79
|
+
*/function c(e){const t=new Map,r=[];e.sectors.forEach(e=>{const o=function(e){const t=function(e){if(!e.facesFile)return{quadSize:-1,coverageFactors:{xy:-1,yz:-1,xz:-1},recursiveCoverageFactors:{xy:-1,yz:-1,xz:-1},fileName:null,downloadSize:e.indexFile.downloadSize};return{...e.facesFile,recursiveCoverageFactors:e.facesFile.recursiveCoverageFactors||e.facesFile.coverageFactors}}(e),r=e.boundingBox,o=r.min.x,i=r.min.y,s=r.min.z,a=r.max.x,c=r.max.y,l=r.max.z;return{id:e.id,path:e.path,depth:e.depth,bounds:new n.Box3(new n.Vector3(o,i,s),new n.Vector3(a,c,l)),estimatedDrawCallCount:e.estimatedDrawCallCount,estimatedRenderCost:e.estimatedTriangleCount||0,indexFile:{...e.indexFile},facesFile:t,children:[]}}(e);t.set(e.id,o),r[e.id]=e.parentId});for(const e of t.values()){const n=r[e.id];if(-1===n)continue;t.get(n).children.push(e)}const o=t.get(0);if(!o)throw new Error("Root sector not found, must have ID 0");!function e(t,r){!function(e){return-1===e.facesFile.coverageFactors.xy}(t)?t.children.forEach(r=>e(r,t.facesFile)):(t.facesFile.coverageFactors.xy=r.recursiveCoverageFactors.xy,t.facesFile.coverageFactors.yz=r.recursiveCoverageFactors.yz,t.facesFile.coverageFactors.xz=r.recursiveCoverageFactors.xz,t.facesFile.recursiveCoverageFactors.xy=r.recursiveCoverageFactors.xy,t.facesFile.recursiveCoverageFactors.yz=r.recursiveCoverageFactors.yz,t.facesFile.recursiveCoverageFactors.xz=r.recursiveCoverageFactors.xz,t.children.forEach(t=>e(t,r)))}
|
|
88
80
|
/*!
|
|
89
81
|
* Copyright 2021 Cognite AS
|
|
90
|
-
*/
|
|
91
|
-
class i extends r.Group{constructor(e,n,t){super(),this._lod=o.a.Discarded,this._updatedTimestamp=Date.now(),this.name=`Sector ${n} [id=${e}]`,this.sectorId=e,this.sectorPath=n,this.bounds=t,this.depth=function(e){let n=0;for(let t=0;t<e.length;++t)n+="/"===e[t]?1:0;return n-1}
|
|
82
|
+
*/(o,o.facesFile);const i=null!==e.unit?e.unit:"Meters";return new a(e.version,e.maxTreeIndex,i,o,t)}class l{parse(e){const t=e.version;switch(t){case 8:return c(e);case void 0:throw new Error('Metadata must contain a "version"-field');default:throw new Error(`Version ${t} is not supported`)}}}
|
|
92
83
|
/*!
|
|
93
84
|
* Copyright 2021 Cognite AS
|
|
94
|
-
*/
|
|
85
|
+
*/const d=new Map([["Meters",1],["Centimeters",.01],["Millimeters",.001],["Micrometers",1e-6],["Kilometers",1e3],["Feet",.3048],["Inches",.0254],["Yards",.9144],["Miles",1609.34],["Mils",254e-7],["Microinches",2.54e-8]]);
|
|
95
86
|
/*!
|
|
96
87
|
* Copyright 2021 Cognite AS
|
|
97
|
-
*/
|
|
88
|
+
*/class h{constructor(e,t,r,n="scene.json"){this._currentModelIdentifier=0,this._modelMetadataProvider=e,this._modelDataProvider=t,this._cadSceneParser=r,this._blobFileName=n}async loadData(e){const t=this._modelMetadataProvider.getModelUri(e),r=this._modelMetadataProvider.getModelMatrix(e),i=this._modelMetadataProvider.getModelCamera(e),s=await t,a=await this._modelDataProvider.getJsonFile(s,this._blobFileName),c=this._cadSceneParser.parse(a),l=function(e,t){const r=d.get(e);if(void 0===r)throw new Error(`Unknown model unit '${e}'`);return(new n.Matrix4).makeScale(r,r,r).multiply(t)}(c.unit,await r),h=(new n.Matrix4).copy(l).invert(),u=await i;return{modelIdentifier:""+this._currentModelIdentifier++,modelBaseUrl:s,geometryClipBox:null,modelMatrix:l,inverseModelMatrix:h,cameraConfiguration:Object(o.s)(u,l),scene:c}}}var u,m=r(7),p=r.n(m);
|
|
98
89
|
/*!
|
|
99
90
|
* Copyright 2021 Cognite AS
|
|
100
91
|
*/
|
|
101
|
-
class
|
|
92
|
+
class f{createSectorScene(e,t,r,n){p()(8===e,"Only version 8 is currently supported");const i=new Map;return Object(o.t)(n,e=>(i.set(e.id,e),!0)),new a(e,t,r,n,i)}}
|
|
102
93
|
/*!
|
|
103
94
|
* Copyright 2021 Cognite AS
|
|
104
|
-
|
|
105
|
-
/*!
|
|
106
|
-
* Copyright 2021 Cognite AS
|
|
107
|
-
*/const{boxGeometry:c,boxGeometryBoundingBox:u}=(()=>{const e=new r.BoxBufferGeometry(1,1,1,1,1,1);try{const n={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{boxGeometry:n,boxGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{quadGeometry:m,quadGeometryBoundingBox:f}=(()=>{const e=new r.PlaneBufferGeometry(1,1,1,1);try{const n={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{quadGeometry:n,quadGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{trapeziumGeometry:v,trapeziumGeometryBoundingBox:p}=(()=>{const e=[0,0,0,1,1,1,2,2,2,3,3,3];return{trapeziumGeometry:{index:new r.BufferAttribute(new Uint16Array([0,1,3,0,3,2]),1),position:new r.BufferAttribute(new Float32Array(e),3)},trapeziumGeometryBoundingBox:(new r.Box3).setFromArray(e)}})(),{coneGeometry:h,coneGeometryBoundingBox:x}=(()=>{const e=[];e.push(-1,1,-1),e.push(-1,-1,-1),e.push(1,1,-1),e.push(1,-1,-1),e.push(1,1,1),e.push(1,-1,1);const n=new Uint16Array([1,2,0,1,3,2,3,4,2,3,5,4]);return{coneGeometry:{index:new r.BufferAttribute(n,1),position:new r.BufferAttribute(new Float32Array(e),3)},coneGeometryBoundingBox:(new r.Box3).setFromArray(e)}})(),{torusLodGeometries:g,torusGeometryBoundingBox:b}=(()=>{const e=(e,n)=>[e,2*n*Math.PI],n=[{tubularSegments:9,radialSegments:18},{tubularSegments:5,radialSegments:12},{tubularSegments:4,radialSegments:5}].map(({tubularSegments:n,radialSegments:t})=>function(e,n,t=((e,n)=>[e,n,0])){const o=[],i=[],a=1/e,s=1/n;for(let r=0;r<=n;r++)for(let n=0;n<=e;n++){const[e,i,l]=t(n*a,r*s);o.push(e||0,i||0,l||0)}for(let t=1;t<=n;t++)for(let n=1;n<=e;n++){const r=(e+1)*t+n-1,o=(e+1)*(t-1)+n-1,a=(e+1)*(t-1)+n,s=(e+1)*t+n;i.push(r,o,s),i.push(o,a,s)}return{index:new r.Uint16BufferAttribute(i,1),position:new r.Float32BufferAttribute(o,3)}}(t,n,e));return{torusLodGeometries:n,torusGeometryBoundingBox:(new r.Box3).setFromArray(n[n.length-1].position.array)}})(),{nutGeometry:y,nutGeometryBoundingBox:_}=(()=>{const e=new r.CylinderBufferGeometry(.5,.5,1,6);try{e.applyMatrix4((new r.Matrix4).makeRotationX(-Math.PI/2));const n={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return{nutGeometry:n,nutGeometryBoundingBox:(new r.Box3).setFromArray(n.position.array)}}finally{e.dispose()}})();var T=t(6),I=t.n(T);
|
|
108
|
-
/*!
|
|
109
|
-
* Copyright 2021 Cognite AS
|
|
110
|
-
*/
|
|
111
|
-
const C={centerA:new r.Vector3,centerB:new r.Vector3,sphere:new r.Sphere,box:new r.Box3};const w={vertex1:new r.Vector3,vertex2:new r.Vector3,vertex3:new r.Vector3,vertex4:new r.Vector3};const S={instanceMatrix:new r.Matrix4};const M={center:new r.Vector3,size:new r.Vector3};
|
|
95
|
+
*/!function(e){e[e.Discarded=0]="Discarded",e[e.Simple=1]="Simple",e[e.Detailed=2]="Detailed"}(u||(u={}));
|
|
112
96
|
/*!
|
|
113
97
|
* Copyright 2021 Cognite AS
|
|
114
98
|
*/
|
|
115
|
-
|
|
99
|
+
class g extends n.Group{constructor(e,t,r){super(),this._lod=u.Discarded,this._updatedTimestamp=Date.now(),this.name=`Sector ${t} [id=${e}]`,this.sectorId=e,this.sectorPath=t,this.bounds=r,this.depth=function(e){let t=0;for(let r=0;r<e.length;++r)t+="/"===e[r]?1:0;return t-1}
|
|
116
100
|
/*!
|
|
117
101
|
* Copyright 2021 Cognite AS
|
|
118
|
-
*/
|
|
102
|
+
*/(t)}get levelOfDetail(){return this._lod}get group(){return this._group}get updatedTimestamp(){return this._updatedTimestamp}updateGeometry(e,t){this.resetGeometry(),this._group=e,void 0!==this._group&&this._group.reference(),this._lod=t,this._updatedTimestamp=Date.now(),this.visible=this._lod!==u.Discarded,this.updateMatrixWorld(!0)}resetGeometry(){void 0!==this._group&&(this._group.dereference(),this.remove(this._group)),this._group=void 0,this._lod=u.Discarded,this._updatedTimestamp=Date.now()}}class v extends g{constructor(e){const t=e.scene.root.bounds.clone();t.applyMatrix4(e.modelMatrix),super(0,"/",t);const{scene:r,modelMatrix:n}=e;this.sectorNodeMap=new Map,function e(t,r,n,o){const i=t.bounds.clone();i.applyMatrix4(o);const s=new g(t.id,t.path,i);s.name="Sector "+t.id,r.add(s),s.matrixAutoUpdate=!1,s.updateMatrixWorld(!0),n.set(t.id,s);for(const r of t.children)e(r,s,n,o)}
|
|
119
103
|
/*!
|
|
120
104
|
* Copyright 2021 Cognite AS
|
|
121
|
-
*/
|
|
122
|
-
function q(e,n,t,o){const i=[],a=e.filter(e=>null===o||function(e,n){const{p:t,box:r}=H;r.makeEmpty();for(let n=0;n<e.vertices.length;n+=3)t.set(e.vertices[n+0],e.vertices[n+1],e.vertices[n+2]),r.expandByPoint(t);return n.intersectsBox(r)}
|
|
105
|
+
*/(r.root,this,this.sectorNodeMap,n),this.matrixAutoUpdate=!1,this.setModelTransformation(n)}setModelTransformation(e){this.matrix.copy(e),this.updateMatrixWorld(!0)}getModelTransformation(e=new n.Matrix4){return e.copy(this.matrix)}}class w{constructor(e=o.j.defaultPool){this.workerPool=e}parseI3D(e){return this.parseDetailed(e)}parseF3D(e){return this.parseSimple(e)}parseCTM(e){return this.parseCtm(e)}async parseSimple(e){return this.workerPool.postWorkToAvailable(async t=>t.parseQuads(e))}async parseDetailed(e){return this.workerPool.postWorkToAvailable(async t=>t.parseSector(e))}async parseCtm(e){return this.workerPool.postWorkToAvailable(async t=>t.parseCtm(e))}}
|
|
123
106
|
/*!
|
|
124
107
|
* Copyright 2021 Cognite AS
|
|
125
|
-
*/
|
|
108
|
+
*/const x={centerA:new n.Vector3,centerB:new n.Vector3,sphere:new n.Sphere,box:new n.Box3};const _={vertex1:new n.Vector3,vertex2:new n.Vector3,vertex3:new n.Vector3,vertex4:new n.Vector3};const y={instanceMatrix:new n.Matrix4};const b={center:new n.Vector3,size:new n.Vector3};
|
|
126
109
|
/*!
|
|
127
110
|
* Copyright 2021 Cognite AS
|
|
128
111
|
*/
|
|
129
|
-
const
|
|
112
|
+
function C(e,t,r,o){const i=Array.from(t.values()).reduce((e,t)=>Math.max(e,t.offset+t.size),0),s=e.length/i,a=new Float32Array(e.buffer),c=new n.Box3,l=new Uint8Array(e.length);let d=0;for(let t=0;t<s;++t)if(o(t,i,a,c),r.intersectsBox(c)){const r=e.subarray(t*i,(t+1)*i);l.set(r,d*i),d++}return l.slice(0,d*i)}function M(e,t,r,n){if(null===n)return e;const o=t.get("instanceMatrix");return p()(void 0!==o),C(e,t,n,(e,t,n,i)=>{!function(e,t,r,n,o,i){const{instanceMatrix:s}=y,a=(n*r+e.offset)/t.BYTES_PER_ELEMENT;s.set(t[a+0],t[a+4],t[a+8],t[a+12],t[a+1],t[a+5],t[a+9],t[a+13],t[a+2],t[a+6],t[a+10],t[a+14],t[a+3],t[a+7],t[a+11],t[a+15]),i.copy(o).applyMatrix4(s)}(o,n,t,e,r,i)})}function B(e,t,r,n="radiusA",o="radiusB"){if(null===r)return e;const i=t.get("centerA"),s=t.get("centerB"),a=t.get(n),c=t.get(o);return p()(void 0!==i&&void 0!==s&&void 0!==a&&void 0!==c),C(e,t,r,(e,t,r,n)=>{!function(e,t,r,n,o,i,s,a){const{centerA:c,centerB:l,sphere:d,box:h}=x;function u(e,t=0){const r=(s*i+e.offset)/o.BYTES_PER_ELEMENT;return o[r+t]}c.set(u(e,0),u(e,1),u(e,2)),l.set(u(t,0),u(t,1),u(t,2));const m=u(r),p=u(n);d.set(c,m),d.getBoundingBox(a),d.set(l,p),d.getBoundingBox(h),a.expandByPoint(h.min),a.expandByPoint(h.max)}(i,s,a,c,r,t,e,n)})}function S(e,t,r){if(null===r)return e;const n=t.get("vertex1"),o=t.get("vertex2"),i=t.get("vertex3"),s=t.get("vertex4");return p()(void 0!==n&&void 0!==o&&void 0!==i&&void 0!==s),C(e,t,r,(e,t,r,a)=>{!function(e,t,r,n,o,i,s,a){const{vertex1:c,vertex2:l,vertex3:d,vertex4:h}=_;function u(e,t=0){const r=(s*i+e.offset)/o.BYTES_PER_ELEMENT;return o[r+t]}c.set(u(e,0),u(e,1),u(e,2)),l.set(u(t,0),u(t,1),u(t,2)),d.set(u(r,0),u(r,1),u(r,2)),h.set(u(n,0),u(n,1),u(n,2)),a.setFromPoints([c,l,d,h])}(n,o,i,s,r,t,e,a)})}function I(e,t,r,n="horizontalRadius",o="verticalRadius"){if(null===r)return e;const i=t.get("center"),s=t.get(n),a=t.get(o),c=t.get("height");return p()(void 0!==i&&void 0!==s&&void 0!==a&&void 0!==c),C(e,t,r,(e,t,r,n)=>{!function(e,t,r,n,o,i,s,a){const{center:c,size:l}=b;function d(e,t=0){const r=(s*i+e.offset)/o.BYTES_PER_ELEMENT;return o[r+t]}c.set(d(e,0),d(e,1),d(e,2));const h=d(t),u=d(r),m=d(n),p=2*Math.max(h,u,m);l.set(p,p,p),a.setFromCenterAndSize(c,l)}(i,s,a,c,r,t,e,n)})}
|
|
130
113
|
/*!
|
|
131
114
|
* Copyright 2021 Cognite AS
|
|
132
115
|
*/
|
|
133
|
-
|
|
134
|
-
/*!
|
|
135
|
-
* Copyright 2021 Cognite AS
|
|
136
|
-
*/
|
|
137
|
-
class ee{constructor(e){this.materialManager=e}transformSimpleSector(e,n,t,r){const o=this.materialManager.getModelMaterials(e);return I()(void 0!==o,"Could not find materials for model '"+e),Promise.resolve(Q(t,n.bounds,o,r))}transformDetailedSector(e,n,t,r){const o=this.materialManager.getModelMaterials(e);return I()(void 0!==o,"Could not find materials for model '"+e),Promise.resolve($(t,n,o,r))}}
|
|
138
|
-
/*!
|
|
139
|
-
* Copyright 2021 Cognite AS
|
|
140
|
-
*/const ne=(new r.Matrix4).identity();class te{constructor(){this._events={changed:new s.d}}on(e,n){switch(e){case"changed":this._events.changed.subscribe(n);break;default:Object(s.k)(e,`Unsupported event: '${e}'`)}}off(e,n){switch(e){case"changed":this._events.changed.unsubscribe(n);break;default:Object(s.k)(e,`Unsupported event: '${e}'`)}}setNodeTransform(e,n){this._events.changed.fire("set",e,n)}resetNodeTransform(e){this._events.changed.fire("reset",e,ne)}}
|
|
116
|
+
const E={p:new n.Vector3,instanceMatrix:new n.Matrix4,baseBounds:new n.Box3,instanceBounds:new n.Box3};function k(e,t,r,n){if(null===n)return r;const{p:o,instanceMatrix:i,baseBounds:s,instanceBounds:a}=E;s.makeEmpty();for(let n=r.triangleOffset;n<r.triangleOffset+r.triangleCount;++n){const r=t[3*n+0],i=t[3*n+1],a=t[3*n+2];o.set(e[r+0],e[r+1],e[r+2]),s.expandByPoint(o),o.set(e[i+0],e[i+1],e[i+2]),s.expandByPoint(o),o.set(e[a+0],e[a+1],e[a+2]),s.expandByPoint(o)}let c=0;const l=r.treeIndices.length,d=new Float32Array(r.instanceMatrices.length),h=new Float32Array(l),u=new Uint8Array(4*l);for(let e=0;e<l;++e)if(m=r.instanceMatrices,p=e,i.set(m[p+0],m[p+4],m[p+8],m[p+12],m[p+1],m[p+5],m[p+9],m[p+13],m[p+2],m[p+6],m[p+10],m[p+14],m[p+3],m[p+7],m[p+11],m[p+15]),a.copy(s).applyMatrix4(i),n.intersectsBox(a)){const t=r.instanceMatrices.subarray(16*e,16*(e+1)),n=r.colors.subarray(4*e,4*(e+1)),o=r.treeIndices[e];d.set(t,16*c),u.set(n,4*c),h[c]=o,c++}var m,p;if(l===c)return r;return{triangleCount:r.triangleCount,triangleOffset:r.triangleOffset,instanceMatrices:d.slice(0,16*c),colors:u.slice(0,4*c),treeIndices:h.slice(0,c)}}
|
|
141
117
|
/*!
|
|
142
118
|
* Copyright 2021 Cognite AS
|
|
143
|
-
*/
|
|
144
|
-
/*!
|
|
145
|
-
* Copyright 2021 Cognite AS
|
|
146
|
-
*/function ie(e){const n=new Array(e.length);return e.forEach((t,r)=>{n[r]=r>0?n[r-1]+e[r-1]:0}),n}
|
|
147
|
-
/*!
|
|
148
|
-
* Copyright 2021 Cognite AS
|
|
149
|
-
*/class ae{constructor(e,n,t){this._modelSectorProvider=e,this._modelDataParser=n,this._modelDataTransformer=t,this._consumedSectorCache=new s.f(50,e=>{void 0!==e.group&&e.group.dereference()}),this._ctmFileCache=new s.g(10)}clear(){this._consumedSectorCache.clear(),this._ctmFileCache.clear()}async loadSector(e){var n,t;const r=this.wantedSectorCacheKey(e);try{if(this._consumedSectorCache.has(r))return this._consumedSectorCache.get(r);switch(e.levelOfDetail){case o.a.Detailed:{const t=await this.loadDetailedSectorFromNetwork(e);return this._consumedSectorCache.forceInsert(r,t),null===(n=null==t?void 0:t.group)||void 0===n||n.reference(),t}case o.a.Simple:{const n=await this.loadSimpleSectorFromNetwork(e);return this._consumedSectorCache.forceInsert(r,n),null===(t=null==n?void 0:n.group)||void 0===t||t.reference(),n}case o.a.Discarded:return{modelIdentifier:e.modelIdentifier,metadata:e.metadata,levelOfDetail:e.levelOfDetail,instancedMeshes:[],group:void 0};default:Object(s.k)(e.levelOfDetail)}}catch(e){throw this._consumedSectorCache.remove(r),Object(s.u)(e,{methodName:"loadSector",moduleName:"CachedRepository"}),e}}async loadSimpleSectorFromNetwork(e){const n=await this._modelSectorProvider.getBinaryFile(e.modelBaseUrl,e.metadata.facesFile.fileName),t=await this._modelDataParser.parseF3D(new Uint8Array(n)),r=await this._modelDataTransformer.transformSimpleSector(e.modelIdentifier,e.metadata,t,e.geometryClipBox);return{...e,group:r.sectorMeshes,instancedMeshes:r.instancedMeshes}}async loadI3DFromNetwork(e,n){const t=await this._modelSectorProvider.getBinaryFile(e,n);return this._modelDataParser.parseI3D(new Uint8Array(t))}async loadCtmsFromNetwork(e,n){const t=await Promise.all(n.map(n=>this.loadCtmFileFromNetwork(e,n)));return n.reduce((e,n,r)=>e.set(n,t[r]),new Map)}async loadDetailedSectorFromNetwork(e){const n=e.metadata.indexFile,t=this.loadI3DFromNetwork(e.modelBaseUrl,n.fileName),r=this.loadCtmsFromNetwork(e.modelBaseUrl,n.peripheralFiles),o=await t,i=await r,a=this.finalizeDetailed(o,i),s=await this._modelDataTransformer.transformDetailedSector(e.modelIdentifier,e.metadata,a,e.geometryClipBox);return{...e,group:s.sectorMeshes,instancedMeshes:s.instancedMeshes}}async loadCtmFileFromNetwork(e,n){const t=this.ctmFileCacheKey(e,n),r=this._ctmFileCache.get(t);if(void 0!==r)return r;const o=this._modelSectorProvider.getBinaryFile(e,n).then(e=>this._modelDataParser.parseCTM(new Uint8Array(e)));return this._ctmFileCache.set(t,o),o}finalizeDetailed(e,n){const{instanceMeshes:t,triangleMeshes:r}=e,o=(()=>{const{fileIds:e,colors:t,triangleCounts:o,treeIndices:i}=r,a=[];for(const{id:r,meshIndices:s}of re(e)){const e=s.map(e=>o[e]),l=ie(e),d=`mesh_${r}.ctm`,{indices:c,vertices:u,normals:m}=n.get(d),f=new Uint8Array(3*c.length),v=new Float32Array(c.length);for(let n=0;n<s.length;n++){const r=s[n],o=i[r],a=l[n],d=e[n],[u,m,p]=[t[4*r+0],t[4*r+1],t[4*r+2]];for(let e=a;e<a+d;e++)for(let n=0;n<3;n++){const t=c[3*e+n];v[t]=o,f[3*t]=u,f[3*t+1]=m,f[3*t+2]=p}}const p={colors:f,fileId:r,treeIndices:v,indices:c,vertices:u,normals:m};a.push(p)}return a})(),i=(()=>{const{fileIds:e,colors:r,treeIndices:o,triangleCounts:i,triangleOffsets:a,instanceMatrices:s}=t,l=[];for(const{id:t,meshIndices:d}of re(e)){const e=`mesh_${t}.ctm`,c=n.get(e),u=c.indices,m=c.vertices,f=[],v=new Float64Array(d.map(e=>a[e])),p=new Float64Array(d.map(e=>i[e]));for(const{id:e,meshIndices:n}of re(v)){const t=p[n[0]],i=new Float32Array(16*n.length),a=new Float32Array(n.length),l=new Uint8Array(4*n.length);for(let e=0;e<n.length;e++){const t=d[n[e]],c=o[t],u=s.subarray(16*t,16*t+16);i.set(u,16*e),a[e]=c;const m=r.subarray(4*t,4*t+4);l.set(m,4*e)}f.push({triangleCount:t,triangleOffset:e,instanceMatrices:i,colors:l,treeIndices:a})}const h={fileId:t,indices:u,vertices:m,instances:f};l.push(h)}return l})();return{treeIndexToNodeIdMap:e.treeIndexToNodeIdMap,nodeIdToTreeIndexMap:e.nodeIdToTreeIndexMap,primitives:e.primitives,instanceMeshes:i,triangleMeshes:o}}wantedSectorCacheKey(e){return e.modelIdentifier+"."+e.metadata.id+"."+e.levelOfDetail}ctmFileCacheKey(e,n){return e+"."+n}}
|
|
150
|
-
/*!
|
|
151
|
-
* Copyright 2021 Cognite AS
|
|
152
|
-
*/var se,le;!function(e){e[e.NoAA=0]="NoAA",e[e.FXAA=1]="FXAA"}(se||(se={})),function(e){e[e.Medium=32]="Medium",e[e.High=64]="High",e[e.VeryHigh=128]="VeryHigh",e[e.None=1]="None",e[e.Default=32]="Default"}(le||(le={}));const de={antiAliasing:se.FXAA,multiSampleCountHint:1,ssaoRenderParameters:{sampleSize:le.Default,sampleRadius:1,depthCheckBias:.0125},edgeDetectionParameters:{enabled:!0}};
|
|
153
|
-
/*!
|
|
154
|
-
* Copyright 2021 Cognite AS
|
|
155
|
-
*/class ce{}ce.Black=new r.Color("rgb(0, 0, 0)"),ce.White=new r.Color("rgb(255, 255, 255)"),ce.Cyan=new r.Color("rgb(102, 213, 234)"),ce.Blue=new r.Color("rgb(77, 106, 242)"),ce.Purple=new r.Color("rgb(186, 82, 212)"),ce.Pink=new r.Color("rgb(232, 64, 117)"),ce.Orange=new r.Color("rgb(238, 113, 53)"),ce.Yellow=new r.Color("rgb(246, 189, 65)"),ce.VeryLightGray=new r.Color("rgb(247, 246, 245)"),ce.LightGray=new r.Color("rgb(242, 241, 240)");class ue{}ue.Red=new r.Color("rgb(235,0,4)"),ue.Green=new r.Color("rgb(46,164,79)");var me=t(5),fe=t(3),ve=t.n(fe);
|
|
119
|
+
*/const{boxGeometry:P,boxGeometryBoundingBox:D}=(()=>{const e=new n.BoxBufferGeometry(1,1,1,1,1,1);try{const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{boxGeometry:t,boxGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{quadGeometry:T,quadGeometryBoundingBox:A}=(()=>{const e=new n.PlaneBufferGeometry(1,1,1,1);try{const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{quadGeometry:t,quadGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{trapeziumGeometry:N,trapeziumGeometryBoundingBox:F}=(()=>{const e=[0,0,0,1,1,1,2,2,2,3,3,3];return{trapeziumGeometry:{index:new n.BufferAttribute(new Uint16Array([0,1,3,0,3,2]),1),position:new n.BufferAttribute(new Float32Array(e),3)},trapeziumGeometryBoundingBox:(new n.Box3).setFromArray(e)}})(),{coneGeometry:R,coneGeometryBoundingBox:O}=(()=>{const e=[];e.push(-1,1,-1),e.push(-1,-1,-1),e.push(1,1,-1),e.push(1,-1,-1),e.push(1,1,1),e.push(1,-1,1);const t=new Uint16Array([1,2,0,1,3,2,3,4,2,3,5,4]);return{coneGeometry:{index:new n.BufferAttribute(t,1),position:new n.BufferAttribute(new Float32Array(e),3)},coneGeometryBoundingBox:(new n.Box3).setFromArray(e)}})(),{torusLodGeometries:V,torusGeometryBoundingBox:z}=(()=>{const e=(e,t)=>[e,2*t*Math.PI],t=[{tubularSegments:9,radialSegments:18},{tubularSegments:5,radialSegments:12},{tubularSegments:4,radialSegments:5}].map(({tubularSegments:t,radialSegments:r})=>function(e,t,r=((e,t)=>[e,t,0])){const o=[],i=[],s=1/e,a=1/t;for(let n=0;n<=t;n++)for(let t=0;t<=e;t++){const[e,i,c]=r(t*s,n*a);o.push(e||0,i||0,c||0)}for(let r=1;r<=t;r++)for(let t=1;t<=e;t++){const n=(e+1)*r+t-1,o=(e+1)*(r-1)+t-1,s=(e+1)*(r-1)+t,a=(e+1)*r+t;i.push(n,o,a),i.push(o,s,a)}return{index:new n.Uint16BufferAttribute(i,1),position:new n.Float32BufferAttribute(o,3)}}(r,t,e));return{torusLodGeometries:t,torusGeometryBoundingBox:(new n.Box3).setFromArray(t[t.length-1].position.array)}})(),{nutGeometry:$,nutGeometryBoundingBox:j}=(()=>{const e=new n.CylinderBufferGeometry(.5,.5,1,6);try{e.applyMatrix4((new n.Matrix4).makeRotationX(-Math.PI/2));const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return{nutGeometry:t,nutGeometryBoundingBox:(new n.Box3).setFromArray(t.position.array)}}finally{e.dispose()}})();
|
|
156
120
|
/*!
|
|
157
121
|
* Copyright 2021 Cognite AS
|
|
158
122
|
*/
|
|
159
|
-
|
|
160
|
-
/*!
|
|
161
|
-
* Copyright 2021 Cognite AS
|
|
162
|
-
*/class Ie{constructor(e,n,t,o){var i,a;this._lastFrameSceneState={hasBackElements:!0,hasInFrontElements:!0,hasGhostElements:!0,hasCustomObjects:!0},this._rootSectorNodeBuffer=new Set,this._outlineTexelSize=2,this._autoSetTargetSize=!1,this._uiObjects=[],this._renderer=e,this._renderOptions=o,this._materialManager=t,this._orthographicCamera=new r.OrthographicCamera(-1,1,1,-1,-1,1),this._renderTarget=null,this._originalScene=n,this._cadScene=new r.Scene,this._cadScene.autoUpdate=!1,this._normalScene=new r.Scene,this._normalScene.autoUpdate=!1,this._inFrontScene=new r.Scene,this._inFrontScene.autoUpdate=!1,this._compositionScene=new r.Scene,this._compositionScene.autoUpdate=!1,this._fxaaScene=new r.Scene,this._fxaaScene.autoUpdate=!1,this._ssaoScene=new r.Scene,this._ssaoScene.autoUpdate=!1,this._ssaoBlurScene=new r.Scene,this._ssaoBlurScene.autoUpdate=!1,this._emptyScene=new r.Scene,this._emptyScene.autoUpdate=!1;const s=e.capabilities.isWebGL2,l=this.createOutlineColorTexture();this._inFrontRenderedCadModelTarget=Ce(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._inFrontRenderedCadModelTarget.depthTexture=new r.DepthTexture(0,0),this._inFrontRenderedCadModelTarget.depthTexture.format=r.DepthFormat,this._inFrontRenderedCadModelTarget.depthTexture.type=r.UnsignedIntType,this._normalRenderedCadModelTarget=Ce(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._normalRenderedCadModelTarget.depthTexture=new r.DepthTexture(0,0),this._normalRenderedCadModelTarget.depthTexture.format=r.DepthFormat,this._normalRenderedCadModelTarget.depthTexture.type=r.UnsignedIntType,this._ghostObjectRenderTarget=Ce(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._ghostObjectRenderTarget.depthTexture=new r.DepthTexture(0,0),this._ghostObjectRenderTarget.depthTexture.format=r.DepthFormat,this._ghostObjectRenderTarget.depthTexture.type=r.UnsignedIntType,this._customObjectRenderTarget=Ce(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._customObjectRenderTarget.depthTexture=new r.DepthTexture(0,0),this._customObjectRenderTarget.depthTexture.format=r.DepthFormat,this._customObjectRenderTarget.depthTexture.type=r.UnsignedIntType,this._compositionTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._compositionTarget.depthTexture=new r.DepthTexture(0,0),this._compositionTarget.depthTexture.format=r.DepthFormat,this._compositionTarget.depthTexture.type=r.UnsignedIntType,this._ssaoTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoTarget.depthTexture=new r.DepthTexture(0,0),this._ssaoTarget.depthTexture.format=r.DepthFormat,this._ssaoTarget.depthTexture.type=r.UnsignedIntType,this._ssaoBlurTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoBlurTarget.depthTexture=new r.DepthTexture(0,0),this._ssaoBlurTarget.depthTexture.format=r.DepthFormat,this._ssaoBlurTarget.depthTexture.type=r.UnsignedIntType,this._combineOutlineDetectionMaterial=new r.ShaderMaterial({vertexShader:xe.vertex,fragmentShader:xe.fragment,uniforms:{tFront:{value:this._inFrontRenderedCadModelTarget.texture},tFrontDepth:{value:this._inFrontRenderedCadModelTarget.depthTexture},tBack:{value:this._normalRenderedCadModelTarget.texture},tBackDepth:{value:this._normalRenderedCadModelTarget.depthTexture},tCustom:{value:this._customObjectRenderTarget.texture},tCustomDepth:{value:this._customObjectRenderTarget.depthTexture},tGhost:{value:this._ghostObjectRenderTarget.texture},tGhostDepth:{value:this._ghostObjectRenderTarget.depthTexture},tOutlineColors:{value:l},resolution:{value:new r.Vector2(0,0)},texelSize:{value:new r.Vector2(0,0)},cameraNear:{value:.1},cameraFar:{value:1e4},edgeStrengthMultiplier:{value:2.5},edgeGrayScaleIntensity:{value:.1}},extensions:{fragDepth:!0},defines:{EDGES:null!==(a=null===(i=this._renderOptions.edgeDetectionParameters)||void 0===i?void 0:i.enabled)&&void 0!==a?a:de.edgeDetectionParameters.enabled}});const d=this.createNoiseTexture(),c=this.ssaoParameters(this._renderOptions),u=c.sampleSize,m=this.createKernel(u),f=c.sampleRadius,v=c.depthCheckBias;this._ssaoMaterial=new r.ShaderMaterial({uniforms:{tDepth:{value:this._compositionTarget.depthTexture},tNoise:{value:d},kernel:{value:m},sampleRadius:{value:f},bias:{value:v},projMatrix:{value:new r.Matrix4},inverseProjectionMatrix:{value:new r.Matrix4},resolution:{value:new r.Vector2}},defines:{MAX_KERNEL_SIZE:u},vertexShader:be.vertex,fragmentShader:be.fragment}),this._ssaoBlurMaterial=new r.ShaderMaterial({uniforms:{tDiffuse:{value:this._compositionTarget.texture},tAmbientOcclusion:{value:this._ssaoTarget.texture},resolution:{value:new r.Vector2}},vertexShader:ye.vertex,fragmentShader:ye.fragment});const p=this.supportsSsao(c)?this._ssaoBlurTarget.texture:this._compositionTarget.texture;this._fxaaMaterial=new r.ShaderMaterial({uniforms:{tDiffuse:{value:p},tDepth:{value:this._compositionTarget.depthTexture},resolution:{value:new r.Vector2},inverseResolution:{value:new r.Vector2}},vertexShader:ge.vertex,fragmentShader:ge.fragment,extensions:{fragDepth:!0}}),this.setupCompositionScene(),this.setupSsaoScene(),this.setupSsaoBlurCombineScene(),this.setupFxaaScene(),this._normalSceneBuilder=new Se(this._normalScene),this._inFrontSceneBuilder=new Se(this._inFrontScene)}set renderOptions(e){const n=this.ssaoParameters(e),t={...n};this.setSsaoParameters(t),this._renderOptions={...e,ssaoRenderParameters:{...n}}}addUiObject(e,n,t){this._uiObjects.push({object:e,screenPos:n,width:t.x,height:t.y})}removeUiObject(e){this._uiObjects=this._uiObjects.filter(n=>{const t=n.object;return e!==t})}ssaoParameters(e){var n;return null!==(n=null==e?void 0:e.ssaoRenderParameters)&&void 0!==n?n:{...de.ssaoRenderParameters}}get antiAliasingMode(){const{antiAliasing:e=de.antiAliasing}=this._renderOptions;return e}get multiSampleCountHint(){const{multiSampleCountHint:e=de.multiSampleCountHint}=this._renderOptions;return e}supportsSsao(e){return!Object(s.q)()&&(this._renderer.capabilities.isWebGL2||this._renderer.extensions.has("EXT_frag_depth"))&&e.sampleSize!==le.None}renderDetailedToDepthOnly(e){const n={renderMode:this._materialManager.getRenderMode()},t=new s.i(this._renderer);this._materialManager.setRenderMode(Te.a.DepthBufferOnly);try{t.setRenderTarget(this._renderTarget),this.setVisibilityOfSectors(o.a.Simple,!1),this.traverseForRootSectorNode(this._originalScene),this.extractCadNodes(this._originalScene),this.clearTarget(this._renderTarget);const{hasBackElements:r,hasInFrontElements:i,hasGhostElements:a}=this.splitToScenes();r&&!a?this.renderNormalCadModelsFromBaseScene(e,this._renderTarget):r&&a&&(this.renderNormalCadModels(e,this._renderTarget),this._normalSceneBuilder.restoreOriginalScene()),i&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene())}finally{this._materialManager.setRenderMode(n.renderMode),t.resetState(),this.restoreCadNodes(),this.setVisibilityOfSectors(o.a.Simple,!0)}}render(e){const n=this._renderer,t=this._originalScene,r=new s.i(n),o={autoClear:n.autoClear,clearAlpha:n.getClearAlpha(),renderMode:this._materialManager.getRenderMode()};n.info.autoReset=!1,n.info.reset(),r.autoClear=!1;try{r.setRenderTarget(this._renderTarget),this.updateRenderSize(n),n.info.autoReset=!1,n.info.reset(),r.autoClear=!1,this.traverseForRootSectorNode(t),this.extractCadNodes(t),this.clearTarget(this._ghostObjectRenderTarget),this.clearTarget(this._compositionTarget),this.clearTarget(this._customObjectRenderTarget),n.setClearAlpha(0),this.clearTarget(this._normalRenderedCadModelTarget),this.clearTarget(this._inFrontRenderedCadModelTarget),n.setClearAlpha(o.clearAlpha);const i={...this._lastFrameSceneState},{hasBackElements:a,hasInFrontElements:s,hasGhostElements:l}=this.splitToScenes(),d=t.children.length>0;this._lastFrameSceneState={hasBackElements:a,hasInFrontElements:s,hasGhostElements:l,hasCustomObjects:d},a&&!l?this.renderNormalCadModelsFromBaseScene(e):a&&l?(this.renderNormalCadModels(e),this._normalSceneBuilder.restoreOriginalScene(),this.renderGhostedCadModelsFromBaseScene(e)):!a&&l&&this.renderGhostedCadModelsFromBaseScene(e),s&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene()),d&&this.renderCustomObjects(t,e),n.capabilities.isWebGL2&&(!a&&i.hasBackElements&&this.explicitFlushRender(e,this._normalRenderedCadModelTarget),!l&&i.hasGhostElements&&this.explicitFlushRender(e,this._ghostObjectRenderTarget),!s&&i.hasInFrontElements&&this.explicitFlushRender(e,this._inFrontRenderedCadModelTarget),!d&&i.hasInFrontElements&&this.explicitFlushRender(e,this._customObjectRenderTarget));const c=this.supportsSsao(this.ssaoParameters(this._renderOptions));switch(this.antiAliasingMode){case se.FXAA:this.renderComposition(n,e,this._compositionTarget),r.autoClear=o.autoClear,c&&(this.renderSsao(n,this._ssaoTarget,e),this.renderPostProcessStep(n,this._ssaoBlurTarget,this._ssaoBlurScene)),this.renderPostProcessStep(n,this._renderTarget,this._fxaaScene);break;case se.NoAA:n.autoClear=o.autoClear,c?(this.renderComposition(n,e,this._compositionTarget),this.renderSsao(n,this._ssaoTarget,e),this.renderPostProcessStep(n,this._renderTarget,this._ssaoBlurScene)):this.renderComposition(n,e,this._renderTarget);break;default:throw new Error("Unsupported anti-aliasing mode: "+this.antiAliasingMode)}}finally{r.resetState(),this._materialManager.setRenderMode(o.renderMode),this.restoreCadNodes()}}restoreCadNodes(){this._rootSectorNodeBuffer.forEach(e=>{e[1].add(e[0])}),this._rootSectorNodeBuffer.clear()}extractCadNodes(e){this._rootSectorNodeBuffer.forEach(n=>{if(n[1].parent!==e&&null!==n[1].parent&&n[1].parent.parent!==e)throw new Error("CadNode must be put at scene root");this._cadScene.add(n[0])})}setRenderTarget(e){this._renderTarget=e}getRenderTarget(){return this._renderTarget}setRenderTargetAutoSize(e){this._autoSetTargetSize=e}getRenderTargetAutoSize(){return this._autoSetTargetSize}clearTarget(e){this._renderer.setRenderTarget(e),this._renderer.clear()}explicitFlushRender(e,n){this._renderer.setRenderTarget(n),this._renderer.render(this._emptyScene,e)}splitToScenes(){const e={hasBackElements:!1,hasInFrontElements:!1,hasGhostElements:!1};this._rootSectorNodeBuffer.forEach(n=>{const t=n[1],r=this._materialManager.getModelBackTreeIndices(t.cadModelMetadata.modelIdentifier),o=this._materialManager.getModelInFrontTreeIndices(t.cadModelMetadata.modelIdentifier),i=this._materialManager.getModelGhostedTreeIndices(t.cadModelMetadata.modelIdentifier),a=r.count>0,s=o.count>0,l=i.count>0;e.hasBackElements=e.hasBackElements||a,e.hasInFrontElements=e.hasInFrontElements||s,e.hasGhostElements=e.hasGhostElements||l});const{hasBackElements:n,hasInFrontElements:t,hasGhostElements:o}=e;return this._rootSectorNodeBuffer.forEach(e=>{const i=e[0],a=e[1],s=this._materialManager.getModelBackTreeIndices(a.cadModelMetadata.modelIdentifier),l=this._materialManager.getModelInFrontTreeIndices(a.cadModelMetadata.modelIdentifier),d=new r.Object3D;d.applyMatrix4(i.matrix),n&&o&&this._normalScene.add(d);const c=new r.Object3D;c.applyMatrix4(i.matrix),t&&this._inFrontScene.add(c);const u=[e[0]];for(;u.length>0;){const e=u.pop(),r=e.userData.treeIndices;r?(t&&l.hasIntersectionWith(r)&&this._inFrontSceneBuilder.addElement(e,c),n&&!o||o&&s.hasIntersectionWith(r)&&this._normalSceneBuilder.addElement(e,d)):u.push(...e.children)}}),e}renderNormalCadModels(e,n=this._normalRenderedCadModelTarget){this._normalSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(n),this._renderer.render(this._normalScene,e)}renderNormalCadModelsFromBaseScene(e,n=this._normalRenderedCadModelTarget){this._renderer.setRenderTarget(n),this._renderer.render(this._cadScene,e)}renderInFrontCadModels(e,n=this._inFrontRenderedCadModelTarget){this._inFrontSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(n),this._materialManager.setRenderMode(Te.a.Effects),this._renderer.render(this._inFrontScene,e)}renderGhostedCadModelsFromBaseScene(e){this._renderer.setRenderTarget(this._ghostObjectRenderTarget),this._materialManager.setRenderMode(Te.a.Ghost),this._renderer.render(this._cadScene,e)}renderCustomObjects(e,n){this._renderer.setRenderTarget(this._customObjectRenderTarget),this._renderer.render(e,n)}updateRenderSize(e){const n=new r.Vector2;return e.getSize(n),this._renderTarget&&this._autoSetTargetSize&&n.x!==this._renderTarget.width&&n.y!==this._renderTarget.height&&this._renderTarget.setSize(n.x,n.y),n.x===this._normalRenderedCadModelTarget.width&&n.y===this._normalRenderedCadModelTarget.height||(this._normalRenderedCadModelTarget.setSize(n.x,n.y),this._inFrontRenderedCadModelTarget.setSize(n.x,n.y),this._customObjectRenderTarget.setSize(n.x,n.y),this._ghostObjectRenderTarget.setSize(n.x,n.y),this._compositionTarget.setSize(n.x,n.y),this._ssaoTarget.setSize(n.x,n.y),this._ssaoBlurTarget.setSize(n.x,n.y),this._combineOutlineDetectionMaterial.uniforms.texelSize.value=new r.Vector2(this._outlineTexelSize/n.x,this._outlineTexelSize/n.y),this._combineOutlineDetectionMaterial.uniforms.resolution.value=n,this._ssaoMaterial.uniforms.resolution.value=n,this._ssaoBlurMaterial.uniforms.resolution.value=n,this._fxaaMaterial.uniforms.resolution.value=n,this._fxaaMaterial.uniforms.inverseResolution.value=new r.Vector2(1/n.x,1/n.y)),n}renderComposition(e,n,t){this._combineOutlineDetectionMaterial.uniforms.cameraNear.value=n.near,this._combineOutlineDetectionMaterial.uniforms.cameraFar.value=n.far,this.renderPostProcessStep(e,t,this._compositionScene)}setSsaoParameters(e){var n;const t=de.ssaoRenderParameters;if(this._ssaoMaterial.uniforms.sampleRadius.value=e.sampleRadius,this._ssaoMaterial.uniforms.bias.value=e.depthCheckBias,e.sampleSize!==this.ssaoParameters(this._renderOptions).sampleSize){const r=null!==(n=null==e?void 0:e.sampleSize)&&void 0!==n?n:t.sampleSize,o=this.createKernel(r);this._fxaaMaterial.uniforms.tDiffuse.value=e.sampleSize!==le.None?this._ssaoBlurTarget.texture:this._compositionTarget.texture,this._ssaoMaterial.uniforms.kernel.value=o,this._ssaoMaterial.defines={MAX_KERNEL_SIZE:r},this._ssaoMaterial.needsUpdate=!0}}renderPostProcessStep(e,n,t){if(e.setRenderTarget(n),e.render(t,this._orthographicCamera),n===this._renderTarget){const n=e.getSize(new r.Vector2),t=new r.Vector2(e.domElement.clientWidth,e.domElement.clientHeight),o=new r.Vector2(n.x/t.x,n.y/t.y);e.autoClear=!1,this._uiObjects.forEach(n=>{const t=new r.Scene;t.add(n.object);const i=n.screenPos.clone().multiply(o),a=n.width*o.x,s=n.height*o.y;e.setViewport(i.x,i.y,a,s),e.clearDepth(),e.render(t,this._orthographicCamera)}),e.setViewport(0,0,n.x,n.y),e.autoClear=!0}}renderSsao(e,n,t){this._ssaoMaterial.uniforms.inverseProjectionMatrix.value=t.projectionMatrixInverse,this._ssaoMaterial.uniforms.projMatrix.value=t.projectionMatrix,this.renderPostProcessStep(e,n,this._ssaoScene)}createOutlineColorTexture(){const e=new Uint8Array(32),n=new r.DataTexture(e,8,1);return we(n.image.data,me.e.White,ce.White),we(n.image.data,me.e.Black,ce.Black),we(n.image.data,me.e.Cyan,ce.Cyan),we(n.image.data,me.e.Blue,ce.Blue),we(n.image.data,me.e.Green,ue.Green),we(n.image.data,me.e.Red,ue.Red),we(n.image.data,me.e.Orange,ce.Orange),n}setupCompositionScene(){const e=this.createRenderTriangle(),n=new r.Mesh(e,this._combineOutlineDetectionMaterial);this._compositionScene.add(n)}setupFxaaScene(){const e=this.createRenderTriangle(),n=new r.Mesh(e,this._fxaaMaterial);this._fxaaScene.add(n)}setupSsaoScene(){const e=this.createRenderTriangle(),n=new r.Mesh(e,this._ssaoMaterial);this._ssaoScene.add(n)}setupSsaoBlurCombineScene(){const e=this.createRenderTriangle(),n=new r.Mesh(e,this._ssaoBlurMaterial);this._ssaoBlurScene.add(n)}createNoiseTexture(){const e=new Float32Array(65536);for(let n=0;n<16384;n++){const t=4*n,r=2*Math.random()-1,o=2*Math.random()-1,i=2*Math.random()-1;e[t]=r,e[t+1]=o,e[t+2]=i,e[t+3]=1}const n=new r.DataTexture(e,128,128,r.RGBAFormat,r.FloatType);return n.wrapS=r.RepeatWrapping,n.wrapT=r.RepeatWrapping,n}createKernel(e){const n=[];for(let o=0;o<e;o++){const i=new r.Vector3;for(;i.length()<.5;)i.x=2*Math.random()-1,i.y=2*Math.random()-1,i.z=Math.random();i.normalize();let a=o/e;a=t(.1,1,a*a),i.multiplyScalar(a),n.push(i)}return n;function t(e,n,t){return e+(n-e)*(t=(t=t<0?0:t)>1?1:t)}}createRenderTriangle(){const e=new r.BufferGeometry,n=new Float32Array([-1,-1,0,3,-1,0,-1,3,0]),t=new Float32Array([0,0,2,0,0,2]);return e.setAttribute("position",new r.BufferAttribute(n,3)),e.setAttribute("uv",new r.BufferAttribute(t,2)),e}traverseForRootSectorNode(e){const n=[e];for(;n.length>0;){const e=n.pop();if(e instanceof a){const n=e.parent;n.visible&&this._rootSectorNodeBuffer.add([e,n])}else e instanceof r.Group||n.push(...e.children)}}setVisibilityOfSectors(e,n){this._originalScene.traverse(t=>{t instanceof i&&t.levelOfDetail===e&&(t.visible=n)})}}function Ce(e,n,t){if(e&&n>1){const e=new r.WebGLMultisampleRenderTarget(0,0,t);return e.samples=n,e}return new r.WebGLRenderTarget(0,0,t)}function we(e,n,t){e[4*n+0]=Math.floor(255*t.r),e[4*n+1]=Math.floor(255*t.g),e[4*n+2]=Math.floor(255*t.b),e[4*n+3]=255}class Se{constructor(e){this.buffer=[],this.temporaryScene=e}addElement(e,n){this.buffer.push({object:e,parent:e.parent,sceneParent:n})}populateTemporaryScene(){this.buffer.forEach(e=>e.sceneParent.add(e.object))}restoreOriginalScene(){this.buffer.forEach(e=>{e.parent.add(e.object)}),this.buffer.length=0,this.temporaryScene.remove(...this.temporaryScene.children)}}
|
|
123
|
+
function L(e,t,r,i){const s=[],a=e.filter(e=>null===i||function(e,t){const{p:r,box:n}=G;n.makeEmpty();for(let t=0;t<e.vertices.length;t+=3)r.set(e.vertices[t+0],e.vertices[t+1],e.vertices[t+2]),n.expandByPoint(r);return t.intersectsBox(n)}
|
|
163
124
|
/*!
|
|
164
125
|
* Copyright 2021 Cognite AS
|
|
165
|
-
*/
|
|
126
|
+
*/(e,i));for(const e of a){const i=new n.BufferGeometry,a=new n.Uint32BufferAttribute(e.indices.buffer,1).onUpload(o.n),c=new n.Float32BufferAttribute(e.vertices.buffer,3).onUpload(o.n),l=new n.Uint8BufferAttribute(e.colors.buffer,3).onUpload(o.n),d=new n.Float32BufferAttribute(e.treeIndices.buffer,1).onUpload(o.n);i.setIndex(a),i.setAttribute("color",l),i.setAttribute("position",c),i.setAttribute("treeIndex",d),i.boundingBox=t.clone(),i.boundingSphere=new n.Sphere,t.getBoundingSphere(i.boundingSphere);const h=new n.Mesh(i,r);h.name="Triangle mesh "+e.fileId,h.userData.treeIndices=new Set(e.treeIndices),s.push(h)}return s}const G={p:new n.Vector3,box:new n.Box3}},,,,function(e,t,r){"use strict";r.d(t,"a",(function(){return n})),r.d(t,"b",(function(){return o})),r.d(t,"c",(function(){return d})),r.d(t,"d",(function(){return h})),r.d(t,"f",(function(){return p})),r.d(t,"g",(function(){return f})),r.d(t,"h",(function(){return g})),r.d(t,"e",(function(){return i}));class n{constructor(e){this.client=e}get headers(){return this.client.getDefaultRequestHeaders()}async getBinaryFile(e,t){const r=`${e}/${t}`,n={...this.client.getDefaultRequestHeaders(),Accept:"*/*"};return(await async function(e,t,r=3){let n;for(let o=0;o<r;o++)try{return await fetch(e,t)}catch(e){void 0!==n&&(n=e)}throw n}(r,{headers:n,method:"GET"})).arrayBuffer()}async getJsonFile(e,t){return(await this.client.get(`${e}/${t}`)).data}}class o{constructor(e,t,r){this.revealInternalId=Symbol(`${e}/${t}[${r}]`),this.modelId=e,this.revisionId=t,this.modelFormat=r}toString(){return`${o.name} (${String(this.revealInternalId)} - ${this.modelFormat})`}}var i,s=r(0);
|
|
166
127
|
/*!
|
|
167
128
|
* Copyright 2021 Cognite AS
|
|
168
|
-
|
|
129
|
+
*/!function(e){e.EptPointCloud="ept-pointcloud",e.RevealCadModel="reveal-directory",e.AnyFormat="all-outputs"}(i||(i={}));
|
|
169
130
|
/*!
|
|
170
131
|
* Copyright 2021 Cognite AS
|
|
171
132
|
*/
|
|
172
|
-
|
|
133
|
+
const a=(new s.Matrix4).set(1,0,0,0,0,0,1,0,0,-1,0,0,0,0,0,1);function c(e,t){switch(t){case i.RevealCadModel:e.premultiply(a);break;case i.EptPointCloud:break;default:throw new Error("Unknown model format '"+t)}}
|
|
173
134
|
/*!
|
|
174
135
|
* Copyright 2021 Cognite AS
|
|
175
|
-
*/(e);this._transformOverrideIndexTexture=t.transformOverrideIndexTexture,this._transformOverrideBuffer=new De(this.handleNewTransformTexture.bind(this)),this._transformProvider=n,this._transformProvider.on("changed",this._handleTransformChangedBound)}dispose(){this._transformOverrideBuffer.dispose(),this._transformOverrideIndexTexture.dispose(),this._transformProvider.off("changed",this._handleTransformChangedBound)}get needsUpdate(){return this._needsUpdate}get overrideTransformIndexTexture(){return this._transformOverrideIndexTexture}get transformLookupTexture(){return this._transformOverrideBuffer.dataTexture}build(){this._needsUpdate=!1}setNodeTransform(e,n){const t=this._transformOverrideBuffer.addOverrideTransform(e.from,n);e.forEach(e=>this.setOverrideIndex(e,t)),this._needsUpdate=!0}resetNodeTransform(e){this._transformOverrideBuffer.removeOverrideTransform(e.from),e.forEach(e=>this.setOverrideIndex(e,-1)),this._needsUpdate=!0}setOverrideIndex(e,n){const t=this._transformOverrideIndexTexture.image.data;t[3*e+0]=n+1>>16,t[3*e+1]=n+1>>8,t[3*e+2]=n+1>>0,this._transformOverrideIndexTexture.needsUpdate=!0}handleNewTransformTexture(){this._needsUpdate=!0}handleTransformChanged(e,n,t){switch(e){case"set":this.setNodeTransform(n,t);break;case"reset":this.resetNodeTransform(n);break;default:Object(s.k)(e,`Unexpected change type '${e}'`)}}}const Ne=new Image;Ne.src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAABgGlDQ1BJQ0MgcHJvZmlsZQAAKJFjYGCqSCwoyGFhYGDIzSspCnJ3UoiIjFJgv8PAzcDDIMRgxSCemFxc4BgQ4MOAE3y7xsAIoi/rgsxqOqd2d+pGwehjat+yq+1cc3DrAwPulNTiZAYGRg4gOyWlODkXyAbp0UsuKCoBsucA2brlJQUg9hkgW6QI6EAg+wGInQ5hfwGxk8BsJg6wmpAgZyBbBsgWSIKwdUDsdAjbBsROzkhMAbJB/tKBuAEMuIJdFAzNDXx1HQk4nFSQm1MKswMUWjypeaHBQFoIiGUYghlcGBQYDBnMGQwYfBl0GYCWl6RWlIAUO+cXVBZlpmeUKDgCQzdVwTk/t6C0JLVIR8EzL1lPR8HIwNAApA4UbxDjPweBbWAUO48Qy5rMwGDxhoGBuQohlrKcgWGLPQODeDBCTH020EnvGRh2hBckFiXCHc/4jYUQvzjN2AjC5nFiYGC99///ZzUGBvZJDAx/J/7//3vR//9/FwPtv8PAcCAHALbUa33lfYEHAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5AobCyAEEhU0UQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAACAASURBVHjalV3bkiLXESzoC9cZdle7lixF7IMj7Ad/qn/RVliybMmyZhkGGKCBBj+s6ig7yTqNJmJjbzPN6XOpysrKqjP429/+djUzu16vNhgMbDAY2HA4tLIsbTKZWFmW1rat7fd7O5/P1ratXS4Xw5+5Xq82HA47v1+vVzOz9Hcz6/zc5XKxtm07z/Ev/34elz9vOBzaYDCwtm3tfD7b6XSy6/Vq5/M5fU9RFFbXtZVlaXVdm5mlZ1wuFyvLMn2GP78oCjMzO51Odjqd0rNxrP5VFEXnl7+nf4bP4+VySe/jn+Hj97nzMU+nUyuKwo7HY3onfGZRFDabzWyxWNjpdLLVamVN06Tvw3nHL5xb/LfL5WIlDsJ/90Vu29bKsrTr9Wpt23YWHxfocrl0Jtf/HQeE/68GpDaBP5tfwsdzPp/tcrnYcDi0qqpsNptZVVVW17UVRZEWwZ91PB7TJuHF8gnxBR0MBukQ+Lh9kx2Px84iFUVhZVlaWZbpM3Hjq7nCeTezNObL5WLn81keoLqubTqd2vl8ts1mY03TpHGphY/mFte69P/gTaBOLf7CBR0Oh9a2rQ2Hw5tF8pfljYaf65/hz8Hv8/8/nU5p0cuytNFoZKPRyKqqSiceFxInECfRTzlaFPydLZSP1Z8zHo9tNBqlzXA6naxpGjsej3Y4HNJmqOu680z/bJ7H6/WaNu3lcrHD4ZDmDdfBF9/MbL1e236/l3OFm1ptAt4MJU4Un27frfhwPvG+0GaWTheamMgMqQ2AL+QL0DSNnc/nNPnT6dTG47FVVWWXy+XmM/CXLxr/P1shXxh2P/w8f0devNFolMbcNI01TWObzSZtVLcqfDh83t1FuYXiuSrL0sbjsRVFYa+vr2lO+DRHi8//jgei4wLYLKEFwIfipsAHKlPEbsD/7j/DE3q9Xu10OtnxeEy+ej6fp5PuC+obEn0qnlb/HceMC41+GBfbD4JbFDTZuFkQE7jFGQwGaZO6RXh9fU2bt67rjoVyrDUYDOxwOEi/79akqiprmsb2+33CJYy5/J0iF8DWouMCcKH5FLA7wMlmP+0P9wnCF1bPRdxxOp3scDiYmSVTy2bUzb0vBO9uXEj8N5wsPgkM3HDh8eTi+7CP5w3sG3c6nVrTNHY4HGy321ld1+k0O25wPKHG65vEN5SDPv8+drO5Lwad1+vVSpw0BSR48ZWJ50lEk46m3jcLY4jz+Wz7/d7MzCaTiY3H405UgSi9LEuJotmPIwD033E8yiziBLKlUT4Ux6Asqf8aj8c2Ho/tfD6nk+6Wwt0cR09u2aqqSt9zPB5vcJjCWJEVUAel7DMXKkrAk4MbQ/0M/r/7bNwg+/3ertdr8u24iH7a/d9wlzv6xs9UoSJvCPb/yl/ySeF/ZzPt74KuyDcbbtCyLG02m9nxeLTlcmnr9boTauM40d0cDoe0+BGuUqFzzhI44C5VWKJMmzrx/H3RTvOXc2DXtq3tdrt0OqbT6c3pcx/M5h9NI5rhnAnk08LjRovC40Bcge+IGwMxA4NqPiyOazzeX61WVlVVQvg4FneLDo755OM40MKyJYxcQbIA/NDIfCCCxYHyKcdFcfLIB7jf761pGiuKwubzeQJG7mv9xLMlYB/NII5fjMOo6KSrU65cCm9qhSXYwuAYGF9UVWXv3r2zpmlsuVzadrtNmGE0Gtn5fLamaTqRUUTm8EG455397yWHfAzsGDUiaFOxpm8QRpwO8Nq2tel0mtCvLzqSKMpcqZBMvVhkFpU7UBtMPVtZumizoFtSB8ldl5/4wWCQNsJ6vbbL5WJffPGFtW1rx+Mx+5loYdW7RiAZfy/ZxOMH9G0ChejdAvjv/iL7/d6KorDFYiERvZ98NGEK6KD5V0RPNFF4Chn0KbPPoFfNQy7WZvCLOMCt3uFw6LiFN2/e2MvLi/3jH/+wh4eHRA5dr1cry7IzN7zROWTNYRt0ryX7cwYZiNpVjM+cAVoBZ7aOx2MCOz7JfuJ9UdD0c6zOuABPm0LCjD944ftOewRsmRPhRVbg0RfFf7aqKptMJtY0TWfDOU56//697XY7Wy6XKWR0V6rcNLu6vq8bkolPj4rpmRLOoU8ndy6Xi+12O2vb1h4fH7t+B0I5PNFMaiDVrHw/x+/3+EF2MfckTnI4QJ0wB4Uq9h6Px50owefVreBkMkkU99PTk202G5vP5535UGPhOWGgGkUGQ4WIOeTLgUIflLNTnkXbbDZmZvbw8NAhNHzx0fw7y4exPsb+vFkQATMw5BOuACQyg8oN8IRGG94XmRlK3NA4Vmf0PBJifsPNvLOg7969s7qubbPZdDKnmLBjfKBctTqk/ueSzRxSnNGu4ZPmg3FSZ7PZWF3XNp/POz4nonHVCceQBicqOtF8Mpmq7ov1owhIAVLlShStnOjWX13faDRKC8zP8twGxvxuPXe7na3X60SJq3dCfKA4EA7fE/ZQ8TsnaCIziMSOZ8VeX19TbO8vzwvINGeExvHvjq5V7K8yYYrQ4mRQxI71bYK+UJFdElq/3W53Q/e6yXdSrCiK5PNPp1PSCaxWK5tOpwkcKmJKhZ6RBTCz3yxAn+lQdDCe+uPxaNvt1kajkU0mk87i4knHiVfiCPydQV4U4qgdz36Pn63Qfk5U0cccKs4BN3pVVck94pdbShfc8OINh0M7nU42Go3s7du39unTJ5vNZglLME7jw9PHGg45XufJZ3yA3+chXtM0tt1uE4/vQM99Gy40U7ycuUOqGK0RAy4kXSLTG0UtDB7v/cq5iQhLIFbwhA/O8WKxsMFgkEgfprI9RHRxzvv37ztJIU6JMwvIhwA35WAwsKECCYzWVU7AxRCHw8HW63VKeLgUC+N7NufI/CkWUqmJWJiCoCjiANRpxZdX38efo/QRymVE6Nx/d5CM3/P4+GiTycS2220KC3EhfQ18jtq2taIo7O3bt0kXwGAwEogo63S9Xm0YERbKr+BEOFW52WxSrtuFCxzjq8VnQQbG9Zg34J3tCFqZP/+ZSCSCAhTMp0fRzT0hJIet/Hmoe8D/H4/H9vDwYOv12g6Hww2NzVwJsozD4dDevXtnu92uIx9Dy+hzhO+KVH7CJzn5EJ9G1sVtt1ur69pGo1FSv6DpR9dRVVVnctACKHGo2tEMSO/RwanwMQJFvFlzz+bf2TqwtcS/V1Vlj4+PdjgcUrgcRV58iHy8ZVnaYrGw19fXThTmohJXL+XY3Ov1+hkDRKecdw/G+a+vr4m4iDRwatAe9+MmUdIuJoFyC6iUOQr98gJFGUx10nPkl+JReBPg5vcIabVadUx4n7ADuRNUS202Gzufz8nKsAVQhziNF3eof7CLL9mMuOnfbrdmZimp4+ZfWQ5W1Mznc/v48aN98cUXHTm1ynqxG1J4RAFAxQJGGkUlO+9zAyqrxtR0pGr2uVqv1wkXMKGj8hN+WPCzLpdLcr/r9TqtmzrpYRiLA2X/iYvvO8w1aXjy0VfxwB3I+NdoNLIPHz7YH/7wh5scO6pwGRiyzDuaZLWwyqzzpEesopKOq39ncKkApH/PbrfroPhc3p6BNOZO6rq26/VqDw8PNhgMkjtQqqUIGJd9oQ9ugP1+b/v9PpERTO0qMoTB0Xa7tR9//DHt4FyYxSpkxA/olljVq15YkUIKKKq6BUVQRZOMIR4mgvDnnOqNSKvISiFuSlRuWdrpdLLFYmHPz89Ja8GgV2kkzew3RVBfbYCreKqqsqqqOmlNJngY6WOat21b+/nnn+/i4BlguctAMKjSooq34CKRKNcfbQLFlnKaWlkdfj4DwkjHwGOPimNQiPL4+Jjk6FhowuPthPosjMBkA7oGlzdPp9MbAsd3Ji4UZsMGg4FVVSUFlszIKbUNF1cwsFHWR20Qj6Mj/95HG7MUS/ECnEhjidY9GUac25zcnj/XaxT8oPr7qkKT5AI4z81MHypS5/N5h+hRIQqKOB28RJOAAFBtRD5VeLr4pRTZhAQNppvVZKiSN1VIweOLijAUd8CJNrXhlb7BP8utB6eGHWedz2ebTCZJQj4ejyXNjfNTIkGhTpGHfF6Nw3G+CtkYbEVaOvWiKoOnJNwKP/wenj4KD9Umjfw8vw9L3vkU42HDwlGmw/F70b36IiMmYJbwfD7bbDZLHA1GYgpwllEs7IN+fX1NzBXLtPGlMQRyfX9RFNY0Ta+eD/16tBH59KF/yxE3OZDFG6Ev0xidzD4pNi42RywqJa3EppEa2zcBbgbXHbgMzzcHR0+yLgB9//F4tLZtUx4aK2AVJ+6boCgK+/rrr62ua/v3v/+deO5cJQ/69hz1yieFcUWfNPwegicifaJows2zqoD2PzuvojaHUjnl1Fio2/C/Y2n85XKx+Xxuz8/PxhaesVfJOx+R6uFwSFW4zuDxALlmzwd0OBxukkkKAEbmkpU8+L2cM2DSKYrFGZmzrDqXSYwKY1URLZ9YP4G4cLl6hByHgGNgsanPjeOj6XRq2+3WHh4e0uexrqLkl/IvL2FySZcvPgM+9QKXy8U+ffpkq9WqUzShkK7K0eOCKgEkAz6up1eIPIoM1AZVglgGyQpIKq0CLzjTvlEdZq5Gg78HRbIOqN0K7HY7Ox6PnSYZiKFKPAFu/p308eJM1tPhLlLNFdCSMBOYyzxGNGu0qBiKRdnLKAZWYZwSkOQKZHJ6AVUHGdG9rEvgRY4KeCMXgfPpGUcPw7mQZOgL5pkjL832ej1H/FzCzaDPQz6UfXHlkKrdizQA+GL4ghxloOBEFZDk6GEUdXJFUo4+jYpJ8d9yMi0Oadn8I7B2F9u38dSzXE42HA5vCkvTBmDT7QWbTiQoAQW/HCZ1ol2uMm7RIvVJuHmTOTuZE2gqmRZPsHJlKr/PHUmQ/FK4Rsm1ImuXqnZ/xU+oc2D8owplMNLwn314eEjtZLzeMM0xL5JnqNxkqDx8TjSCp52zd7nYnXsLsNWI2D/k2ZUKKccPRMkfLjyJ0txK4II1D4qLjwpsWEDqgLFv4VVvJnw39P++CXBDDdn0+A9wvR738olKoZSIFF0BLti9ihs+wVztg1qDvuyXeo/xeJxCXdxAqstIJJ5ly5UTpkaAlCloLiqN8hNctcz5icvlYrPZrNOEwjfBkJFq27Y2m81SzI9FnKr4k09nTnuPANF9HLdZi4Af73LvG6QYOHXKIzc2mUzsm2++sY8fP9r79+9vElzRxnTMpLALS+Aj8osl8vyeyrQr/V+fwPV8PicrwBT6EHUA3toENfuo7OU0YyS85EVQfLyLF1QuXrkS1NBj6OoRBkqhFL+vNoNXLj0+PtpoNLLFYtHBEpjb4OTX27dv7e3bt1KDF9U25MApWzelYWSQGPH8OF6k7Ou67qiGLpfL5w2AqNETCKpok30sPihqRKTy2MhmqdJmBk94ir755hv78ssvE17Bz0Okq1yIcguo1nVtvppkNO2TycQ+fvxoHz9+tIeHh45Qlq1AZL6VGIWbS/KpVwkrnD+2eniYL5eLjUajToOptm2tdPPvyNBjf25r1hcWqTCNY2EVEqk+A5z08e9tmsZWq5X99a9/tePxaE9PTx2dgT+PGT8mlHDCttut/fTTTzYcDlPnTbeInA7m/LoTZOwOo3K1HL5BjMRKZ5U5RGV0LoLCVjWj0cg2m42dTqfkEko8PQ6ClJ9SG4CTFUxCqMRJLn8fsX44hv/85z82Ho/t3bt39vT0ZOfzuZPt8g2Ai5ir9vGNxCdOjccnc7vd2g8//GBFUdh6vb5h5WazmRVFkbSTPJaorgC/T4leGOzhZlel6Vym3ratjUajBPRTcaj/53Q6vYnvlc9X1Tw5LNCnsuWBcg6ew6h//vOfKdvogBA3gdI15j6L2TvF6jHj6d08eB6qqrJvvvnG6rq2H3/80ZbL5Q0oVJlXThMrMQlrJllWx+6DtY9t26YsoYPoIZoJ7JEbMVY5ti0nwuTkiWK2nNRBkz2ZTGyxWNycYG+Vys/B2FmVl+XUOWhyVeZSpWN5wh2bIJnDTa9U+KiqozgyQFwRFb6oSiZ059gAMymCfGe4egcnU1GcESUZDYw7kESly6rJw/l8tj/96U+22+3su+++k/42om+jvn8Rr99X94d/5kaYPkeHw8H+9a9/2XQ6TU0vPV1clmVi5fb7fW/TB6VC4p5AUSfXKA/ifQp8zVNdgNenI9qPkjLKX7IuPurioWrxVaoVZU7ffvutDQYDm8/nlqtl5LHhaYmaODKxpXL9GAarDcWfsd/vbblcpnJvfLfRaJTK55yKVhY3IqIi6xm5VWy84e9eVVWKdkrfTd54IOpEGZUtMUGEZicqcmBLgKbVuQh/OW+Z9t1333W0hgrxRp/HYaoytRhD89hUY0iuWeBwTG3Q0+lkr6+v6fmLxcI+fPhgm83Gfv755xSJMf7hpJhqapWzIPxOvtZt237WBHJm7J48uiIrlComCoEUIsZCUew+ggWWub44qh4hkorj5Kl4H8uz2TJhroK7lUZ0uP/ZS+q8NNwrhI/Ho/3yyy/SKubo9ahcTrXDZxDZtq0N3Tep7FfUCJonCZUmyryyKe5rxcKgyReeFcecgeOxMIHD1icaC5pNVSfJERFHC6qFjurA4l1APW/Pc8RiD6y8Vhb4HlzDVmvojQc6JcPEnSsNXh9B5C/uL6nSxrmmCqpejtO5yhqoTKBqOq1MJfcGwA2FixlpFKOmknyC/Tnr9bqjl1SSfC7OURgod2VMpIN0fFWi2eXYkxdN9aTJJWFUzV8uWuC4FjckM4ZRkiWXjkZlLH6WWlgUuJ5Op5s5wGiAMcY9m2AwGNhms7GffvrJ5vO5LZdLKaHHknyl+mG2U9HFqtWvg/6SgQTGieoyBv4QlexQdWgsBlVsHy+a6nkTpWijsA+tgCJSfOH9RKCF8EnCfAEjcUzecASB4FBxCOfz2Z6enmy5XIbKKAdseEVM1KJXsYaROAf0C+VNOOSnA82OYth4QfvKnnIl3oo8Yl5A8d3qciYOz9j/4+8fPnywtm3t06dPYbNJb9TEuQDFGCr3qe4pYHenzDluOi/66NMWqjnCMfszXPMxVHIuJaDgU88Lyrl9lk0pWravSZOipZX1iEIh1WeIy8j++Mc/2rt37zob5i9/+Yu9efMmxfGYUWM6XJVzRxFErhZBgUzPVXg1MfdaUA0gmAuJtJZwZU0pmaNIY8YxMwMl3iSqzInDtIjUQD+NUYYCVaoalyMb9e9///vfb3ICP/74ox0OBxsOh/bnP//ZvvvuO1utVimdiu3yFcmk4vGcDEyFocj6cTpYpYdz8jAFpr3DSMlgSBVH8I5V3b8UMkZRI4d47ttQXaNEoz4ZKGTEujj+xWxjdLWN/9mrnvH7PIEzHA7t+++/t6ZprK7rRON++eWX9vT0ZKvV6qZzuYPG6OJIRSQp8KzawOKBVA20ouqoTjEo0P2DweC3W8OiFit9SQYEWHxrBloJVMx4MYdvvijs5CSN6tatiis5KuCESFQMwwLO6/Vqz8/PKWxaLpf21Vdf2ddff935P3w/DN0QtEUWLFfYij6c8QBGNFHSTV31w1xI6R0mMPnDk6PoUMwaMv8eTSbHuXhd6n6/T63Trtdr6nRRlmXKq/PNWhENnLsoMkoOcRcPlGe7fM2tw7fffmur1aqzkeu67txxyFp+VgZxqjfXtj833qiLisIFLCsbDoefK4PQz6LWTlXiRMSL6mad6zjuz/nw4UNqjb5YLOzt27epIcVkMrGqquz5+TnRpNgSjYWbDNhU+baiVVWXMt/MHP7t93v74YcfOnODdyqrOgB2P6p1m7pfKMfwKc0gWjoFNPH5/n2lDz7qzRu1M3HFMJssjKXV9XH4fGfDvIs2K1mc+mT1S9QyTlX9qMZXbNX4NCnwxBskYt0U6IpOqNoErDFQtRSKEIoSUMql4zNKVI0yv85EB1fBqElFLgE3QVQfuFwuOy3QvQml3yh2PB5tt9vdtJFXyZwPHz4kda/n5r///nvJYURavQitRz2Vo43FFoIvlc718EdXzCrhnKYhCuf5Kh/8zBIJBqXaiQonUQnLDCBe8Ij6PA5z/FQ4BtlsNqkhRV3X9t///je1qOGaObYsZpawgn+/X7qUaxAZ9SpWrKfCRlFDZgav6FZyIDC3WH3C0qgRVC4LW+IV7JiocGsQ5feRo+b+OyigZI2Ain85MnAFMHbQVoWYLBnf7Xa23++TyDOq2GVwmkue5Pj9SHmj7jFWlkOF3SpiUalsdb08W6Xo4quOK+eXUehU7XjuUKGUQmx6o4nHGN7DySipoZA6jo1Lzxj5Rr4wEoIq/35P7X5UnMJzGgFCdVpz4bmy3HiAWQ/gn1MyEMKya9XESN0BHHX1iLpx5nwZRwxsrlk+heCQQafKOjIzqQQUTMsqnkQdHLWJIqAYbRw0/4zq++4GRveIa4J9gnAtzufz5zCQrxyNBAfRtaROK7JQgweh2DA+obw4yuRyiKbAmbe25WtquPBC1R2quFkBPEVDR907VPUuz4OqnVCp+JzVUdfVMW2P710q/R/XnDMAyl1IpGJgpdRljkC1heer3DHJxI2YFOePqV60ZNyePmqujIvIUvMINHI4misHj274VMmbeyMOjBpUcgnnpm3b35pFs89TDQ4RJGKygs04mxp1kll5rFgsjCjw55TPYyHlzYvSFfKRikaNQ+kf7uldFNVERh1B1FgiU88/E3USVXON71Fyu1a+sJHboirqFHco07C4ee5pdcIhFPtADFm9OJSRME8IF22qhheRwkhpJVU4qWRyUcZPAVyOTriXT1RxHc1hdKkF/l8qDkV5U0Qm5O74UbsZJx19cZ9ciSMNJn4Y8CgaV4E0Vvuo+r9IucMKKRaV8viQ94hYuYgLUPqLaIMqljJXycQ5m+v1+pkH8ELBXK99VQShysQU2ENAGCUqolIybHTI2r2ojDsq4+IcRVSmpWTZWLeg5GdcFNt3e6dqO8/AOXcJBrfr442jxoAZRD8Upep8He20qMqHVbjYPFLRsDnyRWUhWX+HJ00JKRR6jpizHMOmQF6OGIquuFPm+vemhu9RPXEndbd6qgQ/9QfwH4yuU8stvAJxDIByeepIIBFZiqjMC8eGiS2+kCqaVMVrRGPmLGOkQFI0sYpU+m4qVcomfAa/nyLGVFrYN0ip2r2xsvXmssGgxRuqilUVCw5Y8QlKIJmbVL+N0wkirgdkd6FSshwb9/nsXDvZvqIM1UcwKrfrE3qqht2RNVRVRe4KS6xcjV5E9eBVSRHuAaTQOde+K8CV09Mz2VEUhX311Ve22+3s5eUl1DKiKeT7CCIAmMv65ZIykc+/B7UrwWxf63ylKFYVUS4yLYoiXWBdeqjG3bE4BMtJxHjCOe2p+HgGJBGAi7qH+vdVVZVu3PYaO7QyKu7nEDcqCv09JjqSzvWJXxXax0qg6OLN6Fq4SCugoqK6rq3EzlH4gGjxlXiRlSxR2ZJqO6N6/uViVwasl8vFnp6eUvrZXYK6twdZRSVAUQg9KtDM5RlyTF/OiiiNAja84LsVVBeQHPDFNUobwGvF8Yvr4SNRJWfcUMjQNwGK31d3BEcnxuldNO+uK4hy9pxhU1fZRMUhCh/0RS8qNOy70DraCEx5K7wRqaT4GS5tL8vSht4XL+nEodWp6qun/FtEVXJcmruOViUz7hVEKCTvZg41hMqn8wmLFiPXJY0roFV003dbiapm5iwq9xlSBFhUiIMXgHhziF/HPEwdI1AIijtG3UjJiD2K9bnIUXHYSrBxT8UrdtbKNV7Kxd65f0MxSw6kjkajpG7OJWByBTAqP8CAMAoh1cbmZ/lt706hpwsjfACOCvF+Geb4FVDDU6dSyhHTpvxylAlD369YQdWbUJnqKOLIMYcMYplg8hYy3jDS9YxKvxhFBSqncE+95b38AY+j0ybOY2inObFYg5tF5VDy7yE41G5lV6C6hUfAUNXvRT0M2R+r5JdyWYqS9XE0TZNKyTysvsfUR+xkFMfnXGFOBIpaSrdq3uByiD1lPUbMceqqrRmb+SjH3vcrwheKo1cNqxEdqz7B3JQ6QtC5tiyqZOt8Ptt6vU79f9R7KCwUldRFSap7Fj4ShvqzveAmrTveTOFuICpsiDaDMrNRWzmVD1d3DnCLWbUYSpiRk3vnJOARWo+IHbZOrkC+p01LBDKjjmdRoqyvJ5HKRzRNk3I1g8Hg840hTqhgtkvJm3Pp29yt2arFuco55HrxR9agj77NTaA6QUgcKUsX3R8YgVfl76PkUK4WIbpmPqdm4s3tEY/7/4QBMJTZ7Xb28PDQAYNcsKAaL0R1A4rDz5n7KL2r0tT3mMSofCxqdhHJp3MW5B5iJ9rYUeY191m5cJjdMeZo/E5h0gj+NkF1XSc3wFe4YciF+YEoXle7mJMYkUmMmlUpsibCGSqppNLXCjhFobA6ibnCE/WluPvIGuQkYQyGcyGv/7lpms4NsKk83L/cDex2O5tMJp2SJoxVc02fVLMGJpSiiWYqNgeaIq18X46dpdNqQXONnbiqKZJ5Ky5enWKlpVTRj5KeM15T7tijG7/C9+Zyb5zwtm1tPB6nDaAWgAs91WlU1Tw8aWweuQeuyoJFDFvuPh3+d59kLC9jNTKGhHi6WFiBCTPmKaJ6h5wkLHJBCnQyDxM9A83/aDS6vZ+YpVIOEA6Hw02Ha97BUf/AiJDgMrH5fJ4aJHJ+PufjVReNnFtRVCu+W45lU3oFZAlzl0UpU50Doip05HSuqrrKVWYNh0M7HA6J6r/BReyf/V6e3W4X1qlH9fY5lk0t0HQ6tTdv3nQuprynK7mimqN2qY58lSI2pwdkqlbpDFi4mqNpVYlYdFCiSuZc7iWnMt7tdlbX9U29x2Aw+CwI4YUejUbWNE26RURl9iKdWa6aGF+wbVt7eXnpXFDJNQiRHu6epBTToEhVq7Erqll1GlWtc90VcVCcmwAACrxJREFUcL2EqkTKZRBzQFCRT9GB8Dkuy9Kapkl3QWFpXcIBKtvlzRk2m83NzlftYLGtKcf8zA7iRluv1/by8pImq67rTtuZnNQqyjoq/psxB/t9ZX1UTWOuKIOtjGoBn5PC33PrKiuBVM9F7u+42WzS/UBSpqZiXu9rb/a5QyXX1qnMH19+hKlYXBQmSBCFs8o36uARsWtoPXCRlannTF/fla7chk3VCCpuoS+TmOt6nsMG7Io4GVZVVSqv9w2AUYY/o1SXGPnkTSYT2263aTPwSWFTGpV95+hf1P+rCc6FjSpe9uew9IuRvVqAqL8P9zJWdDdrHVUxS870MyBFF8J4Symv2PWWZWmfPn36LesHFqkTskdKF9T3bzab1C2rz4z1UZJ9SFhlBXMKl6jxgqKwOcyMmLk+di66mSPi4/siDH+uK7F4kRWNzkQdvmNVVbZerztrGLahy+n9zMym06nt9/tOKzmFUrmnPi+OuiRSmWVG50p8krs9O6pEVmxlpFKK8EYkic/5dNVeNqoSqus69fCNGL7IdWC3Fr8BHq+miSKPYU7m5AObTCa2Wq06N4pHcW7OOkSMW8TIqYxWdJ2NUteoNHJkbfqijOhEq8ZPqr5PYQR8Vl3XNp1OO4DNDxsKXyJq29+nLEtbrVZWlmVqvhX1D7xer583gKqwwZZxzgp6Sze+4p2JHvbj2EOPTZfKl0eFFLk7DJX5VUxkBEpzXEaEG/pqGThSyfVVwK5gLr9TuYic7tD5G2d08eq6CD+VuZOGoGg+n9vz87ONRqMU80aFnkwv50qjWHUU+SpFJed4dSX3UpEEbiZ1KUVfZjJK5UZ8Apd3I/D2Cx2jqMK1fCyju1wuKXez3W5tNptJt6jmpezrlIUuYjKZ2HK5tPfv33dMlCpcVNfAcZ4g0ujxhsmlaCPhQy4yyYlVuNlUbnHVpsuVdedqIvF6HRVq8vO5OqooCvvll19sOp3enPpcTeJQyb7xA/Dk+HWt7mOwQTKaV9XiPIcbchp8Zaa5aVVOcRRtLKWZU9fh3KO94xI0/gzuzqVOIp78XHdTzgU4abdcLq0sS5tOp507oHNjTyAwusJN9brx8isPDVkurnLUEUDknZm701eRK4rDiKIZhQ8Uq5kDsrmMmxqzN6pi8Mr1+pEWIddxDAmf5+dnO5/PNp1ObyKjHLk1GAw+9wiKZMiqD79fcbbdbq1pmrQJItZNycmia1lytQB48nM5/z7tnGqIpU5ZjuPIJXUUJ6HqF3jTYKgcye/w5Ltia7PZJL8fSe1yX8Mo/FKCD5y42Wxmq9UqScndEmB/2wikReRNn8lVdGjU0JHj9Rxaxw2YUzf16e+jcJjzFZFiKgqVOc+CiZ7VamXT6TTUL0YbIR1s1vb1vTzKyCeTiT0/P3d680Vl11wZpPLbOc38PVU2fRKzqJomB/b6yuD6tP+sqWQ3k0uW4S88YF7csVqtbDKZhAdC3aF4g/VyA4/MOJIX4/HYPn36FE6kMl8RSufIQW08NUG/pwVLpHO8J5uYk55xJBRtXGY7VSWVqkvgQtjn52cbj8dJ5BlhJqar+d6mYdSaVeW+Fdjy27D9/pwoFOpD9VGGL7rWJaex6xOM5iYqp8LN5e9VQUjUExA3nUr2cGYUw1JefGYEo45hkcCljKpflDhCoW7v6z8YDOz5+dnevHmTyqO4x4ACWmoDclu5HE6IFiXCNUq4mdMf5OoK1bWuOYzAvRhVG1hOqfv/1XVt+/3eXl5ebDabpZOv7nlmJXbOIpWR7+U+/1wQya3NncN+eXmxh4cHq+s6AURWDLPPUyxd1FiSFwzVRMpMcx1AdIVaX/dwRZnnupcoSZjqHRxlD/HZTvFuNpvO4vdZS9WxhX+mvAeJRxODcWxRFDYajWw4HNp6vbaHh4d0zx76H54wXxyWROcqd1XpuUoCRQ2ZFXGkZNX3CDgjyTdrG6LOnbzo3PK2qirbbDa22Wzs8fGxo0Jm85+7H5FTyGkDRL6PW8FGBQ58Unx3bjYbO5/PNpvNkjqIawNY0x6d4qhrR7Qhojau0QbpSwblqn4iV6O6nEXEjkqQ+aZ+eXmxpmns8fGxk91TafHcGDEqkMkg7sHTp2Pnxccd5pLv3W5n5/PZFovFjQZfgSN1+yX7RwVMWcmjJkmZXmVdVDzdF3bmLABzIuqwqXa9g8HAVquVtW1r8/m8c/Lv6Q+g6juVdH2owj9uL9ZHKnCrWD/ps9ksXQzl5ozJEG51rkIgJELUBY2KZ8iVb0Wtb6IoQOUT+qxErjsp5wkwDewxvl9mvVgskqRbnXh+h2itIkay7CNbOGd9j1XAD59Op3Y8Hu35+dlms5nNZrMOYo40dgzwVA6BrQbr6iNFM+sDI+4i14uP9ZN9rWf58xURVJalrddra5rGZrNZquML1TwBfZ/bqNgsItUF9PEAaNpzzYgVZ+9hYlVVtt/vrWkaWywWhjeWRrdps2nnhApLtSMBqQJeEXmUGwsveqT46esn7OJV7HjWtq09PT3ZYDCw+Xze6besOpfcE+Jx5MWHqCiKWz2A6pLdRyxwLaAqw/YXOxwOtlwubTqd2mw2kyVOqmBD5erZp6F5QyyTYxoVgRLJp7CiKCKeGIfkxJ0+9u12mzR8k8kkdWtTauWoiimX9IqIrMvl8tkF5KRPLFmKwoyopy6bYb8O1q2BcwaqVQuesoho6Us65W4fizaYAlJ9esIICPL1tNjy7XA4pBK8+XzeEXDyHCh53D2ZyQivpeqhCLhwL/uIo88lYlRxhZNGdV3b4XCw9XptVVUlgiMifBSC5jHeZLqCzmbMeEY99nJJHQaz7KZUHsRPddM09vr6mrR7KAePxDmRVcoplCOLgPihZFKGOeiI5eoTSOQ2B1LIzhhuNhsrisKm02mnjJk7fkeiFS5Nd1PN16hFt33mJGTKKkUdRqIWMd6Ea7fb2eVySQvPsvFIxaT+LZd3ENKvGzY3WYC+Xvi5+3RyjOE9HILr2cbjsZ1OJ3t9fbXdbmfT6TS5C9fKqVtKWCTCpEeuVWzU01+BwL7iFnUZtYtlmqZJal1vKslhncp9qL7DvQKP4F4mNP2dBhG5erbcxN3LDCr0GsXW4/HYRqORnc9ne319tfV6bePxONHKHisr3MGXQTGZpe7tU5MaXegUsYUM8HxDe/Vzasv+q16Pr7/Dkxm1z4k4GLbWag1yjGGHCu7jAiJFLg+QO4iwv1b4gusFPGw8nU623+/tf//7n5VlabPZrAOUONpgppEznNgGv09MosSuSrGMFul4PHbuPB4Oh+m0R00ocxRuX0kZ1yoii6i6qysLkiWC+pJDSqacawKVa7nGhEsiKn7lCxwnOIj0AhVPQPmFF03TdBZQET54jWr0/qq6B822T/7pdLLD4dBxU2VZdnw8/pzqtJbLQEYUL46Hay9YNa2ypZ0NoNQsfRyzMj0KZavFjSwJ3+zpP1NVVZpUpFAxjPJNgTeg4Wd7Qkq5O743gMeHoahvxuPxmGRaZVmmghnVERSf7xs10laouxiiu5zQEqhEUt9NJWbWZQL7Up4KKEZ3/+QAS3TTJpeFMRPpk435hsvlkjbCer3u+NeyLFPTiaIoUl0DXlLBp7pt29RV43g82vF4TKebGUH06RGhxuKPXIUx8wf3MH2clbw3UeSf+X/9B04mXw6cfAAAAABJRU5ErkJggg==";var ze=Ne;
|
|
136
|
+
*/class l{constructor(e,t,r){this.modelId=e,this.revisionId=t,this.outputs=r}findMostRecentOutput(e,t){const r=this.outputs.filter(r=>r.format===e&&(!t||-1!==t.indexOf(r.version)));return r.length>0?r.reduce((e,t)=>t.version>e.version?t:e):void 0}}
|
|
176
137
|
/*!
|
|
177
138
|
* Copyright 2021 Cognite AS
|
|
178
|
-
*/
|
|
139
|
+
*/class d{constructor(e){this._client=e}async getModelMatrix(e){if(!(e instanceof o))throw new Error(`Model must be a ${o.name}, but got ${e.toString()}`);const{modelId:t,revisionId:r,modelFormat:n}=e,i=await this._client.revisions3D.retrieve(t,r),a=new s.Matrix4;return i.rotation&&a.makeRotationFromEuler(new s.Euler(...i.rotation)),c(a,n),a}async getModelCamera(e){if(!(e instanceof o))throw new Error(`Model must be a ${o.name}, but got ${e.toString()}`);const{modelId:t,revisionId:r}=e,n=await this._client.revisions3D.retrieve(t,r);if(n.camera&&n.camera.position&&n.camera.target){const{position:e,target:t}=n.camera;return{position:new s.Vector3(e[0],e[1],e[2]),target:new s.Vector3(t[0],t[1],t[2])}}}async getModelUri(e){if(!(e instanceof o))throw new Error(`Model must be a ${o.name}, but got ${e.toString()}`);const{modelId:t,revisionId:r,modelFormat:n}=e,i=(await this.getOutputs(e)).findMostRecentOutput(n);if(!i)throw new Error(`Model '${t}/${r}' is not compatible with this version of Reveal, because no outputs for format '(${n})' was found. If this model works with a previous version of Reveal it must be reprocessed to support this version.`);const s=i.blobId;return`${this._client.getBaseUrl()}${this.getRequestPath(s)}`}async getOutputs(e){const{modelId:t,revisionId:r,modelFormat:n}=e,o=`/api/v1/projects/${this._client.project}/3d/models/${t}/revisions/${r}/outputs`,i=void 0!==n?{params:{format:n}}:void 0,s=await this._client.get(o,i);if(200===s.status)return new l(t,r,s.data.items);throw new Error(`Unexpected response ${s.status} (payload: '${s.data})`)}getRequestPath(e){return`/api/v1/projects/${this._client.project}/3d/files/${e}`}}
|
|
179
140
|
/*!
|
|
180
141
|
* Copyright 2021 Cognite AS
|
|
181
|
-
*/
|
|
182
|
-
class Be{constructor(){this._events={materialsChanged:new s.d},this._renderMode=Te.a.Color,this.materialsMap=new Map,this._clippingPlanes=[]}get clippingPlanes(){return this._clippingPlanes}set clippingPlanes(e){this._clippingPlanes=e;for(const e of this.materialsMap.keys())this.updateClippingPlanesForModel(e);this.triggerMaterialsChanged()}on(e,n){switch(e){case"materialsChanged":this._events.materialsChanged.subscribe(n);break;default:Object(s.k)(e,"Unexpected event '"+e)}}off(e,n){switch(e){case"materialsChanged":this._events.materialsChanged.unsubscribe(n);break;default:Object(s.k)(e,"Unexpected event '"+e)}}addModelMaterials(e,n){const t=new me.c,o=new Me(n+1,t);o.build();const i=new te,a=new Re(n+1,i);a.build();const s=Fe()(()=>this.updateMaterials(e),75,{leading:!0,trailing:!0}),l=()=>this.updateTransforms(e);t.on("changed",s),i.on("changed",l);const d=function(e,n,t,o,i){const a=new r.Texture(ze);a.needsUpdate=!0;const s=new r.ShaderMaterial({name:"Primitives (Box)",clipping:!0,clippingPlanes:n,extensions:{fragDepth:!0},vertexShader:he.boxPrimitive.vertex,fragmentShader:he.boxPrimitive.fragment,side:r.DoubleSide,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},transparent:!1}),l=new r.ShaderMaterial({name:"Primitives (Circle)",clipping:!0,clippingPlanes:n,extensions:{fragDepth:!0},vertexShader:he.circlePrimitive.vertex,fragmentShader:he.circlePrimitive.fragment,side:r.DoubleSide,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},transparent:!1}),d=new r.ShaderMaterial({name:"Primitives (Nuts)",clipping:!0,clippingPlanes:n,vertexShader:he.nutPrimitive.vertex,fragmentShader:he.nutPrimitive.fragment,side:r.DoubleSide,transparent:!1}),c=new r.ShaderMaterial({name:"Primitives (Quads)",clipping:!0,clippingPlanes:n,vertexShader:he.quadPrimitive.vertex,fragmentShader:he.quadPrimitive.fragment,side:r.DoubleSide,transparent:!1}),u=new r.ShaderMaterial({name:"Primitives (General rings)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:he.generalRingPrimitive.vertex,fragmentShader:he.generalRingPrimitive.fragment,side:r.DoubleSide,transparent:!1}),m=new r.ShaderMaterial({name:"Primitives (Cone)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:he.conePrimitive.vertex,fragmentShader:he.conePrimitive.fragment,side:r.DoubleSide,transparent:!1}),f=new r.ShaderMaterial({name:"Primitives (Eccentric cone)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:he.eccentricConePrimitive.vertex,fragmentShader:he.eccentricConePrimitive.fragment,side:r.DoubleSide,transparent:!1}),v=new r.ShaderMaterial({name:"Primitives (Ellipsoid segments)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:he.ellipsoidSegmentPrimitive.vertex,fragmentShader:he.ellipsoidSegmentPrimitive.fragment,side:r.DoubleSide,transparent:!1}),p=new r.ShaderMaterial({name:"Primitives (General cylinder)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:he.generalCylinderPrimitive.vertex,fragmentShader:he.generalCylinderPrimitive.fragment,side:r.DoubleSide,transparent:!1}),h=new r.ShaderMaterial({name:"Primitives (Trapezium)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:he.trapeziumPrimitive.vertex,fragmentShader:he.trapeziumPrimitive.fragment,side:r.DoubleSide,transparent:!1}),x=new r.ShaderMaterial({name:"Primitives (Torus segment)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0,derivatives:!0},vertexShader:he.torusSegmentPrimitive.vertex,fragmentShader:he.torusSegmentPrimitive.fragment,side:r.DoubleSide,transparent:!1}),g=new r.ShaderMaterial({name:"Primitives (Spherical segment)",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:he.ellipsoidSegmentPrimitive.vertex,fragmentShader:he.ellipsoidSegmentPrimitive.fragment,side:r.DoubleSide,transparent:!1}),b=new r.ShaderMaterial({name:"Triangle meshes",clipping:!0,clippingPlanes:n,extensions:{derivatives:!0},side:r.DoubleSide,fragmentShader:he.detailedMesh.fragment,vertexShader:he.detailedMesh.vertex,transparent:!1}),y={box:s,circle:l,nut:d,generalRing:u,quad:c,cone:m,eccentricCone:f,sphericalSegment:g,torusSegment:x,generalCylinder:p,trapezium:h,ellipsoidSegment:v,instancedMesh:new r.ShaderMaterial({name:"Instanced meshes",clipping:!0,clippingPlanes:n,extensions:{derivatives:!0},side:r.DoubleSide,fragmentShader:he.instancedMesh.fragment,vertexShader:he.instancedMesh.vertex,transparent:!1}),triangleMesh:b,simple:new r.ShaderMaterial({name:"Low detail material",clipping:!0,clippingPlanes:n,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},side:r.FrontSide,fragmentShader:he.simpleMesh.fragment,vertexShader:he.simpleMesh.vertex,transparent:!1})};for(const n of Object.values(y))Oe(n,t,o,i,a,e);return{...y}}(this._renderMode,this._clippingPlanes,o.overrideColorPerTreeIndexTexture,a.overrideTransformIndexTexture,a.transformLookupTexture);this.materialsMap.set(e,{materials:d,perModelClippingPlanes:[],nodeAppearanceProvider:t,nodeTransformProvider:i,nodeAppearanceTextureBuilder:o,nodeTransformTextureBuilder:a,updateMaterialsCallback:s,updateTransformsCallback:l}),this.updateClippingPlanesForModel(e)}getModelMaterials(e){return this.getModelMaterialsWrapper(e).materials}getModelNodeAppearanceProvider(e){return this.getModelMaterialsWrapper(e).nodeAppearanceProvider}getModelNodeTransformProvider(e){return this.getModelMaterialsWrapper(e).nodeTransformProvider}getModelDefaultNodeAppearance(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.getDefaultAppearance()}setModelClippingPlanes(e,n){const t=this.materialsMap.get(e);if(void 0===t)throw new Error(`Materials for model ${e} has not been added, call ${this.addModelMaterials.name} first`);t.perModelClippingPlanes=n,this.updateClippingPlanesForModel(e),this.triggerMaterialsChanged()}updateClippingPlanesForModel(e){const n=this.materialsMap.get(e);if(void 0===n)throw new Error(`Materials for model ${e} has not been added, call ${this.addModelMaterials.name} first`);const t=[...n.perModelClippingPlanes,...this.clippingPlanes];Ee(n.materials,e=>{e.clipping=!0,e.clipIntersection=!1,e.clippingPlanes=t})}setModelDefaultNodeAppearance(e,n){this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.setDefaultAppearance(n),this.updateMaterials(e)}getModelBackTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.regularNodeTreeIndices}getModelInFrontTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.infrontNodeTreeIndices}getModelGhostedTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.ghostedNodeTreeIndices}setRenderMode(e){this._renderMode=e;const n=e===Te.a.Ghost,t=e!==Te.a.DepthBufferOnly;this.applyToAllMaterials(r=>{r.uniforms.renderMode.value=e,r.colorWrite=t,r.transparent=n})}getRenderMode(){return this._renderMode}updateMaterials(e){const n=this.getModelMaterialsWrapper(e);if(n.nodeAppearanceTextureBuilder.needsUpdate){const{nodeAppearanceTextureBuilder:e}=n;e.build()}this.triggerMaterialsChanged()}updateTransforms(e){const n=this.getModelMaterialsWrapper(e);if(n.nodeTransformTextureBuilder.needsUpdate){const{nodeTransformTextureBuilder:e,materials:t}=n;e.build();const o=e.transformLookupTexture,i=new r.Vector2(o.image.width,o.image.height);Ee(t,e=>{e.uniforms.transformOverrideTexture.value=o,e.uniforms.transformOverrideTextureSize.value=i})}this.triggerMaterialsChanged()}getModelMaterialsWrapper(e){const n=this.materialsMap.get(e);if(void 0===n)throw new Error(`Model ${e} has not been added to MaterialManager`);return n}applyToAllMaterials(e){for(const n of this.materialsMap.values()){Ee(n.materials,e)}}triggerMaterialsChanged(){this._events.materialsChanged.fire()}}function Ee(e,n){n(e.box),n(e.circle),n(e.generalRing),n(e.nut),n(e.quad),n(e.cone),n(e.eccentricCone),n(e.sphericalSegment),n(e.torusSegment),n(e.generalCylinder),n(e.trapezium),n(e.ellipsoidSegment),n(e.instancedMesh),n(e.triangleMesh),n(e.simple)}
|
|
142
|
+
*/class h{constructor(e){this._client=e}async getOutputs(e){const{modelId:t,revisionId:r,modelFormat:n}=e,o=`/api/v1/projects/${this._client.project}/3d/models/${t}/revisions/${r}/outputs`,i=void 0!==n?{params:{format:n}}:void 0,s=await this._client.get(o,i);if(200===s.status)return new l(t,r,s.data.items);throw new Error(`Unexpected response ${s.status} (payload: '${s.data})`)}}var u=r(16);
|
|
183
143
|
/*!
|
|
184
144
|
* Copyright 2021 Cognite AS
|
|
185
|
-
*/const
|
|
145
|
+
*/async function m(e){const t=await fetch(e);if(!t.ok){const e={};throw t.headers.forEach((t,r)=>{e[t]=r}),new u.HttpError(t.status,t.body,e)}return t}
|
|
186
146
|
/*!
|
|
187
147
|
* Copyright 2021 Cognite AS
|
|
188
|
-
*/
|
|
189
|
-
function Ue({cadModelsMetadata:e,loadingHints:n}){return e.length>0&&!0!==n.suspendLoading}var Ve=t(24),ke=t.n(Ve);
|
|
148
|
+
*/class p{get headers(){return{}}async getBinaryFile(e,t){return(await m(`${e}/${t}`)).arrayBuffer()}async getJsonFile(e,t){return(await m(`${e}/${t}`)).json()}}
|
|
190
149
|
/*!
|
|
191
150
|
* Copyright 2021 Cognite AS
|
|
192
|
-
*/
|
|
193
|
-
class Le{static async*raceUntilAllCompleted(e){const n=new Map(e.map(e=>[e,e.then(()=>[e])]));for(;n.size>0;){const[e]=await Promise.race(n.values());n.delete(e),yield e}}}var We=t(10);
|
|
194
|
-
/*!
|
|
195
|
-
* Copyright 2021 Cognite AS
|
|
196
|
-
*/class je{constructor(e,n,t,r,o){this._sectorRepository=e,this._sectorCuller=n,this._modelStateHandler=t,this._collectStatisticsCallback=r,this._progressCallback=o}async*loadSectors(e){if(e.cameraInMotion)return[];const n=this._sectorCuller.determineSectors(e);this._collectStatisticsCallback(n.spentBudget);const t=this._modelStateHandler.hasStateChanged.bind(this._modelStateHandler),r=n.wantedSectors.filter(t),o=new qe(this._progressCallback);o.start(r.length);for(const n of ke()(r,20)){const t=await this.filterSectors(e,n,o),r=this.startLoadingBatch(t,o);for await(const e of Le.raceUntilAllCompleted(r))this._modelStateHandler.updateState(e),yield e}}async filterSectors(e,n,t){const r=await this._sectorCuller.filterSectorsToLoad(e,n);return t.reportNewSectorsCulled(n.length-r.length),r}startLoadingBatch(e,n){return e.map(async e=>{try{return await this._sectorRepository.loadSector(e)}catch(n){We.a.error("Failed to load sector",e,"error:",n);return function(e){return{modelIdentifier:e.modelIdentifier,metadata:e.metadata,levelOfDetail:o.a.Discarded,group:void 0,instancedMeshes:void 0}}(e)}finally{n.reportNewSectorsLoaded(1)}})}}class qe{constructor(e){this._sectorsScheduled=0,this._sectorsLoaded=0,this._sectorsCulled=0,this._progressCallback=e}start(e){this._sectorsScheduled=e,this._sectorsLoaded=0,this._sectorsCulled=0,this.triggerCallback()}reportNewSectorsLoaded(e){this._sectorsLoaded+=e,this.triggerCallback()}reportNewSectorsCulled(e){this._sectorsCulled+=e,this._sectorsLoaded+=e,this.triggerCallback()}triggerCallback(){this._progressCallback(this._sectorsLoaded,this._sectorsScheduled,this._sectorsCulled)}}
|
|
151
|
+
*/class f{constructor(e){this.revealInternalId=Symbol(e),this.localPath=e}toString(){return`${f.name} (${this.localPath})`}}
|
|
197
152
|
/*!
|
|
198
153
|
* Copyright 2021 Cognite AS
|
|
199
|
-
*/
|
|
154
|
+
*/class g{getModelUri(e){if(!(e instanceof f))throw new Error(`Model must be a ${f.name}, but got ${e.toString()}`);return Promise.resolve(`${location.origin}/${e.localPath}`)}async getModelMatrix(e){if(!(e instanceof f))throw new Error(`Model must be a ${f.name}, but got ${e.toString()}`);const t=new s.Matrix4;return c(t,i.RevealCadModel),t}getModelCamera(e){if(!(e instanceof f))throw new Error(`Model must be a ${f.name}, but got ${e.toString()}`);return Promise.resolve(void 0)}}
|
|
200
155
|
/*!
|
|
201
156
|
* Copyright 2021 Cognite AS
|
|
202
|
-
*/
|
|
157
|
+
*/},function(e,t){e.exports=require("assert")},function(e,t,r){"use strict";r.d(t,"a",(function(){return c}));var n=r(12),o=r.n(n),i=r(10);const{VERSION:s,MIXPANEL_TOKEN:a}={VERSION:"2.2.0",WORKER_VERSION:"1.2.0",MIXPANEL_TOKEN:"8c900bdfe458e32b768450c20750853d",IS_DEVELOPMENT_MODE:!1};class c{constructor(e,t,r){o.a.init(a,{disable_cookie:!0,disable_persistence:!0,ip:!1,property_blacklist:["$city","$region","mp_country_code","$geo_source","$timezone","mp_lib","$lib_version","$device_id","$user_id","$current_url","$screen_width","$screen_height","$referrer","$referring_domain","$initial_referrer","$initial_referring_domain"]}),o.a.reset(),o.a.identify("reveal-single-user"),this._sessionProps={VERSION:s,project:"unknown",application:"unknown",sessionId:"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(function(e){const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))},e&&(this._sessionProps.project=e),t&&(this._sessionProps.application=t),this.innerTrackEvent("init",r)}static init(e,t,r,n){if(void 0===globalThis.revealMetricsLogger&&e){const e=new c(t,r,n);globalThis.revealMetricsLogger={metricsLogger:e}}}innerTrackEvent(e,t){const r={...this._sessionProps,...t};o.a.track(e,r)}static trackEvent(e,t){globalThis.revealMetricsLogger&&globalThis.revealMetricsLogger.metricsLogger.innerTrackEvent(e,t)}static trackCreateTool(e){c.trackEvent("toolCreated",{toolName:e})}static trackLoadModel(e,t){c.trackEvent("loadModel",{...e,modelIdentifier:t})}static trackCadModelStyled(e,t){c.trackEvent("cadModelStyleAssigned",{nodeCollectionClassToken:e,style:t})}static trackError(e,t){i.a.error(e),this.trackEvent("error",{message:e.message,name:e.name,stack:e.stack,...t})}static trackCameraNavigation(e){c.trackEvent("cameraNavigated",e)}}
|
|
203
158
|
/*!
|
|
204
159
|
* Copyright 2021 Cognite AS
|
|
205
|
-
*/
|
|
160
|
+
*/},,function(e,t,r){"use strict";var n=r(22);
|
|
206
161
|
/*!
|
|
207
162
|
* Copyright 2021 Cognite AS
|
|
208
|
-
*/
|
|
163
|
+
*/t.a=n},function(e,t){e.exports=require("@tweenjs/tween.js")},function(e,t){e.exports=require("mixpanel-browser")},function(e,t,r){"use strict";r.d(t,"a",(function(){return i})),r.d(t,"b",(function(){return s}));var n=r(0);
|
|
209
164
|
/*!
|
|
210
165
|
* Copyright 2021 Cognite AS
|
|
211
166
|
*/
|
|
212
|
-
|
|
167
|
+
const o={renderSize:new n.Vector2,position:new n.Vector3};function i(e,t,r=new n.Vector3){const{position:i}=o;i.copy(t),i.project(e);const s=(i.x+1)/2,a=(1-i.y)/2;return r.set(s,a,i.z)}function s(e,t,r,s=new n.Vector3){i(t,r,s);const{renderSize:a}=o;e.getSize(a);const c=e.domElement,{width:l,height:d}=c.getBoundingClientRect();return s.x=Math.round(s.x*l),s.y=Math.round(s.y*d),s}},function(e,t){e.exports=require("lodash/cloneDeep")},function(e,t){e.exports=require("geo-three")},function(e,t){e.exports=require("@cognite/sdk-core")},function(e,t,r){"use strict";var n=r(13);r.d(t,"d",(function(){return n.b}));var o=r(18);r.d(t,"a",(function(){return o.a}));var i=r(1);r.d(t,"c",(function(){return i.k})),r.d(t,"b",(function(){return i.d}));r(6);
|
|
213
168
|
/*!
|
|
214
169
|
* Copyright 2021 Cognite AS
|
|
215
|
-
*/
|
|
216
|
-
/*!
|
|
217
|
-
* Copyright 2021 Cognite AS
|
|
218
|
-
*/},,,function(e,n,t){"use strict";var r=t(22);
|
|
219
|
-
/*!
|
|
220
|
-
* Copyright 2021 Cognite AS
|
|
221
|
-
*/n.a=r},function(e,n){e.exports=require("mixpanel-browser")},function(e,n){e.exports=require("@tweenjs/tween.js")},function(e,n,t){"use strict";t.d(n,"a",(function(){return i})),t.d(n,"b",(function(){return a}));var r=t(0);
|
|
170
|
+
*/},function(e,t,r){"use strict";r.d(t,"a",(function(){return o}));var n=r(0);
|
|
222
171
|
/*!
|
|
223
172
|
* Copyright 2021 Cognite AS
|
|
224
173
|
*/
|
|
225
|
-
const o={renderSize:new r.Vector2,position:new r.Vector3};function i(e,n,t,i=new r.Vector3){const{renderSize:a,position:s}=o,l=e.domElement;e.getSize(a),s.copy(t),s.project(n);const{width:d,height:c}=l.getBoundingClientRect(),u=a.width/d,m=a.height/c,f=(s.x+1)/(1*u*2),v=(1-s.y)/(1*m*2);return i.set(f,v,s.z)}function a(e,n,t,a=new r.Vector3){i(e,n,t,a);const{renderSize:s}=o;e.getSize(s);const l=e.getPixelRatio();return a.x=Math.round(a.x*s.width/l),a.y=Math.round(a.y*s.height/l),a}},function(e,n){e.exports=require("lodash/cloneDeep")},function(e,n){e.exports=require("geo-three")},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * instanceMatrix * vec4(normalize(normal), 0.0)).xyz;\n //v_normal = normal;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n vViewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}"},function(e,n,t){"use strict";t.d(n,"a",(function(){return o}));var r=t(0);
|
|
174
|
+
class o{constructor(e){this._clippingPlanes=[new n.Plane,new n.Plane,new n.Plane,new n.Plane,new n.Plane,new n.Plane],this._box=e||new n.Box3,this.updatePlanes()}set minX(e){this._box.min.x=e,this.updatePlanes()}get minX(){return this._box.min.x}set minY(e){this._box.min.y=e,this.updatePlanes()}get minY(){return this._box.min.y}set minZ(e){this._box.min.z=e,this.updatePlanes()}get minZ(){return this._box.min.z}set maxX(e){this._box.max.x=e,this.updatePlanes()}get maxX(){return this._box.max.x}set maxY(e){this._box.max.y=e,this.updatePlanes()}get maxY(){return this._box.max.y}set maxZ(e){this._box.max.z=e,this.updatePlanes()}get maxZ(){return this._box.max.z}updatePlanes(){this._clippingPlanes[0].setFromNormalAndCoplanarPoint(new n.Vector3(1,0,0),new n.Vector3(this.minX,0,0)),this._clippingPlanes[1].setFromNormalAndCoplanarPoint(new n.Vector3(-1,0,0),new n.Vector3(this.maxX,0,0)),this._clippingPlanes[2].setFromNormalAndCoplanarPoint(new n.Vector3(0,1,0),new n.Vector3(0,this.minY,0)),this._clippingPlanes[3].setFromNormalAndCoplanarPoint(new n.Vector3(0,-1,0),new n.Vector3(0,this.maxY,0)),this._clippingPlanes[4].setFromNormalAndCoplanarPoint(new n.Vector3(0,0,1),new n.Vector3(0,0,this.minZ)),this._clippingPlanes[5].setFromNormalAndCoplanarPoint(new n.Vector3(0,0,-1),new n.Vector3(0,0,this.maxZ))}get clippingPlanes(){return this._clippingPlanes}}},function(e,t){e.exports=require("lodash/range")},,function(e,t){e.exports=require("comlink")},function(e,t){e.exports=require("loglevel")},function(e,t){e.exports=require("lodash/debounce")},function(e,t){e.exports=require("skmeans")},,,,,,,,function(e,t){e.exports=require("lodash/merge")},,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,function(e,t,r){"use strict";r.r(t),r.d(t,"HtmlOverlayTool",(function(){return u})),r.d(t,"ExplodedViewTool",(function(){return p})),r.d(t,"DebugCameraTool",(function(){return f})),r.d(t,"AxisViewTool",(function(){return S})),r.d(t,"Corner",(function(){return g})),r.d(t,"GeomapTool",(function(){return z})),r.d(t,"MapProviders",(function(){return I})),r.d(t,"MapboxMode",(function(){return E})),r.d(t,"MapboxStyle",(function(){return k})),r.d(t,"MapboxImageFormat",(function(){return D})),r.d(t,"BingMapType",(function(){return T})),r.d(t,"BingMapImageFormat",(function(){return A})),r.d(t,"HereMapType",(function(){return N})),r.d(t,"HereMapScheme",(function(){return F})),r.d(t,"HereMapImageFormat",(function(){return R})),r.d(t,"TimelineTool",(function(){return j})),r.d(t,"Keyframe",(function(){return $})),r.d(t,"Cognite3DViewerToolBase",(function(){return i})),r.d(t,"DebugLoadedSectorsTool",(function(){return G}));var n=r(0),o=r(17);
|
|
226
175
|
/*!
|
|
227
176
|
* Copyright 2021 Cognite AS
|
|
228
177
|
*/
|
|
229
|
-
class o{constructor(e){this._clippingPlanes=[new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane],this._box=e||new r.Box3,this.updatePlanes()}set minX(e){this._box.min.x=e,this.updatePlanes()}get minX(){return this._box.min.x}set minY(e){this._box.min.y=e,this.updatePlanes()}get minY(){return this._box.min.y}set minZ(e){this._box.min.z=e,this.updatePlanes()}get minZ(){return this._box.min.z}set maxX(e){this._box.max.x=e,this.updatePlanes()}get maxX(){return this._box.max.x}set maxY(e){this._box.max.y=e,this.updatePlanes()}get maxY(){return this._box.max.y}set maxZ(e){this._box.max.z=e,this.updatePlanes()}get maxZ(){return this._box.max.z}updatePlanes(){this._clippingPlanes[0].setFromNormalAndCoplanarPoint(new r.Vector3(1,0,0),new r.Vector3(this.minX,0,0)),this._clippingPlanes[1].setFromNormalAndCoplanarPoint(new r.Vector3(-1,0,0),new r.Vector3(this.maxX,0,0)),this._clippingPlanes[2].setFromNormalAndCoplanarPoint(new r.Vector3(0,1,0),new r.Vector3(0,this.minY,0)),this._clippingPlanes[3].setFromNormalAndCoplanarPoint(new r.Vector3(0,-1,0),new r.Vector3(0,this.maxY,0)),this._clippingPlanes[4].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,1),new r.Vector3(0,0,this.minZ)),this._clippingPlanes[5].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,-1),new r.Vector3(0,0,this.maxZ))}get clippingPlanes(){return this._clippingPlanes}}},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,n,t){"use strict";var r=t(13);t.d(n,"d",(function(){return r.b}));var o=t(18);t.d(n,"a",(function(){return o.a}));var i=t(1);t.d(n,"c",(function(){return i.k})),t.d(n,"b",(function(){return i.d}))},function(e,n){e.exports=require("comlink")},function(e,n){e.exports=require("loglevel")},function(e,n){e.exports=require("lodash/throttle")},function(e,n){e.exports=require("lodash/chunk")},,function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n updateFragmentColor(renderMode, color, v_treeIndex, v_normal, gl_FragCoord.z, matCapTexture, GeometryType.Quad);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec3 color;\nattribute float treeIndex;\nattribute vec4 matrix0;\nattribute vec4 matrix1;\nattribute vec4 matrix2;\nattribute vec4 matrix3;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n \n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = treeIndex;\n v_color = color;\n v_normal = normalize(normalMatrix * (inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(normal), 0.0)).xyz);\n mat4 instanceMatrix = mat4(matrix0, matrix1, matrix2, matrix3);\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main()\n{\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.TriangleMesh);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec3 color;\nattribute float treeIndex; \n\nvarying vec3 v_color;\nvarying float v_treeIndex;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_color = color;\n v_treeIndex = treeIndex;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(position, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.InstancedMesh);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_color = a_color;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * modelMatrix * vec4(transformed, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n v_treeIndex = a_treeIndex;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n vec3 normal = normalize( v_normal );\n if (dist > 0.25)\n discard;\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_normal;\n\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\nvarying float v_treeIndex;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_xy = vec2(position.x, position.y);\n v_treeIndex = a_treeIndex;\n\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec3 normal = normalize( v_normal );\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = v_V;\n float height = length(v_centerA.xyz - v_centerB.xyz);\n float R2 = v_centerA.w;\n float dR = R2 - R1;\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n #if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n #else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n #endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR / height;\n float dRdRL2Inv = dRLInv * dRLInv;\n a -= D.z * D.z * dRdRL2Inv;\n b -= dRLInv * (E.z * D.z * dRLInv + R1 * D.z);\n c -= dRLInv * (E.z * E.z * dRLInv + 2.0 * R1 * E.z);\n }\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0) {\n discard;\n }\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n bool isInner = false;\n\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n #if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n if (R1 != R2)\n {\n // Find normal vector\n vec3 n = -normalize(W.xyz);\n vec3 P1 = v_centerB.xyz;\n vec3 P2 = v_centerA.xyz;\n vec3 A = cross(P1 - p, P2 - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = P1 + R1 * t;\n vec3 o2 = P2 + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n }\n else\n {\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n }\n #endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_color;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float maxRadius = max(a_radiusA, a_radiusB);\n float leftUpScale = maxRadius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (maxRadius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0.0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n float radiusB = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusB, 0.0)).xyz);\n float radiusA = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusA, 0.0)).xyz);\n\n // We pack radii as w-components of v_centerB\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radiusB;\n\n v_V.xyz = -cross(v_U.xyz, v_W.xyz);\n v_V.w = surfacePoint.y;\n\n v_centerA.xyz = mul3(modelViewMatrix, centerA);\n v_centerA.w = radiusA;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default='#define GLSLIFY 1\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\n\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\nvarying float height;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n mat3 basis = mat3(U.xyz, V.xyz, axis.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, axis.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerA.xyz;\n vec3 E = diff * basis;\n float L = height;\n vec3 D = rayDirection * basis;\n\n float R1 = v_centerA.w;\n float R2 = v_centerB.w;\n float dR = R2 - R1;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy)-R1*R1;\n float L2Inv = 1.0/(L*L);\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR/L;\n float dRdRL2Inv = dRLInv*dRLInv;\n a -= D.z*D.z*dRdRL2Inv;\n b -= dRLInv*(E.z*D.z*dRLInv + R1*D.z);\n c -= dRLInv*(E.z*E.z*dRLInv + 2.0*R1*E.z);\n }\n\n // Additional terms when one of the center points is displaced orthogonal to normal vector\n vec2 displacement = ((v_centerB.xyz-v_centerA.xyz)*basis).xy; // In the basis where displacement is in XY only\n float displacementLengthSquared = dot(displacement, displacement);\n a += D.z*(D.z*displacementLengthSquared - 2.0*L*dot(D.xy, displacement))*L2Inv;\n b += (D.z*E.z*displacementLengthSquared - L*(D.x*E.z*displacement.x + D.y*E.z*displacement.y + D.z*E.x*displacement.x + D.z*E.y*displacement.y))*L2Inv;\n c += E.z*(E.z*displacementLengthSquared - 2.*L*dot(E.xy, displacement))*L2Inv;\n\n // Calculate a dicriminant of the above quadratic equation (factor 2 removed from all b-terms above)\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long eccentric cone\n if (d < 0.0) {\n discard;\n }\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n // Check the smallest root, it is closest camera. Only test if the z-component is outside the truncated eccentric cone\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n bool isInner = false;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Either intersection point is behind starting point (happens inside the cone),\n // or the intersection point is outside the end caps. This is not a valid solution.\n isInner = true;\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector\n vec3 n = normalize(-axis.xyz);\n vec3 v_centerA = v_centerA.xyz;\n vec3 v_centerB = v_centerB.xyz;\n vec3 A = cross(v_centerA - p, v_centerB - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = v_centerA + R1 * t;\n vec3 o2 = v_centerB + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_normal;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\nvarying float height;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 normalWithOffset = normalize((modelTransformOffset * vec4(a_normal, 0)).xyz);\n\n float uniformScaleFactor = length(mul3(modelMatrix, normalize(vec3(1.0))));\n\n height = dot(centerA - centerB, normalWithOffset) * uniformScaleFactor;\n\n vec3 lDir;\n vec3 center = 0.5 * (centerA + centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n // Find the coordinates of centerA and centerB projected down to the end cap plane\n vec3 maxCenterProjected = centerA - dot(centerA, normalWithOffset) * normalWithOffset;\n vec3 minCenterProjected = centerB - dot(centerB, normalWithOffset) * normalWithOffset;\n float distanceBetweenProjectedCenters = length(maxCenterProjected - minCenterProjected);\n\n lDir = normalWithOffset;\n float dirSign = 1.0;\n if (dot(objectToCameraModelSpace, lDir) < 0.0) { // direction vector looks away, flip it\n dirSign = -1.0;\n lDir *= -1.;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n // compute basis for cone\n axis.xyz = -normalWithOffset;\n U.xyz = cross(objectToCameraModelSpace, axis.xyz);\n V.xyz = cross(U.xyz, axis.xyz);\n // Transform to camera space\n axis.xyz = normalize(normalMatrix * axis.xyz);\n U.xyz = normalize(normalMatrix * U.xyz);\n V.xyz = normalize(normalMatrix * V.xyz);\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radiusA * (position.x + 1.0) * 0.0025 / height);\n#endif\n\n v_centerA.xyz = mul3(viewMatrix, mul3(modelMatrix, centerA));\n v_centerB.xyz = mul3(viewMatrix, mul3(modelMatrix, centerB));\n\n float radiusA = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusA, 0.0)).xyz);\n float radiusB = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusB, 0.0)).xyz);\n\n // Pack radii as w components of v_centerA and v_centerB\n v_centerA.w = radiusA;\n v_centerB.w = radiusB;\n\n float radiusIncludedDisplacement = 0.5*(2.0*max(a_radiusA, a_radiusB) + distanceBetweenProjectedCenters);\n vec3 surfacePoint = center + mat3(0.5 * height * lDir * (1.0 / uniformScaleFactor), radiusIncludedDisplacement*left, radiusIncludedDisplacement*up) * newPosition;\n vec3 transformed = surfacePoint;\n\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n axis.w = surfacePoint.z;\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(sphereNormal.xyz);\n\n float vRadius = center.w;\n float ratio = vRadius / hRadius;\n mat3 basis = mat3(U.xyz, V.xyz, sphereNormal.xyz);\n mat3 scaledBasis = mat3(ratio * U.xyz, ratio * V.xyz, sphereNormal.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, sphereNormal.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - center.xyz;\n vec3 E = diff * scaledBasis;\n vec3 D = rayDirection * scaledBasis;\n\n float a = dot(D, D);\n float b = dot(E, D);\n float c = dot(E, E) - vRadius*vRadius;\n\n // discriminant of sphere equation (factor 2 removed from b above)\n float d = b*b - a*c;\n if(d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector in local space\n normal = vec3(p - center.xyz) * basis;\n normal.z = normal.z * (hRadius / vRadius) * (hRadius / vRadius);\n // Transform into camera space\n normal = normalize(basis * normal);\n if (dot(normal, rayDirection) > 0.) {\n normal = -normal;\n }\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\nuniform mat4 inverseNormalMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_center;\nattribute vec3 a_normal;\nattribute float a_horizontalRadius;\nattribute float a_verticalRadius;\nattribute float a_height;\n\nvarying float v_treeIndex;\n// We pack vRadius as w-component of center\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\n// U, V, axis represent the 3x3 sphere basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerWithOffset = mul3(modelTransformOffset, a_center).xyz;\n\n vec3 normalWithOffset = (modelTransformOffset * vec4(a_normal, 0)).xyz;\n\n vec3 lDir;\n float distanceToCenterOfSegment = a_verticalRadius - a_height * 0.5;\n vec3 centerOfSegment = centerWithOffset + normalWithOffset * distanceToCenterOfSegment;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 objectToCameraModelSpace = inverseNormalMatrix * vec3(0.0, 0.0, 1.0);\n#else\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - centerOfSegment;\n#endif\n\n vec3 newPosition = position;\n\n float bb = dot(objectToCameraModelSpace, normalWithOffset);\n if (bb < 0.0) { // direction vector looks away, flip it\n lDir = -normalWithOffset;\n } else { // direction vector already looks in my direction\n lDir = normalWithOffset;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_verticalRadius * (position.x + 1.0) * 0.0025 / a_height);\n#endif\n\n // Negative angle means height larger than radius,\n // so we should have full size so we can render the largest part of the ellipsoid segment\n float ratio = max(0.0, 1.0 - a_height / a_verticalRadius);\n // maxRadiusOfSegment is the radius of the circle (projected ellipsoid) when ellipsoid segment is seen from above\n float maxRadiusOfSegment = a_horizontalRadius * sqrt(1.0 - ratio * ratio);\n\n vec3 displacement = vec3(newPosition.x*a_height*0.5, maxRadiusOfSegment*newPosition.y, maxRadiusOfSegment*newPosition.z);\n vec3 surfacePoint = centerOfSegment + mat3(lDir, left, up) * displacement;\n vec3 transformed = surfacePoint;\n\n v_treeIndex = a_treeIndex;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n center.xyz = mul3(modelViewMatrix, centerWithOffset);\n center.w = a_verticalRadius; // Pack radius into w-component\n hRadius = a_horizontalRadius;\n height = a_height;\n v_color = a_color;\n\n // compute basis\n sphereNormal.xyz = normalMatrix * normalWithOffset;\n U.xyz = normalMatrix * up;\n V.xyz = normalMatrix * left;\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n sphereNormal.w = surfacePoint.z;\n\n // TODO should perhaps be a different normal?\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n'},function(e,n,t){"use strict";t.r(n),n.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\n// TODO general cylinder and cone are very similar and used\n// the same shader in the old code. Consider de-duplicating\n// parts of this code\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform float dataTextureWidth;\nuniform float dataTextureHeight;\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_surfacePointY;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = vec4(normalize(cross(W.xyz, U.xyz)), v_surfacePointY);\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n vec3 planeACenter = vec3(0.0, 0.0, v_planeA.w);\n vec3 planeANormal = v_planeA.xyz;\n vec3 planeBCenter = vec3(0.0, 0.0, v_planeB.w);\n vec3 planeBNormal = v_planeB.xyz;\n bool isInner = false;\n\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle || isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radius;\nattribute vec3 a_color;\n// slicing plane attributes\nattribute vec4 a_planeA;\nattribute vec4 a_planeB;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_surfacePointY;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float leftUpScale = a_radius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n // We pack radii as w-components of v_centerB\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n float radius = length((modelToTransformOffset * vec4(a_localXAxis * a_radius, 0.0)).xyz);\n\n centerB = centerB - dir;\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radius;\n\n vec4 planeA = a_planeA;\n planeA.w = length((modelToTransformOffset * vec4(planeA.xyz * planeA.w, 0.0)).xyz);\n\n vec4 planeB = a_planeB;\n planeB.w = length((modelToTransformOffset * vec4(planeB.xyz * planeB.w, 0.0)).xyz);\n\n v_planeA = planeA;\n v_planeB = planeB;\n v_surfacePointY = surfacePoint.y;\n v_centerB.w = radius;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n float theta = atan(v_xy.y, v_xy.x);\n vec3 normal = normalize( v_normal );\n if (theta < v_angle) {\n theta += 2.0 * PI;\n }\n if (dist > 0.25 || dist < 0.25 * v_oneMinusThicknessSqr || theta >= v_angle + v_arcAngle) {\n discard;\n }\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_angle;\nattribute float a_arcAngle;\nattribute float a_thickness;\nattribute vec3 a_normal;\n\nvarying float v_treeIndex;\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n v_treeIndex = a_treeIndex;\n v_oneMinusThicknessSqr = (1.0 - a_thickness) * (1.0 - a_thickness);\n v_xy = vec2(position.x, position.y);\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_arcAngle;\nattribute float a_radius;\nattribute float a_tubeRadius;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n // normalized theta and phi are packed into positions\n float theta = position.x * a_arcAngle;\n float phi = position.y;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n vec3 pos3 = vec3(0);\n\n pos3.x = (a_radius + a_tubeRadius*cos(phi)) * cosTheta;\n pos3.y = (a_radius + a_tubeRadius*cos(phi)) * sinTheta;\n pos3.z = a_tubeRadius*sin(phi);\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n vec3 transformed = (instanceMatrix * vec4(pos3, 1.0)).xyz;\n\n // Calculate normal vectors if we're not picking\n vec3 center = (instanceMatrix * vec4(a_radius * cosTheta, a_radius * sinTheta, 0.0, 1.0)).xyz;\n vec3 objectNormal = normalize(transformed.xyz - center);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n\n vViewPosition = modelViewPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_vertex1;\nattribute vec3 a_vertex2;\nattribute vec3 a_vertex3;\nattribute vec3 a_vertex4;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n vec3 transformed;\n // reduce the avarage branchings\n if (position.x < 1.5) {\n transformed = position.x == 0.0 ? a_vertex1 : a_vertex2;\n } else {\n transformed = position.x == 2.0 ? a_vertex3 : a_vertex4;\n }\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 objectNormal = cross(a_vertex1 - a_vertex2, a_vertex1 - a_vertex3);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \nmat3 G[9];\n// hard coded matrix values!!!! as suggested in https://github.com/neilmendoza/ofxPostProcessing/blob/master/src/EdgePass.cpp#L45\nconst mat3 g0 = mat3( 0.3535533845424652, 0, -0.3535533845424652, 0.5, 0, -0.5, 0.3535533845424652, 0, -0.3535533845424652 );\nconst mat3 g1 = mat3( 0.3535533845424652, 0.5, 0.3535533845424652, 0, 0, 0, -0.3535533845424652, -0.5, -0.3535533845424652 );\nconst mat3 g2 = mat3( 0, 0.3535533845424652, -0.5, -0.3535533845424652, 0, 0.3535533845424652, 0.5, -0.3535533845424652, 0 );\nconst mat3 g3 = mat3( 0.5, -0.3535533845424652, 0, -0.3535533845424652, 0, 0.3535533845424652, 0, 0.3535533845424652, -0.5 );\nconst mat3 g4 = mat3( 0, -0.5, 0, 0.5, 0, 0.5, 0, -0.5, 0 );\nconst mat3 g5 = mat3( -0.5, 0, 0.5, 0, 0, 0, 0.5, 0, -0.5 );\nconst mat3 g6 = mat3( 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.6666666865348816, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204 );\nconst mat3 g7 = mat3( -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, 0.6666666865348816, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408 );\nconst mat3 g8 = mat3( 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408 );\n\nfloat edgeDetectionFilter(sampler2D baseTexture, vec2 uv, vec2 resolution) {\n\n vec2 texel = vec2(1.0 / resolution.x, 1.0 / resolution.y);\n\n\tG[0] = g0,\n\tG[1] = g1,\n\tG[2] = g2,\n\tG[3] = g3,\n\tG[4] = g4,\n\tG[5] = g5,\n\tG[6] = g6,\n\tG[7] = g7,\n\tG[8] = g8;\n\n\tmat3 I;\n\tfloat cnv[9];\n\tvec3 neighbour;\n\n\t/* fetch the 3x3 neighbourhood and use the RGB vector's length as intensity value */\n\tfor (int i=0; i<3; i++) {\n\t\tfor (int j=0; j<3; j++) {\n\t\t\tneighbour = texture2D(baseTexture, uv + texel * vec2(float(i)-1.0,float(j)-1.0) ).rgb;\n\t\t\tI[i][j] = length(neighbour);\n\t\t}\n\t}\n\n\t/* calculate the convolution values for all the masks */\n\tfor (int i=0; i<9; i++) {\n\t\tfloat dp3 = dot(G[i][0], I[0]) + dot(G[i][1], I[1]) + dot(G[i][2], I[2]);\n\t\tcnv[i] = dp3 * dp3;\n\t}\n\n\tfloat M = (cnv[0] + cnv[1]) + (cnv[2] + cnv[3]);\n\tfloat S = (cnv[4] + cnv[5]) + (cnv[6] + cnv[7]) + (cnv[8] + M);\n\n float edgeStrength = sqrt(M/S);\n\n return edgeStrength;\n}\n\n#include <packing>\n\nvarying vec2 vUv;\n\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nuniform sampler2D tFront;\nuniform sampler2D tFrontDepth;\n\nuniform sampler2D tBack;\nuniform sampler2D tBackDepth;\n\nuniform sampler2D tCustom;\nuniform sampler2D tCustomDepth;\n\nuniform sampler2D tGhost;\nuniform sampler2D tGhostDepth;\n\nuniform sampler2D tOutlineColors;\n\nuniform float cameraNear;\nuniform float cameraFar;\n\nuniform vec2 resolution;\n\nuniform float edgeStrengthMultiplier;\nuniform float edgeGrayScaleIntensity;\n\nconst float infinity = 1e20;\n\nfloat computeFloatEncodedOutlineIndex(float bitEncodedFloat){\n return floatBitsSubset(floor((bitEncodedFloat * 255.0) + 0.5), 3, 6);\n}\n\nvec4 computeNeighborOutlineIndices(sampler2D colorTexture){\n float outlineIndex0 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv0).a);\n float outlineIndex1 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv1).a);\n float outlineIndex2 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv2).a);\n float outlineIndex3 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv3).a);\n\n return vec4(outlineIndex0, outlineIndex1, outlineIndex2, outlineIndex3);\n}\n\nfloat toViewZ(float depth, float near, float far){\n float normalizedDepth = depth * 2.0 - 1.0;\n return 2.0 * near * far / (far + near - normalizedDepth * (far - near)); \n}\n\nvec4 computeNeighborAlphas(sampler2D colorTexture){\n float alpha0 = texture2D(colorTexture, vUv0).a;\n float alpha1 = texture2D(colorTexture, vUv1).a;\n float alpha2 = texture2D(colorTexture, vUv2).a;\n float alpha3 = texture2D(colorTexture, vUv3).a;\n\n return vec4(alpha0, alpha1, alpha2, alpha3);\n}\n\nvoid main() {\n vec4 frontAlbedo = texture2D(tFront, vUv);\n vec4 backAlbedo = texture2D(tBack, vUv);\n vec4 customAlbedo = texture2D(tCustom, vUv);\n vec4 ghostAlbedo = texture2D(tGhost, vUv);\n\n float frontDepth = texture2D(tFrontDepth, vUv).r;\n float backDepth = texture2D(tBackDepth, vUv).r; \n float customDepth = texture2D(tCustomDepth, vUv).r;\n float ghostDepth = texture2D(tGhostDepth, vUv).r;\n\n // This is a hack to make sure that all textures are initialized\n // If a texture is unused, it will have a clear value of 0.0.\n // Without this we've seen issues with MSAA where resizing render targets\n // causes depth to cleared to either 1 or 0 depending on the device/browser\n customDepth = customDepth > 0.0 ? customDepth : 1.0; \n backDepth = backDepth > 0.0 ? backDepth : 1.0;\n ghostDepth = ghostDepth > 0.0 ? ghostDepth : 1.0;\n frontDepth = frontDepth > 0.0 ? frontDepth : 1.0; \n\n if(all(greaterThanEqual(vec4(backDepth, customDepth, ghostDepth, frontDepth), vec4(1.0)))){\n discard;\n }\n \n // Decompose and clamp \"ghost\" color\n vec4 clampedGhostAlbedo = vec4(max(ghostAlbedo.rgb, 0.5), min(ghostAlbedo.a, 0.8));\n\n float frontOutlineIndex = computeFloatEncodedOutlineIndex(frontAlbedo.a);\n vec4 frontNeighborIndices = computeNeighborOutlineIndices(tFront);\n\n // There exsists fragments of rendered objects within the edge width that should have border\n if(any(equal(frontNeighborIndices, vec4(0.0))) && frontOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(frontNeighborIndices.x, frontNeighborIndices.y), max(frontNeighborIndices.z, frontNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = frontDepth;\n#endif\n return;\n }\n\n // texture has drawn fragment\n if(frontDepth < 1.0){\n float customDepthTest = step(customDepth, backDepth); // zero if back is in front\n\n float a = customDepthTest > 0.0 ? ceil(customAlbedo.a) * 0.5 : ceil(backAlbedo.a) * 0.5;\n\n gl_FragColor = vec4(frontAlbedo.rgb, 1.0) * (1.0 - a) + (vec4(backAlbedo.rgb, 1.0) * (1.0 - customDepthTest) + vec4(customAlbedo.rgb, 1.0) * customDepthTest) * a;\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = texture2D(tFrontDepth, vUv).r;\n#endif\n return;\n }\n\n if (customDepth >= backDepth) {\n float backOutlineIndex = computeFloatEncodedOutlineIndex(backAlbedo.a);\n vec4 backNeighborIndices = computeNeighborOutlineIndices(tBack);\n\n if( any(equal(backNeighborIndices, vec4(0.0))) && backOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(backNeighborIndices.x, backNeighborIndices.y), max(backNeighborIndices.z, backNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tBackDepth, vUv).r;\n#endif\n return;\n }\n }\n \n float edgeStrength = 0.0;\n#if defined(EDGES)\n if (!any(equal(computeNeighborAlphas(tBack), vec4(0.0)))) {\n float depthEdge = toViewZ(backDepth, cameraNear, cameraFar);\n edgeStrength = (1.0 - smoothstep(10.0, 40.0, depthEdge)) * edgeDetectionFilter(tBack, vUv, resolution) * edgeStrengthMultiplier;\n }\n#endif\n\n // Combine color from ghost, back and custom object\n vec4 color = backAlbedo;\n float depth = backDepth;\n if (customDepth < backDepth && ghostDepth == 1.0) {\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * backAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else if (customDepth < backDepth && ghostDepth < 1.0) {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n vec3 modelAlbedo = mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s);\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * modelAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n color = vec4(mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s), backAlbedo.a);\n depth = mix(backDepth, ghostDepth, s);\n }\n \n gl_FragColor = color * (1.0 - edgeStrength) + vec4(vec3(edgeGrayScaleIntensity) * edgeStrength, 1.0);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = depth;\n#endif\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\n// selection outline\nuniform vec2 texelSize;\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nvoid main() {\n vUv = uv;\n\n // selection outline\n vUv0 = vec2(uv.x + texelSize.x, uv.y);\n vUv1 = vec2(uv.x - texelSize.x, uv.y);\n vUv2 = vec2(uv.x, uv.y + texelSize.y);\n vUv3 = vec2(uv.x, uv.y - texelSize.y);\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nuniform vec2 inverseResolution;\nuniform vec2 resolution;\nuniform sampler2D tDiffuse;\nuniform sampler2D tDepth;\n\n#ifndef FXAA_REDUCE_MIN\n #define FXAA_REDUCE_MIN (1.0/ 128.0)\n#endif\n#ifndef FXAA_REDUCE_MUL\n #define FXAA_REDUCE_MUL (1.0 / 8.0)\n#endif\n#ifndef FXAA_SPAN_MAX\n #define FXAA_SPAN_MAX 8.0\n#endif\n\nvec4 fxaa(sampler2D tex, vec2 fragCoord,\n vec2 resolution, vec2 inverseResolution,\n vec2 v_rgbNW, vec2 v_rgbNE,\n vec2 v_rgbSW, vec2 v_rgbSE,\n vec2 v_rgbM) {\n vec4 color;\n\n vec3 rgbNW = texture2D(tex, v_rgbNW).xyz;\n vec3 rgbNE = texture2D(tex, v_rgbNE).xyz;\n vec3 rgbSW = texture2D(tex, v_rgbSW).xyz;\n vec3 rgbSE = texture2D(tex, v_rgbSE).xyz;\n vec4 texColor = texture2D(tex, v_rgbM);\n vec3 rgbM = texColor.xyz;\n\n vec3 luma = vec3(0.299, 0.587, 0.114);\n float lumaNW = dot(rgbNW, luma);\n float lumaNE = dot(rgbNE, luma);\n float lumaSW = dot(rgbSW, luma);\n float lumaSE = dot(rgbSE, luma);\n float lumaM = dot(rgbM, luma);\n float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE)));\n float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE)));\n\n mediump vec2 dir;\n dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE));\n dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE));\n\n float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) *\n (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN);\n\n float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce);\n dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX),\n max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX),\n dir * rcpDirMin));\n\n vec4 rgbA = 0.5 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (1.0 / 3.0 - 0.5))) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (2.0 / 3.0 - 0.5))));\n vec4 rgbB = rgbA * 0.5 + 0.25 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * -0.5)) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * 0.5)));\n\n float lumaB = dot(rgbB.rgb, luma);\n if ((lumaB < lumaMin) || (lumaB > lumaMax)) {\n color = rgbA;\n } else {\n color = rgbB;\n }\n return color;\n}\n\nvoid main() {\n gl_FragColor = fxaa(tDiffuse, v_fragCoord, \n resolution, inverseResolution, \n v_rgbNW, v_rgbNE, v_rgbSW, v_rgbSE, v_rgbM);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tDepth, v_uv).r;\n#endif\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nuniform vec2 resolution;\nuniform vec2 inverseResolution;\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nvoid main() {\n v_fragCoord = uv * resolution;\n v_rgbNW = (v_fragCoord + vec2(-1.0, -1.0)) * inverseResolution;\n v_rgbNE = (v_fragCoord + vec2(1.0, -1.0)) * inverseResolution;\n v_rgbSW = (v_fragCoord + vec2(-1.0, 1.0)) * inverseResolution;\n v_rgbSE = (v_fragCoord + vec2(1.0, 1.0)) * inverseResolution;\n v_rgbM = vec2(v_fragCoord * inverseResolution);\n v_uv = uv;\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nuniform mat4 projMatrix;\nuniform mat4 inverseProjectionMatrix;\n\nuniform vec3 kernel[MAX_KERNEL_SIZE];\n\nuniform sampler2D tDepth;\nuniform sampler2D tNoise;\n\nuniform vec2 resolution;\n\nuniform float sampleRadius;\nuniform float bias;\n\nvec3 viewPosFromDepth(float depth, vec2 uv) {\n // Depth to clip space: [0, 1] -> [-1, 1]\n float z = depth * 2.0 - 1.0;\n\n // Fragment in clip space\n vec4 clipSpacePosition = vec4(uv * 2.0 - 1.0, z, 1.0);\n vec4 viewSpacePosition = inverseProjectionMatrix * clipSpacePosition;\n\n // Perspective division\n viewSpacePosition /= viewSpacePosition.w;\n\n return viewSpacePosition.xyz;\n}\n\nvec3 computeWorldNormalFromDepth(sampler2D depthTexture, vec2 resolution, vec2 uv, float sampleDepth){\n float dx = 1.0 / resolution.x;\n float dy = 1.0 / resolution.y;\n\n vec2 uv1 = uv + vec2(dx, 0.0); // right\n float d1 = texture2D(depthTexture, uv1).r; \n\n vec2 uv2 = uv + vec2(0.0, dy); // up\n float d2 = texture2D(depthTexture, uv2).r;\n\n vec2 uv3 = uv + vec2(-dx, 0.0); // left\n float d3 = texture2D(depthTexture, uv3).r;\n\n vec2 uv4 = uv + vec2(0.0, -dy); // down\n float d4 = texture2D(depthTexture, uv4).r;\n\n bool horizontalSampleCondition = abs(d1 - sampleDepth) < abs(d3 - sampleDepth);\n\n float horizontalSampleDepth = horizontalSampleCondition ? d1 : d3;\n vec2 horizontalSampleUv = horizontalSampleCondition ? uv1 : uv3;\n\n bool verticalSampleCondition = abs(d2 - sampleDepth) < abs(d4 - sampleDepth);\n\n float verticalSampleDepth = verticalSampleCondition ? d2 : d4;\n vec2 verticalSampleUv = verticalSampleCondition ? uv2 : uv4;\n\n vec3 viewPos = viewPosFromDepth(sampleDepth, vUv);\n \n vec3 viewPos1 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv) : viewPosFromDepth(verticalSampleDepth, verticalSampleUv);\n vec3 viewPos2 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(verticalSampleDepth, verticalSampleUv) : viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv);\n\n return normalize(cross(viewPos1 - viewPos, viewPos2 - viewPos));\n}\n\nvoid main(){\n float d = texture2D(tDepth, vUv).r;\n\n vec3 viewNormal = computeWorldNormalFromDepth(tDepth, resolution, vUv, d);\n\n vec3 viewPosition = viewPosFromDepth(d, vUv);\n\n vec2 noiseScale = vec2( resolution.x / 128.0, resolution.y / 128.0 );\n vec3 randomVec = normalize(texture2D(tNoise, vUv * noiseScale).xyz);\n\n vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));\n\n vec3 bitangent = cross(viewNormal, tangent);\n\n mat3 TBN = mat3(tangent, bitangent, viewNormal);\n\n float occlusion = 0.0;\n\n for (int i = 0; i < MAX_KERNEL_SIZE; i++){\n \n vec3 sampleVector = TBN * kernel[i];\n sampleVector = viewPosition + sampleVector * sampleRadius;\n\n vec4 offset = projMatrix * vec4(sampleVector, 1.0);\n offset.xyz /= offset.w;\n offset.xyz = offset.xyz * 0.5 + 0.5;\n\n float realDepth = texture2D(tDepth, offset.xy).r;\n vec3 realPos = viewPosFromDepth(realDepth, offset.xy);\n\n float rangeCheck = smoothstep(0.0, 1.0, sampleRadius / length(viewPosition - realPos));\n\n occlusion += (realPos.z >= sampleVector.z + bias ? 1.0 : 0.0) * rangeCheck;\n }\n\n float occlusionFactor = 1.0 - clamp(occlusion / float(MAX_KERNEL_SIZE), 0.0, 1.0);\n\n gl_FragColor = vec4(vec3(occlusionFactor), 1.0);\n}"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// Copyright Cognite (C) 2019 Cognite\n//\n// Efficient Gaussian blur based on technique described by Daniel Rákos in\n// http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/\n//\n\nvarying vec2 vUv;\n\nuniform sampler2D tDiffuse;\nuniform sampler2D tAmbientOcclusion;\n\nuniform vec2 resolution;\n\nvoid main() {\n float blurredAO = 0.5 * (\n 2.0 * texture2D(tAmbientOcclusion, vUv).r * 0.2270270270 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703\n );\n\n gl_FragColor = vec4(texture2D(tDiffuse, vUv).rgb * blurredAO, 1.0);\n}\n\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n// From http://www.science-and-fiction.org/rendering/noise.html\nfloat rand2d(in vec2 co){\n return fract(sin(dot(co.xy, vec2(12.9898,78.233))) * 43758.5453);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp float v_visible;\nvarying lowp vec2 v_seed;\n\nvarying vec3 v_viewPosition;\n\nvoid main() {\n \n if(v_visible != 1.0 || isSliced(v_viewPosition)){\n discard;\n }\n\n float v = rand2d(gl_FragCoord.xy + v_seed);\n if (v >= v_coverageFactor) {\n discard;\n }\n\n gl_FragColor = vec4(v_color, 1.0);\n}\n"},function(e,n,t){"use strict";t.r(n),n.default="#define GLSLIFY 1\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nattribute mediump float a_sectorId;\nattribute lowp vec3 a_coverageFactor;\nattribute lowp float a_visible;\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp vec2 v_seed;\nvarying lowp float v_visible;\n\nvarying vec3 v_viewPosition;\n\nvoid main()\n{\n v_visible = a_visible;\n v_color = packIntToColor(a_sectorId);\n v_coverageFactor = abs(dot(a_coverageFactor, normal));\n // A seed to ensure that two overlapping sectors A and B \n // doesn't produce the same noise pattern\n v_seed = vec2(a_sectorId / 255.0, a_sectorId / 65025.0);\n\n vec4 mvPosition = modelViewMatrix * instanceMatrix * vec4( position, 1.0 );\n\n v_viewPosition = mvPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewMatrix * instanceMatrix * vec4(position, 1.0);\n}\n"},,,,,,function(e,n){e.exports=require("lodash/merge")},function(e,n,t){"use strict";
|
|
230
178
|
/*!
|
|
231
179
|
* Copyright 2021 Cognite AS
|
|
232
180
|
*/
|
|
233
|
-
|
|
181
|
+
class i{constructor(){this._disposedEvent=new o.b,this._disposed=!1}on(e,t){switch(e){case"disposed":this._disposedEvent.subscribe(t);break;default:Object(o.c)(e)}}off(e,t){switch(e){case"disposed":this._disposedEvent.unsubscribe(t);break;default:Object(o.c)(e)}}dispose(){if(this._disposed)throw new Error("Already disposed");this._disposed=!0,this._disposedEvent.fire(),this._disposedEvent.unsubscribeAll()}ensureNotDisposed(){if(this._disposed)throw new Error("The tool has been disposed")}}var s=r(19),a=r.n(s);
|
|
234
182
|
/*!
|
|
235
183
|
* Copyright 2021 Cognite AS
|
|
236
184
|
*/
|
|
237
|
-
|
|
185
|
+
class c{constructor(e,t){this._removedElements=new Set,this._dimensions=t,this._cells=a()(0,t[0]*t[1]).map(()=>new Array),this._bounds=e}insert(e,t){if(!this._bounds.intersectsBox(e))throw new Error("Element to be added must be partially inside grid");if(this._removedElements.has(t))throw new Error("Re-adding previously taken elements is currently not supported");for(const r of this.cellsIntersecting(e))r.push({bounds:e,element:t})}*overlappingElements(e){if(!this._bounds.intersectsBox(e))return;const t=new Set;for(const r of this.cellsIntersecting(e))for(const n of r)t.has(n.element)||this._removedElements.has(n.element)||!e.intersectsBox(n.bounds)||(t.add(n.element),yield n.element)}*removeOverlappingElements(e){if(this._bounds.intersectsBox(e))for(const t of this.cellsIntersecting(e))for(let r=0;r<t.length;r++){const n=t[r];!this._removedElements.has(n.element)&&e.intersectsBox(n.bounds)&&(this._removedElements.add(n.element),yield n.element)}}*cellsIntersecting(e){const{min:t,max:r}=this._bounds,o=this._dimensions[0],i=this._dimensions[1],s=(e.min.x-t.x)/(r.x-t.x),a=(e.max.x-t.x)/(r.x-t.x),c=(e.min.y-t.y)/(r.y-t.y),l=(e.max.y-t.y)/(r.y-t.y),d=n.MathUtils.clamp,h=d(Math.floor(o*s),0,o-1),u=d(Math.floor(o*a),0,o-1),m=d(Math.floor(i*c),0,i-1),p=d(Math.floor(i*l),0,i-1);for(let e=m;e<=p;++e)for(let t=h;t<=u;++t)yield this._cells[e*o+t]}}var l=r(8),d=r(23),h=r.n(d);
|
|
238
186
|
/*!
|
|
239
187
|
* Copyright 2021 Cognite AS
|
|
240
188
|
*/
|
|
189
|
+
class u extends i{constructor(e,t){super(),this._htmlOverlays=new Map,this._compositeOverlays=[],this._preallocatedVariables={camPos:new n.Vector3,camNormal:new n.Vector3,point:new n.Vector3,nearPlane:new n.Plane,farPlane:new n.Plane,position2D:new n.Vector2},this._onSceneRenderedHandler=this.onSceneRendered.bind(this),this._onViewerDisposedHandler=this.onViewerDisposed.bind(this),this._options=null!=t?t:{},this._viewer=e,this._viewer.on("sceneRendered",this._onSceneRenderedHandler),this._viewer.on("disposed",this._onViewerDisposedHandler),this.scheduleUpdate=h()(()=>this.forceUpdate(),20),l.a.trackCreateTool("HtmlOverlayTool")}get viewerDomElement(){return this._viewer.domElement}get viewerCamera(){return this._viewer.getCamera()}get viewerRenderer(){return this._viewer.renderer}get elements(){return Array.from(this._htmlOverlays.entries()).map(([e,t])=>({element:e,position3D:t.position3D}))}dispose(){this._viewer.off("sceneRendered",this._onSceneRenderedHandler),this._viewer.off("disposed",this._onViewerDisposedHandler),this.clear(),super.dispose()}add(e,t,r={}){if(this.ensureNotDisposed(),this.viewerDomElement.contains(e))throw new Error("Element is already attached to viewer");e.style.visibility="hidden",this.viewerDomElement.appendChild(e);const o=getComputedStyle(e);if("absolute"!==o.position)throw this.viewerDomElement.removeChild(e),new Error(`htmlElement style must have a position of absolute. but was '${o.position}'`);const i={position3D:t,options:r,state:{position2D:new n.Vector2,width:-1,height:-1,visible:!0}};this._htmlOverlays.set(e,i),this.scheduleUpdate()}remove(e){if(this.ensureNotDisposed(),!this.viewerDomElement.contains(e)||!this._htmlOverlays.has(e))throw new Error("Element is not attached to viewer");this.viewerDomElement.removeChild(e),this._htmlOverlays.delete(e)}clear(){const e=Array.from(this._htmlOverlays.keys());for(const t of e)this.remove(t)}forceUpdate(){if(this.ensureNotDisposed(),this.cleanupClusterElements(),0===this._htmlOverlays.size)return;this.updateNewElementSizes();const e=this.viewerCamera,t=this.viewerRenderer,{camPos:r,camNormal:n,point:i,nearPlane:s,farPlane:a,position2D:c}=this._preallocatedVariables;e.getWorldPosition(r),e.getWorldDirection(n),i.copy(r).addScaledVector(n,e.near),s.setFromNormalAndCoplanarPoint(n,i),i.copy(r).addScaledVector(n,e.far),a.setFromNormalAndCoplanarPoint(n,i),this._htmlOverlays.forEach((n,i)=>{const{position3D:l,options:{positionUpdatedCallback:d,userData:h},state:u}=n,m=s.distanceToPoint(l)>=0&&a.distanceToPoint(l)<=0,{x:p,y:f}=Object(o.d)(t,e,l);if(m?(u.position2D.set(p,f),u.visible=!0):u.visible=!1,d){c.set(p,f);const e=r.distanceTo(l);d(i,c,l,e,h)}}),this.clusterElements(),this.commitDOMChanges()}updateNewElementSizes(){this._htmlOverlays.forEach((e,t)=>{if(-1===e.state.width){const r=t.getBoundingClientRect();e.state.width=r.width,e.state.height=r.height}})}commitDOMChanges(){const e=this.viewerRenderer.domElement,t=e.offsetLeft,r=e.offsetTop;this._htmlOverlays.forEach((e,n)=>{const{state:o}=e;n.style.left=o.position2D.x+t+"px",n.style.top=o.position2D.y+r+"px",o.visible&&"visible"!==n.style.visibility?function(e){e.style.visibility="visible",e.style.opacity="1",e.style.transition="opacity 0.2s linear"}(n):o.visible||"hidden"===n.style.visibility||function(e){e.style.visibility="hidden",e.style.opacity="0",e.style.transition="visibility 0s 0.2s, opacity 0.2s linear"}(n)}),this._compositeOverlays.forEach(e=>{this.viewerDomElement.appendChild(e)})}clusterElements(){const e=this._options.clusteringOptions;if(void 0!==e)switch(e.mode){case"overlapInScreenSpace":this.clusterByOverlapInScreenSpace(e.createClusterElementCallback);break;default:Object(o.c)(e.mode,"Unsupported clustering mode: '"+e.mode)}}cleanupClusterElements(){this._compositeOverlays.forEach(e=>{this.viewerDomElement.removeChild(e)}),this._compositeOverlays.splice(0)}clusterByOverlapInScreenSpace(e){const t=this.viewerRenderer.domElement,r=(o=t.getBoundingClientRect(),(i=null!=i?i:new n.Box2).min.set(o.left,o.top),i.max.set(o.right,o.bottom),i);var o,i;const s=r.getSize(new n.Vector2);r.set(new n.Vector2(0,0),s);const a=new c(r,[10,10]);for(const[e,t]of this._htmlOverlays.entries()){const{state:n}=t,o=m(t);n.visible&&o.intersectsBox(r)&&a.insert(o,{htmlElement:e,...t})}const l=new n.Box2,d=new n.Vector2;for(const t of this._htmlOverlays.values()){const{state:n}=t;if(m(t,l),!n.visible||!l.intersectsBox(r))continue;const o=Array.from(a.removeOverlappingElements(l));if(o.length>1){const t=o.reduce((e,t)=>e.add(t.state.position2D),d.set(0,0)).divideScalar(o.length),r=e(o.map(e=>({htmlElement:e.htmlElement,userData:e.options.userData})));o.forEach(e=>e.state.visible=!1),this.addComposite(r,t)}}}addComposite(e,t){const r=this.viewerRenderer.domElement;e.style.visibility="visible",e.style.left=t.x+r.offsetLeft+"px",e.style.top=t.y+r.offsetTop+"px",this._compositeOverlays.push(e)}onSceneRendered(){this.forceUpdate()}onViewerDisposed(){this.dispose()}}function m(e,t){const{state:r}=e;return(t=null!=t?t:new n.Box2).min.set(r.position2D.x,r.position2D.y),t.max.set(r.position2D.x+r.width,r.position2D.y+r.height),t}
|
|
241
190
|
/*!
|
|
242
191
|
* Copyright 2021 Cognite AS
|
|
243
|
-
*/
|
|
244
|
-
class i{constructor(){this._disposedEvent=new o.b,this._disposed=!1}on(e,n){switch(e){case"disposed":this._disposedEvent.subscribe(n);break;default:Object(o.c)(e)}}off(e,n){switch(e){case"disposed":this._disposedEvent.unsubscribe(n);break;default:Object(o.c)(e)}}dispose(){if(this._disposed)throw new Error("Already disposed");this._disposed=!0,this._disposedEvent.fire(),this._disposedEvent.unsubscribeAll()}ensureNotDisposed(){if(this._disposed)throw new Error("The tool has been disposed")}}
|
|
192
|
+
*/class p extends i{constructor(e,t){super(),this._cadModel=t,this._rootTreeIndex=e,this.preloadBoundingBoxData(t,e),l.a.trackCreateTool("ExplodedViewTool")}get readyPromise(){return this._treeBoundingBoxdata.then()}async expand(e){const t=await this._treeBoundingBoxdata;await Promise.all(t.map(({treeIndex:t,direction:r,transform:n})=>0===e?(this._cadModel.resetNodeTransformByTreeIndex(t),Promise.resolve(0)):(n.setPosition(r.x*e,r.y*e,r.z*e),this._cadModel.setNodeTransformByTreeIndex(t,n))))}reset(){this._cadModel.resetNodeTransformByTreeIndex(this._rootTreeIndex,!0)}preloadBoundingBoxData(e,t){const r=e.getBoundingBoxByTreeIndex(t).then(e=>e.getCenter(new n.Vector3)),o=e.getSubtreeTreeIndices(t).then(e=>{if(e.count>1e3)throw new Error(`Subtree size of ${e.count} is too large (max size = 1000)`);return e}).then(t=>Promise.all(t.toArray().map(async t=>({subTreeIndex:t,subTreeIndexBoundingBoxCenter:(await e.getBoundingBoxByTreeIndex(t)).getCenter(new n.Vector3)}))));this._treeBoundingBoxdata=Promise.all([r,o]).then(e=>{const[t,r]=e;return r.map(({subTreeIndex:e,subTreeIndexBoundingBoxCenter:r})=>({treeIndex:e,direction:(new n.Vector3).subVectors(r,t),transform:new n.Matrix4}))}).then(async t=>(await Promise.all(t.map(t=>e.setNodeTransformByTreeIndex(t.treeIndex,t.transform))),t))}}
|
|
245
193
|
/*!
|
|
246
194
|
* Copyright 2021 Cognite AS
|
|
247
|
-
*/class
|
|
195
|
+
*/class f extends i{constructor(e){super(),this._onViewerDisposedHandler=this.onViewerDisposed.bind(this),this._viewer=e,this._viewer.on("disposed",this._onViewerDisposedHandler)}get viewerCamera(){return this._viewer.getCamera()}dispose(){this._viewer.off("disposed",this._onViewerDisposedHandler),super.dispose()}showCameraHelper(){this.hideCameraHelper(),this._cameraHelper=new n.CameraHelper(this.viewerCamera.clone()),this._viewer.addObject3D(this._cameraHelper)}hideCameraHelper(){void 0!==this._cameraHelper&&(this._viewer.removeObject3D(this._cameraHelper),this._cameraHelper=void 0)}onViewerDisposed(){this.dispose()}}var g,v=r(14),w=r.n(v),x=r(32),_=r.n(x),y=r(11),b=r.n(y);!function(e){e[e.TopRight=0]="TopRight",e[e.TopLeft=1]="TopLeft",e[e.BottomLeft=2]="BottomLeft",e[e.BottomRight=3]="BottomRight"}(g||(g={}));const C={ringLabel:"N",labelDelta:Math.PI,fontSize:void 0,fontColor:new n.Color(16711680),tickColor:new n.Color(9737364)},M={label:"",fontSize:void 0,fontColor:new n.Color(3355443),outlineSize:void 0,outlineColor:new n.Color(3355443),faceColor:new n.Color(9737364)},B={size:128,position:{corner:g.BottomRight,padding:new n.Vector2},animationSpeed:200,faces:{xPositiveFace:{...M,label:"Right"},xNegativeFace:{...M,label:"Left"},yPositiveFace:{...M,label:"Up"},yNegativeFace:{...M,label:"Down"},zPositiveFace:{...M,label:"Front"},zNegativeFace:{...M,label:"Back"}},compass:C};
|
|
248
196
|
/*!
|
|
249
197
|
* Copyright 2021 Cognite AS
|
|
250
|
-
*/
|
|
198
|
+
*/
|
|
199
|
+
class S extends i{constructor(e,t){super(),this._dynamicUpdatePosition=()=>{},this._updateClickDiv=()=>{},this._screenPosition=new n.Vector2,this._boxFaceGeometry=new n.PlaneGeometry(.85,.85,1,1),this._raycastCamera=new n.OrthographicCamera(-1,1,1,-1,-1,1),this._raycaster=new n.Raycaster,this._viewer=e,this._layoutConfig=_()(w()(B),t),this._axisGroup=new n.Group,this._interactiveObjects=this.createAxisCross(this._axisGroup),[this._updateClickDiv,this._disposeClickDiv]=this.createClickDiv(e),this.addAxisBoxToViewer(this._axisGroup,this._layoutConfig.position),l.a.trackCreateTool("AxisViewTool")}dispose(){super.dispose(),this._viewer.removeUiObject(this._axisGroup),this._disposeClickDiv()}createClickDiv(e){if(null===e.domElement.querySelector("canvas"))throw new Error("Could not find canvas");const t=document.createElement("div");t.style.position="absolute",t.style.height=this._layoutConfig.size+"px",t.style.width=this._layoutConfig.size+"px",t.style.zIndex="1";let r=0,n=0;return t.addEventListener("mousedown",t=>{const o=new MouseEvent("mousedown",{clientX:t.clientX,clientY:t.clientY,button:t.button});r=t.clientX,n=t.clientY,e.renderer.domElement.dispatchEvent(o)}),t.addEventListener("mousemove",t=>{const r=new MouseEvent("mousemove",{clientX:t.clientX,clientY:t.clientY,button:t.button});e.renderer.domElement.dispatchEvent(r)}),t.addEventListener("contextmenu",e=>e.preventDefault()),t.addEventListener("mouseup",t=>{const o=new MouseEvent("mouseup",{clientX:t.clientX,clientY:t.clientY,button:t.button}),i=e.domElement.getBoundingClientRect(),s=t.clientX-i.left,a=t.clientY-i.top;Math.abs(r-t.clientX)+Math.abs(n-t.clientY)<=10&&!this.handleClick(s,a,i)&&e.renderer.domElement.dispatchEvent(o)}),e.domElement.appendChild(t),[()=>{t.style.left=this._screenPosition.x+"px",t.style.bottom=this._screenPosition.y+"px"},()=>{e.domElement.removeChild(t)}]}addAxisBoxToViewer(e,t){const r=this._layoutConfig.size;if(function(e){return void 0!==e.xAbsolute&&void 0!==e.yAbsolute}(t))this._screenPosition.x=t.xAbsolute,this._screenPosition.y=t.yAbsolute;else{switch(t.corner){case g.BottomRight:this._screenPosition.y=t.padding.y,this._dynamicUpdatePosition=()=>{this._screenPosition.x=this._viewer.renderer.domElement.clientWidth-t.padding.x-r};break;case g.TopRight:this._dynamicUpdatePosition=()=>{this._screenPosition.x=this._viewer.renderer.domElement.clientWidth-t.padding.x-r,this._screenPosition.y=this._viewer.renderer.domElement.clientHeight-t.padding.y-r};break;case g.TopLeft:this._screenPosition.x=t.padding.x,this._dynamicUpdatePosition=()=>{this._screenPosition.y=this._viewer.renderer.domElement.clientHeight-t.padding.y-r};break;case g.BottomLeft:this._screenPosition.x=t.padding.x,this._screenPosition.y=t.padding.y;break;default:throw new Error("Unknown corner position for Axis Cross: Corner = "+t.corner)}this._dynamicUpdatePosition()}this._viewer.addUiObject(e,this._screenPosition,new n.Vector2(r,r))}handleClick(e,t,r){const o=2*(e-this._screenPosition.x)/this._layoutConfig.size-1,i=2*(r.height-t-this._screenPosition.y)/this._layoutConfig.size-1;this._raycaster.setFromCamera({x:o,y:i},this._raycastCamera);const s=new n.Vector3(o,i,1),a=new n.Vector3(0,0,-1).normalize();this._raycaster.set(s,a);const c=this._raycaster.intersectObjects(this._interactiveObjects);if(!(c.length>0))return!1;const l=c[0].object.position.clone().normalize(),d=c[0].object.userData.upVector.clone();return this.moveCameraTo(this._viewer.getCamera(),this._viewer.cameraControls,l,d),!0}createAxisCross(e){const t=this.createBoxFaces();e.add(...t);const r=this.createCompass();return e.add(r),this.setupTransformOnRender(e),t}setupTransformOnRender(e){e.children[0].onBeforeRender=()=>{this._dynamicUpdatePosition(),e.quaternion.copy(this._viewer.getCamera().quaternion).invert(),e.updateMatrixWorld(),this._updateClickDiv()}}createBoxFaces(){const e=this._layoutConfig.faces;return[this.createBoxFace(new n.Vector3(1,0,0),e.xPositiveFace),this.createBoxFace(new n.Vector3(-1,0,0),e.xNegativeFace),this.createBoxFace(new n.Vector3(0,1,0),e.yPositiveFace,new n.Vector3(0,0,-1)),this.createBoxFace(new n.Vector3(0,-1,0),e.yNegativeFace,new n.Vector3(0,0,1)),this.createBoxFace(new n.Vector3(0,0,1),e.zPositiveFace),this.createBoxFace(new n.Vector3(0,0,-1),e.zNegativeFace)]}createCompass(){const e=new n.PlaneGeometry(2.1,2.1,1,1),t=new n.Mesh(e,new n.MeshBasicMaterial({map:this.createCompassTexture(),side:n.DoubleSide,transparent:!0})),r=Math.sin(this._layoutConfig.compass.labelDelta),o=Math.cos(this._layoutConfig.compass.labelDelta);return t.position.y=-.5,t.up.copy(new n.Vector3(r,0,o)),t.lookAt(0,0,0),t}createCompassTexture(){var e;const t=this._layoutConfig.compass,r=this._layoutConfig.size,o=document.createElement("canvas");o.width=r,o.height=r;const i=o.getContext("2d"),s=r/2,a=s-s/4,c=r/32,l=r/8;if(i.strokeStyle=t.tickColor.getStyle(),i.lineWidth=r/16,i.setLineDash([c,l]),i.beginPath(),i.arc(s,s,a,0,2*Math.PI),i.stroke(),t.ringLabel&&t.ringLabel.length>0){const n=null!==(e=t.fontSize)&&void 0!==e?e:r/5;i.font=`bold ${n}px Arial`,i.textAlign="center",i.fillStyle=t.fontColor.getStyle(),i.fillText(t.ringLabel,s,s*(1/4)+n/3)}return new n.CanvasTexture(o)}getFaceTexture(e,t){var r,o;const i=t/2,s=document.createElement("canvas");s.width=i,s.height=i;const a=s.getContext("2d");a.fillStyle=e.outlineColor.getStyle(),a.fillRect(0,0,i,i),a.fillStyle=e.faceColor.getStyle();const c=null!==(r=e.outlineSize)&&void 0!==r?r:i/32;if(a.fillRect(c,c,i-2*c,i-2*c),a.fill(),""!==e.label){const t=null!==(o=e.fontSize)&&void 0!==o?o:i/3;a.font=`bold ${t}px Arial`,a.textAlign="center",a.fillStyle=e.fontColor.getStyle(),a.fillText(e.label,i/2,i/2+t/3)}return new n.CanvasTexture(s)}createBoxFace(e,t,r=new n.Vector3(0,1,0)){const o=new n.Mesh(this._boxFaceGeometry,new n.MeshBasicMaterial({map:this.getFaceTexture(t,this._layoutConfig.size)}));return o.position.copy(e.multiplyScalar(.5*this._boxFaceGeometry.parameters.width)),o.lookAt(e.multiplyScalar(2)),o.userData.upVector=r,o}moveCameraTo(e,t,r,o){const i=e.position.clone(),s=t.getState().target,a=i.clone().sub(s),c=a.length(),l=a.clone().normalize(),d=Math.acos(l.dot(r)),h={t:0},u=new b.a.Tween(h),m=r.clone(),p=e.quaternion.clone(),f=(new n.Quaternion).setFromRotationMatrix((new n.Matrix4).makeBasis(o.clone().cross(m),o,m));if(p.angleTo(f)<1e-6)return;const g=new n.Vector3,v=new n.Quaternion;let w;u.to({t:1},this._layoutConfig.animationSpeed).onUpdate(()=>{g.copy(l).multiplyScalar(Math.sin((1-h.t)*d)/Math.sin(d)).add(r.clone().multiplyScalar(Math.sin(h.t*d)/Math.sin(d))),g.multiplyScalar(c),g.add(s),v.slerpQuaternions(p,f,h.t),e.position.copy(g),e.setRotationFromQuaternion(v)}).start(b.a.now()).onStart(()=>{w=t.enabled,t.enabled=!1}).onComplete(()=>{t.setState(e.position,s),t.enabled=w}).update(b.a.now())}}var I,E,k,P,D,T,A,N,F,R,O=r(15);
|
|
251
200
|
/*!
|
|
252
201
|
* Copyright 2021 Cognite AS
|
|
253
|
-
|
|
202
|
+
*/!function(e){e.BingMap="BingMap",e.HereMap="HereMap",e.MapboxMap="MapboxMap",e.OpenStreetMap="OpenStreetMap"}(I||(I={})),function(e){e[e.Style=100]="Style",e[e.Map_Id=101]="Map_Id"}(E||(E={})),function(e){e.Streets="mapbox/streets-v10",e.Outdoor="mapbox/outdoors-v10",e.Light="mapbox/light-v9",e.Dark="mapbox/dark-v9",e.Satellite="mapbox/satellite-v9",e.Satellite_Streets="mapbox/satellite-streets-v10",e.Navigation_Day="mapbox/navigation-preview-day-v4",e.Navigation_Night="mapbox/navigation-preview-night-v4",e.Navigation_Guide_Day="mapbox/navigation-guidance-day-v4",e.Navigation_Guide_Night="mapbox/navigation-guidance-night-v4"}(k||(k={})),function(e){e.Streets="mapbox.mapbox-streets-v7",e.Satellite="mapbox.satellite",e.Terrain="mapbox.mapbox-terrain-v2",e.Traffic="mapbox.mapbox-traffic-v1",e.TerrainRGB="mapbox.terrain-rgb"}(P||(P={})),function(e){e.PNG="png",e.PNG32="png32",e.PNG64="png64",e.PNG128="png128",e.PNG256="png256",e.JPG70="jpg70",e.JPG80="jpg80",e.JPG90="jpg90",e.PNGRAW="pngraw"}(D||(D={})),function(e){e.Aerial="a",e.Road="r",e.Aerial_Labels="h",e.Oblique="o",e.Oblique_Labels="b"}(T||(T={})),function(e){e.GIF="gif",e.JPEG="jpeg",e.PNG="png"}(A||(A={})),function(e){e.Aerial="aerial",e.Base="base",e.Pano="pano",e.Traffic="traffic"}(N||(N={})),function(e){e.Day="normal.day",e.Night="normal.night",e.Terrain="terrain.day",e.Satellite="satellite.day"}(F||(F={})),function(e){e.PNG="png",e.PNG8="png8",e.JPG="jpg"}(R||(R={}));
|
|
254
203
|
/*!
|
|
255
204
|
* Copyright 2021 Cognite AS
|
|
256
205
|
*/
|
|
257
|
-
class
|
|
206
|
+
class V{constructor(e,t){this._intervalId=0,this._onCameraChange=this.handleCameraChange.bind(this),this._viewer=e;const r=this.getMapProvider(t);this._map=new O.MapView(O.MapView.PLANAR,r,r),this._viewer.addObject3D(this._map);const n=O.UnitsUtils.datumsToSpherical(t.latlong.latitude,t.latlong.longitude),o=this._viewer.models[0].getModelBoundingBox();this._map.position.set(-n.x,o.min.y,n.y),this._map.updateMatrixWorld(!0),this.requestRedraw(1e4),this._viewer.on("cameraChange",this._onCameraChange)}requestRedraw(e){0==this._intervalId&&(this._intervalId=setInterval(()=>{this._viewer.requestRedraw()},100),setTimeout(()=>{clearInterval(this._intervalId),this._intervalId=0},e))}getMapProvider(e){let t;switch(e.provider){case I.BingMap:t=new O.BingMapsProvider(e.APIKey,e.type);break;case I.HereMap:t=new O.HereMapsProvider(e.APIKey,e.appCode,e.style,e.scheme,e.imageFormat,e.size);break;case I.MapboxMap:t=new O.MapBoxProvider(e.APIKey,e.id,e.mode,e.tileFormat);break;case I.OpenStreetMap:t=new O.OpenStreetMapsProvider;break;default:throw new Error("Unsupported map provider")}return t}latLongToWorldCoordinates(e){return O.UnitsUtils.datumsToSpherical(e.latitude,e.longitude)}handleCameraChange(){this.requestRedraw(1e3)}dispose(){this._viewer.removeObject3D(this._map),this._viewer.off("cameraChange",this._onCameraChange)}}
|
|
258
207
|
/*!
|
|
259
208
|
* Copyright 2021 Cognite AS
|
|
260
|
-
|
|
209
|
+
*/class z extends i{constructor(e,t){super(),this._viewer=e,this._maps=new V(this._viewer,t),l.a.trackCreateTool("AxisViewTool")}latLongToWorldCoordinates(e){this._maps.latLongToWorldCoordinates(e)}dispose(){super.dispose(),this._maps.dispose()}}
|
|
261
210
|
/*!
|
|
262
211
|
* Copyright 2021 Cognite AS
|
|
263
|
-
*/
|
|
264
|
-
class N{constructor(e,n){this._intervalId=0,this._onCameraChange=this.handleCameraChange.bind(this),this._viewer=e;const t=this.getMapProvider(n);this._map=new R.MapView(R.MapView.PLANAR,t,t),this._viewer.addObject3D(this._map);const r=R.UnitsUtils.datumsToSpherical(n.latlong.latitude,n.latlong.longitude),o=this._viewer.models[0].getModelBoundingBox();this._map.position.set(-r.x,o.min.y,r.y),this._map.updateMatrixWorld(!0),this.requestRedraw(1e4),this._viewer.on("cameraChange",this._onCameraChange)}requestRedraw(e){0==this._intervalId&&(this._intervalId=setInterval(()=>{this._viewer.requestRedraw()},100),setTimeout(()=>{clearInterval(this._intervalId),this._intervalId=0},e))}getMapProvider(e){let n;switch(e.provider){case y.BingMap:n=new R.BingMapsProvider(e.APIKey,e.type);break;case y.HereMap:n=new R.HereMapsProvider(e.APIKey,e.appCode,e.style,e.scheme,e.imageFormat,e.size);break;case y.MapboxMap:n=new R.MapBoxProvider(e.APIKey,e.id,e.mode,e.tileFormat);break;case y.OpenStreetMap:n=new R.OpenStreetMapsProvider;break;default:throw new Error("Unsupported map provider")}return n}latLongToWorldCoordinates(e){return R.UnitsUtils.datumsToSpherical(e.latitude,e.longitude)}handleCameraChange(){this.requestRedraw(1e3)}dispose(){this._viewer.removeObject3D(this._map),this._viewer.off("cameraChange",this._onCameraChange)}}
|
|
212
|
+
*/class ${constructor(e,t){this._nodeCollectionAndAppearance=[],this._model=e,this._date=t}getKeyframeDate(){return this._date}activate(){this._nodeCollectionAndAppearance.forEach((e,t)=>{this._model.assignStyledNodeCollection(e.nodes,e.nodeAppearance)})}deactivate(){this._nodeCollectionAndAppearance.forEach((e,t)=>{this._model.unassignStyledNodeCollection(e.nodes)})}assignStyledNodeCollection(e,t){l.a.trackCadModelStyled(e.classToken,t);if(-1!==this._nodeCollectionAndAppearance.findIndex(t=>t.nodes===e))throw new Error("Node collection as already been assigned, use updateStyledNodeCollection() to update the appearance");this._nodeCollectionAndAppearance.push({nodes:e,nodeAppearance:t})}unassignStyledNodeCollection(e){const t=this._nodeCollectionAndAppearance.findIndex(t=>t.nodes===e);if(-1===t)throw new Error("Node collection has not been assigned to model");this._nodeCollectionAndAppearance.splice(t,1)}}
|
|
265
213
|
/*!
|
|
266
214
|
* Copyright 2021 Cognite AS
|
|
267
|
-
*/class
|
|
215
|
+
*/class j extends i{constructor(e){super(),this._playback=void 0,this._events={dateChanged:new o.b},this._model=e,this._keyframes=new Array}subscribe(e,t){switch(e){case"dateChanged":this._events.dateChanged.subscribe(t);break;default:Object(o.c)(e,`Unsupported event: '${e}'`)}}unsubscribe(e,t){switch(e){case"dateChanged":this._events.dateChanged.unsubscribe(t);break;default:Object(o.c)(e,`Unsupported event: '${e}'`)}}createKeyframe(e){const t=new $(this._model,e);return this._keyframes.push(t),this.sortKeyframesByDates(),t}getKeyframeByDate(e){return this._keyframes.find(t=>t.getKeyframeDate()===e)}removeKeyframe(e){const t=this._keyframes.findIndex(t=>t===e);t>-1&&this._keyframes.splice(t,1)}removeKeyframeByDate(e){const t=this._keyframes.findIndex(t=>t.getKeyframeDate()===e);t>-1&&this._keyframes.splice(t,1)}play(e,t,r){this.stop();const n={dateInMs:e.getTime()},o={dateInMs:t.getTime()},i=new b.a.Tween(n).to(o,r);let s=-1;i.onUpdate(()=>{const r=new Date(n.dateInMs),o=s;for(;s<this._keyframes.length-1&&this._keyframes[s+1].getKeyframeDate().getTime()<=r.getTime();)s++;s!==o&&(-1!==o&&this._keyframes[o].deactivate(),this._keyframes[s].activate()),this._events.dateChanged.fire({date:r,activeKeyframe:this._keyframes[s],startDate:e,endDate:t})}),this._playback=i,i.start()}stop(){void 0!==this._playback&&(this._playback.stop(),this._playback=void 0)}pause(){void 0!==this._playback&&this._playback.isPlaying()&&this._playback.pause()}resume(){void 0!==this._playback&&this._playback.isPaused()&&this._playback.resume()}getAllKeyframes(){return this._keyframes.slice()}dispose(){super.dispose(),this._events.dateChanged.unsubscribeAll()}sortKeyframesByDates(){this._keyframes.length>1&&this._keyframes.sort((e,t)=>e.getKeyframeDate().getTime()-t.getKeyframeDate().getTime())}}var L=r(2);
|
|
268
216
|
/*!
|
|
269
217
|
* Copyright 2021 Cognite AS
|
|
270
218
|
*/
|
|
271
219
|
/*!
|
|
272
220
|
* Copyright 2021 Cognite AS
|
|
273
|
-
*/
|
|
274
|
-
class A extends i{constructor(e,n={}){super(),this._boundingBoxes=new r.Group,this._viewer=e,this._viewer.addObject3D(this._boundingBoxes),this._options={},this.setOptions(n)}setOptions(e){this._options={showDetailedSectors:!0,showDiscardedSectors:!1,showSimpleSectors:!0,colorBy:"lod",leafsOnly:!1,sectorPathFilterRegex:".*",...e}}dispose(){this._viewer.removeObject3D(this._boundingBoxes)}showSectorBoundingBoxes(e){this._model=e,this.updateBoundingBoxes()}updateBoundingBoxes(){if(this._boundingBoxes.clear(),void 0===this._model)return;this._model.getModelTransformation(this._boundingBoxes.matrix);const e=[];e[O.a.Discarded]=this._options.showDiscardedSectors,e[O.a.Simple]=this._options.showSimpleSectors,e[O.a.Detailed]=this._options.showDetailedSectors;const n=[];this._model.cadNode.traverse(t=>{if(B(t)){const r=t;this.isSectorAcceptedByCurrentFilter(r)&&e[r.levelOfDetail]&&(!this._options.leafsOnly||function(e){return!e.children.some(e=>B(e))}
|
|
221
|
+
*/class G extends i{constructor(e,t={}){super(),this._boundingBoxes=new n.Group,this._viewer=e,this._viewer.addObject3D(this._boundingBoxes),this._options={},this.setOptions(t)}setOptions(e){this._options={showDetailedSectors:!0,showDiscardedSectors:!1,showSimpleSectors:!0,colorBy:"lod",leafsOnly:!1,sectorPathFilterRegex:".*",...e}}dispose(){this._viewer.removeObject3D(this._boundingBoxes)}showSectorBoundingBoxes(e){this._model=e,this.updateBoundingBoxes()}updateBoundingBoxes(){if(this._boundingBoxes.clear(),void 0===this._model)return;this._model.getModelTransformation(this._boundingBoxes.matrix);const e=[];e[L.d.Discarded]=this._options.showDiscardedSectors,e[L.d.Simple]=this._options.showSimpleSectors,e[L.d.Detailed]=this._options.showDetailedSectors;const t=[];this._model.cadNode.traverse(r=>{if(q(r)){const n=r;this.isSectorAcceptedByCurrentFilter(n)&&e[n.levelOfDetail]&&(!this._options.leafsOnly||function(e){return!e.children.some(e=>q(e))}
|
|
275
222
|
/*!
|
|
276
223
|
* Copyright 2021 Cognite AS
|
|
277
224
|
*/
|
|
278
225
|
/*!
|
|
279
226
|
* Copyright 2021 Cognite AS
|
|
280
|
-
*/(
|
|
227
|
+
*/(n))&&t.push(n)}}),t.forEach(e=>{const r=this.createBboxNodeFor(e,t);this._boundingBoxes.add(r)}),this._boundingBoxes.updateMatrixWorld(!0),this._viewer.requestRedraw()}isSectorAcceptedByCurrentFilter(e){return new RegExp(this._options.sectorPathFilterRegex).test(e.sectorPath)}createBboxNodeFor(e,t){const r=this._options;const i=function(){switch(r.colorBy){case"depth":{const t=Math.min(1,e.depth/8);return new n.Color(U.green).lerpHSL(U.red,t)}case"lod":switch(e.levelOfDetail){case L.d.Simple:return U.yellow;case L.d.Detailed:return U.green;case L.d.Discarded:return U.red;default:Object(o.c)(e.levelOfDetail)}case"loadedTimestamp":{const r=[...t].sort((e,t)=>e.updatedTimestamp-t.updatedTimestamp),o=r.findIndex(t=>t===e),i=(r.length-1-o)/Math.max(r.length-1,1);return new n.Color(U.green).lerpHSL(U.red,i)}case"random":return(new n.Color).setHSL(Math.random(),1,.5);default:Object(o.c)(r.colorBy)}}();return new n.Box3Helper(e.bounds,i)}}const U={green:new n.Color("#00ff00"),yellow:new n.Color("yellow"),red:new n.Color("red")};function q(e){return e.name.match(/^Sector \d+$/)}}])}));
|
|
281
228
|
//# sourceMappingURL=tools.map
|