@cognite/reveal 2.0.0-beta.3 → 2.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +81 -6
- package/core/cad.d.ts +5 -0
- package/core/index.d.ts +7 -0
- package/{datamodels → core/src/datamodels}/base/SupportedModelTypes.d.ts +4 -4
- package/{datamodels → core/src/datamodels}/base/index.d.ts +6 -6
- package/{datamodels → core/src/datamodels}/base/types.d.ts +14 -13
- package/{datamodels → core/src/datamodels}/cad/CadManager.d.ts +43 -50
- package/{datamodels → core/src/datamodels}/cad/CadModelFactory.d.ts +7 -11
- package/{datamodels → core/src/datamodels}/cad/CadModelSectorLoadStatistics.d.ts +38 -38
- package/core/src/datamodels/cad/createCadManager.d.ts +10 -0
- package/{datamodels → core/src/datamodels}/cad/picking.d.ts +28 -28
- package/{datamodels → core/src/datamodels}/cad/rendering/RenderAlreadyLoadedGeometryProvider.d.ts +9 -10
- package/{datamodels → core/src/datamodels}/cad/sector/CadModelClipper.d.ts +10 -10
- package/{datamodels → core/src/datamodels}/cad/styling/AssetNodeCollection.d.ts +39 -39
- package/{datamodels → core/src/datamodels}/cad/styling/InvertedNodeCollection.d.ts +24 -24
- package/{datamodels → core/src/datamodels}/cad/styling/NodeCollectionDeserializer.d.ts +26 -26
- package/{datamodels → core/src/datamodels}/cad/styling/PopulateIndexSetFromPagedResponseHelper.d.ts +24 -25
- package/{datamodels → core/src/datamodels}/cad/styling/PropertyFilterNodeCollection.d.ts +61 -61
- package/{datamodels → core/src/datamodels}/cad/styling/SinglePropertyFilterNodeCollection.d.ts +60 -63
- package/{datamodels → core/src/datamodels}/cad/styling/index.d.ts +8 -13
- package/{datamodels → core/src/datamodels}/pointcloud/PointCloudFactory.d.ts +9 -9
- package/{datamodels → core/src/datamodels}/pointcloud/PointCloudManager.d.ts +25 -22
- package/{datamodels → core/src/datamodels}/pointcloud/PointCloudMetadata.d.ts +11 -11
- package/{datamodels → core/src/datamodels}/pointcloud/PointCloudMetadataRepository.d.ts +13 -12
- package/{datamodels → core/src/datamodels}/pointcloud/PointCloudNode.d.ts +62 -61
- package/{datamodels → core/src/datamodels}/pointcloud/PotreeGroupWrapper.d.ts +39 -33
- package/{datamodels → core/src/datamodels}/pointcloud/PotreeNodeWrapper.d.ts +38 -38
- package/core/src/datamodels/pointcloud/createPointCloudManager.d.ts +8 -0
- package/{datamodels → core/src/datamodels}/pointcloud/picking.d.ts +29 -29
- package/{datamodels → core/src/datamodels}/pointcloud/types.d.ts +114 -114
- package/core/src/index.d.ts +19 -0
- package/{migration.d.ts → core/src/migration.d.ts} +8 -8
- package/{public → core/src/public}/RevealManager.d.ts +61 -62
- package/{public → core/src/public}/createRevealManager.d.ts +24 -24
- package/{public → core/src/public}/migration/Cognite3DModel.d.ts +323 -297
- package/{public → core/src/public}/migration/Cognite3DViewer.d.ts +545 -535
- package/{public → core/src/public}/migration/CogniteModelBase.d.ts +17 -17
- package/{public → core/src/public}/migration/CognitePointCloudModel.d.ts +121 -127
- package/{public → core/src/public}/migration/NodeIdAndTreeIndexMaps.d.ts +4 -4
- package/{public → core/src/public}/migration/NotSupportedInMigrationWrapperError.d.ts +11 -11
- package/{public → core/src/public}/migration/RenderController.d.ts +4 -4
- package/core/src/public/migration/RevealManagerHelper.d.ts +58 -0
- package/{public → core/src/public}/migration/types.d.ts +240 -222
- package/core/src/public/types.d.ts +45 -0
- package/{utilities → core/src/utilities}/BoundingBoxClipper.d.ts +23 -26
- package/core/src/utilities/Spinner.d.ts +28 -0
- package/{utilities → core/src/utilities}/ViewStateHelper.d.ts +33 -33
- package/{utilities → core/src/utilities}/callActionWithIndicesAsync.d.ts +4 -4
- package/core/src/utilities/index.d.ts +8 -0
- package/{utilities → core/src/utilities}/reflection.d.ts +7 -13
- package/core/src/utilities/worldToViewport.d.ts +31 -0
- package/{utilities/events/clickOrTouchEventOffset.d.ts → core/utilities.d.ts} +4 -4
- package/index.d.ts +8 -20
- package/index.js +176 -121
- package/index.map +1 -1
- package/package.json +47 -120
- package/packages/cad-geometry-loaders/index.d.ts +20 -0
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/CadLoadingHints.d.ts +11 -11
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/CadModelSectorBudget.d.ts +26 -21
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/CadModelUpdateHandler.d.ts +44 -42
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/CadNode.d.ts +54 -59
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/InstancedMeshManager.d.ts +20 -20
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/cameraconfig.d.ts +12 -12
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/CadMaterialManager.d.ts +37 -39
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/rendering/EffectRenderManager.d.ts +83 -83
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/rendering/createSimpleGeometryMesh.d.ts +6 -6
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/rendering/filterInstanceMesh.d.ts +6 -6
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/rendering/triangleMeshes.d.ts +6 -6
- package/packages/cad-geometry-loaders/src/material-manager/rendering/types.d.ts +76 -0
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/styling/NodeAppearanceTextureBuilder.d.ts +50 -51
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/styling/NodeTransformProvider.d.ts +13 -13
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/styling/NodeTransformTextureBuilder.d.ts +23 -23
- package/{datamodels/cad → packages/cad-geometry-loaders/src/material-manager}/styling/TransformOverrideBuffer.d.ts +21 -21
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/CachedRepository.d.ts +25 -26
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/ModelStateHandler.d.ts +12 -12
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/Repository.d.ts +9 -9
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/RootSectorNode.d.ts +12 -12
- package/packages/cad-geometry-loaders/src/sector/SectorLoader.d.ts +28 -0
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/SectorNode.d.ts +21 -20
- package/packages/cad-geometry-loaders/src/sector/SimpleAndDetailedToSector3D.d.ts +20 -0
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/culling/ByVisibilityGpuSectorCuller.d.ts +53 -53
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/culling/OccludingGeometryProvider.d.ts +6 -6
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/culling/OrderSectorsByVisibilityCoverage.d.ts +107 -108
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/culling/SectorCuller.d.ts +33 -33
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/culling/TakenSectorTree.d.ts +24 -24
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/culling/types.d.ts +71 -68
- package/packages/cad-geometry-loaders/src/sector/rxSectorUtilities.d.ts +12 -0
- package/{datamodels/cad → packages/cad-geometry-loaders/src}/sector/sectorUtilities.d.ts +19 -20
- package/packages/cad-geometry-loaders/src/utilities/PromiseUtils.d.ts +18 -0
- package/{utilities → packages/cad-geometry-loaders/src/utilities}/arrays.d.ts +5 -5
- package/{datamodels/cad/rendering → packages/cad-geometry-loaders/src/utilities}/float32BufferToMatrix.d.ts +12 -12
- package/{datamodels/cad/sector → packages/cad-geometry-loaders/src/utilities}/groupMeshesByNumber.d.ts +8 -8
- package/{utilities → packages/cad-geometry-loaders/src/utilities}/rxOperations.d.ts +5 -7
- package/{utilities → packages/cad-geometry-loaders/src/utilities}/types.d.ts +48 -67
- package/packages/cad-parsers/index.d.ts +20 -0
- package/{datamodels/cad/sector → packages/cad-parsers/src/cad}/CadSectorParser.d.ts +15 -16
- package/{datamodels/cad/sector → packages/cad-parsers/src/cad}/LevelOfDetail.d.ts +8 -8
- package/{datamodels/cad/rendering → packages/cad-parsers/src/cad}/RenderMode.d.ts +15 -15
- package/{datamodels/cad/rendering → packages/cad-parsers/src/cad}/computeBoundingBoxFromAttributes.d.ts +9 -9
- package/{datamodels/cad/rendering → packages/cad-parsers/src/cad}/filterPrimitives.d.ts +9 -9
- package/{datamodels/cad/rendering → packages/cad-parsers/src/cad}/matCapTextureData.d.ts +5 -5
- package/{datamodels/cad/rendering → packages/cad-parsers/src/cad}/materials.d.ts +23 -23
- package/{datamodels/cad/rendering → packages/cad-parsers/src/cad}/primitiveGeometries.d.ts +31 -31
- package/packages/cad-parsers/src/cad/primitives.d.ts +12 -0
- package/{datamodels/cad/rendering → packages/cad-parsers/src/cad}/shaders.d.ts +101 -101
- package/packages/cad-parsers/src/cad/types.d.ts +56 -0
- package/{datamodels/cad/parsers → packages/cad-parsers/src/metadata}/CadMetadataParser.d.ts +7 -7
- package/{datamodels/cad → packages/cad-parsers/src/metadata}/CadModelMetadata.d.ts +44 -36
- package/packages/cad-parsers/src/metadata/CadModelMetadataRepository.d.ts +16 -0
- package/{datamodels/base → packages/cad-parsers/src/metadata}/MetadataRepository.d.ts +6 -6
- package/{datamodels/cad → packages/cad-parsers/src/metadata}/parsers/CadMetadataParserV8.d.ts +51 -50
- package/packages/cad-parsers/src/metadata/types.d.ts +41 -0
- package/{datamodels/cad/sector → packages/cad-parsers/src/utilities}/SectorScene.d.ts +21 -21
- package/packages/cad-parsers/src/utilities/SectorSceneFactory.d.ts +12 -0
- package/packages/cad-parsers/src/utilities/types.d.ts +48 -0
- package/packages/cad-styling/index.d.ts +9 -0
- package/{datamodels/cad/styling → packages/cad-styling/src}/CombineNodeCollectionBase.d.ts +31 -31
- package/{datamodels/cad/styling → packages/cad-styling/src}/IntersectionNodeCollection.d.ts +15 -15
- package/{datamodels/cad → packages/cad-styling/src}/NodeAppearance.d.ts +62 -66
- package/{datamodels/cad/styling → packages/cad-styling/src}/NodeAppearanceProvider.d.ts +29 -29
- package/{datamodels/cad/styling → packages/cad-styling/src}/NodeCollectionBase.d.ts +47 -47
- package/{datamodels/cad/styling → packages/cad-styling/src}/TreeIndexNodeCollection.d.ts +23 -24
- package/{datamodels/cad/styling → packages/cad-styling/src}/UnionNodeCollection.d.ts +15 -15
- package/packages/camera-manager/index.d.ts +4 -0
- package/{combo-camera-controls → packages/camera-manager/src}/ComboControls.d.ts +83 -83
- package/{combo-camera-controls → packages/camera-manager/src}/Keyboard.d.ts +14 -14
- package/packages/logger/index.d.ts +5 -0
- package/packages/logger/src/Log.d.ts +5 -0
- package/packages/modeldata-api/index.d.ts +8 -0
- package/packages/modeldata-api/src/CdfModelDataClient.d.ts +17 -0
- package/{utilities/networking/CdfModelDataClient.d.ts → packages/modeldata-api/src/CdfModelMetadataProvider.d.ts} +36 -45
- package/packages/modeldata-api/src/LocalModelDataClient.d.ts +10 -0
- package/packages/modeldata-api/src/LocalModelMetadataProvider.d.ts +19 -0
- package/{utilities/networking → packages/modeldata-api/src}/Model3DOutputList.d.ts +17 -18
- package/{utilities/networking → packages/modeldata-api/src}/applyDefaultModelTransformation.d.ts +6 -6
- package/packages/modeldata-api/src/types.d.ts +55 -0
- package/packages/modeldata-api/src/utilities.d.ts +7 -0
- package/packages/nodes-api/index.d.ts +7 -0
- package/packages/nodes-api/src/NodesApiClient.d.ts +43 -0
- package/packages/nodes-api/src/NodesCdfClient.d.ts +19 -0
- package/packages/nodes-api/src/NodesLocalClient.d.ts +25 -0
- package/packages/nodes-api/src/types.d.ts +14 -0
- package/packages/tools/index.d.ts +12 -0
- package/{tools → packages/tools/src}/AxisView/AxisViewTool.d.ts +32 -32
- package/{tools → packages/tools/src}/AxisView/types.d.ts +98 -98
- package/{tools → packages/tools/src}/Cognite3DViewerToolBase.d.ts +25 -25
- package/{tools → packages/tools/src}/DebugCameraTool.d.ts +20 -20
- package/{tools → packages/tools/src}/DebugLoadedSectorsTool.d.ts +23 -22
- package/{tools → packages/tools/src}/ExplodedViewTool.d.ts +12 -12
- package/packages/tools/src/Geomap/Geomap.d.ts +14 -0
- package/packages/tools/src/Geomap/GeomapTool.d.ts +21 -0
- package/packages/tools/src/Geomap/MapConfig.d.ts +195 -0
- package/{tools → packages/tools/src}/HtmlOverlayTool.d.ts +88 -88
- package/packages/tools/src/types.d.ts +4 -0
- package/packages/utilities/index.d.ts +24 -0
- package/packages/utilities/src/CameraConfiguration.d.ts +10 -0
- package/{utilities → packages/utilities/src}/IndexSet.d.ts +1 -1
- package/{utilities → packages/utilities/src}/NumericRange.d.ts +21 -21
- package/{utilities → packages/utilities/src}/WebGLRendererStateHelper.d.ts +15 -15
- package/{utilities → packages/utilities/src}/assertNever.d.ts +7 -7
- package/{utilities → packages/utilities/src}/cache/MemoryRequestCache.d.ts +22 -22
- package/{utilities → packages/utilities/src/cache}/MostFrequentlyUsedCache.d.ts +19 -19
- package/{utilities → packages/utilities/src}/cache/RequestCache.d.ts +13 -13
- package/{utilities → packages/utilities/src}/datastructures/DynamicDefragmentedBuffer.d.ts +22 -22
- package/{utilities → packages/utilities/src}/determinePowerOfTwoDimensions.d.ts +11 -11
- package/{utilities → packages/utilities/src}/disposeAttributeArrayOnUpload.d.ts +17 -17
- package/{utilities → packages/utilities/src}/events/EventTrigger.d.ts +13 -13
- package/packages/utilities/src/events/clickOrTouchEventOffset.d.ts +13 -0
- package/{utilities → packages/utilities/src}/events/index.d.ts +5 -5
- package/{utilities → packages/utilities/src}/indexset/IndexSet.d.ts +26 -26
- package/{utilities → packages/utilities/src}/indexset/IntermediateIndexNode.d.ts +26 -26
- package/{utilities → packages/utilities/src}/indexset/LeafIndexNode.d.ts +16 -16
- package/{utilities → packages/utilities/src}/isMobileOrTablet.d.ts +4 -4
- package/{utilities → packages/utilities/src}/metrics.d.ts +15 -15
- package/{utilities → packages/utilities/src}/networking/isTheSameDomain.d.ts +11 -11
- package/{utilities → packages/utilities/src}/objectTraversal.d.ts +8 -8
- package/{utilities → packages/utilities/src}/packFloat.d.ts +6 -6
- package/{revealEnv.d.ts → packages/utilities/src/revealEnv.d.ts} +10 -10
- package/{utilities → packages/utilities/src}/three/AutoDisposeGroup.d.ts +17 -17
- package/packages/utilities/src/three/BoundingBoxLOD.d.ts +27 -0
- package/{utilities → packages/utilities/src}/three/getBox3CornerPoints.d.ts +5 -5
- package/{utilities → packages/utilities/src}/transformCameraConfiguration.d.ts +6 -6
- package/packages/utilities/src/types.d.ts +10 -0
- package/{utilities → packages/utilities/src}/workers/WorkerPool.d.ts +14 -14
- package/tools.d.ts +7 -13
- package/tools.js +252 -12
- package/tools.map +1 -1
- package/combo-camera-controls/index.d.ts +0 -4
- package/datamodels/cad/CadModelMetadataRepository.d.ts +0 -14
- package/datamodels/cad/createCadManager.d.ts +0 -13
- package/datamodels/cad/index.d.ts +0 -8
- package/datamodels/cad/rendering/primitives.d.ts +0 -7
- package/datamodels/cad/rendering/types.d.ts +0 -25
- package/datamodels/cad/sector/SimpleAndDetailedToSector3D.d.ts +0 -20
- package/datamodels/cad/sector/rxSectorUtilities.d.ts +0 -27
- package/datamodels/cad/sector/types.d.ts +0 -113
- package/datamodels/pointcloud/createPointCloudManager.d.ts +0 -10
- package/datamodels/pointcloud/index.d.ts +0 -5
- package/internals.d.ts +0 -27
- package/public/types.d.ts +0 -121
- package/utilities/Spinner.d.ts +0 -13
- package/utilities/index.d.ts +0 -13
- package/utilities/networking/CogniteClientNodeIdAndTreeIndexMapper.d.ts +0 -14
- package/utilities/networking/HttpHeadersProvider.d.ts +0 -7
- package/utilities/networking/LocalModelDataClient.d.ts +0 -23
- package/utilities/networking/types.d.ts +0 -37
- package/utilities/networking/utilities.d.ts +0 -13
- package/utilities/three/dumpRendererToImage.d.ts +0 -5
- package/utilities/three/index.d.ts +0 -6
- package/utilities/worldToViewport.d.ts +0 -11
package/index.js
CHANGED
|
@@ -1,350 +1,405 @@
|
|
|
1
|
-
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var n=t();for(var r in n)("object"==typeof exports?exports:e)[r]=n[r]}}("undefined"!=typeof self?self:this,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="https://apps-cdn.cogniteapp.com/@cognite/reveal-parser-worker/1.
|
|
1
|
+
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var n=t();for(var r in n)("object"==typeof exports?exports:e)[r]=n[r]}}("undefined"!=typeof self?self:this,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var o=t[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)n.d(r,o,function(t){return e[t]}.bind(null,o));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="https://apps-cdn.cogniteapp.com/@cognite/reveal-parser-worker/1.2.0/",n(n.s=64)}([function(e,t){e.exports=require("three")},function(e,t,n){"use strict";
|
|
2
2
|
/*!
|
|
3
3
|
* Copyright 2021 Cognite AS
|
|
4
4
|
*/
|
|
5
|
-
|
|
5
|
+
function r(e,t){if(t(e))for(let n=0;n<e.children.length;n++)r(e.children[n],t)}function o(e,t){if(void 0===e)return;const{position:n,target:r}=e;return n.applyMatrix4(t),r.applyMatrix4(t),{position:n,target:r}}
|
|
6
6
|
/*!
|
|
7
7
|
* Copyright 2021 Cognite AS
|
|
8
|
-
*/
|
|
9
|
-
function r(e,t){const n=t.clone().project(e);return n.y=-n.y,n.addScalar(1).multiplyScalar(.5),n}function o(e,t,n){const o=e.getPixelRatio(),i=e.domElement,a=r(t,n);return{x:Math.round(a.x*(i.width/o)),y:Math.round(a.y*(i.height/o))}}n.d(t,"a",(function(){return r})),n.d(t,"b",(function(){return o}))},function(e,t){e.exports=require("@cognite/sdk-core")},function(e,t){e.exports=require("mixpanel-browser")},function(e,t){e.exports=require("lodash/range")},function(e,t,n){"use strict";
|
|
8
|
+
*/function i(e,t){const n=t.getBoundingClientRect();if(e instanceof MouseEvent)return{offsetX:e.clientX-n.left,offsetY:e.clientY-n.top};if(e.changedTouches.length>0){const t=e.changedTouches[0];return{offsetX:t.clientX-n.left,offsetY:t.clientY-n.top}}return{offsetX:-1,offsetY:-1}}
|
|
10
9
|
/*!
|
|
11
10
|
* Copyright 2021 Cognite AS
|
|
12
|
-
*/
|
|
13
|
-
function r(e,t){throw new Error(t||"Unexpected object: "+e)}n.d(t,"a",(function(){return r}))},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nconst int MAX_ITER_2315452051 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_2315452051; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// TODO consider fixing Three.js so it is possible to pass a mat4:\n// see https://stackoverflow.com/questions/38853096/webgl-how-to-bind-values-to-a-mat4-attribute\n\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n column_0,\n column_1,\n column_2,\n column_3\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * instanceMatrix * vec4(normalize(normal), 0.0)).xyz;\n //v_normal = normal;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n vViewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}"},function(e,t,n){"use strict";n.d(t,"a",(function(){return r}));
|
|
11
|
+
*/n.d(t,"y",(function(){return r})),n.d(t,"x",(function(){return o})),n.d(t,"d",(function(){return a})),n.d(t,"l",(function(){return i})),n.d(t,"k",(function(){return s})),n.d(t,"h",(function(){return d})),n.d(t,"m",(function(){return l})),n.d(t,"e",(function(){return v})),n.d(t,"r",(function(){return x})),n.d(t,"c",(function(){return I})),n.d(t,"a",(function(){return S})),n.d(t,"b",(function(){return R})),n.d(t,"o",(function(){return N})),n.d(t,"q",(function(){return D})),n.d(t,"i",(function(){return z})),n.d(t,"j",(function(){return F})),n.d(t,"f",(function(){return k})),n.d(t,"g",(function(){return L})),n.d(t,"u",(function(){return Y})),n.d(t,"w",(function(){return X})),n.d(t,"t",(function(){return Z})),n.d(t,"v",(function(){return K})),n.d(t,"p",(function(){return H})),n.d(t,"n",(function(){return Q})),n.d(t,"s",(function(){return O}));class a{constructor(){this._listeners=[]}subscribe(e){this._listeners.push(e)}unsubscribe(e){const t=this._listeners.indexOf(e);-1!==t&&this._listeners.splice(t,1)}unsubscribeAll(){this._listeners.splice(0)}fire(...e){this._listeners.forEach(t=>t(...e))}}
|
|
14
12
|
/*!
|
|
15
13
|
* Copyright 2021 Cognite AS
|
|
16
14
|
*/
|
|
17
|
-
class r{constructor(){this._listeners=[]}subscribe(e){this._listeners.push(e)}unsubscribe(e){const t=this._listeners.indexOf(e);-1!==t&&this._listeners.splice(t,1)}unsubscribeAll(){this._listeners.splice(0)}fire(...e){this._listeners.forEach(t=>t(...e))}}},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t){e.exports=require("lodash/debounce")},function(e,t){e.exports=require("lodash/omit")},function(e,t){e.exports=require("lodash/throttle")},function(e,t){e.exports=require("skmeans")},function(e,t){e.exports=require("comlink")},,,,function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nconst int MAX_ITER_2315452051 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_2315452051; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n updateFragmentColor(renderMode, color, v_treeIndex, v_normal, gl_FragCoord.z, matCapTexture, GeometryType.Quad);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec3 color;\nattribute float treeIndex;\nattribute vec4 matrix0;\nattribute vec4 matrix1;\nattribute vec4 matrix2;\nattribute vec4 matrix3;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n \n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = treeIndex;\n v_color = color;\n v_normal = normalize(normalMatrix * (inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(normal), 0.0)).xyz);\n mat4 instanceMatrix = mat4(matrix0, matrix1, matrix2, matrix3);\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nconst int MAX_ITER_529295689 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_529295689; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main()\n{\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.TriangleMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec3 color;\nattribute float treeIndex; \n\nvarying vec3 v_color;\nvarying float v_treeIndex;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_color = color;\n v_treeIndex = treeIndex;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(position, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nconst int MAX_ITER_1535977339 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_1535977339; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.InstancedMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// TODO consider fixing Three.js so it is possible to pass a mat4:\n// see https://stackoverflow.com/questions/38853096/webgl-how-to-bind-values-to-a-mat4-attribute\n\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n column_0,\n column_1,\n column_2,\n column_3\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_color = a_color;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * modelMatrix * vec4(transformed, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n v_treeIndex = a_treeIndex;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nconst int MAX_ITER_2315452051 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_2315452051; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n vec3 normal = normalize( v_normal );\n if (dist > 0.25)\n discard;\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// TODO consider fixing Three.js so it is possible to pass a mat4:\n// see https://stackoverflow.com/questions/38853096/webgl-how-to-bind-values-to-a-mat4-attribute\n\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n column_0,\n column_1,\n column_2,\n column_3\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_normal;\n\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\nvarying float v_treeIndex;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_xy = vec2(position.x, position.y);\n v_treeIndex = a_treeIndex;\n\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nconst int MAX_ITER_870892966 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_870892966; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec3 normal = normalize( v_normal );\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = v_V;\n float height = length(v_centerA.xyz - v_centerB.xyz);\n float R2 = v_centerA.w;\n float dR = R2 - R1;\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n #if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n #else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n #endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR / height;\n float dRdRL2Inv = dRLInv * dRLInv;\n a -= D.z * D.z * dRdRL2Inv;\n b -= dRLInv * (E.z * D.z * dRLInv + R1 * D.z);\n c -= dRLInv * (E.z * E.z * dRLInv + 2.0 * R1 * E.z);\n }\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0) {\n discard;\n }\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n bool isInner = false;\n\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n #if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n if (R1 != R2)\n {\n // Find normal vector\n vec3 n = -normalize(W.xyz);\n vec3 P1 = v_centerB.xyz;\n vec3 P2 = v_centerA.xyz;\n vec3 A = cross(P1 - p, P2 - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = P1 + R1 * t;\n vec3 o2 = P2 + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n }\n else\n {\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n }\n #endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_color;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float maxRadius = max(a_radiusA, a_radiusB);\n float leftUpScale = maxRadius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (maxRadius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0.0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n float radiusB = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusB, 0.0)).xyz);\n float radiusA = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusA, 0.0)).xyz);\n\n // We pack radii as w-components of v_centerB\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radiusB;\n\n v_V.xyz = -cross(v_U.xyz, v_W.xyz);\n v_V.w = surfacePoint.y;\n\n v_centerA.xyz = mul3(modelViewMatrix, centerA);\n v_centerA.w = radiusA;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nconst int MAX_ITER_1062606552 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_1062606552; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\n\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\nvarying float height;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n mat3 basis = mat3(U.xyz, V.xyz, axis.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, axis.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerA.xyz;\n vec3 E = diff * basis;\n float L = height;\n vec3 D = rayDirection * basis;\n\n float R1 = v_centerA.w;\n float R2 = v_centerB.w;\n float dR = R2 - R1;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy)-R1*R1;\n float L2Inv = 1.0/(L*L);\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR/L;\n float dRdRL2Inv = dRLInv*dRLInv;\n a -= D.z*D.z*dRdRL2Inv;\n b -= dRLInv*(E.z*D.z*dRLInv + R1*D.z);\n c -= dRLInv*(E.z*E.z*dRLInv + 2.0*R1*E.z);\n }\n\n // Additional terms when one of the center points is displaced orthogonal to normal vector\n vec2 displacement = ((v_centerB.xyz-v_centerA.xyz)*basis).xy; // In the basis where displacement is in XY only\n float displacementLengthSquared = dot(displacement, displacement);\n a += D.z*(D.z*displacementLengthSquared - 2.0*L*dot(D.xy, displacement))*L2Inv;\n b += (D.z*E.z*displacementLengthSquared - L*(D.x*E.z*displacement.x + D.y*E.z*displacement.y + D.z*E.x*displacement.x + D.z*E.y*displacement.y))*L2Inv;\n c += E.z*(E.z*displacementLengthSquared - 2.*L*dot(E.xy, displacement))*L2Inv;\n\n // Calculate a dicriminant of the above quadratic equation (factor 2 removed from all b-terms above)\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long eccentric cone\n if (d < 0.0) {\n discard;\n }\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n // Check the smallest root, it is closest camera. Only test if the z-component is outside the truncated eccentric cone\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n bool isInner = false;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Either intersection point is behind starting point (happens inside the cone),\n // or the intersection point is outside the end caps. This is not a valid solution.\n isInner = true;\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector\n vec3 n = normalize(-axis.xyz);\n vec3 v_centerA = v_centerA.xyz;\n vec3 v_centerB = v_centerB.xyz;\n vec3 A = cross(v_centerA - p, v_centerB - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = v_centerA + R1 * t;\n vec3 o2 = v_centerB + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_normal;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\nvarying float height;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 normalWithOffset = normalize((modelTransformOffset * vec4(a_normal, 0)).xyz);\n\n float uniformScaleFactor = length(mul3(modelMatrix, normalize(vec3(1.0))));\n\n height = dot(centerA - centerB, normalWithOffset) * uniformScaleFactor;\n\n vec3 lDir;\n vec3 center = 0.5 * (centerA + centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n // Find the coordinates of centerA and centerB projected down to the end cap plane\n vec3 maxCenterProjected = centerA - dot(centerA, normalWithOffset) * normalWithOffset;\n vec3 minCenterProjected = centerB - dot(centerB, normalWithOffset) * normalWithOffset;\n float distanceBetweenProjectedCenters = length(maxCenterProjected - minCenterProjected);\n\n lDir = normalWithOffset;\n float dirSign = 1.0;\n if (dot(objectToCameraModelSpace, lDir) < 0.0) { // direction vector looks away, flip it\n dirSign = -1.0;\n lDir *= -1.;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n // compute basis for cone\n axis.xyz = -normalWithOffset;\n U.xyz = cross(objectToCameraModelSpace, axis.xyz);\n V.xyz = cross(U.xyz, axis.xyz);\n // Transform to camera space\n axis.xyz = normalize(normalMatrix * axis.xyz);\n U.xyz = normalize(normalMatrix * U.xyz);\n V.xyz = normalize(normalMatrix * V.xyz);\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radiusA * (position.x + 1.0) * 0.0025 / height);\n#endif\n\n v_centerA.xyz = mul3(viewMatrix, mul3(modelMatrix, centerA));\n v_centerB.xyz = mul3(viewMatrix, mul3(modelMatrix, centerB));\n\n float radiusA = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusA, 0.0)).xyz);\n float radiusB = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusB, 0.0)).xyz);\n\n // Pack radii as w components of v_centerA and v_centerB\n v_centerA.w = radiusA;\n v_centerB.w = radiusB;\n\n float radiusIncludedDisplacement = 0.5*(2.0*max(a_radiusA, a_radiusB) + distanceBetweenProjectedCenters);\n vec3 surfacePoint = center + mat3(0.5 * height * lDir * (1.0 / uniformScaleFactor), radiusIncludedDisplacement*left, radiusIncludedDisplacement*up) * newPosition;\n vec3 transformed = surfacePoint;\n\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n axis.w = surfacePoint.z;\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nconst int MAX_ITER_529295689 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_529295689; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(sphereNormal.xyz);\n\n float vRadius = center.w;\n float ratio = vRadius / hRadius;\n mat3 basis = mat3(U.xyz, V.xyz, sphereNormal.xyz);\n mat3 scaledBasis = mat3(ratio * U.xyz, ratio * V.xyz, sphereNormal.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, sphereNormal.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - center.xyz;\n vec3 E = diff * scaledBasis;\n vec3 D = rayDirection * scaledBasis;\n\n float a = dot(D, D);\n float b = dot(E, D);\n float c = dot(E, E) - vRadius*vRadius;\n\n // discriminant of sphere equation (factor 2 removed from b above)\n float d = b*b - a*c;\n if(d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector in local space\n normal = vec3(p - center.xyz) * basis;\n normal.z = normal.z * (hRadius / vRadius) * (hRadius / vRadius);\n // Transform into camera space\n normal = normalize(basis * normal);\n if (dot(normal, rayDirection) > 0.) {\n normal = -normal;\n }\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\nuniform mat4 inverseNormalMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_center;\nattribute vec3 a_normal;\nattribute float a_horizontalRadius;\nattribute float a_verticalRadius;\nattribute float a_height;\n\nvarying float v_treeIndex;\n// We pack vRadius as w-component of center\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\n// U, V, axis represent the 3x3 sphere basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerWithOffset = mul3(modelTransformOffset, a_center).xyz;\n\n vec3 normalWithOffset = (modelTransformOffset * vec4(a_normal, 0)).xyz;\n\n vec3 lDir;\n float distanceToCenterOfSegment = a_verticalRadius - a_height * 0.5;\n vec3 centerOfSegment = centerWithOffset + normalWithOffset * distanceToCenterOfSegment;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 objectToCameraModelSpace = inverseNormalMatrix * vec3(0.0, 0.0, 1.0);\n#else\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - centerOfSegment;\n#endif\n\n vec3 newPosition = position;\n\n float bb = dot(objectToCameraModelSpace, normalWithOffset);\n if (bb < 0.0) { // direction vector looks away, flip it\n lDir = -normalWithOffset;\n } else { // direction vector already looks in my direction\n lDir = normalWithOffset;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_verticalRadius * (position.x + 1.0) * 0.0025 / a_height);\n#endif\n\n // Negative angle means height larger than radius,\n // so we should have full size so we can render the largest part of the ellipsoid segment\n float ratio = max(0.0, 1.0 - a_height / a_verticalRadius);\n // maxRadiusOfSegment is the radius of the circle (projected ellipsoid) when ellipsoid segment is seen from above\n float maxRadiusOfSegment = a_horizontalRadius * sqrt(1.0 - ratio * ratio);\n\n vec3 displacement = vec3(newPosition.x*a_height*0.5, maxRadiusOfSegment*newPosition.y, maxRadiusOfSegment*newPosition.z);\n vec3 surfacePoint = centerOfSegment + mat3(lDir, left, up) * displacement;\n vec3 transformed = surfacePoint;\n\n v_treeIndex = a_treeIndex;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n center.xyz = mul3(modelViewMatrix, centerWithOffset);\n center.w = a_verticalRadius; // Pack radius into w-component\n hRadius = a_horizontalRadius;\n height = a_height;\n v_color = a_color;\n\n // compute basis\n sphereNormal.xyz = normalMatrix * normalWithOffset;\n U.xyz = normalMatrix * up;\n V.xyz = normalMatrix * left;\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n sphereNormal.w = surfacePoint.z;\n\n // TODO should perhaps be a different normal?\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n'},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nconst int MAX_ITER_2315452051 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_2315452051; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\n// TODO general cylinder and cone are very similar and used\n// the same shader in the old code. Consider de-duplicating\n// parts of this code\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform float dataTextureWidth;\nuniform float dataTextureHeight;\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_surfacePointY;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = vec4(normalize(cross(W.xyz, U.xyz)), v_surfacePointY);\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n vec3 planeACenter = vec3(0.0, 0.0, v_planeA.w);\n vec3 planeANormal = v_planeA.xyz;\n vec3 planeBCenter = vec3(0.0, 0.0, v_planeB.w);\n vec3 planeBNormal = v_planeB.xyz;\n bool isInner = false;\n\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle || isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radius;\nattribute vec3 a_color;\n// slicing plane attributes\nattribute vec4 a_planeA;\nattribute vec4 a_planeB;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_surfacePointY;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float leftUpScale = a_radius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n // We pack radii as w-components of v_centerB\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n float radius = length((modelToTransformOffset * vec4(a_localXAxis * a_radius, 0.0)).xyz);\n\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radius;\n\n vec4 planeA = a_planeA;\n planeA.w = length((modelToTransformOffset * vec4(planeA.xyz * planeA.w, 0.0)).xyz);\n\n vec4 planeB = a_planeB;\n planeB.w = length((modelToTransformOffset * vec4(planeB.xyz * planeB.w, 0.0)).xyz);\n\n v_planeA = planeA;\n v_planeB = planeB;\n v_surfacePointY = surfacePoint.y;\n v_centerB.w = radius;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nconst int MAX_ITER_529295689 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_529295689; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n float theta = atan(v_xy.y, v_xy.x);\n vec3 normal = normalize( v_normal );\n if (theta < v_angle) {\n theta += 2.0 * PI;\n }\n if (dist > 0.25 || dist < 0.25 * v_oneMinusThicknessSqr || theta >= v_angle + v_arcAngle) {\n discard;\n }\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// TODO consider fixing Three.js so it is possible to pass a mat4:\n// see https://stackoverflow.com/questions/38853096/webgl-how-to-bind-values-to-a-mat4-attribute\n\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n column_0,\n column_1,\n column_2,\n column_3\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_angle;\nattribute float a_arcAngle;\nattribute float a_thickness;\nattribute vec3 a_normal;\n\nvarying float v_treeIndex;\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n v_treeIndex = a_treeIndex;\n v_oneMinusThicknessSqr = (1.0 - a_thickness) * (1.0 - a_thickness);\n v_xy = vec2(position.x, position.y);\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nconst int MAX_ITER_2281831123 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_2281831123; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// TODO consider fixing Three.js so it is possible to pass a mat4:\n// see https://stackoverflow.com/questions/38853096/webgl-how-to-bind-values-to-a-mat4-attribute\n\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n column_0,\n column_1,\n column_2,\n column_3\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_arcAngle;\nattribute float a_radius;\nattribute float a_tubeRadius;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n // normalized theta and phi are packed into positions\n float theta = position.x * a_arcAngle;\n float phi = position.y;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n vec3 pos3 = vec3(0);\n\n pos3.x = (a_radius + a_tubeRadius*cos(phi)) * cosTheta;\n pos3.y = (a_radius + a_tubeRadius*cos(phi)) * sinTheta;\n pos3.z = a_tubeRadius*sin(phi);\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n vec3 transformed = (instanceMatrix * vec4(pos3, 1.0)).xyz;\n\n // Calculate normal vectors if we're not picking\n vec3 center = (instanceMatrix * vec4(a_radius * cosTheta, a_radius * sinTheta, 0.0, 1.0)).xyz;\n vec3 objectNormal = normalize(transformed.xyz - center);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n\n vViewPosition = modelViewPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nconst int MAX_ITER_2315452051 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_2315452051; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_vertex1;\nattribute vec3 a_vertex2;\nattribute vec3 a_vertex3;\nattribute vec3 a_vertex4;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n vec3 transformed;\n // reduce the avarage branchings\n if (position.x < 1.5) {\n transformed = position.x == 0.0 ? a_vertex1 : a_vertex2;\n } else {\n transformed = position.x == 2.0 ? a_vertex3 : a_vertex4;\n }\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 objectNormal = cross(a_vertex1 - a_vertex2, a_vertex1 - a_vertex3);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nconst int MAX_ITER_1604150559 = 24;\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float lsbif = float(fromLeastSignificantBitIndex);\n \n int a = int(toMostSignificantBitIndex - fromLeastSignificantBitIndex);\n\n float denominator = pow(2.0, lsbif);\n \n float outNumber = 0.0;\n for(int i = 0; i < MAX_ITER_1604150559; i++)\n {\n if(i >= a) break;\n\n float backBits = pow(2.0, lsbif + float(i));\n outNumber += (mod(inNumber, backBits * 2.0) - mod(inNumber, backBits)) / denominator;\n }\n\n return outNumber;\n}\n\nmat3 G[9];\n// hard coded matrix values!!!! as suggested in https://github.com/neilmendoza/ofxPostProcessing/blob/master/src/EdgePass.cpp#L45\nconst mat3 g0 = mat3( 0.3535533845424652, 0, -0.3535533845424652, 0.5, 0, -0.5, 0.3535533845424652, 0, -0.3535533845424652 );\nconst mat3 g1 = mat3( 0.3535533845424652, 0.5, 0.3535533845424652, 0, 0, 0, -0.3535533845424652, -0.5, -0.3535533845424652 );\nconst mat3 g2 = mat3( 0, 0.3535533845424652, -0.5, -0.3535533845424652, 0, 0.3535533845424652, 0.5, -0.3535533845424652, 0 );\nconst mat3 g3 = mat3( 0.5, -0.3535533845424652, 0, -0.3535533845424652, 0, 0.3535533845424652, 0, 0.3535533845424652, -0.5 );\nconst mat3 g4 = mat3( 0, -0.5, 0, 0.5, 0, 0.5, 0, -0.5, 0 );\nconst mat3 g5 = mat3( -0.5, 0, 0.5, 0, 0, 0, 0.5, 0, -0.5 );\nconst mat3 g6 = mat3( 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.6666666865348816, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204 );\nconst mat3 g7 = mat3( -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, 0.6666666865348816, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408 );\nconst mat3 g8 = mat3( 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408 );\n\nfloat edgeDetectionFilter(sampler2D baseTexture, vec2 uv, vec2 resolution) {\n\n vec2 texel = vec2(1.0 / resolution.x, 1.0 / resolution.y);\n\n\tG[0] = g0,\n\tG[1] = g1,\n\tG[2] = g2,\n\tG[3] = g3,\n\tG[4] = g4,\n\tG[5] = g5,\n\tG[6] = g6,\n\tG[7] = g7,\n\tG[8] = g8;\n\n\tmat3 I;\n\tfloat cnv[9];\n\tvec3 neighbour;\n\n\t/* fetch the 3x3 neighbourhood and use the RGB vector\'s length as intensity value */\n\tfor (int i=0; i<3; i++) {\n\t\tfor (int j=0; j<3; j++) {\n\t\t\tneighbour = texture2D(baseTexture, uv + texel * vec2(float(i)-1.0,float(j)-1.0) ).rgb;\n\t\t\tI[i][j] = length(neighbour);\n\t\t}\n\t}\n\n\t/* calculate the convolution values for all the masks */\n\tfor (int i=0; i<9; i++) {\n\t\tfloat dp3 = dot(G[i][0], I[0]) + dot(G[i][1], I[1]) + dot(G[i][2], I[2]);\n\t\tcnv[i] = dp3 * dp3;\n\t}\n\n\tfloat M = (cnv[0] + cnv[1]) + (cnv[2] + cnv[3]);\n\tfloat S = (cnv[4] + cnv[5]) + (cnv[6] + cnv[7]) + (cnv[8] + M);\n\n float edgeStrength = sqrt(M/S);\n\n return edgeStrength;\n}\n\n#include <packing>\n\nvarying vec2 vUv;\n\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nuniform sampler2D tFront;\nuniform sampler2D tFrontDepth;\n\nuniform sampler2D tBack;\nuniform sampler2D tBackDepth;\n\nuniform sampler2D tCustom;\nuniform sampler2D tCustomDepth;\n\nuniform sampler2D tGhost;\nuniform sampler2D tGhostDepth;\n\nuniform sampler2D tOutlineColors;\n\nuniform float cameraNear;\nuniform float cameraFar;\n\nuniform vec2 resolution;\n\nuniform float edgeStrengthMultiplier;\nuniform float edgeGrayScaleIntensity;\n\nconst float infinity = 1e20;\n\nfloat computeFloatEncodedOutlineIndex(float bitEncodedFloat){\n return floatBitsSubset(floor((bitEncodedFloat * 255.0) + 0.5), 3, 6);\n}\n\nvec4 computeNeighborOutlineIndices(sampler2D colorTexture){\n float outlineIndex0 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv0).a);\n float outlineIndex1 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv1).a);\n float outlineIndex2 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv2).a);\n float outlineIndex3 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv3).a);\n\n return vec4(outlineIndex0, outlineIndex1, outlineIndex2, outlineIndex3);\n}\n\nfloat toViewZ(float depth, float near, float far){\n float normalizedDepth = depth * 2.0 - 1.0;\n return 2.0 * near * far / (far + near - normalizedDepth * (far - near)); \n}\n\nvec4 computeNeighborAlphas(sampler2D colorTexture){\n float alpha0 = texture2D(colorTexture, vUv0).a;\n float alpha1 = texture2D(colorTexture, vUv1).a;\n float alpha2 = texture2D(colorTexture, vUv2).a;\n float alpha3 = texture2D(colorTexture, vUv3).a;\n\n return vec4(alpha0, alpha1, alpha2, alpha3);\n}\n\nvoid main() {\n vec4 frontAlbedo = texture2D(tFront, vUv);\n vec4 backAlbedo = texture2D(tBack, vUv);\n vec4 customAlbedo = texture2D(tCustom, vUv);\n vec4 ghostAlbedo = texture2D(tGhost, vUv);\n\n float frontDepth = texture2D(tFrontDepth, vUv).r;\n float backDepth = texture2D(tBackDepth, vUv).r; \n float customDepth = texture2D(tCustomDepth, vUv).r;\n float ghostDepth = texture2D(tGhostDepth, vUv).r;\n \n // Decompose and clamp "ghost" color\n vec4 clampedGhostAlbedo = vec4(max(ghostAlbedo.rgb, 0.5), min(ghostAlbedo.a, 0.8));\n\n float frontOutlineIndex = computeFloatEncodedOutlineIndex(frontAlbedo.a);\n vec4 frontNeighborIndices = computeNeighborOutlineIndices(tFront);\n\n // There exsists fragments of rendered objects within the edge width that should have border\n if(any(equal(frontNeighborIndices, vec4(0.0))) && frontOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(frontNeighborIndices.x, frontNeighborIndices.y), max(frontNeighborIndices.z, frontNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = frontDepth;\n#endif\n return;\n }\n\n customDepth = customDepth > 0.0 ? customDepth : infinity; \n backDepth = backDepth > 0.0 ? backDepth : infinity; \n ghostDepth = ghostDepth > 0.0 ? ghostDepth : infinity;\n frontDepth = frontDepth > 0.0 ? frontDepth : infinity; \n\n // texture has drawn fragment\n if(frontDepth < 1.0){\n float customDepthTest = step(customDepth, backDepth); // zero if back is in front\n\n float a = customDepthTest > 0.0 ? ceil(customAlbedo.a) * 0.5 : ceil(backAlbedo.a) * 0.5;\n\n gl_FragColor = vec4(frontAlbedo.rgb, 1.0) * (1.0 - a) + (vec4(backAlbedo.rgb, 1.0) * (1.0 - customDepthTest) + vec4(customAlbedo.rgb, 1.0) * customDepthTest) * a;\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = texture2D(tFrontDepth, vUv).r;\n#endif\n return;\n }\n\n if (customDepth >= backDepth) {\n float backOutlineIndex = computeFloatEncodedOutlineIndex(backAlbedo.a);\n vec4 backNeighborIndices = computeNeighborOutlineIndices(tBack);\n\n if( any(equal(backNeighborIndices, vec4(0.0))) && backOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(backNeighborIndices.x, backNeighborIndices.y), max(backNeighborIndices.z, backNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tBackDepth, vUv).r;\n#endif\n return;\n }\n }\n\n if (texture2D(tBackDepth, vUv).x == 1.0 && \n texture2D(tGhostDepth, vUv).x == 1.0 && \n texture2D(tCustomDepth, vUv).x == 1.0) {\n discard;\n }\n \n // Combine color from ghost, back and custom object\n vec4 color = backAlbedo;\n float depth = backDepth;\n if (customDepth < backDepth && ghostDepth == 1.0) {\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * backAlbedo.rgb, 1.0);\n depth = customDepth;\n } else if (customDepth < backDepth && ghostDepth < 1.0) {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n vec3 modelAlbedo = mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s);\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * modelAlbedo.rgb, 1.0);\n depth = customDepth;\n } else {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n color = vec4(mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s), backAlbedo.a);\n depth = mix(backDepth, ghostDepth, s);\n }\n\n float edgeStrength = 0.0;\n#if defined(EDGES)\n if (!any(equal(computeNeighborAlphas(tBack), vec4(0.0)))) {\n float depthEdge = toViewZ(backDepth, cameraNear, cameraFar);\n edgeStrength = (1.0 - smoothstep(10.0, 40.0, depthEdge)) * edgeDetectionFilter(tBack, vUv, resolution) * edgeStrengthMultiplier;\n }\n#endif\n \n gl_FragColor = color * (1.0 - edgeStrength) + vec4(vec3(edgeGrayScaleIntensity) * edgeStrength, 1.0);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = depth;\n#endif\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\n// selection outline\nuniform vec2 texelSize;\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nvoid main() {\n vUv = uv;\n\n // selection outline\n vUv0 = vec2(uv.x + texelSize.x, uv.y);\n vUv1 = vec2(uv.x - texelSize.x, uv.y);\n vUv2 = vec2(uv.x, uv.y + texelSize.y);\n vUv3 = vec2(uv.x, uv.y - texelSize.y);\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nuniform vec2 inverseResolution;\nuniform vec2 resolution;\nuniform sampler2D tDiffuse;\nuniform sampler2D tDepth;\n\n#ifndef FXAA_REDUCE_MIN\n #define FXAA_REDUCE_MIN (1.0/ 128.0)\n#endif\n#ifndef FXAA_REDUCE_MUL\n #define FXAA_REDUCE_MUL (1.0 / 8.0)\n#endif\n#ifndef FXAA_SPAN_MAX\n #define FXAA_SPAN_MAX 8.0\n#endif\n\nvec4 fxaa(sampler2D tex, vec2 fragCoord,\n vec2 resolution, vec2 inverseResolution,\n vec2 v_rgbNW, vec2 v_rgbNE,\n vec2 v_rgbSW, vec2 v_rgbSE,\n vec2 v_rgbM) {\n vec4 color;\n\n vec3 rgbNW = texture2D(tex, v_rgbNW).xyz;\n vec3 rgbNE = texture2D(tex, v_rgbNE).xyz;\n vec3 rgbSW = texture2D(tex, v_rgbSW).xyz;\n vec3 rgbSE = texture2D(tex, v_rgbSE).xyz;\n vec4 texColor = texture2D(tex, v_rgbM);\n vec3 rgbM = texColor.xyz;\n\n vec3 luma = vec3(0.299, 0.587, 0.114);\n float lumaNW = dot(rgbNW, luma);\n float lumaNE = dot(rgbNE, luma);\n float lumaSW = dot(rgbSW, luma);\n float lumaSE = dot(rgbSE, luma);\n float lumaM = dot(rgbM, luma);\n float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE)));\n float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE)));\n\n mediump vec2 dir;\n dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE));\n dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE));\n\n float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) *\n (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN);\n\n float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce);\n dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX),\n max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX),\n dir * rcpDirMin));\n\n vec4 rgbA = 0.5 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (1.0 / 3.0 - 0.5))) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (2.0 / 3.0 - 0.5))));\n vec4 rgbB = rgbA * 0.5 + 0.25 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * -0.5)) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * 0.5)));\n\n float lumaB = dot(rgbB.rgb, luma);\n if ((lumaB < lumaMin) || (lumaB > lumaMax)) {\n color = rgbA;\n } else {\n color = rgbB;\n }\n return color;\n}\n\nvoid main() {\n gl_FragColor = fxaa(tDiffuse, v_fragCoord, \n resolution, inverseResolution, \n v_rgbNW, v_rgbNE, v_rgbSW, v_rgbSE, v_rgbM);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tDepth, v_uv).r;\n#endif\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nuniform vec2 resolution;\nuniform vec2 inverseResolution;\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nvoid main() {\n v_fragCoord = uv * resolution;\n v_rgbNW = (v_fragCoord + vec2(-1.0, -1.0)) * inverseResolution;\n v_rgbNE = (v_fragCoord + vec2(1.0, -1.0)) * inverseResolution;\n v_rgbSW = (v_fragCoord + vec2(-1.0, 1.0)) * inverseResolution;\n v_rgbSE = (v_fragCoord + vec2(1.0, 1.0)) * inverseResolution;\n v_rgbM = vec2(v_fragCoord * inverseResolution);\n v_uv = uv;\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nuniform mat4 projMatrix;\nuniform mat4 inverseProjectionMatrix;\n\nuniform vec3 kernel[MAX_KERNEL_SIZE];\n\nuniform sampler2D tDepth;\nuniform sampler2D tNoise;\n\nuniform vec2 resolution;\n\nuniform float sampleRadius;\nuniform float bias;\n\nvec3 viewPosFromDepth(float depth, vec2 uv) {\n // Depth to clip space: [0, 1] -> [-1, 1]\n float z = depth * 2.0 - 1.0;\n\n // Fragment in clip space\n vec4 clipSpacePosition = vec4(uv * 2.0 - 1.0, z, 1.0);\n vec4 viewSpacePosition = inverseProjectionMatrix * clipSpacePosition;\n\n // Perspective division\n viewSpacePosition /= viewSpacePosition.w;\n\n return viewSpacePosition.xyz;\n}\n\nvec3 computeWorldNormalFromDepth(sampler2D depthTexture, vec2 resolution, vec2 uv, float sampleDepth){\n float dx = 1.0 / resolution.x;\n float dy = 1.0 / resolution.y;\n\n vec2 uv1 = uv + vec2(dx, 0.0); // right\n float d1 = texture2D(depthTexture, uv1).r; \n\n vec2 uv2 = uv + vec2(0.0, dy); // up\n float d2 = texture2D(depthTexture, uv2).r;\n\n vec2 uv3 = uv + vec2(-dx, 0.0); // left\n float d3 = texture2D(depthTexture, uv3).r;\n\n vec2 uv4 = uv + vec2(0.0, -dy); // down\n float d4 = texture2D(depthTexture, uv4).r;\n\n bool horizontalSampleCondition = abs(d1 - sampleDepth) < abs(d3 - sampleDepth);\n\n float horizontalSampleDepth = horizontalSampleCondition ? d1 : d3;\n vec2 horizontalSampleUv = horizontalSampleCondition ? uv1 : uv3;\n\n bool verticalSampleCondition = abs(d2 - sampleDepth) < abs(d4 - sampleDepth);\n\n float verticalSampleDepth = verticalSampleCondition ? d2 : d4;\n vec2 verticalSampleUv = verticalSampleCondition ? uv2 : uv4;\n\n vec3 viewPos = viewPosFromDepth(sampleDepth, vUv);\n \n vec3 viewPos1 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv) : viewPosFromDepth(verticalSampleDepth, verticalSampleUv);\n vec3 viewPos2 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(verticalSampleDepth, verticalSampleUv) : viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv);\n\n return normalize(cross(viewPos1 - viewPos, viewPos2 - viewPos));\n}\n\nvoid main(){\n float d = texture2D(tDepth, vUv).r;\n\n vec3 viewNormal = computeWorldNormalFromDepth(tDepth, resolution, vUv, d);\n\n vec3 viewPosition = viewPosFromDepth(d, vUv);\n\n vec2 noiseScale = vec2( resolution.x / 128.0, resolution.y / 128.0 );\n vec3 randomVec = normalize(texture2D(tNoise, vUv * noiseScale).xyz);\n\n vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));\n\n vec3 bitangent = cross(viewNormal, tangent);\n\n mat3 TBN = mat3(tangent, bitangent, viewNormal);\n\n float occlusion = 0.0;\n\n for (int i = 0; i < MAX_KERNEL_SIZE; i++){\n \n vec3 sampleVector = TBN * kernel[i];\n sampleVector = viewPosition + sampleVector * sampleRadius;\n\n vec4 offset = projMatrix * vec4(sampleVector, 1.0);\n offset.xyz /= offset.w;\n offset.xyz = offset.xyz * 0.5 + 0.5;\n\n float realDepth = texture2D(tDepth, offset.xy).r;\n vec3 realPos = viewPosFromDepth(realDepth, offset.xy);\n\n float rangeCheck = smoothstep(0.0, 1.0, sampleRadius / length(viewPosition - realPos));\n\n occlusion += (realPos.z >= sampleVector.z + bias ? 1.0 : 0.0) * rangeCheck;\n }\n\n float occlusionFactor = 1.0 - clamp(occlusion / float(MAX_KERNEL_SIZE), 0.0, 1.0);\n\n gl_FragColor = vec4(vec3(occlusionFactor), 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Copyright Cognite (C) 2019 Cognite\n//\n// Efficient Gaussian blur based on technique described by Daniel Rákos in\n// http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/\n//\n\nvarying vec2 vUv;\n\nuniform sampler2D tDiffuse;\nuniform sampler2D tAmbientOcclusion;\n\nuniform vec2 resolution;\n\nvoid main() {\n float blurredAO = 0.5 * (\n 2.0 * texture2D(tAmbientOcclusion, vUv).r * 0.2270270270 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703\n );\n\n gl_FragColor = vec4(texture2D(tDiffuse, vUv).rgb * blurredAO, 1.0);\n}\n\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// From http://www.science-and-fiction.org/rendering/noise.html\nfloat rand2d(in vec2 co){\n return fract(sin(dot(co.xy, vec2(12.9898,78.233))) * 43758.5453);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp float v_visible;\nvarying lowp vec2 v_seed;\n\nvarying vec3 v_viewPosition;\n\nvoid main() {\n \n if(v_visible != 1.0 || isSliced(v_viewPosition)){\n discard;\n }\n\n float v = rand2d(gl_FragCoord.xy + v_seed);\n if (v >= v_coverageFactor) {\n discard;\n }\n\n gl_FragColor = vec4(v_color, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nattribute mediump float a_sectorId;\nattribute lowp vec3 a_coverageFactor;\nattribute lowp float a_visible;\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp vec2 v_seed;\nvarying lowp float v_visible;\n\nvarying vec3 v_viewPosition;\n\nvoid main()\n{\n v_visible = a_visible;\n v_color = packIntToColor(a_sectorId);\n v_coverageFactor = abs(dot(a_coverageFactor, normal));\n // A seed to ensure that two overlapping sectors A and B \n // doesn't produce the same noise pattern\n v_seed = vec2(a_sectorId / 255.0, a_sectorId / 65025.0);\n\n vec4 mvPosition = modelViewMatrix * instanceMatrix * vec4( position, 1.0 );\n\n v_viewPosition = mvPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewMatrix * instanceMatrix * vec4(position, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),n.d(t,"revealEnv",(function(){return r})),n.d(t,"BoundingBoxClipper",(function(){return i})),n.d(t,"Cognite3DModel",(function(){return _})),n.d(t,"Cognite3DViewer",(function(){return Rr})),n.d(t,"CognitePointCloudModel",(function(){return U})),n.d(t,"AntiAliasingMode",(function(){return ln})),n.d(t,"SsaoSampleQuality",(function(){return cn})),n.d(t,"defaultRenderOptions",(function(){return un})),n.d(t,"PotreePointShape",(function(){return Wt})),n.d(t,"PotreePointColorType",(function(){return Ht})),n.d(t,"PotreePointSizeType",(function(){return Xt})),n.d(t,"WellKnownAsprsPointClassCodes",(function(){return Zt})),n.d(t,"NotSupportedInMigrationWrapperError",(function(){return B})),n.d(t,"NodeOutlineColor",(function(){return mn})),n.d(t,"DefaultNodeAppearance",(function(){return xn})),n.d(t,"NodeCollectionBase",(function(){return ur})),n.d(t,"PropertyFilterNodeCollection",(function(){return xr})),n.d(t,"SinglePropertyFilterNodeCollection",(function(){return Ir})),n.d(t,"TreeIndexNodeCollection",(function(){return gr})),n.d(t,"AssetNodeCollection",(function(){return hr})),n.d(t,"IntersectionNodeCollection",(function(){return Tr})),n.d(t,"UnionNodeCollection",(function(){return _r})),n.d(t,"InvertedNodeCollection",(function(){return yr})),n.d(t,"NodeAppearanceProvider",(function(){return Mn})),n.d(t,"IndexSet",(function(){return Dn})),n.d(t,"NumericRange",(function(){return l})),n.d(t,"registerCustomNodeCollectionType",(function(){return Sr})),n.d(t,"THREE",(function(){return o}));
|
|
18
15
|
/*!
|
|
19
16
|
* Copyright 2021 Cognite AS
|
|
20
|
-
*/
|
|
21
|
-
const r={publicPath:""};var o=n(0);
|
|
17
|
+
*/function s(e,t){throw new Error(t||"Unexpected object: "+e)}
|
|
22
18
|
/*!
|
|
23
19
|
* Copyright 2021 Cognite AS
|
|
24
|
-
*/class
|
|
20
|
+
*/class d{constructor(e,t){if(t<0)throw new Error("Range cannot have negative number of elements");this.from=e,this.count=t,this.toInclusive=e+t-1}static createFromInterval(e,t){return new d(e,t-e+1)}*values(){for(let e=this.from;e<=this.toInclusive;++e)yield e}toArray(){return Array.from(this.values())}equal(e){return this.from===e.from&&this.count===e.count}contains(e){return e>=this.from&&e<=this.toInclusive}intersects(e){return this.from<=e.toInclusive&&this.toInclusive>=e.from}intersectsOrCoinciding(e){return this.from<=e.toInclusive+1&&this.toInclusive+1>=e.from}intersectionWith(e){return this.intersects(e)?d.createFromInterval(Math.max(this.from,e.from),Math.min(this.toInclusive,e.toInclusive)):void 0}isInside(e){return this.from>=e.from&&this.toInclusive<=e.toInclusive}union(e){return d.createFromInterval(Math.min(this.from,e.from),Math.max(this.toInclusive,e.toInclusive))}forEach(e){for(let t=this.from;t<=this.toInclusive;++t)e(t)}str(){return"("+this.from+", "+this.toInclusive+")"}}
|
|
25
21
|
/*!
|
|
26
22
|
* Copyright 2021 Cognite AS
|
|
27
|
-
*/
|
|
28
|
-
|
|
23
|
+
*/function l(e){const t=Math.max(1,u(Math.sqrt(e)));return{width:t,height:Math.max(1,u(e/t))}}const c=Math.log(2);function u(e){return Math.pow(2,Math.ceil(Math.log(e)/c))}var m=n(6),h=n.n(m);class p{constructor(e,t){this.left=e,this.right=t,this.maxSubtreeDepth=Math.max(this.left.maxSubtreeDepth,this.right.maxSubtreeDepth)+1,this.range=d.createFromInterval(this.left.range.from,this.right.range.toInclusive),this.count=this.left.count+this.right.count}static fromIndexNodesAndBalance(e,t){return e.range.from>t.range.toInclusive+1?new p(t,e).balance():e.range.toInclusive+1<t.range.from?new p(e,t).balance():void h()(!1,"Internal error in IndexSet: Overlapping nodes")}traverse(e){this.left.traverse(e),this.right.traverse(e)}contains(e){return!!this.range.contains(e)&&(this.left.contains(e)||this.right.contains(e))}addRange(e){if(!e.intersectsOrCoinciding(this.range)){if(e.from<this.range.from){const t=this.left.addRange(e);return p.fromIndexNodesAndBalance(t,this.right)}{const t=this.right.addRange(e);return p.fromIndexNodesAndBalance(this.left,t)}}const t=e.intersectsOrCoinciding(this.left.range),n=e.intersectsOrCoinciding(this.right.range);if(t&&n){const[t,n]=this.left.soak(e),[r,o]=this.right.soak(e),i=n.union(o);if(void 0===t&&void 0===r)return new f(i);if(void 0===t&&void 0!==r)return r.addRange(i);if(void 0===r&&void 0!==t)return t.addRange(i);return p.fromIndexNodesAndBalance(t,r).addRange(i)}return t?p.fromIndexNodesAndBalance(this.left.addRange(e),this.right):n?p.fromIndexNodesAndBalance(this.left,this.right.addRange(e)):this.left.maxSubtreeDepth<this.right.maxSubtreeDepth?p.fromIndexNodesAndBalance(this.left.addRange(e),this.right):p.fromIndexNodesAndBalance(this.left,this.right.addRange(e))}removeRange(e){if(!e.intersects(this.range))return this;const[t,n]=this.soak(e);let r=void 0,o=void 0;if(n.from<e.from&&(r=d.createFromInterval(n.from,e.from-1)),n.toInclusive>e.toInclusive&&(o=d.createFromInterval(e.toInclusive+1,n.toInclusive)),void 0===t)return void 0!==r&&void 0!==o?p.fromIndexNodesAndBalance(new f(r),new f(o)):null!=r?new f(r):null!=o?new f(o):void 0;{let e=t;return void 0!==r&&(e=e.addRange(r)),void 0!==o&&(e=e.addRange(o)),e}}balance(){const e=this.left.maxSubtreeDepth,t=this.right.maxSubtreeDepth;if(t+2<=e){const e=this.left.rotateSmallerRight();return new p(e,this.right).rotateRight().balance()}if(e+2<=t){const e=this.right.rotateSmallerLeft();return new p(this.left,e).rotateLeft().balance()}return this}clone(){return p.fromIndexNodesAndBalance(this.left.clone(),this.right.clone())}hasIntersectionWith(e){return!!e.range.intersects(this.range)&&(this.range.isInside(e.range)?e.hasIntersectionWith(this):!(!this.left.range.intersects(e.range)||!this.left.hasIntersectionWith(e))||!(!this.right.range.intersects(e.range)||!this.right.hasIntersectionWith(e)))}soak(e){let[t,n]=[this.left,e],[r,o]=[this.right,e];if(this.right.range.isInside(e)&&this.left.range.isInside(e))return[void 0,e];this.left.range.intersectsOrCoinciding(e)&&([t,n]=this.left.soak(e)),this.right.range.intersectsOrCoinciding(e)&&([r,o]=this.right.soak(e));const i=n.union(o);if(null==r)return[t,i];if(null==t)return[r,i];return[p.fromIndexNodesAndBalance(t,r),i]}rotateRight(){return"right"in this.left?new p(this.left.left,new p(this.left.right,this.right)):this}rotateLeft(){return"left"in this.right?new p(new p(this.left,this.right.left),this.right.right):this}rotateSmallerLeft(){if(this.left.maxSubtreeDepth>this.right.maxSubtreeDepth){let e=this.rotateRight();return e=e.rotateSmallerLeft(),e}return this}rotateSmallerRight(){if(this.right.maxSubtreeDepth>this.left.maxSubtreeDepth){let e=this.rotateLeft();return e=e.rotateSmallerRight(),e}return this}}
|
|
24
|
+
/*!
|
|
25
|
+
* Copyright 2021 Cognite AS../NumericRange
|
|
26
|
+
*/class f{constructor(e){this.range=e,this.maxSubtreeDepth=0,this.count=e.count}static fromInterval(e,t){return new f(d.createFromInterval(e,t))}traverse(e){e(this.range)}contains(e){return this.range.contains(e)}addRange(e){return this.range.intersectsOrCoinciding(e)?new f(this.range.union(e)):p.fromIndexNodesAndBalance(this,new f(e))}removeRange(e){if(!e.intersects(this.range))return this;if(this.range.isInside(e))return;let t=void 0,n=void 0;return this.range.from<e.from&&(t=d.createFromInterval(this.range.from,e.from-1)),this.range.toInclusive>e.toInclusive&&(n=d.createFromInterval(e.toInclusive+1,this.range.toInclusive)),null!=t&&null!=n?p.fromIndexNodesAndBalance(new f(t),new f(n)):null!=t?new f(t):null!=n?new f(n):void 0}hasIntersectionWith(e){return e.range.intersects(this.range)}soak(e){return this.range.intersectsOrCoinciding(e)?[void 0,this.range.union(e)]:[this,e]}clone(){return new f(this.range)}}class v{constructor(e){if(null==e)this.rootNode=void 0;else if(e instanceof d)this.addRange(e);else for(const t of e)this.add(t)}forEachRange(e){this.rootNode&&this.rootNode.traverse(e)}add(e){const t=new d(e,1);this.addRange(t)}addRange(e){this.rootNode?this.rootNode=this.rootNode.addRange(e):this.rootNode=new f(e)}remove(e){const t=new d(e,1);this.removeRange(t)}removeRange(e){this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))}contains(e){return!!this.rootNode&&this.rootNode.contains(e)}get count(){return this.rootNode?this.rootNode.count:0}toRangeArray(){const e=[];return this.forEachRange(t=>{e.push(t)}),e}toIndexArray(){const e=[];return this.rootNode&&this.forEachRange(t=>{t.forEach(t=>{e.push(t)})}),e}toPlainSet(){const e=this.toIndexArray();return new Set(e)}invertedRanges(){const e=this.toRangeArray(),t=[];for(let n=0;n<e.length-1;n++)e[n].toInclusive+1>=e[n+1].from||t.push(d.createFromInterval(e[n].toInclusive+1,e[n+1].from));return t}unionWith(e){return this.rootNode?e.forEachRange(e=>{this.rootNode=this.rootNode.addRange(e)}):this.rootNode=e.rootNode,this}differenceWith(e){return this.rootNode&&e.forEachRange(e=>{var t;this.rootNode=null===(t=this.rootNode)||void 0===t?void 0:t.removeRange(e)}),this}hasIntersectionWith(e){if(e instanceof v)return void 0!==this.rootNode&&void 0!==e.rootNode&&this.rootNode.hasIntersectionWith(e.rootNode);for(const t of e)if(this.contains(t))return!0;return!1}intersectWith(e){if(this.rootNode&&e.rootNode){if(this.rootNode.range.from<e.rootNode.range.from){const t=d.createFromInterval(this.rootNode.range.from,e.rootNode.range.from-1);if(this.rootNode=this.rootNode.removeRange(t),!this.rootNode)return this}if(this.rootNode.range.toInclusive>e.rootNode.range.toInclusive){const t=d.createFromInterval(e.rootNode.range.toInclusive+1,this.rootNode.range.toInclusive);this.rootNode=this.rootNode.removeRange(t)}e.invertedRanges().forEach(e=>{this.rootNode&&(this.rootNode=this.rootNode.removeRange(e))})}else this.rootNode&&(this.rootNode=void 0);return this}clear(){this.rootNode=void 0}clone(){const e=new v;return this.rootNode&&(e.rootNode=this.rootNode.clone()),e}}
|
|
29
27
|
/*!
|
|
30
28
|
* Copyright 2021 Cognite AS
|
|
31
|
-
*/
|
|
29
|
+
*/function x(e,t,n){const r=C(e);if(0==r)return;const o=g(0,-e);let i=_(T(r));const a=r/y(i);a<1&&(i-=1),i+=127,t[n]=128*o+_(i*y(-1)),t[n+1]=128*b(i,2)+b(_(128*a),128),t[n+2]=_(b(_(a*y(15)),y(8))),t[n+3]=_(y(23)*b(a,y(-15)))}function g(e,t){return t<e?0:1}function y(e){return Math.pow(2,e)}function b(e,t){return e-t*_(e/t)}function _(e){return Math.floor(e)}function T(e){return Math.log(e)/Math.log(2)}function C(e){return Math.abs(e)}
|
|
32
30
|
/*!
|
|
33
31
|
* Copyright 2021 Cognite AS
|
|
34
|
-
*/
|
|
35
|
-
const{VERSION:m,MIXPANEL_TOKEN:p}={VERSION:"2.0.0-beta.3",WORKER_VERSION:"1.1.1",MIXPANEL_TOKEN:"8c900bdfe458e32b768450c20750853d"};let h=!0;const f={VERSION:m,project:"unknown",application:"unknown",sessionId:"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(function(e){const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))};function v(e,t){if(!h)return;const n={...f,...t};u.a.track(e,n)}function x(e,t){console.error(e),v("error",{message:e.message,name:e.name,stack:e.stack,...t})}
|
|
32
|
+
*/class I{constructor(e,t){this._numFilled=0,this._batchIdCounter=0,this._batchMap=new Map,this._type=t;const n=Math.pow(2,Math.ceil(Math.log2(e)));this._buffer=new t(n)}get length(){return this._numFilled}get buffer(){return this._buffer}add(e){let t=!1;if(this._numFilled+e.length>this._buffer.length){const n=Math.pow(2,Math.ceil(Math.log2(this._numFilled+e.length)));this.allocateNewBuffer(n),t=!0}this._buffer.set(e,this._numFilled);const n=this.createBatch(e);return this._numFilled+=e.length,{batchId:n,bufferIsReallocated:t}}remove(e){const t=this._batchMap.get(e);if(!t)throw new Error("batch does not exist in buffer");this._buffer.copyWithin(t.from,t.from+t.count,this.buffer.length),this._numFilled-=t.count,this._currentTail===t&&(this._currentTail=t.prev);const n=t.prev,r=t.next;n&&(n.next=r),r&&(r.prev=n);let o=r;for(;o;)o.from-=t.count,o=o.next;this._batchMap.delete(e)}createBatch(e){const t={from:this._numFilled,count:e.length,prev:this._currentTail,next:void 0};this._currentTail&&(this._currentTail.next=t),this._currentTail=t;const n=this._batchIdCounter;return this._batchIdCounter++,this._batchMap.set(n,t),n}allocateNewBuffer(e){const t=new this._type(e);t.set(this._buffer),this._buffer=t}}var w=n(0);
|
|
36
33
|
/*!
|
|
37
34
|
* Copyright 2021 Cognite AS
|
|
38
|
-
*/
|
|
39
|
-
async function g(e,t,n,r=15e3){let o=e;return new Promise(e=>{!function i(){for(let e=0;e<r&&o<=t;e++)n(o++);o<=t?setTimeout(i):e()}()})}var y=n(13);class b{constructor(e){this._client=e}async mapTreeIndicesToNodeIds(e,t,n){const r=[...b.chunkInputItems(n)].map(async n=>this.postByTreeIndicesRequest(e,t,n));return(await Promise.all(r)).flat()}async mapNodeIdsToTreeIndices(e,t,n){const r=[...b.chunkInputItems(n)].map(async n=>this.postByNodeIdsRequest(e,t,n));return(await Promise.all(r)).flat()}async postByTreeIndicesRequest(e,t,n){console.assert(n.length<=b.MaxItemsPerRequest);const r=`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${e}/revisions/${t}/nodes/internalids/bytreeindices`,o=await this._client.post(r,{data:{items:n}});if(200===o.status)return o.data.items;throw new y.HttpError(o.status,o.data,o.headers)}async postByNodeIdsRequest(e,t,n){console.assert(n.length<=b.MaxItemsPerRequest);const r=`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${e}/revisions/${t}/nodes/treeindices/byinternalids`,o=await this._client.post(r,{data:{items:n}});if(200===o.status)return o.data.items;throw new y.HttpError(o.status,o.data,o.headers)}static*chunkInputItems(e){let t=0;for(;t<e.length;){const n=Math.min(e.length-t,b.MaxItemsPerRequest);yield e.slice(t,t+n),t+=n}}}b.MaxItemsPerRequest=1e3;
|
|
35
|
+
*/const M=new w.BufferGeometry;class S extends w.Group{constructor(){super(...arguments),this._isDisposed=!1,this._referenceCount=0}reference(){this.ensureNotDisposed(),this._referenceCount++}dereference(){if(this.ensureNotDisposed(),0===this._referenceCount)throw new Error("No references");0==--this._referenceCount&&this.dispose()}dispose(){this.ensureNotDisposed(),this._isDisposed=!0;const e=this.children.filter(e=>e instanceof w.Mesh).map(e=>e);for(const t of e)void 0!==t.geometry&&(t.geometry.dispose(),t.geometry=M)}ensureNotDisposed(){if(this._isDisposed)throw new Error("Already disposed/dereferenced")}}
|
|
40
36
|
/*!
|
|
41
37
|
* Copyright 2021 Cognite AS
|
|
42
|
-
*/
|
|
43
|
-
class _ extends o.Object3D{constructor(e,t,n,r){super(),this.type="cad",this._styledNodeCollections=[],this.modelId=e,this.revisionId=t,this.cadModel=n.cadModelMetadata,this.client=r;const o=new b(r);this.nodeIdAndTreeIndexMaps=new d(e,t,r,o),this.cadNode=n,this.add(this.cadNode)}get nodeTransformProvider(){return this.cadNode.nodeTransformProvider}get styledNodeCollections(){return this._styledNodeCollections}setDefaultNodeAppearance(e){this.cadNode.defaultNodeAppearance=e}getDefaultNodeAppearance(){return this.cadNode.defaultNodeAppearance}assignStyledNodeCollection(e,t){this._styledNodeCollections.push({nodes:e,appearance:t}),this.cadNode.nodeAppearanceProvider.assignStyledNodeCollection(e,t)}unassignStyledNodeCollection(e){this.cadNode.nodeAppearanceProvider.unassignStyledNodeCollection(e)}removeAllStyledNodeCollections(){this.cadNode.nodeAppearanceProvider.clear()}setNodeTransform(e,t){this.nodeTransformProvider.setNodeTransform(e,t)}resetNodeTransform(e){this.nodeTransformProvider.resetNodeTransform(e)}mapFromCdfToModelCoordinates(e,t){return(t=void 0!==t?t:new o.Vector3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.modelMatrix),t}mapPositionFromModelToCdfCoordinates(e,t){return(t=void 0!==t?t:new o.Vector3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.inverseModelMatrix),t}mapBoxFromModelToCdfCoordinates(e,t){return(t=null!=t?t:new o.Box3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.inverseModelMatrix),t}dispose(){this.children=[]}async getSubtreeTreeIndices(e){return this.determineTreeIndices(e,!0)}getModelBoundingBox(e,t){const n=t?this.cadModel.scene.getBoundsOfMostGeometry():this.cadModel.scene.root.bounds;return(e=e||new o.Box3).copy(n),e.applyMatrix4(this.cadModel.modelMatrix),e}getCameraConfiguration(){return this.cadModel.cameraConfiguration}setModelTransformation(e){this.cadNode.setModelTransformation(e)}getModelTransformation(e){return this.cadNode.getModelTransformation(e)}async getBoundingBoxByNodeId(e,t){const n=await this.client.revisions3D.retrieve3DNodes(this.modelId,this.revisionId,[{id:e}]);if(n.length<1)throw new Error("NodeId not found");const r=n[0].boundingBox;if(void 0===r)return x(new Error(`Node ${e} doesn't have a defined bounding box, returning model bounding box`),{moduleName:"Cognite3DModel",methodName:"getBoundingBoxByNodeId"}),this.getModelBoundingBox();const i=r.min,a=r.max,s=t||new o.Box3;return s.min.set(i[0],i[1],i[2]),s.max.set(a[0],a[1],a[2]),s.applyMatrix4(this.cadModel.modelMatrix)}async getBoundingBoxByTreeIndex(e,t){const n=await this.nodeIdAndTreeIndexMaps.getNodeId(e);return this.getBoundingBoxByNodeId(n,t)}iterateNodesByTreeIndex(e){return g(0,this.cadModel.scene.maxTreeIndex,e)}get nodeCount(){return this.cadModel.scene.maxTreeIndex+1}async iterateSubtreeByTreeIndex(e,t){const n=await this.determineTreeIndices(e,!0);return g(n.from,n.toInclusive,t)}async setNodeTransformByTreeIndex(e,t,n=!0){const r=await this.determineTreeIndices(e,n);return this.nodeTransformProvider.setNodeTransform(r,t),r.count}async resetNodeTransformByTreeIndex(e,t=!0){const n=await this.determineTreeIndices(e,t);return this.nodeTransformProvider.resetNodeTransform(n),n.count}async mapNodeIdsToTreeIndices(e){return this.nodeIdAndTreeIndexMaps.getTreeIndices(e)}async mapNodeIdToTreeIndex(e){return this.nodeIdAndTreeIndexMaps.getTreeIndex(e)}async mapTreeIndicesToNodeIds(e){return this.nodeIdAndTreeIndexMaps.getNodeIds(e)}async mapTreeIndexToNodeId(e){return this.nodeIdAndTreeIndexMaps.getNodeId(e)}async determineTreeIndices(e,t){let n=1;if(t){const t=await this.nodeIdAndTreeIndexMaps.getSubtreeSize(e);n=t||1}return new l(e,n)}}var T,I=n(8),C=n.n(I),M=n(21),w=n.n(M),S=n(22),P=n.n(S),R=n(12);!function(e){e[e.Color=1]="Color",e[e.Normal=2]="Normal",e[e.TreeIndex=3]="TreeIndex",e[e.PackColorAndNormal=4]="PackColorAndNormal",e[e.Depth=5]="Depth",e[e.Effects=6]="Effects",e[e.Ghost=7]="Ghost",e[e.LOD=8]="LOD",e[e.DepthBufferOnly=9]="DepthBufferOnly",e[e.GeometryType=10]="GeometryType"}(T||(T={}));
|
|
38
|
+
*/const P={camPos:new w.Vector3,bounds:new w.Box3};class R extends w.Object3D{constructor(e){super(),this._activeLevel=0,this._levels=[],this.isLOD=!0,this.autoUpdate=!0,this._boundingBox=e.clone(),this.type="BoundingBoxLOD"}setBoundingBox(e){this._boundingBox.copy(e)}addLevel(e,t=0){this._levels.push({object:e,distance:Math.abs(t)}),this._levels.sort((e,t)=>t.distance-e.distance),e.visible=!1,this.add(e)}getCurrentLevel(){return this._levels.length>0?this._levels.length-this._activeLevel-1:0}update(e){this.updateCurrentLevel(e)}updateCurrentLevel(e){const t=this._levels,{camPos:n,bounds:r}=P;r.copy(this._boundingBox).applyMatrix4(this.matrixWorld);const o=e instanceof w.PerspectiveCamera?e.zoom:1;if(t.length>0){n.setFromMatrixPosition(e.matrixWorld);const i=r.distanceToPoint(n)/o;t[this._activeLevel].object.visible=!1,this._activeLevel=t.findIndex(e=>i>=e.distance),this._activeLevel=this._activeLevel>=0?this._activeLevel:t.length-1,t[this._activeLevel].object.visible=!0}}}
|
|
44
39
|
/*!
|
|
45
40
|
* Copyright 2021 Cognite AS
|
|
46
|
-
*/
|
|
47
|
-
class N{constructor(e){this._originalState={},this._renderer=e,this._originalState={}}setClearColor(e,t){this._originalState={clearColor:this._renderer.getClearColor(new o.Color),clearAlpha:this._renderer.getClearAlpha(),...this._originalState},this._renderer.setClearColor(e,t)}setSize(e,t){this._originalState={size:this._renderer.getSize(new o.Vector2),...this._originalState},this._renderer.setSize(e,t)}set localClippingEnabled(e){this._originalState={localClippingEnabled:this._renderer.localClippingEnabled,...this._originalState},this._renderer.localClippingEnabled=e}set autoClear(e){this._originalState={autoClear:this._renderer.autoClear,...this._originalState},this._renderer.autoClear=e}setRenderTarget(e){this._originalState={renderTarget:this._renderer.getRenderTarget(),...this._originalState},this._renderer.setRenderTarget(e)}resetState(){void 0!==this._originalState.autoClear&&(this._renderer.autoClear=this._originalState.autoClear),void 0!==this._originalState.clearColor&&this._renderer.setClearColor(this._originalState.clearColor,this._originalState.clearAlpha),void 0!==this._originalState.localClippingEnabled&&(this._renderer.localClippingEnabled=this._originalState.localClippingEnabled),void 0!==this._originalState.size&&this._renderer.setSize(this._originalState.size.width,this._originalState.size.height),void 0!==this._originalState.renderTarget&&this._renderer.setRenderTarget(this._originalState.renderTarget),this._originalState={}}}
|
|
41
|
+
*/function N(e){return[new w.Vector3(e.min.x,e.min.y,e.min.z),new w.Vector3(e.min.x,e.min.y,e.max.z),new w.Vector3(e.min.x,e.max.y,e.min.z),new w.Vector3(e.min.x,e.max.y,e.max.z),new w.Vector3(e.max.x,e.min.y,e.min.z),new w.Vector3(e.max.x,e.min.y,e.max.z),new w.Vector3(e.max.x,e.max.y,e.min.z),new w.Vector3(e.max.x,e.max.y,e.max.z)]}
|
|
48
42
|
/*!
|
|
49
43
|
* Copyright 2021 Cognite AS
|
|
50
|
-
*/
|
|
44
|
+
*/function D(){let e=!1;var t;return t=navigator.userAgent||navigator.vendor||window.opera,(/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino|android|ipad|playbook|silk/i.test(t)||/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(t.substr(0,4)))&&(e=!0),e}
|
|
51
45
|
/*!
|
|
52
46
|
* Copyright 2021 Cognite AS
|
|
53
|
-
*/class
|
|
47
|
+
*/class z{constructor(e){this._originalState={},this._renderer=e,this._originalState={}}setClearColor(e,t){this._originalState={clearColor:this._renderer.getClearColor(new w.Color),clearAlpha:this._renderer.getClearAlpha(),...this._originalState},this._renderer.setClearColor(e,t)}setSize(e,t){this._originalState={size:this._renderer.getSize(new w.Vector2),...this._originalState},this._renderer.setSize(e,t)}set localClippingEnabled(e){this._originalState={localClippingEnabled:this._renderer.localClippingEnabled,...this._originalState},this._renderer.localClippingEnabled=e}set autoClear(e){this._originalState={autoClear:this._renderer.autoClear,...this._originalState},this._renderer.autoClear=e}setRenderTarget(e){this._originalState={renderTarget:this._renderer.getRenderTarget(),...this._originalState},this._renderer.setRenderTarget(e)}resetState(){void 0!==this._originalState.autoClear&&(this._renderer.autoClear=this._originalState.autoClear),void 0!==this._originalState.clearColor&&this._renderer.setClearColor(this._originalState.clearColor,this._originalState.clearAlpha),void 0!==this._originalState.localClippingEnabled&&(this._renderer.localClippingEnabled=this._originalState.localClippingEnabled),void 0!==this._originalState.size&&this._renderer.setSize(this._originalState.size.width,this._originalState.size.height),void 0!==this._originalState.renderTarget&&this._renderer.setRenderTarget(this._originalState.renderTarget),this._originalState={}}}var E=n(21);
|
|
54
48
|
/*!
|
|
55
49
|
* Copyright 2021 Cognite AS
|
|
56
|
-
*/
|
|
50
|
+
*/const O={publicPath:""};
|
|
57
51
|
/*!
|
|
58
52
|
* Copyright 2021 Cognite AS
|
|
59
|
-
*/
|
|
53
|
+
*/var A=n(10);
|
|
60
54
|
/*!
|
|
61
55
|
* Copyright 2021 Cognite AS
|
|
62
|
-
*/class
|
|
56
|
+
*/class F{constructor(){this.workerList=[];const e=this.determineNumberOfWorkers();for(let t=0;t<e;t++){const e={worker:Object(E.wrap)(this.createWorker()),activeJobCount:0,messageIdCounter:0};this.workerList.push(e)}(async function(e){let t;try{t=await e.getVersion()}catch(e){t="1.1.0"}const n="1.2.0",[r,o,i]=n.split(".").map(e=>parseInt(e,10)),[a,s,d]=t.split(".").map(e=>parseInt(e,10)),l=`Update your local copy of @cognite/reveal-parser-worker. Required version is ${n}. Received ${t}.`;if(r!==a)throw new Error(l);if(s<o)throw new Error(l);if(s===o&&d<i)throw new Error(l)}
|
|
63
57
|
/*!
|
|
64
58
|
* Copyright 2021 Cognite AS
|
|
65
|
-
*/
|
|
59
|
+
*/)(this.workerList[0].worker).catch(e=>A.a.error(e)),this.workerObjUrl&&URL.revokeObjectURL(this.workerObjUrl)}static get defaultPool(){return F._defaultPool=F._defaultPool||new F,F._defaultPool}createWorker(){const e=(O.publicPath||n.p)+"reveal.parser.worker.js",t={name:"reveal.parser #"+this.workerList.length};if(function(e,t=location.origin){const n=e=>!e.match(/^.*\/\//);if(n(t))throw new Error("isTheSameDomain: the second argument must be an absolute url or omitted. Received "+t);if(n(e))return!0;try{const n=[e,t].map(e=>e.startsWith("//")?"https:"+e:e).map(e=>new URL(e));return n[0].host===n[1].host}catch(n){return console.error(`can not create URLs for ${e} and ${t}`,n),!1}}(e))return new Worker(e,t);if(!this.workerObjUrl){const t=new Blob([`importScripts(${JSON.stringify(e)});`],{type:"text/javascript"});this.workerObjUrl=URL.createObjectURL(t)}return new Worker(this.workerObjUrl,t)}async postWorkToAvailable(e){const t=this.workerList.reduce((e,t)=>e.activeJobCount>t.activeJobCount?t:e,this.workerList[0]);t.activeJobCount+=1;return await(async()=>{try{return await e(t.worker)}finally{t.activeJobCount-=1}})()}determineNumberOfWorkers(){return Math.max(2,Math.min(4,window.navigator.hardwareConcurrency||2))}}class B{constructor(e){this._value=e,this._lastAccessTime=Date.now()}get value(){return this.touch(),this._value}get lastAccessTime(){return this._lastAccessTime}touch(){this._lastAccessTime=Date.now()}}class k{constructor(e=50,t,n=10){this._data=new Map,this._maxElementsInCache=e,this._defaultCleanupCount=n,this._removeCallback=t}has(e){return this._data.has(e)}forceInsert(e,t){this.isFull()&&this.cleanCache(this._defaultCleanupCount),this.insert(e,t)}insert(e,t){if(!(this._data.size<this._maxElementsInCache))throw new Error("Cache full, please clean Cache and retry adding data");this._data.set(e,new B(t))}remove(e){if(void 0!==this._removeCallback){const t=this._data.get(e);void 0!==t&&this._removeCallback(t.value)}this._data.delete(e)}get(e){const t=this._data.get(e);if(void 0!==t)return t.value;throw new Error(`Cache element ${e} does not exist`)}isFull(){return!(this._data.size<this._maxElementsInCache)}cleanCache(e){const t=Array.from(this._data.entries());t.sort((e,t)=>t[1].lastAccessTime-e[1].lastAccessTime);for(let n=0;n<e;n++){const e=t.pop();if(void 0===e)return;this.remove(e[0])}}clear(){if(void 0!==this._removeCallback)for(const e of this._data.values())this._removeCallback(e.value);this._data.clear()}}
|
|
60
|
+
/*!
|
|
61
|
+
* Copyright 2021 Cognite AS
|
|
62
|
+
*/class L{constructor(e,t){this._cache=new Map,this._retrieves=new Map,this._capacity=e,this._disposeCallback=t}get(e){const t=this._retrieves.get(e)||0;return this._retrieves.set(e,t+1),this._cache.get(e)}set(e,t){return this._cache.has(e)||this._capacity<this._cache.size?(this._cache.set(e,t),!0):(this._cache.set(e,t),this.ensureWithinCapacity(),this._cache.has(e))}remove(e){this._retrieves.delete(e);const t=this._cache.get(e);return void 0!==t&&(void 0!==this._disposeCallback&&this._disposeCallback(t),this._cache.delete(e),!0)}clear(){if(void 0!==this._disposeCallback)for(const e of this._cache.values())this._disposeCallback(e);this._retrieves.clear(),this._cache.clear()}ensureWithinCapacity(){if(this._capacity>=this._cache.size)return;const e=Array.from(this._cache.keys()).map(e=>({key:e,retrivalCount:this._retrieves.get(e)||0})).sort((e,t)=>e.retrivalCount-t.retrivalCount).slice(0,this._cache.size-this._capacity).map(e=>e.key);for(const t of e)this.remove(t)}}var G=n(11),U=n.n(G);
|
|
63
|
+
/*!
|
|
64
|
+
* Copyright 2021 Cognite AS
|
|
65
|
+
*/
|
|
66
|
+
const{VERSION:V,MIXPANEL_TOKEN:j}={VERSION:"2.1.2",WORKER_VERSION:"1.2.0",MIXPANEL_TOKEN:"8c900bdfe458e32b768450c20750853d",IS_DEVELOPMENT_MODE:!1};let W=!0;const q={VERSION:V,project:"unknown",application:"unknown",sessionId:"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(function(e){const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))};function H(e,t,n,r){W=e,W&&(U.a.init(j,{disable_cookie:!0,disable_persistence:!0,ip:!1,property_blacklist:["$city","$region","mp_country_code","$geo_source","$timezone","mp_lib","$lib_version","$device_id","$user_id","$current_url","$screen_width","$screen_height","$referrer","$referring_domain","$initial_referrer","$initial_referring_domain"]}),U.a.reset(),U.a.identify("reveal-single-user"),t&&(q.project=t),n&&(q.application=n),K("init",r))}function K(e,t){if(!W)return;const n={...q,...t};U.a.track(e,n)}function X(e,t){K("loadModel",{...e,modelIdentifier:t})}function Y(e,t){A.a.error(e),K("error",{message:e.message,name:e.name,stack:e.stack,...t})}function Z(e){K("cameraNavigated",e)}
|
|
67
|
+
/*!
|
|
68
|
+
* Copyright 2021 Cognite AS
|
|
69
|
+
*/function Q(){this.array=null}
|
|
70
|
+
/*!
|
|
71
|
+
* Copyright 2021 Cognite AS
|
|
72
|
+
*/},function(e,t){e.exports=require("rxjs/operators")},function(e,t){e.exports=require("glslify")},function(e,t){e.exports=require("rxjs")},function(e,t,n){"use strict";n.d(t,"b",(function(){return c})),n.d(t,"d",(function(){return s})),n.d(t,"f",(function(){return u})),n.d(t,"g",(function(){return l})),n.d(t,"a",(function(){return p})),n.d(t,"e",(function(){return r})),n.d(t,"c",(function(){return f}));var r,o=n(6),i=n.n(o),a=n(1);
|
|
66
73
|
/*!
|
|
67
74
|
* Copyright 2021 Cognite AS
|
|
68
75
|
*/
|
|
69
|
-
|
|
76
|
+
class s{constructor(e){this._changedEvent=new a.d,this._classToken=e}get classToken(){return this._classToken}on(e,t){i()("changed"===e),this._changedEvent.subscribe(t)}off(e,t){i()("changed"===e),this._changedEvent.unsubscribe(t)}notifyChanged(){this._changedEvent.fire()}}class d extends s{constructor(e,t){super(e),this._cachedCombinedIndexSet=void 0,this._nodeCollections=[],this._changedUnderlyingNodeCollectionHandler=this.makeDirty.bind(this),t&&t.forEach(e=>this.add(e))}add(e){e.on("changed",this._changedUnderlyingNodeCollectionHandler),this._nodeCollections.push(e),this.makeDirty()}remove(e){const t=this._nodeCollections.indexOf(e);if(t<0)throw new Error("Could not find set");e.off("changed",this._changedUnderlyingNodeCollectionHandler),this._nodeCollections.splice(t,1),this.makeDirty()}clear(){this._nodeCollections.forEach(e=>e.clear())}makeDirty(){void 0!==this._cachedCombinedIndexSet&&(this._cachedCombinedIndexSet=void 0,this.notifyChanged())}getIndexSet(){var e;return this._cachedCombinedIndexSet=null!==(e=this._cachedCombinedIndexSet)&&void 0!==e?e:this.createCombinedIndexSet(),this._cachedCombinedIndexSet}get isLoading(){return this._nodeCollections.some(e=>e.isLoading)}}class l extends d{constructor(e){super(l.classToken,e)}serialize(){return{token:this.classToken,state:{subCollections:this._nodeCollections.map(e=>e.serialize())}}}createCombinedIndexSet(){if(0===this._nodeCollections.length)return new a.e;const e=this._nodeCollections[0].getIndexSet().clone();for(let t=1;t<this._nodeCollections.length;++t)e.unionWith(this._nodeCollections[t].getIndexSet());return e}}l.classToken="UnionNodeCollection";class c extends d{constructor(e){super(l.classToken,e)}serialize(){return{token:this.classToken,state:{subCollections:this._nodeCollections.map(e=>e.serialize())}}}createCombinedIndexSet(){if(0===this._nodeCollections.length)return new a.e;const e=this._nodeCollections[0].getIndexSet().clone();for(let t=1;t<this._nodeCollections.length;++t)e.intersectWith(this._nodeCollections[t].getIndexSet());return e}}c.classToken="IntersectionNodeCollection";
|
|
70
77
|
/*!
|
|
71
78
|
* Copyright 2021 Cognite AS
|
|
72
79
|
*/
|
|
80
|
+
class u extends s{constructor(e){super(u.classToken),e instanceof a.e?this._treeIndices=e:(a.h,this._treeIndices=new a.e(e))}updateSet(e){this._treeIndices=e,this.notifyChanged()}clear(){this._treeIndices=new a.e,this.notifyChanged()}getIndexSet(){return this._treeIndices}get isLoading(){return!1}serialize(){return{token:this.classToken,state:this._treeIndices.toRangeArray()}}}u.classToken="TreeIndexNodeCollection",function(e){e[e.NoOutline=0]="NoOutline",e[e.White=1]="White",e[e.Black=2]="Black",e[e.Cyan=3]="Cyan",e[e.Blue=4]="Blue",e[e.Green=5]="Green",e[e.Red=6]="Red",e[e.Orange=7]="Orange"}(r||(r={}));const m={visible:!0,outlineColor:r.White},h={visible:!0,renderInFront:!0},p={Default:{visible:!0,renderGhosted:!1,renderInFront:!1,outlineColor:r.NoOutline,color:[0,0,0]},Outlined:m,Hidden:{visible:!1},InFront:h,Ghosted:{visible:!0,renderGhosted:!0},Highlighted:{...h,visible:!0,color:[100,100,255],...m}};
|
|
73
81
|
/*!
|
|
74
82
|
* Copyright 2021 Cognite AS
|
|
75
83
|
*/
|
|
76
|
-
class
|
|
84
|
+
class f{constructor(){this._styledCollections=new Array,this._events={changed:new a.d,loadingStateChanged:new a.d}}on(e,t){switch(e){case"changed":this._events.changed.subscribe(t);break;case"loadingStateChanged":this._events.loadingStateChanged.subscribe(t);break;default:Object(a.k)(e,`Unsupported event: '${e}'`)}}off(e,t){switch(e){case"changed":this._events.changed.unsubscribe(t);break;case"loadingStateChanged":this._events.loadingStateChanged.unsubscribe(t);break;default:Object(a.k)(e,`Unsupported event: '${e}'`)}}assignStyledNodeCollection(e,t){const n=this._styledCollections.find(t=>t.nodeCollection===e);if(void 0!==n)n.appearance=t,this.handleNodeCollectionChanged(n);else{const n={nodeCollection:e,appearance:t,handleNodeCollectionChangedListener:()=>{this.handleNodeCollectionChanged(n)}};this._styledCollections.push(n),e.on("changed",n.handleNodeCollectionChangedListener),this.notifyChanged()}}unassignStyledNodeCollection(e){const t=this._styledCollections.findIndex(t=>t.nodeCollection===e);if(-1===t)throw new Error("NodeCollection not added");const n=this._styledCollections[t];this._styledCollections.splice(t,1),e.off("changed",n.handleNodeCollectionChangedListener),this.notifyChanged()}applyStyles(e){this._styledCollections.forEach(t=>{const n=t.nodeCollection.getIndexSet();e(n,t.appearance)})}clear(){for(const e of this._styledCollections){e.nodeCollection.off("changed",e.handleNodeCollectionChangedListener)}this._styledCollections.splice(0),this.notifyChanged()}get isLoading(){return this._styledCollections.some(e=>e.nodeCollection.isLoading)}notifyChanged(){this._events.changed.fire()}notifyLoadingStateChanged(){this._lastFiredLoadingState!==this.isLoading&&(this._lastFiredLoadingState=this.isLoading,this._events.loadingStateChanged.fire(this.isLoading))}handleNodeCollectionChanged(e){this.notifyChanged(),this.notifyLoadingStateChanged()}}
|
|
77
85
|
/*!
|
|
78
86
|
* Copyright 2021 Cognite AS
|
|
79
|
-
*/function
|
|
87
|
+
*/},function(e,t){e.exports=require("assert")},function(e,t,n){"use strict";n.d(t,"e",(function(){return l})),n.d(t,"g",(function(){return ee})),n.d(t,"b",(function(){return ae})),n.d(t,"j",(function(){return le})),n.d(t,"h",(function(){return de})),n.d(t,"a",(function(){return se})),n.d(t,"f",(function(){return Ce})),n.d(t,"c",(function(){return Fe})),n.d(t,"d",(function(){return Ye})),n.d(t,"i",(function(){return lt}));var r=n(0),o=n(62);
|
|
80
88
|
/*!
|
|
81
89
|
* Copyright 2021 Cognite AS
|
|
82
90
|
*/
|
|
83
|
-
class
|
|
91
|
+
class i extends r.Group{constructor(e,t,n){super(),this._lod=o.a.Discarded,this._updatedTimestamp=Date.now(),this.name=`Sector ${t} [id=${e}]`,this.sectorId=e,this.sectorPath=t,this.bounds=n,this.depth=function(e){let t=0;for(let n=0;n<e.length;++n)t+="/"===e[n]?1:0;return t-1}
|
|
84
92
|
/*!
|
|
85
93
|
* Copyright 2021 Cognite AS
|
|
86
|
-
*/
|
|
94
|
+
*/(t)}get levelOfDetail(){return this._lod}get group(){return this._group}get updatedTimestamp(){return this._updatedTimestamp}updateGeometry(e,t){this.resetGeometry(),this._group=e,void 0!==this._group&&this._group.reference(),this._lod=t,this._updatedTimestamp=Date.now(),this.visible=this._lod!==o.a.Discarded,this.updateMatrixWorld(!0)}resetGeometry(){void 0!==this._group&&(this._group.dereference(),this.remove(this._group)),this._group=void 0,this._lod=o.a.Discarded,this._updatedTimestamp=Date.now()}}class a extends i{constructor(e){const t=e.scene.root.bounds.clone();t.applyMatrix4(e.modelMatrix),super(0,"/",t);const{scene:n,modelMatrix:r}=e;this.sectorNodeMap=new Map,function e(t,n,r,o){const a=t.bounds.clone();a.applyMatrix4(o);const s=new i(t.id,t.path,a);s.name="Sector "+t.id,n.add(s),s.matrixAutoUpdate=!1,s.updateMatrixWorld(!0),r.set(t.id,s);for(const n of t.children)e(n,s,r,o)}(n.root,this,this.sectorNodeMap,r),this.matrixAutoUpdate=!1,this.setModelTransformation(r)}setModelTransformation(e){this.matrix.copy(e),this.updateMatrixWorld(!0)}getModelTransformation(e=new r.Matrix4){return e.copy(this.matrix)}}var s=n(1);
|
|
87
95
|
/*!
|
|
88
96
|
* Copyright 2021 Cognite AS
|
|
89
|
-
*/
|
|
97
|
+
*/
|
|
90
98
|
/*!
|
|
91
99
|
* Copyright 2021 Cognite AS
|
|
92
|
-
*/
|
|
100
|
+
*/
|
|
101
|
+
class d{constructor(e,t){this._materialManager=t,this._instancedGeometryMap=new Map,this._instancedAttributeMap=new Map,this._processedSectorMap=new Map,this._instancedMeshGroup=e}addInstanceMeshes(e,t,n){if(this._processedSectorMap.has(n))return;this._instancedGeometryMap.has(e.fileId)||this._instancedGeometryMap.set(e.fileId,{vertices:new r.Float32BufferAttribute(e.vertices.buffer,3),indices:new r.Uint32BufferAttribute(e.indices.buffer,1)});const o=this._instancedGeometryMap.get(e.fileId),i=this._materialManager.getModelMaterials(t).instancedMesh;for(const t of e.instances){const r=JSON.stringify([e.fileId,t.triangleOffset]);if(this._instancedAttributeMap.has(r)){const e=this._instancedAttributeMap.get(r),i=e.treeIndexBuffer.add(t.treeIndices),a=e.colorBuffer.add(t.colors),s=e.instanceMatrixBuffer.add(t.instanceMatrices);e.updateAttributes(),this.addBatchDescriptor(r,i.batchId,a.batchId,s.batchId,n),e.mesh.count=e.treeIndexBuffer.length,(i.bufferIsReallocated||a.bufferIsReallocated||s.bufferIsReallocated)&&this.recreateBufferGeometry(o,e,t,r)}else this.createInstance(t,o,i,r,n)}}removeSectorInstancedMeshes(e){const t=this._processedSectorMap.get(e);if(t){for(const n of t){const t=this._instancedAttributeMap.get(n.instanceIdentifier);if(void 0===t)throw new Error("Cannot resolve instance identifier for sector "+e);t.treeIndexBuffer.remove(n.treeIndicesbatchId),t.colorBuffer.remove(n.colorsBatchId),t.instanceMatrixBuffer.remove(n.instanceMatricesBatchId),t.updateAttributes(),t.mesh.count=t.treeIndexBuffer.length}this._processedSectorMap.delete(e)}}createInstancedBufferGeometry(e,t,n,o,i){const a=new r.InstancedBufferGeometry;a.setIndex(t),a.setAttribute("position",e);const s=new r.InstancedBufferAttribute(n.buffer,1);a.setAttribute("a_treeIndex",s);const d=new r.InstancedBufferAttribute(o.buffer,4,!0);a.setAttribute("a_color",d);const l=new r.InstancedInterleavedBuffer(i.buffer,16);for(let e=0;e<4;e++){const t=new r.InterleavedBufferAttribute(l,4,4*e);a.setAttribute("a_instanceMatrix_column_"+e,t)}return[a,()=>{s.needsUpdate=!0,d.needsUpdate=!0,l.needsUpdate=!0}]}recreateBufferGeometry(e,t,n,r){const[o,i]=this.createInstancedBufferGeometry(e.vertices,e.indices,t.treeIndexBuffer,t.colorBuffer,t.instanceMatrixBuffer);o.setDrawRange(3*n.triangleOffset,3*n.triangleCount),t.mesh.geometry.dispose(),t.mesh.geometry=o,t.mesh.count=t.treeIndexBuffer.length,this._instancedAttributeMap.set(r,{mesh:t.mesh,treeIndexBuffer:t.treeIndexBuffer,colorBuffer:t.colorBuffer,instanceMatrixBuffer:t.instanceMatrixBuffer,updateAttributes:i})}createInstance(e,t,n,o,i){const a=new s.c(e.treeIndices.length,Float32Array),d=a.add(e.treeIndices),l=new s.c(e.colors.length,Uint8Array),c=l.add(e.colors),u=new s.c(e.instanceMatrices.length,Float32Array),m=u.add(e.instanceMatrices);this.addBatchDescriptor(o,d.batchId,c.batchId,m.batchId,i);const[h,p]=this.createInstancedBufferGeometry(t.vertices,t.indices,a,l,u);h.setDrawRange(3*e.triangleOffset,3*e.triangleCount);const f=new r.InstancedMesh(h,n,a.length);f.frustumCulled=!1,this._instancedAttributeMap.set(o,{mesh:f,treeIndexBuffer:a,colorBuffer:l,instanceMatrixBuffer:u,updateAttributes:p}),this._instancedMeshGroup.add(f),f.updateMatrixWorld(!0)}addBatchDescriptor(e,t,n,r,o){const i={instanceIdentifier:e,treeIndicesbatchId:t,colorsBatchId:n,instanceMatricesBatchId:r},a=this._processedSectorMap.get(o);a?a.push(i):this._processedSectorMap.set(o,[i])}}
|
|
93
102
|
/*!
|
|
94
103
|
* Copyright 2021 Cognite AS
|
|
95
|
-
*/(
|
|
104
|
+
*/class l extends r.Object3D{constructor(e,t){super(),this._previousCameraMatrix=new r.Matrix4,this.type="CadNode",this.name="Sector model",this._materialManager=t;const n=new r.Group;n.name="InstancedMeshes",this._instancedMeshManager=new d(n,t);const o=new a(e);o.add(n),this._cadModelMetadata=e;const{scene:i}=e;this._sectorScene=i,this._previousCameraMatrix.elements[0]=1/0,this._rootSector=o,this.add(o),this.matrixAutoUpdate=!1,this.updateMatrixWorld(),this.setModelTransformation(e.modelMatrix)}get nodeTransformProvider(){return this._materialManager.getModelNodeTransformProvider(this._cadModelMetadata.modelIdentifier)}get nodeAppearanceProvider(){return this._materialManager.getModelNodeAppearanceProvider(this._cadModelMetadata.modelIdentifier)}get defaultNodeAppearance(){return this._materialManager.getModelDefaultNodeAppearance(this._cadModelMetadata.modelIdentifier)}set defaultNodeAppearance(e){this._materialManager.setModelDefaultNodeAppearance(this._cadModelMetadata.modelIdentifier,e)}get clippingPlanes(){return this._materialManager.clippingPlanes}set clippingPlanes(e){this._materialManager.clippingPlanes=e}get cadModelMetadata(){return this._cadModelMetadata}get sectorScene(){return this._sectorScene}get rootSector(){return this._rootSector}get materialManager(){return this._materialManager}set renderMode(e){this._materialManager.setRenderMode(e)}get renderMode(){return this._materialManager.getRenderMode()}setModelTransformation(e){this._rootSector.setModelTransformation(e),this._cadModelMetadata.modelMatrix.copy(e)}getModelTransformation(e){return this._rootSector.getModelTransformation(e)}suggestCameraConfig(){const{position:e,target:t,near:n,far:o}=function(e){const t=new r.Vector3,n=new r.Vector3;let o=0;Object(s.y)(e,e=>(t.add(e.bounds.min),n.add(e.bounds.max),o+=1,!0)),t.divideScalar(o),n.divideScalar(o);const i=new r.Box3(t,n),a=i.getCenter(new r.Vector3),d=i.getSize(new r.Vector3);d.x*=-2,d.y*=-2,d.z*=2;const l=(new r.Vector3).addVectors(a,d);return{position:l,target:a,near:.1,far:12*l.distanceTo(a)}}(this._sectorScene.root),i=this.getModelTransformation(),a=e.clone(),d=t.clone();return a.applyMatrix4(i),d.applyMatrix4(i),{position:a,target:d,near:n,far:o}}updateInstancedMeshes(e,t,n){for(const r of e)this._instancedMeshManager.addInstanceMeshes(r,t,n)}discardInstancedMeshes(e){this._instancedMeshManager.removeSectorInstancedMeshes(e)}}
|
|
96
105
|
/*!
|
|
97
106
|
* Copyright 2021 Cognite AS
|
|
98
|
-
*/
|
|
107
|
+
*/const{boxGeometry:c,boxGeometryBoundingBox:u}=(()=>{const e=new r.BoxBufferGeometry(1,1,1,1,1,1);try{const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{boxGeometry:t,boxGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{quadGeometry:m,quadGeometryBoundingBox:h}=(()=>{const e=new r.PlaneBufferGeometry(1,1,1,1);try{const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return e.computeBoundingBox(),{quadGeometry:t,quadGeometryBoundingBox:e.boundingBox}}finally{e.dispose()}})(),{trapeziumGeometry:p,trapeziumGeometryBoundingBox:f}=(()=>{const e=[0,0,0,1,1,1,2,2,2,3,3,3];return{trapeziumGeometry:{index:new r.BufferAttribute(new Uint16Array([0,1,3,0,3,2]),1),position:new r.BufferAttribute(new Float32Array(e),3)},trapeziumGeometryBoundingBox:(new r.Box3).setFromArray(e)}})(),{coneGeometry:v,coneGeometryBoundingBox:x}=(()=>{const e=[];e.push(-1,1,-1),e.push(-1,-1,-1),e.push(1,1,-1),e.push(1,-1,-1),e.push(1,1,1),e.push(1,-1,1);const t=new Uint16Array([1,2,0,1,3,2,3,4,2,3,5,4]);return{coneGeometry:{index:new r.BufferAttribute(t,1),position:new r.BufferAttribute(new Float32Array(e),3)},coneGeometryBoundingBox:(new r.Box3).setFromArray(e)}})(),{torusLodGeometries:g,torusGeometryBoundingBox:y}=(()=>{const e=(e,t)=>[e,2*t*Math.PI],t=[{tubularSegments:9,radialSegments:18},{tubularSegments:5,radialSegments:12},{tubularSegments:4,radialSegments:5}].map(({tubularSegments:t,radialSegments:n})=>function(e,t,n=((e,t)=>[e,t,0])){const o=[],i=[],a=1/e,s=1/t;for(let r=0;r<=t;r++)for(let t=0;t<=e;t++){const[e,i,d]=n(t*a,r*s);o.push(e||0,i||0,d||0)}for(let n=1;n<=t;n++)for(let t=1;t<=e;t++){const r=(e+1)*n+t-1,o=(e+1)*(n-1)+t-1,a=(e+1)*(n-1)+t,s=(e+1)*n+t;i.push(r,o,s),i.push(o,a,s)}return{index:new r.Uint16BufferAttribute(i,1),position:new r.Float32BufferAttribute(o,3)}}(n,t,e));return{torusLodGeometries:t,torusGeometryBoundingBox:(new r.Box3).setFromArray(t[t.length-1].position.array)}})(),{nutGeometry:b,nutGeometryBoundingBox:_}=(()=>{const e=new r.CylinderBufferGeometry(.5,.5,1,6);try{e.applyMatrix4((new r.Matrix4).makeRotationX(-Math.PI/2));const t={index:e.getIndex(),position:e.getAttribute("position"),normal:e.getAttribute("normal")};return{nutGeometry:t,nutGeometryBoundingBox:(new r.Box3).setFromArray(t.position.array)}}finally{e.dispose()}})();var T=n(6),C=n.n(T);
|
|
99
108
|
/*!
|
|
100
109
|
* Copyright 2021 Cognite AS
|
|
101
|
-
*/
|
|
110
|
+
*/
|
|
111
|
+
const I={centerA:new r.Vector3,centerB:new r.Vector3,sphere:new r.Sphere,box:new r.Box3};const w={vertex1:new r.Vector3,vertex2:new r.Vector3,vertex3:new r.Vector3,vertex4:new r.Vector3};const M={instanceMatrix:new r.Matrix4};const S={center:new r.Vector3,size:new r.Vector3};
|
|
102
112
|
/*!
|
|
103
113
|
* Copyright 2021 Cognite AS
|
|
104
|
-
*/
|
|
114
|
+
*/
|
|
115
|
+
function P(e,t,n,o){const i=Array.from(t.values()).reduce((e,t)=>Math.max(e,t.offset+t.size),0),a=e.length/i,s=new Float32Array(e.buffer),d=new r.Box3,l=new Uint8Array(e.length);let c=0;for(let t=0;t<a;++t)if(o(t,i,s,d),n.intersectsBox(d)){const n=e.subarray(t*i,(t+1)*i);l.set(n,c*i),c++}return l.slice(0,c*i)}function R(e,t,n,r){if(null===r)return e;const o=t.get("instanceMatrix");return C()(void 0!==o),P(e,t,r,(e,t,r,i)=>{!function(e,t,n,r,o,i){const{instanceMatrix:a}=M,s=(r*n+e.offset)/t.BYTES_PER_ELEMENT;a.set(t[s+0],t[s+4],t[s+8],t[s+12],t[s+1],t[s+5],t[s+9],t[s+13],t[s+2],t[s+6],t[s+10],t[s+14],t[s+3],t[s+7],t[s+11],t[s+15]),i.copy(o).applyMatrix4(a)}(o,r,t,e,n,i)})}function N(e,t,n,r="radiusA",o="radiusB"){if(null===n)return e;const i=t.get("centerA"),a=t.get("centerB"),s=t.get(r),d=t.get(o);return C()(void 0!==i&&void 0!==a&&void 0!==s&&void 0!==d),P(e,t,n,(e,t,n,r)=>{!function(e,t,n,r,o,i,a,s){const{centerA:d,centerB:l,sphere:c,box:u}=I;function m(e,t=0){const n=(a*i+e.offset)/o.BYTES_PER_ELEMENT;return o[n+t]}d.set(m(e,0),m(e,1),m(e,2)),l.set(m(t,0),m(t,1),m(t,2));const h=m(n),p=m(r);c.set(d,h),c.getBoundingBox(s),c.set(l,p),c.getBoundingBox(u),s.expandByPoint(u.min),s.expandByPoint(u.max)}(i,a,s,d,n,t,e,r)})}function D(e,t,n){if(null===n)return e;const r=t.get("vertex1"),o=t.get("vertex2"),i=t.get("vertex3"),a=t.get("vertex4");return C()(void 0!==r&&void 0!==o&&void 0!==i&&void 0!==a),P(e,t,n,(e,t,n,s)=>{!function(e,t,n,r,o,i,a,s){const{vertex1:d,vertex2:l,vertex3:c,vertex4:u}=w;function m(e,t=0){const n=(a*i+e.offset)/o.BYTES_PER_ELEMENT;return o[n+t]}d.set(m(e,0),m(e,1),m(e,2)),l.set(m(t,0),m(t,1),m(t,2)),c.set(m(n,0),m(n,1),m(n,2)),u.set(m(r,0),m(r,1),m(r,2)),s.setFromPoints([d,l,c,u])}(r,o,i,a,n,t,e,s)})}function z(e,t,n,r="horizontalRadius",o="verticalRadius"){if(null===n)return e;const i=t.get("center"),a=t.get(r),s=t.get(o),d=t.get("height");return C()(void 0!==i&&void 0!==a&&void 0!==s&&void 0!==d),P(e,t,n,(e,t,n,r)=>{!function(e,t,n,r,o,i,a,s){const{center:d,size:l}=S;function c(e,t=0){const n=(a*i+e.offset)/o.BYTES_PER_ELEMENT;return o[n+t]}d.set(c(e,0),c(e,1),c(e,2));const u=c(t),m=c(n),h=c(r),p=2*Math.max(u,m,h);l.set(p,p,p),s.setFromCenterAndSize(d,l)}(i,a,s,d,n,t,e,r)})}
|
|
105
116
|
/*!
|
|
106
117
|
* Copyright 2021 Cognite AS
|
|
107
|
-
*/(n.
|
|
118
|
+
*/function*E(e,t,n,o=null){const i=e.primitives;if(O(i.boxCollection)&&(yield function(e,t,n,o){const i=R(e,t,u,o),a=new r.InstancedBufferGeometry,s=new r.Mesh(a,n);return a.setIndex(c.index),a.setAttribute("position",c.position),a.setAttribute("normal",c.normal),A(a,i,t,s),U(a),s.onBeforeRender=()=>B(n,s.matrixWorld),s.name="Primitives (Boxes)",s}(i.boxCollection,i.boxAttributes,t.box,o)),O(i.circleCollection)&&(yield function(e,t,n,o){const i=R(e,t,h,o),a=new r.InstancedBufferGeometry,s=new r.Mesh(a,n);return a.setIndex(m.index),a.setAttribute("position",m.position),a.setAttribute("normal",m.position),A(a,i,t,s),U(a),s.onBeforeRender=()=>B(n,s.matrixWorld),s.name="Primitives (Circles)",s}(i.circleCollection,i.circleAttributes,t.circle,o)),O(i.coneCollection)&&(yield function(e,t,n,o,i){const a=N(e,t,i),s=new r.InstancedBufferGeometry,d=new r.Mesh(s,n);return s.setIndex(v.index),s.setAttribute("position",v.position),A(s,a,t,d),k(s,o),d.onBeforeRender=()=>B(n,d.matrixWorld),d.name="Primitives (Cones)",d}(i.coneCollection,i.coneAttributes,t.cone,n,o)),O(i.eccentricConeCollection)&&(yield function(e,t,n,o,i){const a=N(e,t,i),s=new r.InstancedBufferGeometry,d=new r.Mesh(s,n);return s.setIndex(v.index),s.setAttribute("position",v.position),A(s,a,t,d),k(s,o),d.onBeforeRender=()=>B(n,d.matrixWorld),d.name="Primitives (EccentricCones)",d}(i.eccentricConeCollection,i.eccentricConeAttributes,t.eccentricCone,n,o)),O(i.ellipsoidSegmentCollection)&&(yield function(e,t,n,o,i){const a=z(e,t,i),s=new r.InstancedBufferGeometry,d=new r.Mesh(s,n);return s.setIndex(v.index),s.setAttribute("position",v.position),A(s,a,t,d),k(s,o),d.onBeforeRender=()=>B(n,d.matrixWorld),d.name="Primitives (EllipsoidSegments)",d}(i.ellipsoidSegmentCollection,i.ellipsoidSegmentAttributes,t.ellipsoidSegment,n,o)),O(i.generalCylinderCollection)){const e=function(e,t,n,o,i){const a=N(e,t,i,"radius","radius");if(0===a.length)return null;const s=new r.InstancedBufferGeometry,d=new r.Mesh(s,n);return s.setIndex(v.index),s.setAttribute("position",v.position),A(s,a,t,d),k(s,o),d.onBeforeRender=()=>B(n,d.matrixWorld),d.name="Primitives (GeneralCylinders)",d}(i.generalCylinderCollection,i.generalCylinderAttributes,t.generalCylinder,n,o);e&&(yield e)}O(i.generalRingCollection)&&(yield function(e,t,n,o){const i=R(e,t,h,o),a=new r.InstancedBufferGeometry,s=new r.Mesh(a,n);return a.setIndex(m.index),a.setAttribute("position",m.position),A(a,i,t,s),U(a),s.onBeforeRender=()=>B(n,s.matrixWorld),s.name="Primitives (GeneralRings)",s}(i.generalRingCollection,i.generalRingAttributes,t.generalRing,o)),O(i.quadCollection)&&(yield function(e,t,n,o){const i=R(e,t,h,o),a=new r.InstancedBufferGeometry,s=new r.Mesh(a,n);return a.setIndex(m.index),a.setAttribute("position",m.position),a.setAttribute("normal",m.normal),A(a,i,t,s),U(a),s.name="Primitives (Quads)",s}(i.quadCollection,i.quadAttributes,t.quad,o)),O(i.sphericalSegmentCollection)&&(yield function(e,t,n,o,i){const a=z(e,t,i,"radius","radius"),s=new r.InstancedBufferGeometry,d=new r.Mesh(s,n);return s.setIndex(v.index),s.setAttribute("position",v.position),A(s,a,t,d),k(s,o),s.setAttribute("a_horizontalRadius",s.getAttribute("a_radius")),s.setAttribute("a_verticalRadius",s.getAttribute("a_radius")),d.onBeforeRender=()=>B(n,d.matrixWorld),d.name="Primitives (EllipsoidSegments)",d}(i.sphericalSegmentCollection,i.sphericalSegmentAttributes,t.sphericalSegment,n,o)),O(i.torusSegmentCollection)&&(yield function(e,t,n,o){const i=function(e,t){const n=Array.from(t.values()).reduce((e,t)=>e+t.size,0),r=e.length/n;let o=0;const i=new DataView(e.buffer),a=t.get("size").offset;for(let e=0;e<r;e++)o=Math.max(o,i.getFloat32(e*n+a,!0));return o}(R(e,t,y,o),t),a=new s.b(new r.Box3);a.name="Primitives (TorusSegments)";let d=null,l=null;for(const[o,s]of g.entries()){const c=new r.InstancedBufferGeometry,u=new r.Mesh(c,n);if(c.setIndex(s.index),c.setAttribute("position",s.position),A(c,e,t,u),null===d){const e=G(c);d=e.boundingBox,l=e.boundingSphere,a.setBoundingBox(d)}c.boundingBox=d,c.boundingSphere=l,u.name="Primitives (TorusSegments) - LOD "+o,a.addLevel(u,F(i,o)),u.onBeforeRender=()=>B(n,u.matrixWorld)}return a}(i.torusSegmentCollection,i.torusSegmentAttributes,t.torusSegment,o)),O(i.trapeziumCollection)&&(yield function(e,t,n,o){const i=D(e,t,o),a=new r.InstancedBufferGeometry,s=new r.Mesh(a,n);return a.setIndex(p.index),a.setAttribute("position",p.position),A(a,i,t,s),function(e){const{bbox:t,p:n}=V;t.makeEmpty();const o=e.getAttribute("a_vertex1"),i=e.getAttribute("a_vertex2"),a=e.getAttribute("a_vertex3"),s=e.getAttribute("a_vertex4");C()(void 0!==o&&void 0!==i&&void 0!==a&&void 0!==s);for(let e=0;e<o.count;++e)n.set(o.getX(e),o.getY(e),o.getZ(e)),t.expandByPoint(n),n.set(i.getX(e),i.getY(e),i.getZ(e)),t.expandByPoint(n),n.set(a.getX(e),a.getY(e),a.getZ(e)),t.expandByPoint(n),n.set(s.getX(e),s.getY(e),s.getZ(e)),t.expandByPoint(n);e.boundingBox=t,e.boundingSphere=e.boundingSphere||new r.Sphere,e.boundingBox.getBoundingSphere(e.boundingSphere)}(a),s.onBeforeRender=()=>B(n,s.matrixWorld),s.name="Primitives (Trapeziums)",s}(i.trapeziumCollection,i.trapeziumAttributes,t.trapezium,o)),O(i.nutCollection)&&(yield function(e,t,n,o){const i=R(e,t,_,o),a=new r.InstancedBufferGeometry,s=new r.Mesh(a,n);return a.setIndex(b.index),a.setAttribute("position",b.position),a.setAttribute("normal",b.normal),A(a,i,t,s),U(a),s.name="Primitives (Nuts)",s}(i.nutCollection,i.nutAttributes,t.nut,o))}function O(e){return e.length>0}function A(e,t,n,o){const i=Array.from(n.values()).reduce((e,t)=>e+t.size,0);!function(e){const t=e.get("instanceMatrix");if(!t)return;const n=t.size/4;for(let r=0;r<4;r++){const o={size:n,offset:t.offset+n*r};e.set("instanceMatrix_column_"+r,o)}e.delete("instanceMatrix")}(n);const a=new r.InstancedInterleavedBuffer(t,i),d=new r.InstancedInterleavedBuffer(new Float32Array(t.buffer),i/4);for(const[t,o]of n){const n="color"===t,i=n?a:d,s=n?o.size:o.size/4,l=n?o.offset:o.offset/4;e.setAttribute("a_"+t,new r.InterleavedBufferAttribute(i,s,l,n))}o.onAfterRender=()=>{s.n.bind(d)(),s.n.bind(a)(),o.onAfterRender=()=>{}},e.instanceCount=t.length/i,function(){const r=new DataView(t.buffer),a=n.get("treeIndex").offset,s=new Set;for(let t=0;t<e.instanceCount;t++)s.add(r.getFloat32(t*i+a,!0));o.userData.treeIndices=s}()}function F(e,t){if(0===t)return 0;return e*5**t}function B(e,t){e.uniforms.inverseModelMatrix.value.copy(t).invert()}function k(e,t){e.boundingSphere=e.boundingSphere||new r.Sphere,t.getBoundingSphere(e.boundingSphere)}const L={baseBoundingBox:new r.Box3,instanceBoundingBox:new r.Box3,instanceMatrix:new r.Matrix4,p:new r.Vector3};function G(e){const{baseBoundingBox:t,instanceBoundingBox:n,instanceMatrix:o,p:i}=L;t.makeEmpty();const a=new r.Box3,s=e.getAttribute("position");for(let e=0;e<s.count;++e)i.set(s.getX(e),s.getY(e),s.getZ(e)),t.expandByPoint(i);const d=e.getAttribute("a_instanceMatrix_column_0"),l=e.getAttribute("a_instanceMatrix_column_1"),c=e.getAttribute("a_instanceMatrix_column_2"),u=e.getAttribute("a_instanceMatrix_column_3");C()(void 0!==d&&void 0!==l&&void 0!==c&&void 0!==u);for(let e=0;e<d.count;++e)o.set(d.getX(e),l.getX(e),c.getX(e),u.getX(e),d.getY(e),l.getY(e),c.getY(e),u.getY(e),d.getZ(e),l.getZ(e),c.getZ(e),u.getZ(e),d.getW(e),l.getW(e),c.getW(e),u.getW(e)),n.copy(t).applyMatrix4(o),a.expandByPoint(n.min),a.expandByPoint(n.max);return{boundingBox:a,boundingSphere:a.getBoundingSphere(new r.Sphere)}}function U(e){const{boundingBox:t,boundingSphere:n}=G(e);e.boundingBox=t,e.boundingSphere=n}const V={bbox:new r.Box3,p:new r.Vector3};var j=n(4),W=n(2);
|
|
108
119
|
/*!
|
|
109
120
|
* Copyright 2021 Cognite AS
|
|
110
121
|
*/
|
|
111
|
-
|
|
122
|
+
function q(e,t,n,o){const i=[],a=e.filter(e=>null===o||function(e,t){const{p:n,box:r}=H;r.makeEmpty();for(let t=0;t<e.vertices.length;t+=3)n.set(e.vertices[t+0],e.vertices[t+1],e.vertices[t+2]),r.expandByPoint(n);return t.intersectsBox(r)}
|
|
112
123
|
/*!
|
|
113
124
|
* Copyright 2021 Cognite AS
|
|
114
|
-
*/
|
|
125
|
+
*/(e,o));for(const e of a){const o=new r.BufferGeometry,a=new r.Uint32BufferAttribute(e.indices.buffer,1).onUpload(s.n),d=new r.Float32BufferAttribute(e.vertices.buffer,3).onUpload(s.n),l=new r.Uint8BufferAttribute(e.colors.buffer,3).onUpload(s.n),c=new r.Float32BufferAttribute(e.treeIndices.buffer,1).onUpload(s.n);o.setIndex(a),o.setAttribute("color",l),o.setAttribute("position",d),o.setAttribute("treeIndex",c),o.boundingBox=t.clone(),o.boundingSphere=new r.Sphere,t.getBoundingSphere(o.boundingSphere);const u=new r.Mesh(o,n);u.name="Triangle mesh "+e.fileId,u.userData.treeIndices=new Set(e.treeIndices),i.push(u)}return i}const H={p:new r.Vector3,box:new r.Box3};const K=new Float32Array([-.5,-.5,0,.5,-.5,0,.5,.5,0,.5,.5,0,-.5,.5,0,-.5,-.5,0]),X=new r.Float32BufferAttribute(K.buffer,3),Y=(new r.Box3).setFromArray(K),Z=new Map([["color",{offset:0,size:12}],["treeIndex",{offset:12,size:4}],["normal",{offset:16,size:12}],["instanceMatrix",{offset:28,size:64}]]);
|
|
115
126
|
/*!
|
|
116
127
|
* Copyright 2021 Cognite AS
|
|
117
|
-
*/
|
|
128
|
+
*/
|
|
129
|
+
const Q={p:new r.Vector3,instanceMatrix:new r.Matrix4,baseBounds:new r.Box3,instanceBounds:new r.Box3};
|
|
118
130
|
/*!
|
|
119
131
|
* Copyright 2021 Cognite AS
|
|
120
|
-
*/
|
|
132
|
+
*/
|
|
133
|
+
function J(e,t,n,o){const i=new s.a;if(i.name="Quads",0===e.buffer.byteLength)return{sectorMeshes:new s.a,instancedMeshes:[]};const a=function(e,t,n,o){const i=R(new Uint8Array(e.buffer),Z,Y,o);if(0===i.byteLength)return;const a=new Float32Array(i.buffer);if(i.byteLength%23!=0)throw new Error("Expected buffer size to be multiple of 23, but got "+a.byteLength);const d=new r.InstancedBufferGeometry,l=new r.InstancedInterleavedBuffer(a,23),c=new r.InterleavedBufferAttribute(l,3,0,!0),u=new r.InterleavedBufferAttribute(l,1,3,!1),m=new r.InterleavedBufferAttribute(l,3,4,!0),h=new r.InterleavedBufferAttribute(l,4,7,!1),p=new r.InterleavedBufferAttribute(l,4,11,!1),f=new r.InterleavedBufferAttribute(l,4,15,!1),v=new r.InterleavedBufferAttribute(l,4,19,!1);d.setAttribute("position",X),d.setAttribute("color",c),d.setAttribute("treeIndex",u),d.setAttribute("normal",m),d.setAttribute("matrix0",h),d.setAttribute("matrix1",p),d.setAttribute("matrix2",f),d.setAttribute("matrix3",v);const x=new r.Mesh(d,t.simple);return x.name="Low detail geometry",x.onAfterRender=()=>{s.n.bind(l)(),x.onAfterRender=()=>{}},x.onBeforeRender=()=>{t.simple.uniforms.inverseModelMatrix.value.copy(x.matrixWorld).invert()},function(){const t=new Set;for(let n=0;n<e.length/23;n++)t.add(e[23*n+3]);x.userData.treeIndices=t}(),x.geometry.boundingSphere=new r.Sphere,n.getBoundingSphere(x.geometry.boundingSphere),x}(e.buffer,n,t,o);return void 0!==a&&i.add(a),{sectorMeshes:i,instancedMeshes:[]}}function $(e,t,n,r){const o=t.bounds;null!==r&&r.containsBox(o)&&(r=null);const i=new s.a;for(const t of E(e,n,o,r))i.add(t);const a=q(e.triangleMeshes,o,n.triangleMesh,r);for(const e of a)i.add(e);return{sectorMeshes:i,instancedMeshes:e.instanceMeshes.map(e=>{const t=e.instances.map(t=>function(e,t,n,r){if(null===r)return n;const{p:o,instanceMatrix:i,baseBounds:a,instanceBounds:s}=Q;a.makeEmpty();for(let r=n.triangleOffset;r<n.triangleOffset+n.triangleCount;++r){const n=t[3*r+0],i=t[3*r+1],s=t[3*r+2];o.set(e[n+0],e[n+1],e[n+2]),a.expandByPoint(o),o.set(e[i+0],e[i+1],e[i+2]),a.expandByPoint(o),o.set(e[s+0],e[s+1],e[s+2]),a.expandByPoint(o)}let d=0;const l=n.treeIndices.length,c=new Float32Array(n.instanceMatrices.length),u=new Float32Array(l),m=new Uint8Array(4*l);for(let e=0;e<l;++e)if(h=n.instanceMatrices,p=e,i.set(h[p+0],h[p+4],h[p+8],h[p+12],h[p+1],h[p+5],h[p+9],h[p+13],h[p+2],h[p+6],h[p+10],h[p+14],h[p+3],h[p+7],h[p+11],h[p+15]),s.copy(a).applyMatrix4(i),r.intersectsBox(s)){const t=n.instanceMatrices.subarray(16*e,16*(e+1)),r=n.colors.subarray(4*e,4*(e+1)),o=n.treeIndices[e];c.set(t,16*d),m.set(r,4*d),u[d]=o,d++}var h,p;return l===d?n:{triangleCount:n.triangleCount,triangleOffset:n.triangleOffset,instanceMatrices:c.slice(0,16*d),colors:m.slice(0,4*d),treeIndices:u.slice(0,d)}}(e.vertices,e.indices,t,r)).filter(e=>e.treeIndices.length>0);return{fileId:e.fileId,vertices:e.vertices,indices:e.indices,instances:t}}).filter(e=>e.instances.length>0)}}
|
|
121
134
|
/*!
|
|
122
135
|
* Copyright 2021 Cognite AS
|
|
123
|
-
*/
|
|
136
|
+
*/
|
|
137
|
+
class ee{constructor(e){this.materialManager=e}transformSimpleSector(e,t,n,r){const o=this.materialManager.getModelMaterials(e);return C()(void 0!==o,"Could not find materials for model '"+e),Promise.resolve(J(n,t.bounds,o,r))}transformDetailedSector(e,t,n,r){const o=this.materialManager.getModelMaterials(e);return C()(void 0!==o,"Could not find materials for model '"+e),Promise.resolve($(n,t,o,r))}}
|
|
124
138
|
/*!
|
|
125
139
|
* Copyright 2021 Cognite AS
|
|
126
|
-
*/
|
|
140
|
+
*/const te=(new r.Matrix4).identity();class ne{constructor(){this._events={changed:new s.d}}on(e,t){switch(e){case"changed":this._events.changed.subscribe(t);break;default:Object(s.k)(e,`Unsupported event: '${e}'`)}}off(e,t){switch(e){case"changed":this._events.changed.unsubscribe(t);break;default:Object(s.k)(e,`Unsupported event: '${e}'`)}}setNodeTransform(e,t){this._events.changed.fire("set",e,t)}resetNodeTransform(e){this._events.changed.fire("reset",e,te)}}
|
|
127
141
|
/*!
|
|
128
142
|
* Copyright 2021 Cognite AS
|
|
129
|
-
*/function
|
|
143
|
+
*/function*re(e){const t=new Array(e.length);for(let n=0;n<e.length;++n)t[n]={fileId:e[n],index:n};t.sort((e,t)=>e.fileId-t.fileId);let n=0;for(;n<t.length;){const e=t[n].fileId,r=oe(t,e,n,e=>e.fileId),o=new Array(r+1-n);for(let e=n;e<r+1;e++)o[e-n]=t[e].index;yield{id:e,meshIndices:o},n=r+1}}function oe(e,t,n,r){let o=n,i=e.length-1,a=e.length;for(;o<=i;){const n=Math.floor((o+i)/2),s=r(e[n]);s>t?i=n-1:(s<t||(a=n),o=n+1)}return a}
|
|
130
144
|
/*!
|
|
131
145
|
* Copyright 2021 Cognite AS
|
|
132
|
-
*/const
|
|
146
|
+
*/function ie(e){const t=new Array(e.length);return e.forEach((n,r)=>{t[r]=r>0?t[r-1]+e[r-1]:0}),t}
|
|
133
147
|
/*!
|
|
134
148
|
* Copyright 2021 Cognite AS
|
|
135
|
-
*/
|
|
136
|
-
class be{constructor(){this._sceneModelState={}}hasStateChanged(e){const t=this._sceneModelState[e.blobUrl];if(void 0!==t){const n=t[e.metadata.id];return void 0!==n?n!==e.levelOfDetail:e.levelOfDetail!==K.a.Discarded}return!0}addModel(e){ye()(void 0===this._sceneModelState[e],`Model ${e} is already added`),this._sceneModelState[e]={}}removeModel(e){ye()(void 0!==this._sceneModelState[e],`Model ${e} is not added`),delete this._sceneModelState[e]}updateState(e){ye()(void 0!==this._sceneModelState[e.blobUrl],`Received sector from model ${e.blobUrl}, but the model is not added`);const t=this._sceneModelState[e.blobUrl];e.levelOfDetail===K.a.Discarded?delete t[e.metadata.id]:t[e.metadata.id]=e.levelOfDetail}}
|
|
149
|
+
*/class ae{constructor(e,t,n){this._modelSectorProvider=e,this._modelDataParser=t,this._modelDataTransformer=n,this._consumedSectorCache=new s.f(50,e=>{void 0!==e.group&&e.group.dereference()}),this._ctmFileCache=new s.g(10)}clear(){this._consumedSectorCache.clear(),this._ctmFileCache.clear()}async loadSector(e){var t,n;const r=this.wantedSectorCacheKey(e);try{if(this._consumedSectorCache.has(r))return this._consumedSectorCache.get(r);switch(e.levelOfDetail){case o.a.Detailed:{const n=await this.loadDetailedSectorFromNetwork(e);return this._consumedSectorCache.forceInsert(r,n),null===(t=null==n?void 0:n.group)||void 0===t||t.reference(),n}case o.a.Simple:{const t=await this.loadSimpleSectorFromNetwork(e);return this._consumedSectorCache.forceInsert(r,t),null===(n=null==t?void 0:t.group)||void 0===n||n.reference(),t}case o.a.Discarded:return{modelIdentifier:e.modelIdentifier,metadata:e.metadata,levelOfDetail:e.levelOfDetail,instancedMeshes:[],group:void 0};default:Object(s.k)(e.levelOfDetail)}}catch(e){throw this._consumedSectorCache.remove(r),Object(s.u)(e,{methodName:"loadSector",moduleName:"CachedRepository"}),e}}async loadSimpleSectorFromNetwork(e){const t=await this._modelSectorProvider.getBinaryFile(e.modelBaseUrl,e.metadata.facesFile.fileName),n=await this._modelDataParser.parseF3D(new Uint8Array(t)),r=await this._modelDataTransformer.transformSimpleSector(e.modelIdentifier,e.metadata,n,e.geometryClipBox);return{...e,group:r.sectorMeshes,instancedMeshes:r.instancedMeshes}}async loadI3DFromNetwork(e,t){const n=await this._modelSectorProvider.getBinaryFile(e,t);return this._modelDataParser.parseI3D(new Uint8Array(n))}async loadCtmsFromNetwork(e,t){const n=await Promise.all(t.map(t=>this.loadCtmFileFromNetwork(e,t)));return t.reduce((e,t,r)=>e.set(t,n[r]),new Map)}async loadDetailedSectorFromNetwork(e){const t=e.metadata.indexFile,n=this.loadI3DFromNetwork(e.modelBaseUrl,t.fileName),r=this.loadCtmsFromNetwork(e.modelBaseUrl,t.peripheralFiles),o=await n,i=await r,a=this.finalizeDetailed(o,i),s=await this._modelDataTransformer.transformDetailedSector(e.modelIdentifier,e.metadata,a,e.geometryClipBox);return{...e,group:s.sectorMeshes,instancedMeshes:s.instancedMeshes}}async loadCtmFileFromNetwork(e,t){const n=this.ctmFileCacheKey(e,t),r=this._ctmFileCache.get(n);if(void 0!==r)return r;const o=this._modelSectorProvider.getBinaryFile(e,t).then(e=>this._modelDataParser.parseCTM(new Uint8Array(e)));return this._ctmFileCache.set(n,o),o}finalizeDetailed(e,t){const{instanceMeshes:n,triangleMeshes:r}=e,o=(()=>{const{fileIds:e,colors:n,triangleCounts:o,treeIndices:i}=r,a=[];for(const{id:r,meshIndices:s}of re(e)){const e=s.map(e=>o[e]),d=ie(e),l=`mesh_${r}.ctm`,{indices:c,vertices:u,normals:m}=t.get(l),h=new Uint8Array(3*c.length),p=new Float32Array(c.length);for(let t=0;t<s.length;t++){const r=s[t],o=i[r],a=d[t],l=e[t],[u,m,f]=[n[4*r+0],n[4*r+1],n[4*r+2]];for(let e=a;e<a+l;e++)for(let t=0;t<3;t++){const n=c[3*e+t];p[n]=o,h[3*n]=u,h[3*n+1]=m,h[3*n+2]=f}}const f={colors:h,fileId:r,treeIndices:p,indices:c,vertices:u,normals:m};a.push(f)}return a})(),i=(()=>{const{fileIds:e,colors:r,treeIndices:o,triangleCounts:i,triangleOffsets:a,instanceMatrices:s}=n,d=[];for(const{id:n,meshIndices:l}of re(e)){const e=`mesh_${n}.ctm`,c=t.get(e),u=c.indices,m=c.vertices,h=[],p=new Float64Array(l.map(e=>a[e])),f=new Float64Array(l.map(e=>i[e]));for(const{id:e,meshIndices:t}of re(p)){const n=f[t[0]],i=new Float32Array(16*t.length),a=new Float32Array(t.length),d=new Uint8Array(4*t.length);for(let e=0;e<t.length;e++){const n=l[t[e]],c=o[n],u=s.subarray(16*n,16*n+16);i.set(u,16*e),a[e]=c;const m=r.subarray(4*n,4*n+4);d.set(m,4*e)}h.push({triangleCount:n,triangleOffset:e,instanceMatrices:i,colors:d,treeIndices:a})}const v={fileId:n,indices:u,vertices:m,instances:h};d.push(v)}return d})();return{treeIndexToNodeIdMap:e.treeIndexToNodeIdMap,nodeIdToTreeIndexMap:e.nodeIdToTreeIndexMap,primitives:e.primitives,instanceMeshes:i,triangleMeshes:o}}wantedSectorCacheKey(e){return e.modelIdentifier+"."+e.metadata.id+"."+e.levelOfDetail}ctmFileCacheKey(e,t){return e+"."+t}}
|
|
137
150
|
/*!
|
|
138
151
|
* Copyright 2021 Cognite AS
|
|
139
|
-
*/
|
|
152
|
+
*/var se,de;!function(e){e[e.NoAA=0]="NoAA",e[e.FXAA=1]="FXAA"}(se||(se={})),function(e){e[e.Medium=32]="Medium",e[e.High=64]="High",e[e.VeryHigh=128]="VeryHigh",e[e.None=1]="None",e[e.Default=32]="Default"}(de||(de={}));const le={antiAliasing:se.FXAA,multiSampleCountHint:1,ssaoRenderParameters:{sampleSize:de.Default,sampleRadius:1,depthCheckBias:.0125},edgeDetectionParameters:{enabled:!0}};
|
|
140
153
|
/*!
|
|
141
154
|
* Copyright 2021 Cognite AS
|
|
142
|
-
*/
|
|
155
|
+
*/class ce{}ce.Black=new r.Color("rgb(0, 0, 0)"),ce.White=new r.Color("rgb(255, 255, 255)"),ce.Cyan=new r.Color("rgb(102, 213, 234)"),ce.Blue=new r.Color("rgb(77, 106, 242)"),ce.Purple=new r.Color("rgb(186, 82, 212)"),ce.Pink=new r.Color("rgb(232, 64, 117)"),ce.Orange=new r.Color("rgb(238, 113, 53)"),ce.Yellow=new r.Color("rgb(246, 189, 65)"),ce.VeryLightGray=new r.Color("rgb(247, 246, 245)"),ce.LightGray=new r.Color("rgb(242, 241, 240)");class ue{}ue.Red=new r.Color("rgb(235,0,4)"),ue.Green=new r.Color("rgb(46,164,79)");var me=n(5),he=n(3),pe=n.n(he);
|
|
143
156
|
/*!
|
|
144
157
|
* Copyright 2021 Cognite AS
|
|
145
158
|
*/
|
|
146
|
-
|
|
159
|
+
const fe={defines:{COGNITE_COLOR_BY_TREE_INDEX:!1}},ve={simpleMesh:{fragment:pe()(n(26).default),vertex:pe()(n(27).default)},detailedMesh:{fragment:pe()(n(28).default),vertex:pe()(n(29).default)},instancedMesh:{fragment:pe()(n(30).default),vertex:pe()(n(31).default)},boxPrimitive:{fragment:pe()(n(16).default),vertex:pe()(n(17).default)},circlePrimitive:{fragment:pe()(n(32).default),vertex:pe()(n(33).default)},conePrimitive:{fragment:pe()(n(34).default),vertex:pe()(n(35).default)},eccentricConePrimitive:{fragment:pe()(n(36).default),vertex:pe()(n(37).default)},ellipsoidSegmentPrimitive:{fragment:pe()(n(38).default),vertex:pe()(n(39).default)},generalCylinderPrimitive:{fragment:pe()(n(40).default),vertex:pe()(n(41).default)},generalRingPrimitive:{fragment:pe()(n(42).default),vertex:pe()(n(43).default)},nutPrimitive:{fragment:pe()(n(16).default),vertex:pe()(n(17).default)},quadPrimitive:{fragment:pe()(n(16).default),vertex:pe()(n(17).default)},torusSegmentPrimitive:{fragment:pe()(n(44).default),vertex:pe()(n(45).default)},trapeziumPrimitive:{fragment:pe()(n(46).default),vertex:pe()(n(47).default)}},xe={fragment:pe()(n(48).default),vertex:pe()(n(49).default)},ge={fragment:pe()(n(50).default),vertex:pe()(n(51).default)},ye={fragment:pe()(n(52).default),vertex:pe()(n(19).default)},be={fragment:pe()(n(53).default),vertex:pe()(n(19).default)},_e={fragment:pe()(n(54).default),vertex:pe()(n(55).default)};var Te=n(63);
|
|
147
160
|
/*!
|
|
148
161
|
* Copyright 2021 Cognite AS
|
|
149
|
-
*/)(this.workerList[0].worker).catch(console.error),this.workerObjUrl&&URL.revokeObjectURL(this.workerObjUrl)}static get defaultPool(){return Pe._defaultPool=Pe._defaultPool||new Pe,Pe._defaultPool}createWorker(){const e=(r.publicPath||n.p)+"reveal.parser.worker.js",t={name:"reveal.parser #"+this.workerList.length};if(function(e,t=location.origin){const n=e=>!e.match(/^.*\/\//);if(n(t))throw new Error("isTheSameDomain: the second argument must be an absolute url or omitted. Received "+t);if(n(e))return!0;try{const n=[e,t].map(e=>e.startsWith("//")?"https:"+e:e).map(e=>new URL(e));return n[0].host===n[1].host}catch(n){return console.error(`can not create URLs for ${e} and ${t}`,n),!1}}(e))return new Worker(e,t);if(!this.workerObjUrl){const t=new Blob([`importScripts(${JSON.stringify(e)});`],{type:"text/javascript"});this.workerObjUrl=URL.createObjectURL(t)}return new Worker(this.workerObjUrl,t)}async postWorkToAvailable(e){const t=this.workerList.reduce((e,t)=>e.activeJobCount>t.activeJobCount?t:e,this.workerList[0]);t.activeJobCount+=1;return await(async()=>{try{return await e(t.worker)}finally{t.activeJobCount-=1}})()}determineNumberOfWorkers(){return Math.max(2,Math.min(4,window.navigator.hardwareConcurrency||2))}}class Re{constructor(e=Pe.defaultPool){this.workerPool=e}parseI3D(e){return this.parseDetailed(e)}parseF3D(e){return this.parseSimple(e)}parseCTM(e){return this.parseCtm(e)}async parseSimple(e){return this.workerPool.postWorkToAvailable(async t=>t.parseQuads(e))}async parseDetailed(e){return this.workerPool.postWorkToAvailable(async t=>t.parseSector(e))}async parseCtm(e){return this.workerPool.postWorkToAvailable(async t=>t.parseCtm(e))}}
|
|
162
|
+
*/class Ce{constructor(e,t,n,o){var i,a;this._lastFrameSceneState={hasBackElements:!0,hasInFrontElements:!0,hasGhostElements:!0,hasCustomObjects:!0},this._rootSectorNodeBuffer=new Set,this._outlineTexelSize=2,this._autoSetTargetSize=!1,this._uiObjects=[],this._renderer=e,this._renderOptions=o,this._materialManager=n,this._orthographicCamera=new r.OrthographicCamera(-1,1,1,-1,-1,1),this._renderTarget=null,this._originalScene=t,this._cadScene=new r.Scene,this._cadScene.autoUpdate=!1,this._normalScene=new r.Scene,this._normalScene.autoUpdate=!1,this._inFrontScene=new r.Scene,this._inFrontScene.autoUpdate=!1,this._compositionScene=new r.Scene,this._compositionScene.autoUpdate=!1,this._fxaaScene=new r.Scene,this._fxaaScene.autoUpdate=!1,this._ssaoScene=new r.Scene,this._ssaoScene.autoUpdate=!1,this._ssaoBlurScene=new r.Scene,this._ssaoBlurScene.autoUpdate=!1,this._emptyScene=new r.Scene,this._emptyScene.autoUpdate=!1;const s=e.capabilities.isWebGL2,d=this.createOutlineColorTexture();this._inFrontRenderedCadModelTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._inFrontRenderedCadModelTarget.depthTexture=new r.DepthTexture(0,0),this._inFrontRenderedCadModelTarget.depthTexture.format=r.DepthFormat,this._inFrontRenderedCadModelTarget.depthTexture.type=r.UnsignedIntType,this._normalRenderedCadModelTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._normalRenderedCadModelTarget.depthTexture=new r.DepthTexture(0,0),this._normalRenderedCadModelTarget.depthTexture.format=r.DepthFormat,this._normalRenderedCadModelTarget.depthTexture.type=r.UnsignedIntType,this._ghostObjectRenderTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._ghostObjectRenderTarget.depthTexture=new r.DepthTexture(0,0),this._ghostObjectRenderTarget.depthTexture.format=r.DepthFormat,this._ghostObjectRenderTarget.depthTexture.type=r.UnsignedIntType,this._customObjectRenderTarget=Ie(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._customObjectRenderTarget.depthTexture=new r.DepthTexture(0,0),this._customObjectRenderTarget.depthTexture.format=r.DepthFormat,this._customObjectRenderTarget.depthTexture.type=r.UnsignedIntType,this._compositionTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._compositionTarget.depthTexture=new r.DepthTexture(0,0),this._compositionTarget.depthTexture.format=r.DepthFormat,this._compositionTarget.depthTexture.type=r.UnsignedIntType,this._ssaoTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoTarget.depthTexture=new r.DepthTexture(0,0),this._ssaoTarget.depthTexture.format=r.DepthFormat,this._ssaoTarget.depthTexture.type=r.UnsignedIntType,this._ssaoBlurTarget=new r.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoBlurTarget.depthTexture=new r.DepthTexture(0,0),this._ssaoBlurTarget.depthTexture.format=r.DepthFormat,this._ssaoBlurTarget.depthTexture.type=r.UnsignedIntType,this._combineOutlineDetectionMaterial=new r.ShaderMaterial({vertexShader:xe.vertex,fragmentShader:xe.fragment,uniforms:{tFront:{value:this._inFrontRenderedCadModelTarget.texture},tFrontDepth:{value:this._inFrontRenderedCadModelTarget.depthTexture},tBack:{value:this._normalRenderedCadModelTarget.texture},tBackDepth:{value:this._normalRenderedCadModelTarget.depthTexture},tCustom:{value:this._customObjectRenderTarget.texture},tCustomDepth:{value:this._customObjectRenderTarget.depthTexture},tGhost:{value:this._ghostObjectRenderTarget.texture},tGhostDepth:{value:this._ghostObjectRenderTarget.depthTexture},tOutlineColors:{value:d},resolution:{value:new r.Vector2(0,0)},texelSize:{value:new r.Vector2(0,0)},cameraNear:{value:.1},cameraFar:{value:1e4},edgeStrengthMultiplier:{value:2.5},edgeGrayScaleIntensity:{value:.1}},extensions:{fragDepth:!0},defines:{EDGES:null!==(a=null===(i=this._renderOptions.edgeDetectionParameters)||void 0===i?void 0:i.enabled)&&void 0!==a?a:le.edgeDetectionParameters.enabled}});const l=this.createNoiseTexture(),c=this.ssaoParameters(this._renderOptions),u=c.sampleSize,m=this.createKernel(u),h=c.sampleRadius,p=c.depthCheckBias;this._ssaoMaterial=new r.ShaderMaterial({uniforms:{tDepth:{value:this._compositionTarget.depthTexture},tNoise:{value:l},kernel:{value:m},sampleRadius:{value:h},bias:{value:p},projMatrix:{value:new r.Matrix4},inverseProjectionMatrix:{value:new r.Matrix4},resolution:{value:new r.Vector2}},defines:{MAX_KERNEL_SIZE:u},vertexShader:ye.vertex,fragmentShader:ye.fragment}),this._ssaoBlurMaterial=new r.ShaderMaterial({uniforms:{tDiffuse:{value:this._compositionTarget.texture},tAmbientOcclusion:{value:this._ssaoTarget.texture},resolution:{value:new r.Vector2}},vertexShader:be.vertex,fragmentShader:be.fragment});const f=this.supportsSsao(c)?this._ssaoBlurTarget.texture:this._compositionTarget.texture;this._fxaaMaterial=new r.ShaderMaterial({uniforms:{tDiffuse:{value:f},tDepth:{value:this._compositionTarget.depthTexture},resolution:{value:new r.Vector2},inverseResolution:{value:new r.Vector2}},vertexShader:ge.vertex,fragmentShader:ge.fragment,extensions:{fragDepth:!0}}),this.setupCompositionScene(),this.setupSsaoScene(),this.setupSsaoBlurCombineScene(),this.setupFxaaScene(),this._normalSceneBuilder=new Me(this._normalScene),this._inFrontSceneBuilder=new Me(this._inFrontScene)}set renderOptions(e){const t=this.ssaoParameters(e),n={...t};this.setSsaoParameters(n),this._renderOptions={...e,ssaoRenderParameters:{...t}}}addUiObject(e,t,n){this._uiObjects.push({object:e,screenPos:t,width:n.x,height:n.y})}removeUiObject(e){this._uiObjects=this._uiObjects.filter(t=>{const n=t.object;return e!==n})}ssaoParameters(e){var t;return null!==(t=null==e?void 0:e.ssaoRenderParameters)&&void 0!==t?t:{...le.ssaoRenderParameters}}get antiAliasingMode(){const{antiAliasing:e=le.antiAliasing}=this._renderOptions;return e}get multiSampleCountHint(){const{multiSampleCountHint:e=le.multiSampleCountHint}=this._renderOptions;return e}supportsSsao(e){return!Object(s.q)()&&(this._renderer.capabilities.isWebGL2||this._renderer.extensions.has("EXT_frag_depth"))&&e.sampleSize!==de.None}renderDetailedToDepthOnly(e){const t={renderMode:this._materialManager.getRenderMode()},n=new s.i(this._renderer);this._materialManager.setRenderMode(Te.a.DepthBufferOnly);try{n.setRenderTarget(this._renderTarget),this.setVisibilityOfSectors(o.a.Simple,!1),this.traverseForRootSectorNode(this._originalScene),this.extractCadNodes(this._originalScene),this.clearTarget(this._renderTarget);const{hasBackElements:r,hasInFrontElements:i,hasGhostElements:a}=this.splitToScenes();r&&!a?this.renderNormalCadModelsFromBaseScene(e,this._renderTarget):r&&a&&(this.renderNormalCadModels(e,this._renderTarget),this._normalSceneBuilder.restoreOriginalScene()),i&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene())}finally{this._materialManager.setRenderMode(t.renderMode),n.resetState(),this.restoreCadNodes(),this.setVisibilityOfSectors(o.a.Simple,!0)}}render(e){const t=this._renderer,n=this._originalScene,r=new s.i(t),o={autoClear:t.autoClear,clearAlpha:t.getClearAlpha(),renderMode:this._materialManager.getRenderMode()};t.info.autoReset=!1,t.info.reset(),r.autoClear=!1;try{r.setRenderTarget(this._renderTarget),this.updateRenderSize(t),t.info.autoReset=!1,t.info.reset(),r.autoClear=!1,this.traverseForRootSectorNode(n),this.extractCadNodes(n),this.clearTarget(this._ghostObjectRenderTarget),this.clearTarget(this._compositionTarget),this.clearTarget(this._customObjectRenderTarget),t.setClearAlpha(0),this.clearTarget(this._normalRenderedCadModelTarget),this.clearTarget(this._inFrontRenderedCadModelTarget),t.setClearAlpha(o.clearAlpha);const i={...this._lastFrameSceneState},{hasBackElements:a,hasInFrontElements:s,hasGhostElements:d}=this.splitToScenes(),l=n.children.length>0;this._lastFrameSceneState={hasBackElements:a,hasInFrontElements:s,hasGhostElements:d,hasCustomObjects:l},a&&!d?this.renderNormalCadModelsFromBaseScene(e):a&&d?(this.renderNormalCadModels(e),this._normalSceneBuilder.restoreOriginalScene(),this.renderGhostedCadModelsFromBaseScene(e)):!a&&d&&this.renderGhostedCadModelsFromBaseScene(e),s&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene()),l&&this.renderCustomObjects(n,e),t.capabilities.isWebGL2&&(!a&&i.hasBackElements&&this.explicitFlushRender(e,this._normalRenderedCadModelTarget),!d&&i.hasGhostElements&&this.explicitFlushRender(e,this._ghostObjectRenderTarget),!s&&i.hasInFrontElements&&this.explicitFlushRender(e,this._inFrontRenderedCadModelTarget),!l&&i.hasInFrontElements&&this.explicitFlushRender(e,this._customObjectRenderTarget));const c=this.supportsSsao(this.ssaoParameters(this._renderOptions));switch(this.antiAliasingMode){case se.FXAA:this.renderComposition(t,e,this._compositionTarget),r.autoClear=o.autoClear,c&&(this.renderSsao(t,this._ssaoTarget,e),this.renderPostProcessStep(t,this._ssaoBlurTarget,this._ssaoBlurScene)),this.renderPostProcessStep(t,this._renderTarget,this._fxaaScene);break;case se.NoAA:t.autoClear=o.autoClear,c?(this.renderComposition(t,e,this._compositionTarget),this.renderSsao(t,this._ssaoTarget,e),this.renderPostProcessStep(t,this._renderTarget,this._ssaoBlurScene)):this.renderComposition(t,e,this._renderTarget);break;default:throw new Error("Unsupported anti-aliasing mode: "+this.antiAliasingMode)}}finally{r.resetState(),this._materialManager.setRenderMode(o.renderMode),this.restoreCadNodes()}}restoreCadNodes(){this._rootSectorNodeBuffer.forEach(e=>{e[1].add(e[0])}),this._rootSectorNodeBuffer.clear()}extractCadNodes(e){this._rootSectorNodeBuffer.forEach(t=>{if(t[1].parent!==e&&null!==t[1].parent&&t[1].parent.parent!==e)throw new Error("CadNode must be put at scene root");this._cadScene.add(t[0])})}setRenderTarget(e){this._renderTarget=e}getRenderTarget(){return this._renderTarget}setRenderTargetAutoSize(e){this._autoSetTargetSize=e}getRenderTargetAutoSize(){return this._autoSetTargetSize}clearTarget(e){this._renderer.setRenderTarget(e),this._renderer.clear()}explicitFlushRender(e,t){this._renderer.setRenderTarget(t),this._renderer.render(this._emptyScene,e)}splitToScenes(){const e={hasBackElements:!1,hasInFrontElements:!1,hasGhostElements:!1};this._rootSectorNodeBuffer.forEach(t=>{const n=t[1],r=this._materialManager.getModelBackTreeIndices(n.cadModelMetadata.modelIdentifier),o=this._materialManager.getModelInFrontTreeIndices(n.cadModelMetadata.modelIdentifier),i=this._materialManager.getModelGhostedTreeIndices(n.cadModelMetadata.modelIdentifier),a=r.count>0,s=o.count>0,d=i.count>0;e.hasBackElements=e.hasBackElements||a,e.hasInFrontElements=e.hasInFrontElements||s,e.hasGhostElements=e.hasGhostElements||d});const{hasBackElements:t,hasInFrontElements:n,hasGhostElements:o}=e;return this._rootSectorNodeBuffer.forEach(e=>{const i=e[0],a=e[1],s=this._materialManager.getModelBackTreeIndices(a.cadModelMetadata.modelIdentifier),d=this._materialManager.getModelInFrontTreeIndices(a.cadModelMetadata.modelIdentifier),l=new r.Object3D;l.applyMatrix4(i.matrix),t&&o&&this._normalScene.add(l);const c=new r.Object3D;c.applyMatrix4(i.matrix),n&&this._inFrontScene.add(c);const u=[e[0]];for(;u.length>0;){const e=u.pop(),r=e.userData.treeIndices;r?(n&&d.hasIntersectionWith(r)&&this._inFrontSceneBuilder.addElement(e,c),t&&!o||o&&s.hasIntersectionWith(r)&&this._normalSceneBuilder.addElement(e,l)):u.push(...e.children)}}),e}renderNormalCadModels(e,t=this._normalRenderedCadModelTarget){this._normalSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this._renderer.render(this._normalScene,e)}renderNormalCadModelsFromBaseScene(e,t=this._normalRenderedCadModelTarget){this._renderer.setRenderTarget(t),this._renderer.render(this._cadScene,e)}renderInFrontCadModels(e,t=this._inFrontRenderedCadModelTarget){this._inFrontSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this._materialManager.setRenderMode(Te.a.Effects),this._renderer.render(this._inFrontScene,e)}renderGhostedCadModelsFromBaseScene(e){this._renderer.setRenderTarget(this._ghostObjectRenderTarget),this._materialManager.setRenderMode(Te.a.Ghost),this._renderer.render(this._cadScene,e)}renderCustomObjects(e,t){this._renderer.setRenderTarget(this._customObjectRenderTarget),this._renderer.render(e,t)}updateRenderSize(e){const t=new r.Vector2;return e.getSize(t),this._renderTarget&&this._autoSetTargetSize&&t.x!==this._renderTarget.width&&t.y!==this._renderTarget.height&&this._renderTarget.setSize(t.x,t.y),t.x===this._normalRenderedCadModelTarget.width&&t.y===this._normalRenderedCadModelTarget.height||(this._normalRenderedCadModelTarget.setSize(t.x,t.y),this._inFrontRenderedCadModelTarget.setSize(t.x,t.y),this._customObjectRenderTarget.setSize(t.x,t.y),this._ghostObjectRenderTarget.setSize(t.x,t.y),this._compositionTarget.setSize(t.x,t.y),this._ssaoTarget.setSize(t.x,t.y),this._ssaoBlurTarget.setSize(t.x,t.y),this._combineOutlineDetectionMaterial.uniforms.texelSize.value=new r.Vector2(this._outlineTexelSize/t.x,this._outlineTexelSize/t.y),this._combineOutlineDetectionMaterial.uniforms.resolution.value=t,this._ssaoMaterial.uniforms.resolution.value=t,this._ssaoBlurMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.inverseResolution.value=new r.Vector2(1/t.x,1/t.y)),t}renderComposition(e,t,n){this._combineOutlineDetectionMaterial.uniforms.cameraNear.value=t.near,this._combineOutlineDetectionMaterial.uniforms.cameraFar.value=t.far,this.renderPostProcessStep(e,n,this._compositionScene)}setSsaoParameters(e){var t;const n=le.ssaoRenderParameters;if(this._ssaoMaterial.uniforms.sampleRadius.value=e.sampleRadius,this._ssaoMaterial.uniforms.bias.value=e.depthCheckBias,e.sampleSize!==this.ssaoParameters(this._renderOptions).sampleSize){const r=null!==(t=null==e?void 0:e.sampleSize)&&void 0!==t?t:n.sampleSize,o=this.createKernel(r);this._fxaaMaterial.uniforms.tDiffuse.value=e.sampleSize!==de.None?this._ssaoBlurTarget.texture:this._compositionTarget.texture,this._ssaoMaterial.uniforms.kernel.value=o,this._ssaoMaterial.defines={MAX_KERNEL_SIZE:r},this._ssaoMaterial.needsUpdate=!0}}renderPostProcessStep(e,t,n){if(e.setRenderTarget(t),e.render(n,this._orthographicCamera),t===this._renderTarget){const t=e.getSize(new r.Vector2),n=new r.Vector2(e.domElement.clientWidth,e.domElement.clientHeight),o=new r.Vector2(t.x/n.x,t.y/n.y);e.autoClear=!1,this._uiObjects.forEach(t=>{const n=new r.Scene;n.add(t.object);const i=t.screenPos.clone().multiply(o),a=t.width*o.x,s=t.height*o.y;e.setViewport(i.x,i.y,a,s),e.clearDepth(),e.render(n,this._orthographicCamera)}),e.setViewport(0,0,t.x,t.y),e.autoClear=!0}}renderSsao(e,t,n){this._ssaoMaterial.uniforms.inverseProjectionMatrix.value=n.projectionMatrixInverse,this._ssaoMaterial.uniforms.projMatrix.value=n.projectionMatrix,this.renderPostProcessStep(e,t,this._ssaoScene)}createOutlineColorTexture(){const e=new Uint8Array(32),t=new r.DataTexture(e,8,1);return we(t.image.data,me.e.White,ce.White),we(t.image.data,me.e.Black,ce.Black),we(t.image.data,me.e.Cyan,ce.Cyan),we(t.image.data,me.e.Blue,ce.Blue),we(t.image.data,me.e.Green,ue.Green),we(t.image.data,me.e.Red,ue.Red),we(t.image.data,me.e.Orange,ce.Orange),t}setupCompositionScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._combineOutlineDetectionMaterial);this._compositionScene.add(t)}setupFxaaScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._fxaaMaterial);this._fxaaScene.add(t)}setupSsaoScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._ssaoMaterial);this._ssaoScene.add(t)}setupSsaoBlurCombineScene(){const e=this.createRenderTriangle(),t=new r.Mesh(e,this._ssaoBlurMaterial);this._ssaoBlurScene.add(t)}createNoiseTexture(){const e=new Float32Array(65536);for(let t=0;t<16384;t++){const n=4*t,r=2*Math.random()-1,o=2*Math.random()-1,i=2*Math.random()-1;e[n]=r,e[n+1]=o,e[n+2]=i,e[n+3]=1}const t=new r.DataTexture(e,128,128,r.RGBAFormat,r.FloatType);return t.wrapS=r.RepeatWrapping,t.wrapT=r.RepeatWrapping,t}createKernel(e){const t=[];for(let o=0;o<e;o++){const i=new r.Vector3;for(;i.length()<.5;)i.x=2*Math.random()-1,i.y=2*Math.random()-1,i.z=Math.random();i.normalize();let a=o/e;a=n(.1,1,a*a),i.multiplyScalar(a),t.push(i)}return t;function n(e,t,n){return e+(t-e)*(n=(n=n<0?0:n)>1?1:n)}}createRenderTriangle(){const e=new r.BufferGeometry,t=new Float32Array([-1,-1,0,3,-1,0,-1,3,0]),n=new Float32Array([0,0,2,0,0,2]);return e.setAttribute("position",new r.BufferAttribute(t,3)),e.setAttribute("uv",new r.BufferAttribute(n,2)),e}traverseForRootSectorNode(e){const t=[e];for(;t.length>0;){const e=t.pop();if(e instanceof a){const t=e.parent;t.visible&&this._rootSectorNodeBuffer.add([e,t])}else e instanceof r.Group||t.push(...e.children)}}setVisibilityOfSectors(e,t){this._originalScene.traverse(n=>{n instanceof i&&n.levelOfDetail===e&&(n.visible=t)})}}function Ie(e,t,n){if(e&&t>1){const e=new r.WebGLMultisampleRenderTarget(0,0,n);return e.samples=t,e}return new r.WebGLRenderTarget(0,0,n)}function we(e,t,n){e[4*t+0]=Math.floor(255*n.r),e[4*t+1]=Math.floor(255*n.g),e[4*t+2]=Math.floor(255*n.b),e[4*t+3]=255}class Me{constructor(e){this.buffer=[],this.temporaryScene=e}addElement(e,t){this.buffer.push({object:e,parent:e.parent,sceneParent:t})}populateTemporaryScene(){this.buffer.forEach(e=>e.sceneParent.add(e.object))}restoreOriginalScene(){this.buffer.forEach(e=>{e.parent.add(e.object)}),this.buffer.length=0,this.temporaryScene.remove(...this.temporaryScene.children)}}
|
|
150
163
|
/*!
|
|
151
164
|
* Copyright 2021 Cognite AS
|
|
152
|
-
*/class
|
|
165
|
+
*/class Se{constructor(e,t){this._defaultAppearance={},this._handleStylesChangedListener=this.handleStylesChanged.bind(this),this._needsUpdate=!0,this._allTreeIndices=new s.e,this._allTreeIndices.addRange(new s.h(0,e)),this._styleProvider=t,this._styleProvider.on("changed",this._handleStylesChangedListener);const n=function(e){const{width:t,height:n}=Object(s.m)(e),o=t*n,i=new r.DataTexture(new Uint8ClampedArray(4*o),t,n),a=new Uint8ClampedArray(3*o),d=new r.DataTexture(a,t,n,r.RGBFormat);return{overrideColorPerTreeIndexTexture:i,transformOverrideIndexTexture:d}}(e);this._overrideColorPerTreeIndexTexture=n.overrideColorPerTreeIndexTexture,this._overrideColorDefaultAppearanceRgba=new Uint8ClampedArray(this._overrideColorPerTreeIndexTexture.image.data.length),this._regularNodesTreeIndices=new s.e,this._ghostedNodesTreeIndices=new s.e,this._infrontNodesTreeIndices=new s.e,this.setDefaultAppearance(me.a.Default)}getDefaultAppearance(){return this._defaultAppearance}setDefaultAppearance(e){var t,n;
|
|
153
166
|
/*!
|
|
154
167
|
* Copyright 2021 Cognite AS
|
|
155
|
-
*/function
|
|
168
|
+
*/(t=e,n=this._defaultAppearance,JSON.stringify(t)!==JSON.stringify(n))&&(this._defaultAppearance=e,function(e,t){const[n,r,o,i]=Pe(t);for(let t=0;t<e.length;++t)e[4*t+0]=n,e[4*t+1]=r,e[4*t+2]=o,e[4*t+3]=i}(this._overrideColorDefaultAppearanceRgba,e),this._needsUpdate=!0)}get regularNodeTreeIndices(){return this._regularNodesTreeIndices}get ghostedNodeTreeIndices(){return this._ghostedNodesTreeIndices}get infrontNodeTreeIndices(){return this._infrontNodesTreeIndices}get needsUpdate(){return this._needsUpdate}get overrideColorPerTreeIndexTexture(){return this._overrideColorPerTreeIndexTexture}dispose(){this._styleProvider.off("changed",this._handleStylesChangedListener),this._overrideColorPerTreeIndexTexture.dispose()}build(){if(!this._needsUpdate)return;const e=this._overrideColorPerTreeIndexTexture.image.data;this.populateTexture(e),this.populateNodeSets(e),this._needsUpdate=!1}populateTexture(e){e.set(this._overrideColorDefaultAppearanceRgba),this._styleProvider.applyStyles((e,t)=>{const n={...this._defaultAppearance,...t};this.applyStyleToNodes(e,n)}),this._overrideColorPerTreeIndexTexture.needsUpdate=!0}populateNodeSets(e){this._regularNodesTreeIndices.clear(),this._infrontNodesTreeIndices.clear(),this._ghostedNodesTreeIndices.clear();const t={rangeStart:-1,inFront:!1,ghosted:!1},n=e=>{const n=s.h.createFromInterval(t.rangeStart,e-1);t.inFront?this._infrontNodesTreeIndices.addRange(n):t.ghosted?this._ghostedNodesTreeIndices.addRange(n):this._regularNodesTreeIndices.addRange(n)};for(let r=0;r<this._allTreeIndices.count;++r){const o=0!=(2&e[4*r+3]),i=0!=(4&e[4*r+3]);-1===t.rangeStart?(t.rangeStart=r,t.inFront=o,t.ghosted=i):t.inFront===o&&t.ghosted===i||(n(r),t.rangeStart=r,t.inFront=o,t.ghosted=i)}-1!==t.rangeStart&&n(this._allTreeIndices.count)}applyStyleToNodes(e,t){0!==e.count&&function(e,t,n){const[r,o,i,a]=Pe(n),s=void 0!==n.color?255:0,d=~s,l=r&s,c=o&s,u=i&s,m=(void 0!==n.visible?1:0)|(void 0!==n.renderInFront?2:0)|(void 0!==n.renderGhosted?4:0)|(void 0!==n.outlineColor?56:0),h=~m,p=a&m;t.forEachRange(t=>{for(let n=t.from;n<=t.toInclusive;++n)e[4*n+0]=r&d|l,e[4*n+1]=o&d|c,e[4*n+2]=i&d|u,e[4*n+3]=e[4*n+3]&h|p})}(this._overrideColorPerTreeIndexTexture.image.data,e,t)}handleStylesChanged(){this._needsUpdate=!0}}function Pe(e){const[t,n,r]=e.color||[0,0,0];return[t,n,r,(void 0===e.visible||!!e.visible?1:0)+(!!e.renderInFront?2:0)+(!!e.renderGhosted?4:0)+((e.outlineColor?Number(e.outlineColor):0)<<3)]}class Re{constructor(e){this._textureBuffer=new Uint8Array(Re.MIN_NUMBER_OF_TREE_INDICES*Re.NUMBER_OF_ELEMENTS_PER_MATRIX*Re.BYTES_PER_FLOAT),this._dataTexture=new r.DataTexture(this._textureBuffer,Re.NUMBER_OF_ELEMENTS_PER_MATRIX,Re.MIN_NUMBER_OF_TREE_INDICES),this._onGenerateNewDataTextureCallback=e,this._unusedIndices=[...Array(Re.MIN_NUMBER_OF_TREE_INDICES).keys()].map((e,t)=>t),this._treeIndexToOverrideIndex=new Map}get dataTexture(){return this._dataTexture}get overrideIndices(){return this._treeIndexToOverrideIndex}dispose(){this._dataTexture.dispose()}addOverrideTransform(e,t){const n=t.toArray();let r;this._treeIndexToOverrideIndex.has(e)?r=this._treeIndexToOverrideIndex.get(e):(r=this._unusedIndices.pop(),void 0===r&&(this.recomputeDataTexture(),r=this._unusedIndices.pop()),this._treeIndexToOverrideIndex.set(e,r));for(let e=0;e<Re.NUMBER_OF_ELEMENTS_PER_MATRIX;e++){const t=(r*Re.NUMBER_OF_ELEMENTS_PER_MATRIX+e)*Re.BYTES_PER_FLOAT,o=n[e%4*4+Math.floor(e/4)];Object(s.r)(o,this._dataTexture.image.data,t)}return this._dataTexture.needsUpdate=!0,r}removeOverrideTransform(e){if(!this._treeIndexToOverrideIndex.has(e))return;const t=this._treeIndexToOverrideIndex.get(e);this._unusedIndices.push(t),this._treeIndexToOverrideIndex.delete(e)}recomputeDataTexture(){const e=this._textureBuffer.length,t=new Uint8Array(2*e);t.set(this._textureBuffer);const n=Object(s.m)(2*e/Re.BYTES_PER_FLOAT),o=new r.DataTexture(t,n.width,n.height),i=e/(Re.BYTES_PER_FLOAT*Re.NUMBER_OF_ELEMENTS_PER_MATRIX);for(let e=i;e<2*i;e++)this._unusedIndices.push(e);this._textureBuffer=t,this._dataTexture=o,this._onGenerateNewDataTextureCallback(o)}}Re.MIN_NUMBER_OF_TREE_INDICES=16,Re.NUMBER_OF_ELEMENTS_PER_MATRIX=16,Re.BYTES_PER_FLOAT=4;
|
|
156
169
|
/*!
|
|
157
170
|
* Copyright 2021 Cognite AS
|
|
158
|
-
*/
|
|
171
|
+
*/
|
|
172
|
+
class Ne{constructor(e,t){this._needsUpdate=!1,this._handleTransformChangedBound=this.handleTransformChanged.bind(this);const n=function(e){const{width:t,height:n}=Object(s.m)(e),o=new Uint8ClampedArray(t*n*3);return{transformOverrideIndexTexture:new r.DataTexture(o,t,n,r.RGBFormat)}}
|
|
159
173
|
/*!
|
|
160
174
|
* Copyright 2021 Cognite AS
|
|
161
|
-
*/class Ae{constructor(e,t){this._cache=new Map,this._retrieves=new Map,this._capacity=e,this._disposeCallback=t}get(e){const t=this._retrieves.get(e)||0;return this._retrieves.set(e,t+1),this._cache.get(e)}set(e,t){return this._cache.has(e)||this._capacity<this._cache.size?(this._cache.set(e,t),!0):(this._cache.set(e,t),this.ensureWithinCapacity(),this._cache.has(e))}remove(e){this._retrieves.delete(e);const t=this._cache.get(e);return void 0!==t&&(void 0!==this._disposeCallback&&this._disposeCallback(t),this._cache.delete(e),!0)}clear(){if(void 0!==this._disposeCallback)for(const e of this._cache.values())this._disposeCallback(e);this._retrieves.clear(),this._cache.clear()}ensureWithinCapacity(){if(this._capacity>=this._cache.size)return;const e=Array.from(this._cache.keys()).map(e=>({key:e,retrivalCount:this._retrieves.get(e)||0})).sort((e,t)=>e.retrivalCount-t.retrivalCount).slice(0,this._cache.size-this._capacity).map(e=>e.key);for(const t of e)this.remove(t)}}
|
|
175
|
+
*/(e);this._transformOverrideIndexTexture=n.transformOverrideIndexTexture,this._transformOverrideBuffer=new Re(this.handleNewTransformTexture.bind(this)),this._transformProvider=t,this._transformProvider.on("changed",this._handleTransformChangedBound)}dispose(){this._transformOverrideBuffer.dispose(),this._transformOverrideIndexTexture.dispose(),this._transformProvider.off("changed",this._handleTransformChangedBound)}get needsUpdate(){return this._needsUpdate}get overrideTransformIndexTexture(){return this._transformOverrideIndexTexture}get transformLookupTexture(){return this._transformOverrideBuffer.dataTexture}build(){this._needsUpdate=!1}setNodeTransform(e,t){const n=this._transformOverrideBuffer.addOverrideTransform(e.from,t);e.forEach(e=>this.setOverrideIndex(e,n)),this._needsUpdate=!0}resetNodeTransform(e){this._transformOverrideBuffer.removeOverrideTransform(e.from),e.forEach(e=>this.setOverrideIndex(e,-1)),this._needsUpdate=!0}setOverrideIndex(e,t){const n=this._transformOverrideIndexTexture.image.data;n[3*e+0]=t+1>>16,n[3*e+1]=t+1>>8,n[3*e+2]=t+1>>0,this._transformOverrideIndexTexture.needsUpdate=!0}handleNewTransformTexture(){this._needsUpdate=!0}handleTransformChanged(e,t,n){switch(e){case"set":this.setNodeTransform(t,n);break;case"reset":this.resetNodeTransform(t);break;default:Object(s.k)(e,`Unexpected change type '${e}'`)}}}const De=new Image;De.src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAABgGlDQ1BJQ0MgcHJvZmlsZQAAKJFjYGCqSCwoyGFhYGDIzSspCnJ3UoiIjFJgv8PAzcDDIMRgxSCemFxc4BgQ4MOAE3y7xsAIoi/rgsxqOqd2d+pGwehjat+yq+1cc3DrAwPulNTiZAYGRg4gOyWlODkXyAbp0UsuKCoBsucA2brlJQUg9hkgW6QI6EAg+wGInQ5hfwGxk8BsJg6wmpAgZyBbBsgWSIKwdUDsdAjbBsROzkhMAbJB/tKBuAEMuIJdFAzNDXx1HQk4nFSQm1MKswMUWjypeaHBQFoIiGUYghlcGBQYDBnMGQwYfBl0GYCWl6RWlIAUO+cXVBZlpmeUKDgCQzdVwTk/t6C0JLVIR8EzL1lPR8HIwNAApA4UbxDjPweBbWAUO48Qy5rMwGDxhoGBuQohlrKcgWGLPQODeDBCTH020EnvGRh2hBckFiXCHc/4jYUQvzjN2AjC5nFiYGC99///ZzUGBvZJDAx/J/7//3vR//9/FwPtv8PAcCAHALbUa33lfYEHAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5AobCyAEEhU0UQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAACAASURBVHjalV3bkiLXESzoC9cZdle7lixF7IMj7Ad/qn/RVliybMmyZhkGGKCBBj+s6ig7yTqNJmJjbzPN6XOpysrKqjP429/+djUzu16vNhgMbDAY2HA4tLIsbTKZWFmW1rat7fd7O5/P1ratXS4Xw5+5Xq82HA47v1+vVzOz9Hcz6/zc5XKxtm07z/Ev/34elz9vOBzaYDCwtm3tfD7b6XSy6/Vq5/M5fU9RFFbXtZVlaXVdm5mlZ1wuFyvLMn2GP78oCjMzO51Odjqd0rNxrP5VFEXnl7+nf4bP4+VySe/jn+Hj97nzMU+nUyuKwo7HY3onfGZRFDabzWyxWNjpdLLVamVN06Tvw3nHL5xb/LfL5WIlDsJ/90Vu29bKsrTr9Wpt23YWHxfocrl0Jtf/HQeE/68GpDaBP5tfwsdzPp/tcrnYcDi0qqpsNptZVVVW17UVRZEWwZ91PB7TJuHF8gnxBR0MBukQ+Lh9kx2Px84iFUVhZVlaWZbpM3Hjq7nCeTezNObL5WLn81keoLqubTqd2vl8ts1mY03TpHGphY/mFte69P/gTaBOLf7CBR0Oh9a2rQ2Hw5tF8pfljYaf65/hz8Hv8/8/nU5p0cuytNFoZKPRyKqqSiceFxInECfRTzlaFPydLZSP1Z8zHo9tNBqlzXA6naxpGjsej3Y4HNJmqOu680z/bJ7H6/WaNu3lcrHD4ZDmDdfBF9/MbL1e236/l3OFm1ptAt4MJU4Un27frfhwPvG+0GaWTheamMgMqQ2AL+QL0DSNnc/nNPnT6dTG47FVVWWXy+XmM/CXLxr/P1shXxh2P/w8f0devNFolMbcNI01TWObzSZtVLcqfDh83t1FuYXiuSrL0sbjsRVFYa+vr2lO+DRHi8//jgei4wLYLKEFwIfipsAHKlPEbsD/7j/DE3q9Xu10OtnxeEy+ej6fp5PuC+obEn0qnlb/HceMC41+GBfbD4JbFDTZuFkQE7jFGQwGaZO6RXh9fU2bt67rjoVyrDUYDOxwOEi/79akqiprmsb2+33CJYy5/J0iF8DWouMCcKH5FLA7wMlmP+0P9wnCF1bPRdxxOp3scDiYmSVTy2bUzb0vBO9uXEj8N5wsPgkM3HDh8eTi+7CP5w3sG3c6nVrTNHY4HGy321ld1+k0O25wPKHG65vEN5SDPv8+drO5Lwad1+vVSpw0BSR48ZWJ50lEk46m3jcLY4jz+Wz7/d7MzCaTiY3H405UgSi9LEuJotmPIwD033E8yiziBLKlUT4Ux6Asqf8aj8c2Ho/tfD6nk+6Wwt0cR09u2aqqSt9zPB5vcJjCWJEVUAel7DMXKkrAk4MbQ/0M/r/7bNwg+/3ertdr8u24iH7a/d9wlzv6xs9UoSJvCPb/yl/ySeF/ZzPt74KuyDcbbtCyLG02m9nxeLTlcmnr9boTauM40d0cDoe0+BGuUqFzzhI44C5VWKJMmzrx/H3RTvOXc2DXtq3tdrt0OqbT6c3pcx/M5h9NI5rhnAnk08LjRovC40Bcge+IGwMxA4NqPiyOazzeX61WVlVVQvg4FneLDo755OM40MKyJYxcQbIA/NDIfCCCxYHyKcdFcfLIB7jf761pGiuKwubzeQJG7mv9xLMlYB/NII5fjMOo6KSrU65cCm9qhSXYwuAYGF9UVWXv3r2zpmlsuVzadrtNmGE0Gtn5fLamaTqRUUTm8EG455397yWHfAzsGDUiaFOxpm8QRpwO8Nq2tel0mtCvLzqSKMpcqZBMvVhkFpU7UBtMPVtZumizoFtSB8ldl5/4wWCQNsJ6vbbL5WJffPGFtW1rx+Mx+5loYdW7RiAZfy/ZxOMH9G0ChejdAvjv/iL7/d6KorDFYiERvZ98NGEK6KD5V0RPNFF4Chn0KbPPoFfNQy7WZvCLOMCt3uFw6LiFN2/e2MvLi/3jH/+wh4eHRA5dr1cry7IzN7zROWTNYRt0ryX7cwYZiNpVjM+cAVoBZ7aOx2MCOz7JfuJ9UdD0c6zOuABPm0LCjD944ftOewRsmRPhRVbg0RfFf7aqKptMJtY0TWfDOU56//697XY7Wy6XKWR0V6rcNLu6vq8bkolPj4rpmRLOoU8ndy6Xi+12O2vb1h4fH7t+B0I5PNFMaiDVrHw/x+/3+EF2MfckTnI4QJ0wB4Uq9h6Px50owefVreBkMkkU99PTk202G5vP5535UGPhOWGgGkUGQ4WIOeTLgUIflLNTnkXbbDZmZvbw8NAhNHzx0fw7y4exPsb+vFkQATMw5BOuACQyg8oN8IRGG94XmRlK3NA4Vmf0PBJifsPNvLOg7969s7qubbPZdDKnmLBjfKBctTqk/ueSzRxSnNGu4ZPmg3FSZ7PZWF3XNp/POz4nonHVCceQBicqOtF8Mpmq7ov1owhIAVLlShStnOjWX13faDRKC8zP8twGxvxuPXe7na3X60SJq3dCfKA4EA7fE/ZQ8TsnaCIziMSOZ8VeX19TbO8vzwvINGeExvHvjq5V7K8yYYrQ4mRQxI71bYK+UJFdElq/3W53Q/e6yXdSrCiK5PNPp1PSCaxWK5tOpwkcKmJKhZ6RBTCz3yxAn+lQdDCe+uPxaNvt1kajkU0mk87i4knHiVfiCPydQV4U4qgdz36Pn63Qfk5U0cccKs4BN3pVVck94pdbShfc8OINh0M7nU42Go3s7du39unTJ5vNZglLME7jw9PHGg45XufJZ3yA3+chXtM0tt1uE4/vQM99Gy40U7ycuUOqGK0RAy4kXSLTG0UtDB7v/cq5iQhLIFbwhA/O8WKxsMFgkEgfprI9RHRxzvv37ztJIU6JMwvIhwA35WAwsKECCYzWVU7AxRCHw8HW63VKeLgUC+N7NufI/CkWUqmJWJiCoCjiANRpxZdX38efo/QRymVE6Nx/d5CM3/P4+GiTycS2220KC3EhfQ18jtq2taIo7O3bt0kXwGAwEogo63S9Xm0YERbKr+BEOFW52WxSrtuFCxzjq8VnQQbG9Zg34J3tCFqZP/+ZSCSCAhTMp0fRzT0hJIet/Hmoe8D/H4/H9vDwYOv12g6Hww2NzVwJsozD4dDevXtnu92uIx9Dy+hzhO+KVH7CJzn5EJ9G1sVtt1ur69pGo1FSv6DpR9dRVVVnctACKHGo2tEMSO/RwanwMQJFvFlzz+bf2TqwtcS/V1Vlj4+PdjgcUrgcRV58iHy8ZVnaYrGw19fXThTmohJXL+XY3Ov1+hkDRKecdw/G+a+vr4m4iDRwatAe9+MmUdIuJoFyC6iUOQr98gJFGUx10nPkl+JReBPg5vcIabVadUx4n7ADuRNUS202Gzufz8nKsAVQhziNF3eof7CLL9mMuOnfbrdmZimp4+ZfWQ5W1Mznc/v48aN98cUXHTm1ynqxG1J4RAFAxQJGGkUlO+9zAyqrxtR0pGr2uVqv1wkXMKGj8hN+WPCzLpdLcr/r9TqtmzrpYRiLA2X/iYvvO8w1aXjy0VfxwB3I+NdoNLIPHz7YH/7wh5scO6pwGRiyzDuaZLWwyqzzpEesopKOq39ncKkApH/PbrfroPhc3p6BNOZO6rq26/VqDw8PNhgMkjtQqqUIGJd9oQ9ugP1+b/v9PpERTO0qMoTB0Xa7tR9//DHt4FyYxSpkxA/olljVq15YkUIKKKq6BUVQRZOMIR4mgvDnnOqNSKvISiFuSlRuWdrpdLLFYmHPz89Ja8GgV2kkzew3RVBfbYCreKqqsqqqOmlNJngY6WOat21b+/nnn+/i4BlguctAMKjSooq34CKRKNcfbQLFlnKaWlkdfj4DwkjHwGOPimNQiPL4+Jjk6FhowuPthPosjMBkA7oGlzdPp9MbAsd3Ji4UZsMGg4FVVSUFlszIKbUNF1cwsFHWR20Qj6Mj/95HG7MUS/ECnEhjidY9GUac25zcnj/XaxT8oPr7qkKT5AI4z81MHypS5/N5h+hRIQqKOB28RJOAAFBtRD5VeLr4pRTZhAQNppvVZKiSN1VIweOLijAUd8CJNrXhlb7BP8utB6eGHWedz2ebTCZJQj4ejyXNjfNTIkGhTpGHfF6Nw3G+CtkYbEVaOvWiKoOnJNwKP/wenj4KD9Umjfw8vw9L3vkU42HDwlGmw/F70b36IiMmYJbwfD7bbDZLHA1GYgpwllEs7IN+fX1NzBXLtPGlMQRyfX9RFNY0Ta+eD/16tBH59KF/yxE3OZDFG6Ev0xidzD4pNi42RywqJa3EppEa2zcBbgbXHbgMzzcHR0+yLgB9//F4tLZtUx4aK2AVJ+6boCgK+/rrr62ua/v3v/+deO5cJQ/69hz1yieFcUWfNPwegicifaJows2zqoD2PzuvojaHUjnl1Fio2/C/Y2n85XKx+Xxuz8/PxhaesVfJOx+R6uFwSFW4zuDxALlmzwd0OBxukkkKAEbmkpU8+L2cM2DSKYrFGZmzrDqXSYwKY1URLZ9YP4G4cLl6hByHgGNgsanPjeOj6XRq2+3WHh4e0uexrqLkl/IvL2FySZcvPgM+9QKXy8U+ffpkq9WqUzShkK7K0eOCKgEkAz6up1eIPIoM1AZVglgGyQpIKq0CLzjTvlEdZq5Gg78HRbIOqN0K7HY7Ox6PnSYZiKFKPAFu/p308eJM1tPhLlLNFdCSMBOYyzxGNGu0qBiKRdnLKAZWYZwSkOQKZHJ6AVUHGdG9rEvgRY4KeCMXgfPpGUcPw7mQZOgL5pkjL832ej1H/FzCzaDPQz6UfXHlkKrdizQA+GL4ghxloOBEFZDk6GEUdXJFUo4+jYpJ8d9yMi0Oadn8I7B2F9u38dSzXE42HA5vCkvTBmDT7QWbTiQoAQW/HCZ1ol2uMm7RIvVJuHmTOTuZE2gqmRZPsHJlKr/PHUmQ/FK4Rsm1ImuXqnZ/xU+oc2D8owplMNLwn314eEjtZLzeMM0xL5JnqNxkqDx8TjSCp52zd7nYnXsLsNWI2D/k2ZUKKccPRMkfLjyJ0txK4II1D4qLjwpsWEDqgLFv4VVvJnw39P++CXBDDdn0+A9wvR738olKoZSIFF0BLti9ihs+wVztg1qDvuyXeo/xeJxCXdxAqstIJJ5ly5UTpkaAlCloLiqN8hNctcz5icvlYrPZrNOEwjfBkJFq27Y2m81SzI9FnKr4k09nTnuPANF9HLdZi4Af73LvG6QYOHXKIzc2mUzsm2++sY8fP9r79+9vElzRxnTMpLALS+Aj8osl8vyeyrQr/V+fwPV8PicrwBT6EHUA3toENfuo7OU0YyS85EVQfLyLF1QuXrkS1NBj6OoRBkqhFL+vNoNXLj0+PtpoNLLFYtHBEpjb4OTX27dv7e3bt1KDF9U25MApWzelYWSQGPH8OF6k7Ou67qiGLpfL5w2AqNETCKpok30sPihqRKTy2MhmqdJmBk94ir755hv78ssvE17Bz0Okq1yIcguo1nVtvppkNO2TycQ+fvxoHz9+tIeHh45Qlq1AZL6VGIWbS/KpVwkrnD+2eniYL5eLjUajToOptm2tdPPvyNBjf25r1hcWqTCNY2EVEqk+A5z08e9tmsZWq5X99a9/tePxaE9PTx2dgT+PGT8mlHDCttut/fTTTzYcDlPnTbeInA7m/LoTZOwOo3K1HL5BjMRKZ5U5RGV0LoLCVjWj0cg2m42dTqfkEko8PQ6ClJ9SG4CTFUxCqMRJLn8fsX44hv/85z82Ho/t3bt39vT0ZOfzuZPt8g2Ai5ir9vGNxCdOjccnc7vd2g8//GBFUdh6vb5h5WazmRVFkbSTPJaorgC/T4leGOzhZlel6Vym3ratjUajBPRTcaj/53Q6vYnvlc9X1Tw5LNCnsuWBcg6ew6h//vOfKdvogBA3gdI15j6L2TvF6jHj6d08eB6qqrJvvvnG6rq2H3/80ZbL5Q0oVJlXThMrMQlrJllWx+6DtY9t26YsoYPoIZoJ7JEbMVY5ti0nwuTkiWK2nNRBkz2ZTGyxWNycYG+Vys/B2FmVl+XUOWhyVeZSpWN5wh2bIJnDTa9U+KiqozgyQFwRFb6oSiZ059gAMymCfGe4egcnU1GcESUZDYw7kESly6rJw/l8tj/96U+22+3su+++k/42om+jvn8Rr99X94d/5kaYPkeHw8H+9a9/2XQ6TU0vPV1clmVi5fb7fW/TB6VC4p5AUSfXKA/ifQp8zVNdgNenI9qPkjLKX7IuPurioWrxVaoVZU7ffvutDQYDm8/nlqtl5LHhaYmaODKxpXL9GAarDcWfsd/vbblcpnJvfLfRaJTK55yKVhY3IqIi6xm5VWy84e9eVVWKdkrfTd54IOpEGZUtMUGEZicqcmBLgKbVuQh/OW+Z9t1333W0hgrxRp/HYaoytRhD89hUY0iuWeBwTG3Q0+lkr6+v6fmLxcI+fPhgm83Gfv755xSJMf7hpJhqapWzIPxOvtZt237WBHJm7J48uiIrlComCoEUIsZCUew+ggWWub44qh4hkorj5Kl4H8uz2TJhroK7lUZ0uP/ZS+q8NNwrhI/Ho/3yyy/SKubo9ahcTrXDZxDZtq0N3Tep7FfUCJonCZUmyryyKe5rxcKgyReeFcecgeOxMIHD1icaC5pNVSfJERFHC6qFjurA4l1APW/Pc8RiD6y8Vhb4HlzDVmvojQc6JcPEnSsNXh9B5C/uL6nSxrmmCqpejtO5yhqoTKBqOq1MJfcGwA2FixlpFKOmknyC/Tnr9bqjl1SSfC7OURgod2VMpIN0fFWi2eXYkxdN9aTJJWFUzV8uWuC4FjckM4ZRkiWXjkZlLH6WWlgUuJ5Op5s5wGiAMcY9m2AwGNhms7GffvrJ5vO5LZdLKaHHknyl+mG2U9HFqtWvg/6SgQTGieoyBv4QlexQdWgsBlVsHy+a6nkTpWijsA+tgCJSfOH9RKCF8EnCfAEjcUzecASB4FBxCOfz2Z6enmy5XIbKKAdseEVM1KJXsYaROAf0C+VNOOSnA82OYth4QfvKnnIl3oo8Yl5A8d3qciYOz9j/4+8fPnywtm3t06dPYbNJb9TEuQDFGCr3qe4pYHenzDluOi/66NMWqjnCMfszXPMxVHIuJaDgU88Lyrl9lk0pWravSZOipZX1iEIh1WeIy8j++Mc/2rt37zob5i9/+Yu9efMmxfGYUWM6XJVzRxFErhZBgUzPVXg1MfdaUA0gmAuJtJZwZU0pmaNIY8YxMwMl3iSqzInDtIjUQD+NUYYCVaoalyMb9e9///vfb3ICP/74ox0OBxsOh/bnP//ZvvvuO1utVimdiu3yFcmk4vGcDEyFocj6cTpYpYdz8jAFpr3DSMlgSBVH8I5V3b8UMkZRI4d47ttQXaNEoz4ZKGTEujj+xWxjdLWN/9mrnvH7PIEzHA7t+++/t6ZprK7rRON++eWX9vT0ZKvV6qZzuYPG6OJIRSQp8KzawOKBVA20ouqoTjEo0P2DweC3W8OiFit9SQYEWHxrBloJVMx4MYdvvijs5CSN6tatiis5KuCESFQMwwLO6/Vqz8/PKWxaLpf21Vdf2ddff935P3w/DN0QtEUWLFfYij6c8QBGNFHSTV31w1xI6R0mMPnDk6PoUMwaMv8eTSbHuXhd6n6/T63Trtdr6nRRlmXKq/PNWhENnLsoMkoOcRcPlGe7fM2tw7fffmur1aqzkeu67txxyFp+VgZxqjfXtj833qiLisIFLCsbDoefK4PQz6LWTlXiRMSL6mad6zjuz/nw4UNqjb5YLOzt27epIcVkMrGqquz5+TnRpNgSjYWbDNhU+baiVVWXMt/MHP7t93v74YcfOnODdyqrOgB2P6p1m7pfKMfwKc0gWjoFNPH5/n2lDz7qzRu1M3HFMJssjKXV9XH4fGfDvIs2K1mc+mT1S9QyTlX9qMZXbNX4NCnwxBskYt0U6IpOqNoErDFQtRSKEIoSUMql4zNKVI0yv85EB1fBqElFLgE3QVQfuFwuOy3QvQml3yh2PB5tt9vdtJFXyZwPHz4kda/n5r///nvJYURavQitRz2Vo43FFoIvlc718EdXzCrhnKYhCuf5Kh/8zBIJBqXaiQonUQnLDCBe8Ij6PA5z/FQ4BtlsNqkhRV3X9t///je1qOGaObYsZpawgn+/X7qUaxAZ9SpWrKfCRlFDZgav6FZyIDC3WH3C0qgRVC4LW+IV7JiocGsQ5feRo+b+OyigZI2Ain85MnAFMHbQVoWYLBnf7Xa23++TyDOq2GVwmkue5Pj9SHmj7jFWlkOF3SpiUalsdb08W6Xo4quOK+eXUehU7XjuUKGUQmx6o4nHGN7DySipoZA6jo1Lzxj5Rr4wEoIq/35P7X5UnMJzGgFCdVpz4bmy3HiAWQ/gn1MyEMKya9XESN0BHHX1iLpx5nwZRwxsrlk+heCQQafKOjIzqQQUTMsqnkQdHLWJIqAYbRw0/4zq++4GRveIa4J9gnAtzufz5zCQrxyNBAfRtaROK7JQgweh2DA+obw4yuRyiKbAmbe25WtquPBC1R2quFkBPEVDR907VPUuz4OqnVCp+JzVUdfVMW2P710q/R/XnDMAyl1IpGJgpdRljkC1heer3DHJxI2YFOePqV60ZNyePmqujIvIUvMINHI4misHj274VMmbeyMOjBpUcgnnpm3b35pFs89TDQ4RJGKygs04mxp1kll5rFgsjCjw55TPYyHlzYvSFfKRikaNQ+kf7uldFNVERh1B1FgiU88/E3USVXON71Fyu1a+sJHboirqFHco07C4ee5pdcIhFPtADFm9OJSRME8IF22qhheRwkhpJVU4qWRyUcZPAVyOTriXT1RxHc1hdKkF/l8qDkV5U0Qm5O74UbsZJx19cZ9ciSMNJn4Y8CgaV4E0Vvuo+r9IucMKKRaV8viQ94hYuYgLUPqLaIMqljJXycQ5m+v1+pkH8ELBXK99VQShysQU2ENAGCUqolIybHTI2r2ojDsq4+IcRVSmpWTZWLeg5GdcFNt3e6dqO8/AOXcJBrfr442jxoAZRD8Upep8He20qMqHVbjYPFLRsDnyRWUhWX+HJ00JKRR6jpizHMOmQF6OGIquuFPm+vemhu9RPXEndbd6qgQ/9QfwH4yuU8stvAJxDIByeepIIBFZiqjMC8eGiS2+kCqaVMVrRGPmLGOkQFI0sYpU+m4qVcomfAa/nyLGVFrYN0ip2r2xsvXmssGgxRuqilUVCw5Y8QlKIJmbVL+N0wkirgdkd6FSshwb9/nsXDvZvqIM1UcwKrfrE3qqht2RNVRVRe4KS6xcjV5E9eBVSRHuAaTQOde+K8CV09Mz2VEUhX311Ve22+3s5eUl1DKiKeT7CCIAmMv65ZIykc+/B7UrwWxf63ylKFYVUS4yLYoiXWBdeqjG3bE4BMtJxHjCOe2p+HgGJBGAi7qH+vdVVZVu3PYaO7QyKu7nEDcqCv09JjqSzvWJXxXax0qg6OLN6Fq4SCugoqK6rq3EzlH4gGjxlXiRlSxR2ZJqO6N6/uViVwasl8vFnp6eUvrZXYK6twdZRSVAUQg9KtDM5RlyTF/OiiiNAja84LsVVBeQHPDFNUobwGvF8Yvr4SNRJWfcUMjQNwGK31d3BEcnxuldNO+uK4hy9pxhU1fZRMUhCh/0RS8qNOy70DraCEx5K7wRqaT4GS5tL8vSht4XL+nEodWp6qun/FtEVXJcmruOViUz7hVEKCTvZg41hMqn8wmLFiPXJY0roFV003dbiapm5iwq9xlSBFhUiIMXgHhziF/HPEwdI1AIijtG3UjJiD2K9bnIUXHYSrBxT8UrdtbKNV7Kxd65f0MxSw6kjkajpG7OJWByBTAqP8CAMAoh1cbmZ/lt706hpwsjfACOCvF+Geb4FVDDU6dSyhHTpvxylAlD369YQdWbUJnqKOLIMYcMYplg8hYy3jDS9YxKvxhFBSqncE+95b38AY+j0ybOY2inObFYg5tF5VDy7yE41G5lV6C6hUfAUNXvRT0M2R+r5JdyWYqS9XE0TZNKyTysvsfUR+xkFMfnXGFOBIpaSrdq3uByiD1lPUbMceqqrRmb+SjH3vcrwheKo1cNqxEdqz7B3JQ6QtC5tiyqZOt8Ptt6vU79f9R7KCwUldRFSap7Fj4ShvqzveAmrTveTOFuICpsiDaDMrNRWzmVD1d3DnCLWbUYSpiRk3vnJOARWo+IHbZOrkC+p01LBDKjjmdRoqyvJ5HKRzRNk3I1g8Hg840hTqhgtkvJm3Pp29yt2arFuco55HrxR9agj77NTaA6QUgcKUsX3R8YgVfl76PkUK4WIbpmPqdm4s3tEY/7/4QBMJTZ7Xb28PDQAYNcsKAaL0R1A4rDz5n7KL2r0tT3mMSofCxqdhHJp3MW5B5iJ9rYUeY191m5cJjdMeZo/E5h0gj+NkF1XSc3wFe4YciF+YEoXle7mJMYkUmMmlUpsibCGSqppNLXCjhFobA6ibnCE/WluPvIGuQkYQyGcyGv/7lpms4NsKk83L/cDex2O5tMJp2SJoxVc02fVLMGJpSiiWYqNgeaIq18X46dpdNqQXONnbiqKZJ5Ky5enWKlpVTRj5KeM15T7tijG7/C9+Zyb5zwtm1tPB6nDaAWgAs91WlU1Tw8aWweuQeuyoJFDFvuPh3+d59kLC9jNTKGhHi6WFiBCTPmKaJ6h5wkLHJBCnQyDxM9A83/aDS6vZ+YpVIOEA6Hw02Ha97BUf/AiJDgMrH5fJ4aJHJ+PufjVReNnFtRVCu+W45lU3oFZAlzl0UpU50Doip05HSuqrrKVWYNh0M7HA6J6r/BReyf/V6e3W4X1qlH9fY5lk0t0HQ6tTdv3nQuprynK7mimqN2qY58lSI2pwdkqlbpDFi4mqNpVYlYdFCiSuZc7iWnMt7tdlbX9U29x2Aw+CwI4YUejUbWNE26RURl9iKdWa6aGF+wbVt7eXnpXFDJNQiRHu6epBTToEhVq7Erqll1GlWtc90VcVCcmwAACrxJREFUcL2EqkTKZRBzQFCRT9GB8Dkuy9Kapkl3QWFpXcIBKtvlzRk2m83NzlftYLGtKcf8zA7iRluv1/by8pImq67rTtuZnNQqyjoq/psxB/t9ZX1UTWOuKIOtjGoBn5PC33PrKiuBVM9F7u+42WzS/UBSpqZiXu9rb/a5QyXX1qnMH19+hKlYXBQmSBCFs8o36uARsWtoPXCRlannTF/fla7chk3VCCpuoS+TmOt6nsMG7Io4GVZVVSqv9w2AUYY/o1SXGPnkTSYT2263aTPwSWFTGpV95+hf1P+rCc6FjSpe9uew9IuRvVqAqL8P9zJWdDdrHVUxS870MyBFF8J4Symv2PWWZWmfPn36LesHFqkTskdKF9T3bzab1C2rz4z1UZJ9SFhlBXMKl6jxgqKwOcyMmLk+di66mSPi4/siDH+uK7F4kRWNzkQdvmNVVbZerztrGLahy+n9zMym06nt9/tOKzmFUrmnPi+OuiRSmWVG50p8krs9O6pEVmxlpFKK8EYkic/5dNVeNqoSqus69fCNGL7IdWC3Fr8BHq+miSKPYU7m5AObTCa2Wq06N4pHcW7OOkSMW8TIqYxWdJ2NUteoNHJkbfqijOhEq8ZPqr5PYQR8Vl3XNp1OO4DNDxsKXyJq29+nLEtbrVZWlmVqvhX1D7xer583gKqwwZZxzgp6Sze+4p2JHvbj2EOPTZfKl0eFFLk7DJX5VUxkBEpzXEaEG/pqGThSyfVVwK5gLr9TuYic7tD5G2d08eq6CD+VuZOGoGg+n9vz87ONRqMU80aFnkwv50qjWHUU+SpFJed4dSX3UpEEbiZ1KUVfZjJK5UZ8Apd3I/D2Cx2jqMK1fCyju1wuKXez3W5tNptJt6jmpezrlIUuYjKZ2HK5tPfv33dMlCpcVNfAcZ4g0ujxhsmlaCPhQy4yyYlVuNlUbnHVpsuVdedqIvF6HRVq8vO5OqooCvvll19sOp3enPpcTeJQyb7xA/Dk+HWt7mOwQTKaV9XiPIcbchp8Zaa5aVVOcRRtLKWZU9fh3KO94xI0/gzuzqVOIp78XHdTzgU4abdcLq0sS5tOp507oHNjTyAwusJN9brx8isPDVkurnLUEUDknZm701eRK4rDiKIZhQ8Uq5kDsrmMmxqzN6pi8Mr1+pEWIddxDAmf5+dnO5/PNp1ObyKjHLk1GAw+9wiKZMiqD79fcbbdbq1pmrQJItZNycmia1lytQB48nM5/z7tnGqIpU5ZjuPIJXUUJ6HqF3jTYKgcye/w5Ltia7PZJL8fSe1yX8Mo/FKCD5y42Wxmq9UqScndEmB/2wikReRNn8lVdGjU0JHj9Rxaxw2YUzf16e+jcJjzFZFiKgqVOc+CiZ7VamXT6TTUL0YbIR1s1vb1vTzKyCeTiT0/P3d680Vl11wZpPLbOc38PVU2fRKzqJomB/b6yuD6tP+sqWQ3k0uW4S88YF7csVqtbDKZhAdC3aF4g/VyA4/MOJIX4/HYPn36FE6kMl8RSufIQW08NUG/pwVLpHO8J5uYk55xJBRtXGY7VSWVqkvgQtjn52cbj8dJ5BlhJqar+d6mYdSaVeW+Fdjy27D9/pwoFOpD9VGGL7rWJaex6xOM5iYqp8LN5e9VQUjUExA3nUr2cGYUw1JefGYEo45hkcCljKpflDhCoW7v6z8YDOz5+dnevHmTyqO4x4ACWmoDclu5HE6IFiXCNUq4mdMf5OoK1bWuOYzAvRhVG1hOqfv/1XVt+/3eXl5ebDabpZOv7nlmJXbOIpWR7+U+/1wQya3NncN+eXmxh4cHq+s6AURWDLPPUyxd1FiSFwzVRMpMcx1AdIVaX/dwRZnnupcoSZjqHRxlD/HZTvFuNpvO4vdZS9WxhX+mvAeJRxODcWxRFDYajWw4HNp6vbaHh4d0zx76H54wXxyWROcqd1XpuUoCRQ2ZFXGkZNX3CDgjyTdrG6LOnbzo3PK2qirbbDa22Wzs8fGxo0Jm85+7H5FTyGkDRL6PW8FGBQ58Unx3bjYbO5/PNpvNkjqIawNY0x6d4qhrR7Qhojau0QbpSwblqn4iV6O6nEXEjkqQ+aZ+eXmxpmns8fGxk91TafHcGDEqkMkg7sHTp2Pnxccd5pLv3W5n5/PZFovFjQZfgSN1+yX7RwVMWcmjJkmZXmVdVDzdF3bmLABzIuqwqXa9g8HAVquVtW1r8/m8c/Lv6Q+g6juVdH2owj9uL9ZHKnCrWD/ps9ksXQzl5ozJEG51rkIgJELUBY2KZ8iVb0Wtb6IoQOUT+qxErjsp5wkwDewxvl9mvVgskqRbnXh+h2itIkay7CNbOGd9j1XAD59Op3Y8Hu35+dlms5nNZrMOYo40dgzwVA6BrQbr6iNFM+sDI+4i14uP9ZN9rWf58xURVJalrddra5rGZrNZquML1TwBfZ/bqNgsItUF9PEAaNpzzYgVZ+9hYlVVtt/vrWkaWywWhjeWRrdps2nnhApLtSMBqQJeEXmUGwsveqT46esn7OJV7HjWtq09PT3ZYDCw+Xze6besOpfcE+Jx5MWHqCiKWz2A6pLdRyxwLaAqw/YXOxwOtlwubTqd2mw2kyVOqmBD5erZp6F5QyyTYxoVgRLJp7CiKCKeGIfkxJ0+9u12mzR8k8kkdWtTauWoiimX9IqIrMvl8tkF5KRPLFmKwoyopy6bYb8O1q2BcwaqVQuesoho6Us65W4fizaYAlJ9esIICPL1tNjy7XA4pBK8+XzeEXDyHCh53D2ZyQivpeqhCLhwL/uIo88lYlRxhZNGdV3b4XCw9XptVVUlgiMifBSC5jHeZLqCzmbMeEY99nJJHQaz7KZUHsRPddM09vr6mrR7KAePxDmRVcoplCOLgPihZFKGOeiI5eoTSOQ2B1LIzhhuNhsrisKm02mnjJk7fkeiFS5Nd1PN16hFt33mJGTKKkUdRqIWMd6Ea7fb2eVySQvPsvFIxaT+LZd3ENKvGzY3WYC+Xvi5+3RyjOE9HILr2cbjsZ1OJ3t9fbXdbmfT6TS5C9fKqVtKWCTCpEeuVWzU01+BwL7iFnUZtYtlmqZJal1vKslhncp9qL7DvQKP4F4mNP2dBhG5erbcxN3LDCr0GsXW4/HYRqORnc9ne319tfV6bePxONHKHisr3MGXQTGZpe7tU5MaXegUsYUM8HxDe/Vzasv+q16Pr7/Dkxm1z4k4GLbWag1yjGGHCu7jAiJFLg+QO4iwv1b4gusFPGw8nU623+/tf//7n5VlabPZrAOUONpgppEznNgGv09MosSuSrGMFul4PHbuPB4Oh+m0R00ocxRuX0kZ1yoii6i6qysLkiWC+pJDSqacawKVa7nGhEsiKn7lCxwnOIj0AhVPQPmFF03TdBZQET54jWr0/qq6B822T/7pdLLD4dBxU2VZdnw8/pzqtJbLQEYUL46Hay9YNa2ypZ0NoNQsfRyzMj0KZavFjSwJ3+zpP1NVVZpUpFAxjPJNgTeg4Wd7Qkq5O743gMeHoahvxuPxmGRaZVmmghnVERSf7xs10laouxiiu5zQEqhEUt9NJWbWZQL7Up4KKEZ3/+QAS3TTJpeFMRPpk435hsvlkjbCer3u+NeyLFPTiaIoUl0DXlLBp7pt29RV43g82vF4TKebGUH06RGhxuKPXIUx8wf3MH2clbw3UeSf+X/9B04mXw6cfAAAAABJRU5ErkJggg==";var ze=De;
|
|
162
176
|
/*!
|
|
163
177
|
* Copyright 2021 Cognite AS
|
|
164
|
-
*/
|
|
178
|
+
*/function Ee(e,t,n,o,i,a){const s=new r.Vector2(t.image.width,t.image.height),d=new r.Vector2(o.image.width,o.image.height),l=e.uniforms;e.setValues({...fe,uniforms:{...l,renderMode:{value:a},treeIndexTextureSize:{value:s},transformOverrideTextureSize:{value:d},colorDataTexture:{value:t},transformOverrideIndexTexture:{value:n},transformOverrideTexture:{value:o},matCapTexture:{value:i}}}),e.uniformsNeedUpdate=!0}var Oe=n(23),Ae=n.n(Oe);
|
|
165
179
|
/*!
|
|
166
180
|
* Copyright 2021 Cognite AS
|
|
167
|
-
*/
|
|
181
|
+
*/
|
|
182
|
+
class Fe{constructor(){this._events={materialsChanged:new s.d},this._renderMode=Te.a.Color,this.materialsMap=new Map,this._clippingPlanes=[]}get clippingPlanes(){return this._clippingPlanes}set clippingPlanes(e){this._clippingPlanes=e;for(const e of this.materialsMap.keys())this.updateClippingPlanesForModel(e);this.triggerMaterialsChanged()}on(e,t){switch(e){case"materialsChanged":this._events.materialsChanged.subscribe(t);break;default:Object(s.k)(e,"Unexpected event '"+e)}}off(e,t){switch(e){case"materialsChanged":this._events.materialsChanged.unsubscribe(t);break;default:Object(s.k)(e,"Unexpected event '"+e)}}addModelMaterials(e,t){const n=new me.c,o=new Se(t+1,n);o.build();const i=new ne,a=new Ne(t+1,i);a.build();const s=Ae()(()=>this.updateMaterials(e),75,{leading:!0,trailing:!0}),d=()=>this.updateTransforms(e);n.on("changed",s),i.on("changed",d);const l=function(e,t,n,o,i){const a=new r.Texture(ze);a.needsUpdate=!0;const s=new r.ShaderMaterial({name:"Primitives (Box)",clipping:!0,clippingPlanes:t,extensions:{fragDepth:!0},vertexShader:ve.boxPrimitive.vertex,fragmentShader:ve.boxPrimitive.fragment,side:r.DoubleSide,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},transparent:!1}),d=new r.ShaderMaterial({name:"Primitives (Circle)",clipping:!0,clippingPlanes:t,extensions:{fragDepth:!0},vertexShader:ve.circlePrimitive.vertex,fragmentShader:ve.circlePrimitive.fragment,side:r.DoubleSide,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},transparent:!1}),l=new r.ShaderMaterial({name:"Primitives (Nuts)",clipping:!0,clippingPlanes:t,vertexShader:ve.nutPrimitive.vertex,fragmentShader:ve.nutPrimitive.fragment,side:r.DoubleSide,transparent:!1}),c=new r.ShaderMaterial({name:"Primitives (Quads)",clipping:!0,clippingPlanes:t,vertexShader:ve.quadPrimitive.vertex,fragmentShader:ve.quadPrimitive.fragment,side:r.DoubleSide,transparent:!1}),u=new r.ShaderMaterial({name:"Primitives (General rings)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:ve.generalRingPrimitive.vertex,fragmentShader:ve.generalRingPrimitive.fragment,side:r.DoubleSide,transparent:!1}),m=new r.ShaderMaterial({name:"Primitives (Cone)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:ve.conePrimitive.vertex,fragmentShader:ve.conePrimitive.fragment,side:r.DoubleSide,transparent:!1}),h=new r.ShaderMaterial({name:"Primitives (Eccentric cone)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:ve.eccentricConePrimitive.vertex,fragmentShader:ve.eccentricConePrimitive.fragment,side:r.DoubleSide,transparent:!1}),p=new r.ShaderMaterial({name:"Primitives (Ellipsoid segments)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:ve.ellipsoidSegmentPrimitive.vertex,fragmentShader:ve.ellipsoidSegmentPrimitive.fragment,side:r.DoubleSide,transparent:!1}),f=new r.ShaderMaterial({name:"Primitives (General cylinder)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:ve.generalCylinderPrimitive.vertex,fragmentShader:ve.generalCylinderPrimitive.fragment,side:r.DoubleSide,transparent:!1}),v=new r.ShaderMaterial({name:"Primitives (Trapezium)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:ve.trapeziumPrimitive.vertex,fragmentShader:ve.trapeziumPrimitive.fragment,side:r.DoubleSide,transparent:!1}),x=new r.ShaderMaterial({name:"Primitives (Torus segment)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0,derivatives:!0},vertexShader:ve.torusSegmentPrimitive.vertex,fragmentShader:ve.torusSegmentPrimitive.fragment,side:r.DoubleSide,transparent:!1}),g=new r.ShaderMaterial({name:"Primitives (Spherical segment)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},extensions:{fragDepth:!0},vertexShader:ve.ellipsoidSegmentPrimitive.vertex,fragmentShader:ve.ellipsoidSegmentPrimitive.fragment,side:r.DoubleSide,transparent:!1}),y=new r.ShaderMaterial({name:"Triangle meshes",clipping:!0,clippingPlanes:t,extensions:{derivatives:!0},side:r.DoubleSide,fragmentShader:ve.detailedMesh.fragment,vertexShader:ve.detailedMesh.vertex,transparent:!1}),b={box:s,circle:d,nut:l,generalRing:u,quad:c,cone:m,eccentricCone:h,sphericalSegment:g,torusSegment:x,generalCylinder:f,trapezium:v,ellipsoidSegment:p,instancedMesh:new r.ShaderMaterial({name:"Instanced meshes",clipping:!0,clippingPlanes:t,extensions:{derivatives:!0},side:r.DoubleSide,fragmentShader:ve.instancedMesh.fragment,vertexShader:ve.instancedMesh.vertex,transparent:!1}),triangleMesh:y,simple:new r.ShaderMaterial({name:"Low detail material",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new r.Matrix4}},side:r.FrontSide,fragmentShader:ve.simpleMesh.fragment,vertexShader:ve.simpleMesh.vertex,transparent:!1})};for(const t of Object.values(b))Ee(t,n,o,i,a,e);return{...b}}(this._renderMode,this._clippingPlanes,o.overrideColorPerTreeIndexTexture,a.overrideTransformIndexTexture,a.transformLookupTexture);this.materialsMap.set(e,{materials:l,perModelClippingPlanes:[],nodeAppearanceProvider:n,nodeTransformProvider:i,nodeAppearanceTextureBuilder:o,nodeTransformTextureBuilder:a,updateMaterialsCallback:s,updateTransformsCallback:d}),this.updateClippingPlanesForModel(e)}getModelMaterials(e){return this.getModelMaterialsWrapper(e).materials}getModelNodeAppearanceProvider(e){return this.getModelMaterialsWrapper(e).nodeAppearanceProvider}getModelNodeTransformProvider(e){return this.getModelMaterialsWrapper(e).nodeTransformProvider}getModelDefaultNodeAppearance(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.getDefaultAppearance()}setModelClippingPlanes(e,t){const n=this.materialsMap.get(e);if(void 0===n)throw new Error(`Materials for model ${e} has not been added, call ${this.addModelMaterials.name} first`);n.perModelClippingPlanes=t,this.updateClippingPlanesForModel(e),this.triggerMaterialsChanged()}updateClippingPlanesForModel(e){const t=this.materialsMap.get(e);if(void 0===t)throw new Error(`Materials for model ${e} has not been added, call ${this.addModelMaterials.name} first`);const n=[...t.perModelClippingPlanes,...this.clippingPlanes];Be(t.materials,e=>{e.clipping=!0,e.clipIntersection=!1,e.clippingPlanes=n})}setModelDefaultNodeAppearance(e,t){this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.setDefaultAppearance(t),this.updateMaterials(e)}getModelBackTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.regularNodeTreeIndices}getModelInFrontTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.infrontNodeTreeIndices}getModelGhostedTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.ghostedNodeTreeIndices}setRenderMode(e){this._renderMode=e;const t=e===Te.a.Ghost,n=e!==Te.a.DepthBufferOnly;this.applyToAllMaterials(r=>{r.uniforms.renderMode.value=e,r.colorWrite=n,r.transparent=t})}getRenderMode(){return this._renderMode}updateMaterials(e){const t=this.getModelMaterialsWrapper(e);if(t.nodeAppearanceTextureBuilder.needsUpdate){const{nodeAppearanceTextureBuilder:e}=t;e.build()}this.triggerMaterialsChanged()}updateTransforms(e){const t=this.getModelMaterialsWrapper(e);if(t.nodeTransformTextureBuilder.needsUpdate){const{nodeTransformTextureBuilder:e,materials:n}=t;e.build();const o=e.transformLookupTexture,i=new r.Vector2(o.image.width,o.image.height);Be(n,e=>{e.uniforms.transformOverrideTexture.value=o,e.uniforms.transformOverrideTextureSize.value=i})}this.triggerMaterialsChanged()}getModelMaterialsWrapper(e){const t=this.materialsMap.get(e);if(void 0===t)throw new Error(`Model ${e} has not been added to MaterialManager`);return t}applyToAllMaterials(e){for(const t of this.materialsMap.values()){Be(t.materials,e)}}triggerMaterialsChanged(){this._events.materialsChanged.fire()}}function Be(e,t){t(e.box),t(e.circle),t(e.generalRing),t(e.nut),t(e.quad),t(e.cone),t(e.eccentricCone),t(e.sphericalSegment),t(e.torusSegment),t(e.generalCylinder),t(e.trapezium),t(e.ellipsoidSegment),t(e.instancedMesh),t(e.triangleMesh),t(e.simple)}
|
|
168
183
|
/*!
|
|
169
184
|
* Copyright 2021 Cognite AS
|
|
170
|
-
*/
|
|
185
|
+
*/const ke=e=>Object(j.of)(!1).pipe(Object(W.delay)(e),Object(W.startWith)(!0));
|
|
171
186
|
/*!
|
|
172
187
|
* Copyright 2021 Cognite AS
|
|
173
|
-
*/
|
|
188
|
+
*/
|
|
189
|
+
function Le({cadModelsMetadata:e,loadingHints:t}){return e.length>0&&!0!==t.suspendLoading}var Ge=n(24),Ue=n.n(Ge);
|
|
174
190
|
/*!
|
|
175
191
|
* Copyright 2021 Cognite AS
|
|
176
192
|
*/
|
|
177
|
-
|
|
193
|
+
class Ve{static async*raceUntilAllCompleted(e){const t=new Map(e.map(e=>[e,e.then(()=>[e])]));for(;t.size>0;){const[e]=await Promise.race(t.values());t.delete(e),yield e}}}var je=n(10);
|
|
178
194
|
/*!
|
|
179
195
|
* Copyright 2021 Cognite AS
|
|
180
|
-
*/
|
|
196
|
+
*/class We{constructor(e,t,n,r,o){this._sectorRepository=e,this._sectorCuller=t,this._modelStateHandler=n,this._collectStatisticsCallback=r,this._progressCallback=o}async*loadSectors(e){if(e.cameraInMotion)return[];const t=this._sectorCuller.determineSectors(e);this._collectStatisticsCallback(t.spentBudget);const n=this._modelStateHandler.hasStateChanged.bind(this._modelStateHandler),r=t.wantedSectors.filter(n),o=new qe(this._progressCallback);o.start(r.length);for(const t of Ue()(r,20)){const n=await this.filterSectors(e,t,o),r=this.startLoadingBatch(n,o);for await(const e of Ve.raceUntilAllCompleted(r))this._modelStateHandler.updateState(e),yield e}}async filterSectors(e,t,n){const r=await this._sectorCuller.filterSectorsToLoad(e,t);return n.reportNewSectorsCulled(t.length-r.length),r}startLoadingBatch(e,t){return e.map(async e=>{try{return await this._sectorRepository.loadSector(e)}catch(t){je.a.error("Failed to load sector",e,"error:",t);return function(e){return{modelIdentifier:e.modelIdentifier,metadata:e.metadata,levelOfDetail:o.a.Discarded,group:void 0,instancedMeshes:void 0}}(e)}finally{t.reportNewSectorsLoaded(1)}})}}class qe{constructor(e){this._sectorsScheduled=0,this._sectorsLoaded=0,this._sectorsCulled=0,this._progressCallback=e}start(e){this._sectorsScheduled=e,this._sectorsLoaded=0,this._sectorsCulled=0,this.triggerCallback()}reportNewSectorsLoaded(e){this._sectorsLoaded+=e,this.triggerCallback()}reportNewSectorsCulled(e){this._sectorsCulled+=e,this._sectorsLoaded+=e,this.triggerCallback()}triggerCallback(){this._progressCallback(this._sectorsLoaded,this._sectorsScheduled,this._sectorsCulled)}}
|
|
181
197
|
/*!
|
|
182
198
|
* Copyright 2021 Cognite AS
|
|
183
|
-
*/
|
|
199
|
+
*/const He=Object(s.q)()?{highDetailProximityThreshold:5,geometryDownloadSizeBytes:20971520,maximumNumberOfDrawCalls:700,maximumRenderCost:1/0}:{highDetailProximityThreshold:10,geometryDownloadSizeBytes:36700160,maximumNumberOfDrawCalls:2e3,maximumRenderCost:1/0};
|
|
184
200
|
/*!
|
|
185
201
|
* Copyright 2021 Cognite AS
|
|
186
|
-
*/
|
|
187
|
-
const yt={p:new o.Vector3,instanceMatrix:new o.Matrix4,baseBounds:new o.Box3,instanceBounds:new o.Box3};
|
|
202
|
+
*/class Ke{constructor(){this._sceneModelState={}}hasStateChanged(e){const t=this._sceneModelState[e.modelIdentifier];C()(void 0!==t,`Model ${e.modelIdentifier} has not been added`);const n=t[e.metadata.id];return void 0!==n?n!==e.levelOfDetail:e.levelOfDetail!==o.a.Discarded}addModel(e){C()(void 0===this._sceneModelState[e],`Model ${e} is already added`),this._sceneModelState[e]={}}removeModel(e){C()(void 0!==this._sceneModelState[e],`Model ${e} is not added`),delete this._sceneModelState[e]}updateState(e){if(void 0===this._sceneModelState[e.modelIdentifier])return;const t=this._sceneModelState[e.modelIdentifier];e.levelOfDetail===o.a.Discarded?delete t[e.metadata.id]:t[e.metadata.id]=e.levelOfDetail}}
|
|
188
203
|
/*!
|
|
189
204
|
* Copyright 2021 Cognite AS
|
|
190
|
-
*/
|
|
191
|
-
const bt=new o.BufferGeometry;class _t extends o.Group{constructor(){super(...arguments),this._isDisposed=!1,this._referenceCount=0}reference(){this.ensureNotDisposed(),this._referenceCount++}dereference(){if(this.ensureNotDisposed(),0===this._referenceCount)throw new Error("No references");0==--this._referenceCount&&this.dispose()}dispose(){this.ensureNotDisposed(),this._isDisposed=!0;const e=this.children.filter(e=>e instanceof o.Mesh).map(e=>e);for(const t of e)void 0!==t.geometry&&(t.geometry.dispose(),t.geometry=bt)}ensureNotDisposed(){if(this._isDisposed)throw new Error("Already disposed/dereferenced")}}
|
|
205
|
+
*/const Xe={isLoading:!1,itemsLoaded:0,itemsRequested:0,itemsCulled:0};class Ye{constructor(e,t){this._cameraSubject=new j.Subject,this._clippingPlaneSubject=new j.Subject,this._loadingHintsSubject=new j.Subject,this._modelSubject=new j.Subject,this._budgetSubject=new j.Subject,this._progressSubject=new j.BehaviorSubject(Xe),this._sectorRepository=e,this._sectorCuller=t,this._modelStateHandler=new Ke,this._budget=He,this._lastSpent={downloadSize:0,drawCalls:0,renderCost:0,loadedSectorCount:0,simpleSectorCount:0,detailedSectorCount:0,forcedDetailedSectorCount:0,totalSectorCount:0,accumulatedPriority:0};const n=Object(j.combineLatest)([Object(j.combineLatest)([this._loadingHintsSubject.pipe(Object(W.startWith)({})),this._budgetSubject.pipe(Object(W.startWith)(this._budget))]).pipe(Object(W.map)(Ze)),Object(j.combineLatest)([this._cameraSubject.pipe(Object(W.auditTime)(500)),this._cameraSubject.pipe(Object(W.auditTime)(250),(r=600,Object(j.pipe)(Object(W.switchMap)(e=>ke(r)),Object(W.distinctUntilChanged)())))]).pipe(Object(W.map)(Qe)),Object(j.combineLatest)([this._clippingPlaneSubject.pipe(Object(W.startWith)([]))]).pipe(Object(W.map)(Je)),this.loadingModelObservable()]);var r;const o=new We(e,t,this._modelStateHandler,e=>{this._lastSpent=e},(e,t,n)=>{const r={isLoading:t>e,itemsRequested:t,itemsLoaded:e,itemsCulled:n};this._progressSubject.next(r)});this._updateObservable=n.pipe(Object(W.observeOn)(j.asyncScheduler),Object(W.auditTime)(250),Object(W.map)($e),Object(W.filter)(Le),Object(W.mergeMap)(async e=>async function*(e){for await(const t of o.loadSectors(e))yield t}(e)),Object(W.mergeMap)(e=>e),Object(W.finalize)(()=>{this._sectorRepository.clear()}))}dispose(){this._sectorCuller.dispose()}updateCamera(e){this._cameraSubject.next(e),this._progressSubject.next(Xe)}set clippingPlanes(e){this._clippingPlaneSubject.next(e)}get budget(){return this._budget}set budget(e){this._budget=e,this._budgetSubject.next(e)}get lastBudgetSpendage(){return this._lastSpent}addModel(e){this._modelStateHandler.addModel(e.cadModelMetadata.modelIdentifier),this._modelSubject.next({model:e,operation:"add"})}removeModel(e){this._modelStateHandler.removeModel(e.cadModelMetadata.modelIdentifier),this._modelSubject.next({model:e,operation:"remove"})}updateLoadingHints(e){this._loadingHintsSubject.next(e)}consumedSectorObservable(){return this._updateObservable.pipe(Object(W.share)())}getLoadingStateObserver(){return this._progressSubject}loadingModelObservable(){return this._modelSubject.pipe(Object(W.scan)((e,t)=>{const{model:n,operation:r}=t;switch(r){case"add":return e.push(n),e;case"remove":return e.filter(e=>e.cadModelMetadata.modelIdentifier!==n.cadModelMetadata.modelIdentifier);default:Object(s.k)(r)}},[]))}}function Ze([e,t]){return{loadingHints:e,budget:t}}function Qe([e,t]){return{camera:e,cameraInMotion:t}}function Je([e]){return{clippingPlanes:e}}function $e([e,t,n,r]){return{...t,...e,...n,cadModelsMetadata:r.filter(e=>e.visible).map(e=>e.cadModelMetadata)}}
|
|
192
206
|
/*!
|
|
193
207
|
* Copyright 2021 Cognite AS
|
|
194
|
-
*/
|
|
208
|
+
*/const et=new r.Quaternion;class tt{constructor(e){this.sectorIdOffset=0,this.scene=new r.Scene,this.containers=new Map,this.buffers={size:new r.Vector2,rtBuffer:new Uint8Array,sectorVisibilityBuffer:[]},this.coverageMaterial=new r.ShaderMaterial({vertexShader:_e.vertex,fragmentShader:_e.fragment,clipping:!0,side:r.DoubleSide}),this._ensureBuffersCorrectSizeVars={size:new r.Vector2},this._renderer=e.renderer,this._alreadyLoadedProvider=e.occludingGeometryProvider,this.renderTarget=new r.WebGLRenderTarget(1,1,{generateMipmaps:!1,type:r.UnsignedByteType,format:r.RGBAFormat,stencilBuffer:!1})}dispose(){this._renderer.dispose()}get renderer(){return this._renderer}createDebugCanvas(e){if(this._debugImageElement)throw new Error("createDebugCanvas() can only be called once");const t=e?e.width:this.renderTarget.width,n=e?e.height:this.renderTarget.height;return this._debugImageElement=document.createElement("img"),this._debugImageElement.style.width=t+"px",this._debugImageElement.style.height=n+"px",this._debugImageElement}setModels(e){const t=new Set;for(const n of e){const e=n.modelIdentifier;t.add(e);const r=this.containers.get(e);r?this.updateModel(r,n):this.addModel(n)}const n=new Set(this.containers.keys()),r=new Set([...n].filter(e=>!t.has(e)));for(const e of r)this.removeModel(e)}setClipping(e){this.coverageMaterial.clippingPlanes=e}cullOccludedSectors(e,t){try{this.setAllSectorsVisible(!1),this.setSectorsVisibility(t,!0);const n=this.orderSectorsByVisibility(e);return t.filter(t=>{const r=this.containers.get(t.modelIdentifier);if(void 0===r)throw new Error(`Model ${t.modelIdentifier} is not registered`);const o=function(e,t,n){const{sectorBounds:r}=nt;return r.copy(t.bounds),r.applyMatrix4(e.modelMatrix),r.containsPoint(n)}(r.model,t.metadata,e.position);return n.some(e=>e.model.modelIdentifier===t.modelIdentifier&&e.sectorId===t.metadata.id)||o})}finally{this.setAllSectorsVisible(!0)}}orderSectorsByVisibility(e){this._debugImageElement&&(this.renderSectors(null,e),this._debugImageElement.src=this._renderer.domElement.toDataURL()),this.ensureBuffersCorrectSize(),this.renderSectors(this.renderTarget,e),this._renderer.readRenderTargetPixels(this.renderTarget,0,0,this.renderTarget.width,this.renderTarget.height,this.buffers.rtBuffer);const t=this.unpackSectorVisibility(this.renderTarget.width,this.renderTarget.height,this.buffers.rtBuffer),n=t.reduce((e,t)=>t.weight+e,0);return t.filter(e=>e.weight>0).sort((e,t)=>e&&t?t.weight-e.weight:e?-1:t?1:0).map(e=>{const t=this.findSectorContainer(e.sectorIdWithOffset),r=e.sectorIdWithOffset-t.sectorIdOffset;return{model:t.model,sectorId:r,priority:e.weight/n,depth:e.distance}})}ensureBuffersCorrectSize(){const{size:e}=this._ensureBuffersCorrectSizeVars;if(this._renderer.getSize(e),!this.buffers.size.equals(e)){const t=Math.max(Math.floor(e.width*tt.CoverageRenderTargetScalingFactor),64),n=Math.max(Math.floor(e.height*tt.CoverageRenderTargetScalingFactor),64);this.renderTarget.setSize(t,n),this.buffers.rtBuffer.length<4*t*n&&(this.buffers.rtBuffer=new Uint8Array(4*t*n)),this.buffers.size.copy(e)}}renderSectors(e,t){const n=new s.i(this._renderer);try{n.localClippingEnabled=!0,n.setRenderTarget(e),n.setClearColor("#FFFFFF",1),n.autoClear=!1,n.setSize(this.buffers.size.width,this.buffers.size.height),this._renderer.clear(!0,!0),this._alreadyLoadedProvider.renderOccludingGeometry(e,t),this._renderer.render(this.scene,t)}finally{n.resetState()}}setAllSectorsVisible(e){const t=e?1:0;this.containers.forEach(e=>{for(let n=0;n<e.sectors.length;++n){const r=e.sectors[n].id,o=e.sectorIndexById[r];e.attributesValues[5*o+4]=t}e.attributesBuffer.needsUpdate=!0})}setSectorsVisibility(e,t){const n=t?1:0;e.forEach(e=>{const t=e.metadata.id,r=this.containers.get(e.modelIdentifier);if(void 0===r)throw new Error(`Sector ${e} is from a model not added`);const o=r.sectorIndexById[t];r.attributesValues[5*o+4]=n,r.attributesBuffer.needsUpdate=!0})}removeModel(e){const t=this.containers.get(e);if(!t)throw new Error(`Could not find model '${e}'`);t.mesh.geometry.dispose(),this.deleteSectorsFromBuffers(t.sectorIdOffset,t.lastSectorIdWithOffset),this.scene.remove(t.renderable),this.containers.delete(e)}deleteSectorsFromBuffers(e,t){const n=this.buffers.sectorVisibilityBuffer;for(let r=e;r<=t;++r)n[r]={sectorIdWithOffset:-1,weight:-1,distance:1/0}}addModel(e){const t=e.scene.getAllSectors(),[n,o,i]=this.createSectorTreeGeometry(this.sectorIdOffset,t),a=new r.Group;a.matrixAutoUpdate=!1,a.applyMatrix4(e.modelMatrix),a.updateMatrixWorld(),a.add(n);const s=t.reduce((e,t)=>Math.max(t.id,e),0),d=new Array(s);t.forEach((e,t)=>d[e.id]=t),this.containers.set(e.modelIdentifier,{model:e,sectors:t,sectorIndexById:d,sectorIdOffset:this.sectorIdOffset,lastSectorIdWithOffset:this.sectorIdOffset+s,renderable:a,mesh:n,attributesBuffer:o,attributesValues:i}),this.sectorIdOffset+=s+1,this.scene.add(a)}updateModel(e,t){e.renderable.matrix.copy(t.modelMatrix),e.renderable.updateMatrixWorld(!0)}findSectorContainer(e){for(const t of this.containers.values())if(e>=t.sectorIdOffset&&e<=t.lastSectorIdWithOffset)return t;throw new Error(`Sector ID ${e} is out of range`)}unpackSectorVisibility(e,t,n){function r(e,t){const n=e*e+t*t;return.5*(2.5-n)+Math.exp(-Math.sqrt(n))}const o=this.buffers.sectorVisibilityBuffer;!function(e){for(let t=0;t<e.length;t++){const n=e[t];n&&(n.weight=0)}}(o);const i=t/2,a=e/2;for(let s=0;s<t;s++){const d=(s-t/2)/i;for(let t=0;t<e;t++){const i=t+e*s,l=n[4*i+0],c=n[4*i+1],u=n[4*i+2],m=n[4*i+3];if(255!==l||255!==c||255!==u){const e=(s-a)/a,t=u+255*c+255*l*255,n=o[t]||{sectorIdWithOffset:t,weight:0,distance:m};n.weight+=r(e,d),n.distance=Math.min(n.distance,m),o[t]=n}}}return o}createSectorTreeGeometry(e,t){const n=new r.Vector3,o=new r.Vector3,i=t.length,a=new Float32Array(5*i),s=new r.BoxBufferGeometry,d=new r.InstancedMesh(s,this.coverageMaterial,i),l=new r.Vector3;t.forEach((t,i)=>{const{xy:s,xz:c,yz:u}=t.facesFile.coverageFactors;l.set(u,c,s),((t,i,s,l)=>{t.getCenter(n),t.getSize(o);const c=(new r.Matrix4).compose(n,et,o);d.setMatrixAt(i,c),a[5*i+0]=e+s,a[5*i+1]=l.x,a[5*i+2]=l.y,a[5*i+3]=l.z,a[5*i+4]=1})(t.bounds,i,t.id,l)});const c=new r.InstancedInterleavedBuffer(a,5);return s.setAttribute("a_sectorId",new r.InterleavedBufferAttribute(c,1,0)),s.setAttribute("a_coverageFactor",new r.InterleavedBufferAttribute(c,3,1)),s.setAttribute("a_visible",new r.InterleavedBufferAttribute(c,1,4)),[d,c,a]}}tt.CoverageRenderTargetScalingFactor=.5;const nt={sectorBounds:new r.Box3};function rt(e,t){e.downloadSize+=t.downloadSize,e.drawCalls+=t.drawCalls,e.renderCost+=t.renderCost}
|
|
195
209
|
/*!
|
|
196
210
|
* Copyright 2021 Cognite AS
|
|
197
211
|
*/
|
|
198
|
-
class
|
|
212
|
+
class ot{constructor(e,t){this.sectors=[],this._totalCost={downloadSize:0,drawCalls:0,renderCost:0},this.determineSectorCost=t,Object(s.y)(e,e=>(this.sectors.length=Math.max(this.sectors.length,e.id),this.sectors[e.id]={sector:e,parentIndex:-1,priority:-1,cost:{downloadSize:0,drawCalls:0,renderCost:0},lod:o.a.Discarded},!0));for(let e=0;e<this.sectors.length;++e){const t=this.sectors[e];if(void 0!==t){const n=t.sector.children.map(e=>e.id);for(const t of n)this.sectors[t].parentIndex=e}}e.facesFile.fileName&&this.setSectorLod(e.id,o.a.Simple)}get totalCost(){return this._totalCost}determineWantedSectorCount(){return this.sectors.reduce((e,t)=>e=t.lod!==o.a.Discarded?e+1:e,0)}toWantedSectors(e,t,n){return this.sectors.filter(e=>void 0!==e).map(r=>({modelIdentifier:e,modelBaseUrl:t,levelOfDetail:r.lod,metadata:r.sector,priority:r.priority,geometryClipBox:n})).sort((e,t)=>t.priority-e.priority)}markSectorDetailed(e,t){if(this.setSectorPriority(e,t),this.sectors[e].lod===o.a.Detailed)return;let n=this.sectors[e];for(;;){switch(n.lod){case o.a.Simple:this.replaceSimpleWithDetailed(n.sector.id);break;case o.a.Discarded:this.setSectorLod(n.sector.id,o.a.Detailed)}if(-1===n.parentIndex)break;n=this.sectors[n.parentIndex]}this.markAllDiscardedChildrenAsSimple(e)}replaceSimpleWithDetailed(e){it(this.sectors[e].lod===o.a.Simple,`Sector ${e} must be a Simple-sector, but got ${this.sectors[e].lod}`),this.setSectorLod(e,o.a.Detailed),this.markAllDiscardedChildrenAsSimple(e)}markAllDiscardedChildrenAsSimple(e){for(const t of this.sectors[e].sector.children)this.getSectorLod(t.id)===o.a.Discarded&&null!==t.facesFile.fileName&&this.setSectorLod(t.id,o.a.Simple)}setSectorLod(e,t){var n,r;it(t!==o.a.Simple||null!==this.sectors[e].sector.facesFile.fileName),this.sectors[e].lod=t,n=this._totalCost,r=this.sectors[e].cost,n.downloadSize-=r.downloadSize,n.drawCalls-=r.drawCalls,n.renderCost-=r.renderCost,this.sectors[e].cost=this.determineSectorCost(this.sectors[e].sector,t),rt(this._totalCost,this.sectors[e].cost)}setSectorPriority(e,t){this.sectors[e].priority=t}getSectorLod(e){return this.sectors[e].lod}}function it(e,t="assertion hit"){e||je.a.error("[ASSERT]",t)}
|
|
213
|
+
/*!
|
|
214
|
+
* Copyright 2021 Cognite AS
|
|
215
|
+
*/class at{constructor(e){this._takenSectorTrees=new Map,this.determineSectorCost=e}get totalCost(){const e={downloadSize:0,drawCalls:0,renderCost:0};return this._takenSectorTrees.forEach(({sectorTree:t})=>{rt(e,t.totalCost)}),e}initializeScene(e){this._takenSectorTrees.set(e.modelIdentifier,{sectorTree:new ot(e.scene.root,this.determineSectorCost),modelMetadata:e})}getWantedSectorCount(){let e=0;return this._takenSectorTrees.forEach(({sectorTree:t})=>{e+=t.determineWantedSectorCount()}),e}markSectorDetailed(e,t,n){const r=this._takenSectorTrees.get(e.modelIdentifier);!function(e,t="assertion hit"){console.assert(e,t)}(!!r,`Could not find sector tree for ${e.modelIdentifier} (have trees ${Array.from(this._takenSectorTrees.keys()).join(", ")})`);const{sectorTree:o}=r;o.markSectorDetailed(t,n)}isWithinBudget(e){return this.totalCost.downloadSize<e.geometryDownloadSizeBytes&&this.totalCost.drawCalls<e.maximumNumberOfDrawCalls&&this.totalCost.renderCost<e.maximumRenderCost}collectWantedSectors(){const e=new Array;for(const[t,{sectorTree:n,modelMetadata:r}]of this._takenSectorTrees)e.push(...n.toWantedSectors(t,r.modelBaseUrl,r.geometryClipBox));return e.sort((e,t)=>t.priority-e.priority),e}clear(){this._takenSectorTrees.clear()}}class st{constructor(e){this.options={renderer:e.renderer,determineSectorCost:e&&e.determineSectorCost?e.determineSectorCost:dt,logCallback:e&&e.logCallback?e.logCallback:()=>{},coverageUtil:e.coverageUtil},this.takenSectors=new at(this.options.determineSectorCost)}dispose(){this.options.coverageUtil.dispose()}determineSectors(e){const t=this.update(e.camera,e.cadModelsMetadata,e.clippingPlanes,e.budget),n=t.collectWantedSectors(),r=n.filter(e=>e.levelOfDetail!==o.a.Discarded),i=e.cadModelsMetadata.reduce((e,t)=>e+t.scene.sectorCount,0),a=r.length,s=r.filter(e=>e.levelOfDetail===o.a.Simple).length,d=r.filter(e=>!Number.isFinite(e.priority)).length,l=r.filter(e=>Number.isFinite(e.priority)&&e.priority>0).reduce((e,t)=>e+t.priority,0),c=(100*(a-s)/i).toPrecision(3),u=(100*a/i).toPrecision(3);this.log(`Scene: ${a} (${d} required, ${i} sectors, ${u}% of all sectors - ${c}% detailed)`);return{spentBudget:{drawCalls:t.totalCost.drawCalls,downloadSize:t.totalCost.downloadSize,renderCost:t.totalCost.renderCost,totalSectorCount:i,forcedDetailedSectorCount:d,loadedSectorCount:a,simpleSectorCount:s,detailedSectorCount:a-s,accumulatedPriority:l},wantedSectors:n}}filterSectorsToLoad(e,t){const n=this.options.coverageUtil.cullOccludedSectors(e.camera,t);return Promise.resolve(n)}update(e,t,n,r){const{coverageUtil:o}=this.options,i=this.takenSectors;i.clear(),t.forEach(e=>i.initializeScene(e)),o.setModels(t),o.setClipping(n);const a=o.orderSectorsByVisibility(e);this.addHighDetailsForNearSectors(e,t,r,i,n);let s=0;const d=a.length;let l=0;for(l=0;l<d&&i.isWithinBudget(r);l++){const e=a[l];i.markSectorDetailed(e.model,e.sectorId,e.priority),s+=e.priority}return this.log(`Retrieving ${l} of ${d} (last: ${a.length>0?a[l-1]:null})`),this.log(`Total scheduled: ${i.getWantedSectorCount()} of ${d} (cost: ${i.totalCost.downloadSize/1024/1024}/${r.geometryDownloadSizeBytes/1024/1024}, drawCalls: ${i.totalCost.drawCalls}/${r.maximumNumberOfDrawCalls}, priority: ${s})`),i}addHighDetailsForNearSectors(e,t,n,o,i){const a=e.clone(!0);a.far=n.highDetailProximityThreshold,a.updateProjectionMatrix();const s=a.matrixWorldInverse,d=a.projectionMatrix,l=new r.Matrix4;t.forEach(e=>{l.multiplyMatrices(s,e.modelMatrix);let t=e.scene.getSectorsIntersectingFrustum(d,l);null!=i&&i.length>0&&(t=this.testForClippingOcclusion(t,i,e.modelMatrix)),this.markSectorsAsDetailed(t,o,e)})}testForClippingOcclusion(e,t,n){const r=[];for(let o=0;o<e.length;o++){const i=Object(s.o)(e[o].bounds).map(e=>{const t=e.clone();return t.applyMatrix4(n),t});let a=!0;for(let e=0;e<t.length;e++){let n=!1;for(let r=0;r<i.length;r++)n=t[e].distanceToPoint(i[r])>=0||n;a=a&&n}a&&r.push(e[o])}return r}markSectorsAsDetailed(e,t,n){for(let r=0;r<e.length;r++)t.markSectorDetailed(n,e[r].id,1/0)}log(e,...t){this.options.logCallback(e,...t)}}function dt(e,t){switch(t){case o.a.Detailed:return{downloadSize:e.indexFile.downloadSize,drawCalls:e.estimatedDrawCallCount,renderCost:e.estimatedRenderCost};case o.a.Simple:return{downloadSize:e.facesFile.downloadSize,drawCalls:1,renderCost:Math.ceil(e.facesFile.downloadSize/100)};default:throw new Error("Can't compute cost for lod "+t)}}function lt(e,t){const n=new tt({renderer:e,occludingGeometryProvider:t});return new st({renderer:e,coverageUtil:n})}
|
|
216
|
+
/*!
|
|
217
|
+
* Copyright 2021 Cognite AS
|
|
218
|
+
*/},,function(e,t){e.exports=require("@cognite/potree-core")},function(e,t,n){"use strict";var r=n(22);
|
|
219
|
+
/*!
|
|
220
|
+
* Copyright 2021 Cognite AS
|
|
221
|
+
*/t.a=r},function(e,t){e.exports=require("mixpanel-browser")},function(e,t){e.exports=require("@tweenjs/tween.js")},function(e,t,n){"use strict";n.d(t,"a",(function(){return i})),n.d(t,"b",(function(){return a}));var r=n(0);
|
|
199
222
|
/*!
|
|
200
223
|
* Copyright 2021 Cognite AS
|
|
201
224
|
*/
|
|
202
|
-
const St={defines:{COGNITE_COLOR_BY_TREE_INDEX:!1}},Pt={simpleMesh:{fragment:wt()(n(29).default),vertex:wt()(n(30).default)},detailedMesh:{fragment:wt()(n(31).default),vertex:wt()(n(32).default)},instancedMesh:{fragment:wt()(n(33).default),vertex:wt()(n(34).default)},boxPrimitive:{fragment:wt()(n(17).default),vertex:wt()(n(18).default)},circlePrimitive:{fragment:wt()(n(35).default),vertex:wt()(n(36).default)},conePrimitive:{fragment:wt()(n(37).default),vertex:wt()(n(38).default)},eccentricConePrimitive:{fragment:wt()(n(39).default),vertex:wt()(n(40).default)},ellipsoidSegmentPrimitive:{fragment:wt()(n(41).default),vertex:wt()(n(42).default)},generalCylinderPrimitive:{fragment:wt()(n(43).default),vertex:wt()(n(44).default)},generalRingPrimitive:{fragment:wt()(n(45).default),vertex:wt()(n(46).default)},nutPrimitive:{fragment:wt()(n(17).default),vertex:wt()(n(18).default)},quadPrimitive:{fragment:wt()(n(17).default),vertex:wt()(n(18).default)},torusSegmentPrimitive:{fragment:wt()(n(47).default),vertex:wt()(n(48).default)},trapeziumPrimitive:{fragment:wt()(n(49).default),vertex:wt()(n(50).default)}},Rt={fragment:wt()(n(51).default),vertex:wt()(n(52).default)},Nt={fragment:wt()(n(53).default),vertex:wt()(n(54).default)},Dt={fragment:wt()(n(55).default),vertex:wt()(n(20).default)},zt={fragment:wt()(n(56).default),vertex:wt()(n(20).default)},Et={fragment:wt()(n(57).default),vertex:wt()(n(58).default)},Ot=new o.Quaternion;class At{constructor(e){this.sectorIdOffset=0,this.scene=new o.Scene,this.containers=new Map,this.buffers={size:new o.Vector2,rtBuffer:new Uint8Array,sectorVisibilityBuffer:[]},this.coverageMaterial=new o.ShaderMaterial({vertexShader:Et.vertex,fragmentShader:Et.fragment,clipping:!0,side:o.DoubleSide}),this._ensureBuffersCorrectSizeVars={size:new o.Vector2},this._renderer=e.renderer,this._alreadyLoadedProvider=e.occludingGeometryProvider,this.renderTarget=new o.WebGLRenderTarget(1,1,{generateMipmaps:!1,type:o.UnsignedByteType,format:o.RGBAFormat,stencilBuffer:!1})}dispose(){this._renderer.dispose()}get renderer(){return this._renderer}createDebugCanvas(e){if(this._debugImageElement)throw new Error("createDebugCanvas() can only be called once");const t=e?e.width:this.renderTarget.width,n=e?e.height:this.renderTarget.height;return this._debugImageElement=document.createElement("img"),this._debugImageElement.style.width=t+"px",this._debugImageElement.style.height=n+"px",this._debugImageElement}setModels(e){const t=new Set;for(const n of e){const e=n.blobUrl;t.add(e);const r=this.containers.get(e);r?this.updateModel(r,n):this.addModel(n)}const n=new Set(this.containers.keys()),r=new Set([...n].filter(e=>!t.has(e)));for(const e of r)this.removeModel(e)}setClipping(e,t){this.coverageMaterial.clippingPlanes=e,this.coverageMaterial.clipIntersection=t}cullOccludedSectors(e,t){try{this.setAllSectorsVisible(!1),this.setSectorsVisibility(t,!0);const n=this.orderSectorsByVisibility(e);return t.filter(t=>{const r=this.containers.get(t.blobUrl);if(void 0===r)throw new Error(`Model ${t.blobUrl} is not registered`);const o=function(e,t,n){const{sectorBounds:r}=Ft;return r.copy(t.bounds),r.applyMatrix4(e.modelMatrix),r.containsPoint(n)}(r.model,t.metadata,e.position);return n.some(e=>e.model.blobUrl===t.blobUrl&&e.sectorId===t.metadata.id)||o})}finally{this.setAllSectorsVisible(!0)}}orderSectorsByVisibility(e){this._debugImageElement&&(this.renderSectors(null,e),this._debugImageElement.src=this._renderer.domElement.toDataURL()),this.ensureBuffersCorrectSize(),this.renderSectors(this.renderTarget,e),this._renderer.readRenderTargetPixels(this.renderTarget,0,0,this.renderTarget.width,this.renderTarget.height,this.buffers.rtBuffer);const t=this.unpackSectorVisibility(this.renderTarget.width,this.renderTarget.height,this.buffers.rtBuffer),n=t.reduce((e,t)=>t.weight+e,0);return t.filter(e=>e.weight>0).sort((e,t)=>e&&t?t.weight-e.weight:e?-1:t?1:0).map(e=>{const t=this.findSectorContainer(e.sectorIdWithOffset),r=e.sectorIdWithOffset-t.sectorIdOffset;return{model:t.model,sectorId:r,priority:e.weight/n,depth:e.distance}})}ensureBuffersCorrectSize(){const{size:e}=this._ensureBuffersCorrectSizeVars;if(this._renderer.getSize(e),!this.buffers.size.equals(e)){const t=Math.max(Math.floor(e.width*At.CoverageRenderTargetScalingFactor),64),n=Math.max(Math.floor(e.height*At.CoverageRenderTargetScalingFactor),64);this.renderTarget.setSize(t,n),this.buffers.rtBuffer.length<4*t*n&&(this.buffers.rtBuffer=new Uint8Array(4*t*n)),this.buffers.size.copy(e)}}renderSectors(e,t){const n=new N(this._renderer);try{n.localClippingEnabled=!0,n.setRenderTarget(e),n.setClearColor("#FFFFFF",1),n.autoClear=!1,n.setSize(this.buffers.size.width,this.buffers.size.height),this._renderer.clear(!0,!0),this._alreadyLoadedProvider.renderOccludingGeometry(e,t),this._renderer.render(this.scene,t)}finally{n.resetState()}}setAllSectorsVisible(e){const t=e?1:0;this.containers.forEach(e=>{for(let n=0;n<e.sectors.length;++n){const r=e.sectors[n].id,o=e.sectorIndexById[r];e.attributesValues[5*o+4]=t}e.attributesBuffer.needsUpdate=!0})}setSectorsVisibility(e,t){const n=t?1:0;e.forEach(e=>{const t=e.metadata.id,r=this.containers.get(e.blobUrl);if(void 0===r)throw new Error(`Sector ${e} is from a model not added`);const o=r.sectorIndexById[t];r.attributesValues[5*o+4]=n,r.attributesBuffer.needsUpdate=!0})}removeModel(e){const t=this.containers.get(e);if(!t)throw new Error(`Could not find model '${e}'`);t.mesh.geometry.dispose(),this.scene.remove(t.renderable),this.containers.delete(e)}addModel(e){const t=e.scene.getAllSectors(),[n,r,i]=this.createSectorTreeGeometry(this.sectorIdOffset,t),a=new o.Group;a.matrixAutoUpdate=!1,a.applyMatrix4(e.modelMatrix),a.updateMatrixWorld(),a.add(n);const s=t.reduce((e,t)=>Math.max(t.id,e),0),d=new Array(s);t.forEach((e,t)=>d[e.id]=t),this.containers.set(e.blobUrl,{model:e,sectors:t,sectorIndexById:d,sectorIdOffset:this.sectorIdOffset,lastSectorIdWithOffset:this.sectorIdOffset+s,renderable:a,mesh:n,attributesBuffer:r,attributesValues:i}),this.sectorIdOffset+=s,this.scene.add(a)}updateModel(e,t){e.renderable.matrix.copy(t.modelMatrix),e.renderable.updateMatrixWorld(!0)}findSectorContainer(e){for(const t of this.containers.values())if(e>=t.sectorIdOffset&&e<=t.lastSectorIdWithOffset)return t;throw new Error(`Sector ID ${e} is out of range`)}unpackSectorVisibility(e,t,n){function r(e,t){const n=e*e+t*t;return.5*(2.5-n)+Math.exp(-Math.sqrt(n))}const o=this.buffers.sectorVisibilityBuffer;!function(e){for(let t=0;t<e.length;t++){const n=e[t];n&&(n.weight=0)}}(o);const i=t/2,a=e/2;for(let s=0;s<t;s++){const d=(s-t/2)/i;for(let t=0;t<e;t++){const i=t+e*s,l=n[4*i+0],c=n[4*i+1],u=n[4*i+2],m=n[4*i+3];if(255!==l||255!==c||255!==u){const e=(s-a)/a,t=u+255*c+255*l*255,n=o[t]||{sectorIdWithOffset:t,weight:0,distance:m};n.weight+=r(e,d),n.distance=Math.min(n.distance,m),o[t]=n}}}return o}createSectorTreeGeometry(e,t){const n=new o.Vector3,r=new o.Vector3,i=t.length,a=new Float32Array(5*i),s=new o.BoxBufferGeometry,d=new o.InstancedMesh(s,this.coverageMaterial,i),l=new o.Vector3;t.forEach((t,i)=>{const{xy:s,xz:c,yz:u}=t.facesFile.coverageFactors;l.set(u,c,s),((t,i,s,l)=>{t.getCenter(n),t.getSize(r);const c=(new o.Matrix4).compose(n,Ot,r);d.setMatrixAt(i,c),a[5*i+0]=e+s,a[5*i+1]=l.x,a[5*i+2]=l.y,a[5*i+3]=l.z,a[5*i+4]=1})(t.bounds,i,t.id,l)});const c=new o.InstancedInterleavedBuffer(a,5);return s.setAttribute("a_sectorId",new o.InterleavedBufferAttribute(c,1,0)),s.setAttribute("a_coverageFactor",new o.InterleavedBufferAttribute(c,3,1)),s.setAttribute("a_visible",new o.InterleavedBufferAttribute(c,1,4)),[d,c,a]}}At.CoverageRenderTargetScalingFactor=.5;const Ft={sectorBounds:new o.Box3};function Bt(e,t){e.downloadSize+=t.downloadSize,e.drawCalls+=t.drawCalls}
|
|
225
|
+
const o={renderSize:new r.Vector2,position:new r.Vector3};function i(e,t,n,i=new r.Vector3){const{renderSize:a,position:s}=o,d=e.domElement;e.getSize(a),s.copy(n),s.project(t);const{width:l,height:c}=d.getBoundingClientRect(),u=a.width/l,m=a.height/c,h=(s.x+1)/(1*u*2),p=(1-s.y)/(1*m*2);return i.set(h,p,s.z)}function a(e,t,n,a=new r.Vector3){i(e,t,n,a);const{renderSize:s}=o;e.getSize(s);const d=e.getPixelRatio();return a.x=Math.round(a.x*s.width/d),a.y=Math.round(a.y*s.height/d),a}},function(e,t){e.exports=require("lodash/cloneDeep")},,function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_normal;\nvarying vec3 v_color;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * instanceMatrix * vec4(normalize(normal), 0.0)).xyz;\n //v_normal = normal;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n vViewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}"},function(e,t,n){"use strict";n.d(t,"a",(function(){return o}));var r=n(0);
|
|
203
226
|
/*!
|
|
204
227
|
* Copyright 2021 Cognite AS
|
|
205
228
|
*/
|
|
206
|
-
class kt{constructor(e,t){this.sectors=[],this._totalCost={downloadSize:0,drawCalls:0},this.determineSectorCost=t,X(e,e=>(this.sectors.length=Math.max(this.sectors.length,e.id),this.sectors[e.id]={sector:e,parentIndex:-1,priority:-1,cost:{downloadSize:0,drawCalls:0},lod:K.a.Discarded},!0));for(let e=0;e<this.sectors.length;++e){const t=this.sectors[e];if(void 0!==t){const n=t.sector.children.map(e=>e.id);for(const t of n)this.sectors[t].parentIndex=e}}e.facesFile.fileName&&this.setSectorLod(e.id,K.a.Simple)}get totalCost(){return this._totalCost}determineWantedSectorCount(){return this.sectors.reduce((e,t)=>e=t.lod!==K.a.Discarded?e+1:e,0)}toWantedSectors(e,t){return this.sectors.filter(e=>void 0!==e).map(n=>({levelOfDetail:n.lod,metadata:n.sector,priority:n.priority,blobUrl:e,geometryClipBox:t})).sort((e,t)=>t.priority-e.priority)}markSectorDetailed(e,t){if(this.setSectorPriority(e,t),this.sectors[e].lod===K.a.Detailed)return;let n=this.sectors[e];for(;;){switch(n.lod){case K.a.Simple:this.replaceSimpleWithDetailed(n.sector.id);break;case K.a.Discarded:this.setSectorLod(n.sector.id,K.a.Detailed)}if(-1===n.parentIndex)break;n=this.sectors[n.parentIndex]}this.markAllDiscardedChildrenAsSimple(e)}replaceSimpleWithDetailed(e){Ut(this.sectors[e].lod===K.a.Simple,`Sector ${e} must be a Simple-sector, but got ${this.sectors[e].lod}`),this.setSectorLod(e,K.a.Detailed),this.markAllDiscardedChildrenAsSimple(e)}markAllDiscardedChildrenAsSimple(e){for(const t of this.sectors[e].sector.children)this.getSectorLod(t.id)===K.a.Discarded&&null!==t.facesFile.fileName&&this.setSectorLod(t.id,K.a.Simple)}setSectorLod(e,t){var n,r;Ut(t!==K.a.Simple||null!==this.sectors[e].sector.facesFile.fileName),this.sectors[e].lod=t,n=this._totalCost,r=this.sectors[e].cost,n.downloadSize-=r.downloadSize,n.drawCalls-=r.drawCalls,this.sectors[e].cost=this.determineSectorCost(this.sectors[e].sector,t),Bt(this._totalCost,this.sectors[e].cost)}setSectorPriority(e,t){this.sectors[e].priority=t}getSectorLod(e){return this.sectors[e].lod}}function Ut(e,t="assertion hit"){console.assert(e,t)}
|
|
229
|
+
class o{constructor(e){this._clippingPlanes=[new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane,new r.Plane],this._box=e||new r.Box3,this.updatePlanes()}set minX(e){this._box.min.x=e,this.updatePlanes()}get minX(){return this._box.min.x}set minY(e){this._box.min.y=e,this.updatePlanes()}get minY(){return this._box.min.y}set minZ(e){this._box.min.z=e,this.updatePlanes()}get minZ(){return this._box.min.z}set maxX(e){this._box.max.x=e,this.updatePlanes()}get maxX(){return this._box.max.x}set maxY(e){this._box.max.y=e,this.updatePlanes()}get maxY(){return this._box.max.y}set maxZ(e){this._box.max.z=e,this.updatePlanes()}get maxZ(){return this._box.max.z}updatePlanes(){this._clippingPlanes[0].setFromNormalAndCoplanarPoint(new r.Vector3(1,0,0),new r.Vector3(this.minX,0,0)),this._clippingPlanes[1].setFromNormalAndCoplanarPoint(new r.Vector3(-1,0,0),new r.Vector3(this.maxX,0,0)),this._clippingPlanes[2].setFromNormalAndCoplanarPoint(new r.Vector3(0,1,0),new r.Vector3(0,this.minY,0)),this._clippingPlanes[3].setFromNormalAndCoplanarPoint(new r.Vector3(0,-1,0),new r.Vector3(0,this.maxY,0)),this._clippingPlanes[4].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,1),new r.Vector3(0,0,this.minZ)),this._clippingPlanes[5].setFromNormalAndCoplanarPoint(new r.Vector3(0,0,-1),new r.Vector3(0,0,this.maxZ))}get clippingPlanes(){return this._clippingPlanes}}},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t,n){"use strict";var r=n(13);n.d(t,"d",(function(){return r.b}));var o=n(18);n.d(t,"a",(function(){return o.a}));var i=n(1);n.d(t,"c",(function(){return i.k})),n.d(t,"b",(function(){return i.d}))},function(e,t){e.exports=require("comlink")},function(e,t){e.exports=require("loglevel")},function(e,t){e.exports=require("lodash/throttle")},function(e,t){e.exports=require("lodash/chunk")},function(e,t){e.exports=require("lodash/range")},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n updateFragmentColor(renderMode, color, v_treeIndex, v_normal, gl_FragCoord.z, matCapTexture, GeometryType.Quad);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec3 color;\nattribute float treeIndex;\nattribute vec4 matrix0;\nattribute vec4 matrix1;\nattribute vec4 matrix2;\nattribute vec4 matrix3;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n \n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_treeIndex = treeIndex;\n v_color = color;\n v_normal = normalize(normalMatrix * (inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(normal), 0.0)).xyz);\n mat4 instanceMatrix = mat4(matrix0, matrix1, matrix2, matrix3);\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main()\n{\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.TriangleMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec3 color;\nattribute float treeIndex; \n\nvarying vec3 v_color;\nvarying float v_treeIndex;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize;\nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_color = color;\n v_treeIndex = treeIndex;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(position, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 derivateNormal(vec3 v_viewPosition) {\n vec3 fdx = vec3(dFdx(v_viewPosition.x), dFdx(v_viewPosition.y), dFdx(v_viewPosition.z));\n vec3 fdy = vec3(dFdy(v_viewPosition.x), dFdy(v_viewPosition.y), dFdy(v_viewPosition.z));\n vec3 normal = normalize(cross(fdx, fdy));\n return normal;\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(v_viewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = derivateNormal(v_viewPosition);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.InstancedMesh);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_viewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main()\n{\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n v_color = a_color;\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 modelViewPosition = viewMatrix * modelMatrix * vec4(transformed, 1.0);\n v_viewPosition = modelViewPosition.xyz;\n v_treeIndex = a_treeIndex;\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n vec3 normal = normalize( v_normal );\n if (dist > 0.25)\n discard;\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_normal;\n\nvarying vec2 v_xy;\nvarying vec3 v_color;\nvarying vec3 v_normal;\nvarying float v_treeIndex;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n v_xy = vec2(position.x, position.y);\n v_treeIndex = a_treeIndex;\n\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec3 normal = normalize( v_normal );\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = v_V;\n float height = length(v_centerA.xyz - v_centerB.xyz);\n float R2 = v_centerA.w;\n float dR = R2 - R1;\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n #if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n #else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n #endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR / height;\n float dRdRL2Inv = dRLInv * dRLInv;\n a -= D.z * D.z * dRdRL2Inv;\n b -= dRLInv * (E.z * D.z * dRLInv + R1 * D.z);\n c -= dRLInv * (E.z * E.z * dRLInv + 2.0 * R1 * E.z);\n }\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0) {\n discard;\n }\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n bool isInner = false;\n\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (intersectionPoint.z <= 0.0 ||\n intersectionPoint.z > height ||\n theta > v_angle + v_arcAngle ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n #if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n if (R1 != R2)\n {\n // Find normal vector\n vec3 n = -normalize(W.xyz);\n vec3 P1 = v_centerB.xyz;\n vec3 P2 = v_centerA.xyz;\n vec3 A = cross(P1 - p, P2 - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = P1 + R1 * t;\n vec3 o2 = P2 + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n }\n else\n {\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n }\n #endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_color;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_centerA;\nvarying vec4 v_V;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float maxRadius = max(a_radiusA, a_radiusB);\n float leftUpScale = maxRadius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (maxRadius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0.0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n float radiusB = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusB, 0.0)).xyz);\n float radiusA = length((modelToTransformOffset * vec4(a_localXAxis * a_radiusA, 0.0)).xyz);\n\n // We pack radii as w-components of v_centerB\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radiusB;\n\n v_V.xyz = -cross(v_U.xyz, v_W.xyz);\n v_V.w = surfacePoint.y;\n\n v_centerA.xyz = mul3(modelViewMatrix, centerA);\n v_centerA.w = radiusA;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\n\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\nvarying float height;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n mat3 basis = mat3(U.xyz, V.xyz, axis.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, axis.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerA.xyz;\n vec3 E = diff * basis;\n float L = height;\n vec3 D = rayDirection * basis;\n\n float R1 = v_centerA.w;\n float R2 = v_centerB.w;\n float dR = R2 - R1;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy)-R1*R1;\n float L2Inv = 1.0/(L*L);\n\n if (R1 != R2) {\n // Additional terms if radii are different\n float dRLInv = dR/L;\n float dRdRL2Inv = dRLInv*dRLInv;\n a -= D.z*D.z*dRdRL2Inv;\n b -= dRLInv*(E.z*D.z*dRLInv + R1*D.z);\n c -= dRLInv*(E.z*E.z*dRLInv + 2.0*R1*E.z);\n }\n\n // Additional terms when one of the center points is displaced orthogonal to normal vector\n vec2 displacement = ((v_centerB.xyz-v_centerA.xyz)*basis).xy; // In the basis where displacement is in XY only\n float displacementLengthSquared = dot(displacement, displacement);\n a += D.z*(D.z*displacementLengthSquared - 2.0*L*dot(D.xy, displacement))*L2Inv;\n b += (D.z*E.z*displacementLengthSquared - L*(D.x*E.z*displacement.x + D.y*E.z*displacement.y + D.z*E.x*displacement.x + D.z*E.y*displacement.y))*L2Inv;\n c += E.z*(E.z*displacementLengthSquared - 2.*L*dot(E.xy, displacement))*L2Inv;\n\n // Calculate a dicriminant of the above quadratic equation (factor 2 removed from all b-terms above)\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long eccentric cone\n if (d < 0.0) {\n discard;\n }\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n // Check the smallest root, it is closest camera. Only test if the z-component is outside the truncated eccentric cone\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n bool isInner = false;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Either intersection point is behind starting point (happens inside the cone),\n // or the intersection point is outside the end caps. This is not a valid solution.\n isInner = true;\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= 0.0 ||\n intersectionPointZ >= L ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector\n vec3 n = normalize(-axis.xyz);\n vec3 v_centerA = v_centerA.xyz;\n vec3 v_centerB = v_centerB.xyz;\n vec3 A = cross(v_centerA - p, v_centerB - p);\n\n vec3 t = normalize(cross(n, A));\n vec3 o1 = v_centerA + R1 * t;\n vec3 o2 = v_centerB + R2 * t;\n vec3 B = o2-o1;\n normal = normalize(cross(A, B));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radiusA;\nattribute float a_radiusB;\nattribute vec3 a_normal;\nattribute vec3 a_color;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerA;\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 axis;\nvarying float height;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 normalWithOffset = normalize((modelTransformOffset * vec4(a_normal, 0)).xyz);\n\n float uniformScaleFactor = length(mul3(modelMatrix, normalize(vec3(1.0))));\n\n height = dot(centerA - centerB, normalWithOffset) * uniformScaleFactor;\n\n vec3 lDir;\n vec3 center = 0.5 * (centerA + centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n // Find the coordinates of centerA and centerB projected down to the end cap plane\n vec3 maxCenterProjected = centerA - dot(centerA, normalWithOffset) * normalWithOffset;\n vec3 minCenterProjected = centerB - dot(centerB, normalWithOffset) * normalWithOffset;\n float distanceBetweenProjectedCenters = length(maxCenterProjected - minCenterProjected);\n\n lDir = normalWithOffset;\n float dirSign = 1.0;\n if (dot(objectToCameraModelSpace, lDir) < 0.0) { // direction vector looks away, flip it\n dirSign = -1.0;\n lDir *= -1.;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n // compute basis for cone\n axis.xyz = -normalWithOffset;\n U.xyz = cross(objectToCameraModelSpace, axis.xyz);\n V.xyz = cross(U.xyz, axis.xyz);\n // Transform to camera space\n axis.xyz = normalize(normalMatrix * axis.xyz);\n U.xyz = normalize(normalMatrix * U.xyz);\n V.xyz = normalize(normalMatrix * V.xyz);\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radiusA * (position.x + 1.0) * 0.0025 / height);\n#endif\n\n v_centerA.xyz = mul3(viewMatrix, mul3(modelMatrix, centerA));\n v_centerB.xyz = mul3(viewMatrix, mul3(modelMatrix, centerB));\n\n float radiusA = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusA, 0.0)).xyz);\n float radiusB = length((modelToTransformOffset * vec4(normalize(vec3(1.0)) * a_radiusB, 0.0)).xyz);\n\n // Pack radii as w components of v_centerA and v_centerB\n v_centerA.w = radiusA;\n v_centerB.w = radiusB;\n\n float radiusIncludedDisplacement = 0.5*(2.0*max(a_radiusA, a_radiusB) + distanceBetweenProjectedCenters);\n vec3 surfacePoint = center + mat3(0.5 * height * lDir * (1.0 / uniformScaleFactor), radiusIncludedDisplacement*left, radiusIncludedDisplacement*up) * newPosition;\n vec3 transformed = surfacePoint;\n\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n axis.w = surfacePoint.z;\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform mat4 projectionMatrix;\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(sphereNormal.xyz);\n\n float vRadius = center.w;\n float ratio = vRadius / hRadius;\n mat3 basis = mat3(U.xyz, V.xyz, sphereNormal.xyz);\n mat3 scaledBasis = mat3(ratio * U.xyz, ratio * V.xyz, sphereNormal.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, sphereNormal.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - center.xyz;\n vec3 E = diff * scaledBasis;\n vec3 D = rayDirection * scaledBasis;\n\n float a = dot(D, D);\n float b = dot(E, D);\n float c = dot(E, E) - vRadius*vRadius;\n\n // discriminant of sphere equation (factor 2 removed from b above)\n float d = b*b - a*c;\n if(d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n float intersectionPointZ = E.z + dist*D.z;\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n\n dist = dist2;\n intersectionPointZ = E.z + dist*D.z;\n p = rayTarget + dist*rayDirection;\n if (intersectionPointZ <= vRadius - height ||\n intersectionPointZ > vRadius ||\n isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Find normal vector in local space\n normal = vec3(p - center.xyz) * basis;\n normal.z = normal.z * (hRadius / vRadius) * (hRadius / vRadius);\n // Transform into camera space\n normal = normalize(basis * normal);\n if (dot(normal, rayDirection) > 0.) {\n normal = -normal;\n }\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\nuniform mat4 inverseNormalMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_center;\nattribute vec3 a_normal;\nattribute float a_horizontalRadius;\nattribute float a_verticalRadius;\nattribute float a_height;\n\nvarying float v_treeIndex;\n// We pack vRadius as w-component of center\nvarying vec4 center;\nvarying float hRadius;\nvarying float height;\n\n// U, V, axis represent the 3x3 sphere basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 U;\nvarying vec4 V;\nvarying vec4 sphereNormal;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerWithOffset = mul3(modelTransformOffset, a_center).xyz;\n\n vec3 normalWithOffset = (modelTransformOffset * vec4(a_normal, 0)).xyz;\n\n vec3 lDir;\n float distanceToCenterOfSegment = a_verticalRadius - a_height * 0.5;\n vec3 centerOfSegment = centerWithOffset + normalWithOffset * distanceToCenterOfSegment;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 objectToCameraModelSpace = inverseNormalMatrix * vec3(0.0, 0.0, 1.0);\n#else\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - centerOfSegment;\n#endif\n\n vec3 newPosition = position;\n\n float bb = dot(objectToCameraModelSpace, normalWithOffset);\n if (bb < 0.0) { // direction vector looks away, flip it\n lDir = -normalWithOffset;\n } else { // direction vector already looks in my direction\n lDir = normalWithOffset;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_verticalRadius * (position.x + 1.0) * 0.0025 / a_height);\n#endif\n\n // Negative angle means height larger than radius,\n // so we should have full size so we can render the largest part of the ellipsoid segment\n float ratio = max(0.0, 1.0 - a_height / a_verticalRadius);\n // maxRadiusOfSegment is the radius of the circle (projected ellipsoid) when ellipsoid segment is seen from above\n float maxRadiusOfSegment = a_horizontalRadius * sqrt(1.0 - ratio * ratio);\n\n vec3 displacement = vec3(newPosition.x*a_height*0.5, maxRadiusOfSegment*newPosition.y, maxRadiusOfSegment*newPosition.z);\n vec3 surfacePoint = centerOfSegment + mat3(lDir, left, up) * displacement;\n vec3 transformed = surfacePoint;\n\n v_treeIndex = a_treeIndex;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n center.xyz = mul3(modelViewMatrix, centerWithOffset);\n center.w = a_verticalRadius; // Pack radius into w-component\n hRadius = a_horizontalRadius;\n height = a_height;\n v_color = a_color;\n\n // compute basis\n sphereNormal.xyz = normalMatrix * normalWithOffset;\n U.xyz = normalMatrix * up;\n V.xyz = normalMatrix * left;\n\n // We pack surfacePoint as w-components of U, V and axis\n U.w = surfacePoint.x;\n V.w = surfacePoint.y;\n sphereNormal.w = surfacePoint.z;\n\n // TODO should perhaps be a different normal?\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n'},function(e,t,n){"use strict";n.r(t),t.default='#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\nfloat displaceScalar(vec3 point, float scalar, \n float treeIndex, vec3 cameraPosition, mat4 inverseModelMatrix) {\n\n // Displaces a scalar based on distance to camera to avoid z-fighting\n vec3 cameraPositionModelSpace = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 pointToCamera = cameraPositionModelSpace - point;\n\n // "Random" number in the range [0, 1], based on treeIndex\n float rnd = mod(treeIndex, 64.) / 64.;\n // Compute distance to camera, but cap it\n float maxDistanceToCamera = 50.;\n float distanceToCamera = min(length(pointToCamera), maxDistanceToCamera);\n\n float maxDisplacement = 0.01;\n float scaleFactor = 0.01;\n float displacement = min(maxDisplacement, scaleFactor * rnd * distanceToCamera / maxDistanceToCamera);\n return scalar + displacement;\n}\n\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\n// TODO general cylinder and cone are very similar and used\n// the same shader in the old code. Consider de-duplicating\n// parts of this code\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform float dataTextureWidth;\nuniform float dataTextureHeight;\nuniform mat4 projectionMatrix;\n\nvarying vec4 v_centerB;\n\nvarying vec4 v_W;\nvarying vec4 v_U;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_surfacePointY;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform int renderMode;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize( v_normal );\n\n float R1 = v_centerB.w;\n vec4 U = v_U;\n vec4 W = v_W;\n vec4 V = vec4(normalize(cross(W.xyz, U.xyz)), v_surfacePointY);\n\n mat3 basis = mat3(U.xyz, V.xyz, W.xyz);\n vec3 surfacePoint = vec3(U.w, V.w, W.w);\n vec3 rayTarget = surfacePoint;\n\n#if defined(COGNITE_ORTHOGRAPHIC_CAMERA)\n vec3 rayDirection = vec3(0.0, 0.0, -1.0);\n#else\n vec3 rayDirection = normalize(rayTarget); // rayOrigin is (0,0,0) in camera space\n#endif\n\n vec3 diff = rayTarget - v_centerB.xyz;\n vec3 E = diff * basis;\n vec3 D = rayDirection * basis;\n\n float a = dot(D.xy, D.xy);\n float b = dot(E.xy, D.xy);\n float c = dot(E.xy, E.xy) - R1*R1;\n\n // Calculate a dicriminant of the above quadratic equation\n float d = b*b - a*c;\n\n // d < 0.0 means the ray hits outside an infinitely long cone\n if (d < 0.0)\n discard;\n\n float sqrtd = sqrt(d);\n float dist1 = (-b - sqrtd)/a;\n float dist2 = (-b + sqrtd)/a;\n\n // Make sure dist1 is the smaller one\n if (dist2 < dist1) {\n float tmp = dist1;\n dist1 = dist2;\n dist2 = tmp;\n }\n\n float dist = dist1;\n vec3 intersectionPoint = E + dist * D;\n float theta = atan(intersectionPoint.y, intersectionPoint.x);\n if (theta < v_angle) theta += 2.0 * PI;\n\n // Intersection point in camera space\n vec3 p = rayTarget + dist*rayDirection;\n\n vec3 planeACenter = vec3(0.0, 0.0, v_planeA.w);\n vec3 planeANormal = v_planeA.xyz;\n vec3 planeBCenter = vec3(0.0, 0.0, v_planeB.w);\n vec3 planeBNormal = v_planeB.xyz;\n bool isInner = false;\n\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle ||\n isSliced(p)\n ) {\n // Missed the first point, check the other point\n isInner = true;\n dist = dist2;\n intersectionPoint = E + dist * D;\n theta = atan(intersectionPoint.y, intersectionPoint.x);\n p = rayTarget + dist*rayDirection;\n if (theta < v_angle) theta += 2.0 * PI;\n if (dot(intersectionPoint - planeACenter, planeANormal) > 0.0 ||\n dot(intersectionPoint - planeBCenter, planeBNormal) > 0.0 ||\n theta > v_arcAngle + v_angle || isSliced(p)\n ) {\n // Missed the other point too\n discard;\n }\n }\n\n#if !defined(COGNITE_RENDER_COLOR_ID) && !defined(COGNITE_RENDER_DEPTH)\n // Regular cylinder has simpler normal vector in camera space\n vec3 p_local = p - v_centerB.xyz;\n normal = normalize(p_local - W.xyz * dot(p_local, W.xyz));\n#endif\n\n float fragDepth = updateFragmentDepth(p, projectionMatrix);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, fragDepth, matCapTexture, GeometryType.Primitive);\n}\n'},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 mul3(mat4 M, vec3 v) {\n vec4 u = M * vec4(v, 1.0);\n return u.xyz / u.w;\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_centerA;\nattribute vec3 a_centerB;\nattribute float a_radius;\nattribute vec3 a_color;\n// slicing plane attributes\nattribute vec4 a_planeA;\nattribute vec4 a_planeB;\n// segment attributes\nattribute vec3 a_localXAxis;\nattribute float a_angle;\nattribute float a_arcAngle;\n\nvarying float v_treeIndex;\n// We pack the radii into w-components\nvarying vec4 v_centerB;\n\n// U, V, axis represent the 3x3 cone basis.\n// They are vec4 to pack extra data into the w-component\n// since Safari on iOS only supports 8 varying vec4 registers.\nvarying vec4 v_U;\nvarying vec4 v_W;\n\nvarying vec4 v_planeA;\nvarying vec4 v_planeB;\n\nvarying float v_surfacePointY;\n\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n mat4 modelTransformOffset = inverseModelMatrix * treeIndexWorldTransform * modelMatrix;\n\n vec3 centerA = mul3(modelTransformOffset, a_centerA);\n vec3 centerB = mul3(modelTransformOffset, a_centerB);\n\n vec3 center = 0.5 * (centerA + centerB);\n float halfHeight = 0.5 * length(centerA - centerB);\n vec3 dir = normalize(centerA - centerB);\n vec3 newPosition = position;\n\n vec3 rayOrigin = (inverseModelMatrix * vec4(cameraPosition, 1.0)).xyz;\n vec3 objectToCameraModelSpace = rayOrigin - center;\n\n float leftUpScale = a_radius;\n\n vec3 lDir = dir;\n if (dot(objectToCameraModelSpace, dir) < 0.0) { // direction vector looks away, flip it\n lDir = -lDir;\n }\n\n vec3 left = normalize(cross(objectToCameraModelSpace, lDir));\n vec3 up = normalize(cross(left, lDir));\n\n#ifndef GL_EXT_frag_depth\n // make sure the billboard will not overlap with cap geometry (flickering effect), not important if we write to depth buffer\n newPosition.x *= 1.0 - (a_radius * (position.x + 1.0) * 0.0025 / halfHeight);\n#endif\n\n vec3 surfacePoint = center + mat3(halfHeight*lDir, leftUpScale*left, leftUpScale*up) * newPosition;\n vec3 transformed = surfacePoint;\n surfacePoint = mul3(modelViewMatrix, surfacePoint);\n\n // varying data\n v_treeIndex = a_treeIndex;\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n // compute basis for cone\n v_W.xyz = dir;\n v_U.xyz = (modelTransformOffset * vec4(a_localXAxis, 0)).xyz;\n v_W.xyz = normalize(normalMatrix * v_W.xyz);\n v_U.xyz = normalize(normalMatrix * v_U.xyz);\n // We pack surfacePoint as w-components of U and W\n v_W.w = surfacePoint.z;\n v_U.w = surfacePoint.x;\n\n // We pack radii as w-components of v_centerB\n mat4 modelToTransformOffset = modelMatrix * modelTransformOffset;\n float radius = length((modelToTransformOffset * vec4(a_localXAxis * a_radius, 0.0)).xyz);\n\n centerB = centerB - dir;\n v_centerB.xyz = mul3(modelViewMatrix, centerB);\n v_centerB.w = radius;\n\n vec4 planeA = a_planeA;\n planeA.w = length((modelToTransformOffset * vec4(planeA.xyz * planeA.w, 0.0)).xyz);\n\n vec4 planeB = a_planeB;\n planeB.w = length((modelToTransformOffset * vec4(planeB.xyz * planeB.w, 0.0)).xyz);\n\n v_planeA = planeA;\n v_planeB = planeB;\n v_surfacePointY = surfacePoint.y;\n v_centerB.w = radius;\n\n v_color = a_color;\n v_normal = normalMatrix * normal;\n\n vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat computeFragmentDepth(vec3 p, mat4 projectionMatrix) {\n // Anders Hafreager comments:\n // Depth value can be calculated by transforming the z-component of the intersection point to projection space.\n // The w-component is also needed to scale projection space into clip space.\n // However, the 4th column of the projection matrix is (0, 0, const, 0), so we can exploit this when computing w-value.\n float projected_intersection_z=projectionMatrix[0][2]*p.x+projectionMatrix[1][2]*p.y+projectionMatrix[2][2]*p.z+projectionMatrix[3][2];\n\n // If we want to use orthographic camera, the full w-component is found as\n float projected_intersection_w=projectionMatrix[0][3]*p.x+projectionMatrix[1][3]*p.y+projectionMatrix[2][3]*p.z+projectionMatrix[3][3];\n // float projected_intersection_w = projectionMatrix[2][3]*newPoint.z; // Optimized for perspective camera\n return ((gl_DepthRange.diff * (projected_intersection_z / projected_intersection_w)) + gl_DepthRange.near + gl_DepthRange.far) * .5;\n}\n\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n\nfloat updateFragmentDepth(vec3 p,mat4 projectionMatrix) {\n gl_FragDepthEXT = computeFragmentDepth(p, projectionMatrix);\n return gl_FragDepthEXT;\n}\n\n#else\n\nfloat updateFragmentDepth(vec3 p, mat4 projectionMatrix){\n // Extension not available - not much we can do.\n return computeFragmentDepth(p, projectionMatrix);\n}\n\n#endif\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\n#define PI 3.14159265359\n#define PI2 6.28318530718\n#define PI_HALF 1.5707963267949\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n float dist = dot(v_xy, v_xy);\n float theta = atan(v_xy.y, v_xy.x);\n vec3 normal = normalize( v_normal );\n if (theta < v_angle) {\n theta += 2.0 * PI;\n }\n if (dist > 0.25 || dist < 0.25 * v_oneMinusThicknessSqr || theta >= v_angle + v_arcAngle) {\n discard;\n }\n\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_angle;\nattribute float a_arcAngle;\nattribute float a_thickness;\nattribute vec3 a_normal;\n\nvarying float v_treeIndex;\nvarying float v_oneMinusThicknessSqr;\nvarying vec2 v_xy;\nvarying float v_angle;\nvarying float v_arcAngle;\n\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n\n v_treeIndex = a_treeIndex;\n v_oneMinusThicknessSqr = (1.0 - a_thickness) * (1.0 - a_thickness);\n v_xy = vec2(position.x, position.y);\n v_angle = a_angle;\n v_arcAngle = a_arcAngle;\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 transformed = (instanceMatrix * vec4(position, 1.0)).xyz;\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n v_color = a_color;\n\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(normalize(a_normal), 0.0)).xyz;\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_1 || renderMode == RenderTypeEffects_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_1) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_1) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_1) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_1) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_1) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_0) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_0) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_0));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Not using the w-component to support using vec3 and vec4 as input\nmat4 constructMatrix(vec4 column_0, vec4 column_1, vec4 column_2, vec4 column_3) {\n return mat4(\n vec4(column_0.xyz, 0.0),\n vec4(column_1.xyz, 0.0),\n vec4(column_2.xyz, 0.0),\n vec4(column_3.xyz, 1.0)\n );\n}\n\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute vec4 a_instanceMatrix_column_0;\nattribute vec4 a_instanceMatrix_column_1;\nattribute vec4 a_instanceMatrix_column_2;\nattribute vec4 a_instanceMatrix_column_3;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute float a_arcAngle;\nattribute float a_radius;\nattribute float a_tubeRadius;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n mat4 instanceMatrix = constructMatrix(\n a_instanceMatrix_column_0,\n a_instanceMatrix_column_1,\n a_instanceMatrix_column_2,\n a_instanceMatrix_column_3\n );\n // normalized theta and phi are packed into positions\n float theta = position.x * a_arcAngle;\n float phi = position.y;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n vec3 pos3 = vec3(0);\n\n pos3.x = (a_radius + a_tubeRadius*cos(phi)) * cosTheta;\n pos3.y = (a_radius + a_tubeRadius*cos(phi)) * sinTheta;\n pos3.z = a_tubeRadius*sin(phi);\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n \n vec3 transformed = (instanceMatrix * vec4(pos3, 1.0)).xyz;\n\n // Calculate normal vectors if we're not picking\n vec3 center = (instanceMatrix * vec4(a_radius * cosTheta, a_radius * sinTheta, 0.0, 1.0)).xyz;\n vec3 objectNormal = normalize(transformed.xyz - center);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 modelViewPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4(transformed, 1.0);\n\n vViewPosition = modelViewPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvec3 rgb2hsv(vec3 c)\n{\n vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n\n float d = q.x - min(q.w, q.y);\n float e = 1.0e-10;\n return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n\nvec3 hsv2rgb(vec3 c)\n{\n vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nstruct GeometryTypeEnum {\n int Quad;\n int Primitive;\n int TriangleMesh;\n int InstancedMesh;\n};\n\nconst GeometryTypeEnum GeometryType = GeometryTypeEnum(1,2,3,4);\n\nconst int RenderTypeColor_0 = 1;\nconst int RenderTypeNormal_0 = 2;\nconst int RenderTypeTreeIndex_0 = 3;\nconst int RenderTypePackColorAndNormal_0 = 4;\nconst int RenderTypeDepth_0 = 5;\nconst int RenderTypeEffects_0 = 6;\nconst int RenderTypeGhost_0 = 7;\nconst int RenderTypeLOD = 8;\nconst int RenderTypeDepthBufferOnly = 9;\nconst int RenderTypeGeometryType = 10;\n\n#include <packing>\n\nvec3 packNormalToRgb( const in vec3 normal ) {\n return normalize( normal ) * 0.5 + 0.5;\n}\n\nvoid updateFragmentColor(\n int renderMode, vec4 color, float treeIndex, \n vec3 normal, float depth, sampler2D matCapTexture, \n int geometryType) {\n if (renderMode == RenderTypeColor_0 || renderMode == RenderTypeEffects_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n hsv.z = min(0.5 * hsv.z + 0.5, 1.0);\n vec3 colorRGB = hsv2rgb(hsv);\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec4 albedo = vec4(colorRGB * (0.4 + 0.6 * amplitude), 1.0);\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec4 mc = vec4(texture2D(matCapTexture, cap).rgb, 1.0);\n \n gl_FragColor = vec4(albedo.rgb * mc.rgb * 1.7, color.a);\n } else if (renderMode == RenderTypeGhost_0) {\n float amplitude = max(0.0, dot(normal, vec3(0.0, 0.0, 1.0)));\n vec3 albedo = min(vec3(0.8) * (0.4 + 0.6 * amplitude), 1.0);\n gl_FragColor = vec4(albedo, 0.2);\n } else if (renderMode == RenderTypeDepthBufferOnly) {\n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n } else if (renderMode == RenderTypePackColorAndNormal_0) {\n vec3 hsv = rgb2hsv(color.rgb);\n float a = 0.0;\n if (hsv.y > 0.01) {\n if (hsv.z > 0.5) {\n // light color\n a = (0.55 + hsv.x * 0.45);\n } else {\n // dark color\n a = (0.1 + hsv.x * 0.45);\n }\n } else {\n // no saturation - grayscale\n a = hsv.z * 0.09;\n }\n gl_FragColor = vec4(packNormalToRgb(normal.rgb), a);\n } else if (renderMode == RenderTypeNormal_0) {\n gl_FragColor = vec4(packNormalToRgb(normal), 1.0);\n } else if (renderMode == RenderTypeTreeIndex_0) {\n gl_FragColor = vec4(packIntToColor(treeIndex), 1.0);\n } else if (renderMode == RenderTypeDepth_0) {\n gl_FragColor = packDepthToRGBA(depth);\n } else if (renderMode == RenderTypeLOD) {\n bool isHighDetail = geometryType != GeometryType.Quad;\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n gl_FragColor = isHighDetail ? vec4(vec3(0.0, 1.0, 0.0) * mc, color.a) : vec4(vec3(1.0, 1.0, 0.0) * mc, color.a);\n } else if (renderMode == RenderTypeGeometryType) {\n vec2 cap = normal.xy * 0.5 + 0.5;\n vec3 mc = texture2D(matCapTexture, cap).rgb * 1.5;\n vec3 geometryColor = \n float(geometryType == 1) * vec3(1.0, 0.0, 0.0) + // Quads\n float(geometryType == 2) * vec3(0.0, 1.0, 0.0) + // Primitives\n float(geometryType == 3) * vec3(0.0, 0.0, 1.0) + // Triangle meshes\n float(geometryType == 4) * vec3(1.0, 1.0, 0.0); // Instance meshes\n gl_FragColor = vec4(geometryColor * mc, color.a);\n } else {\n // Unknown render mode - should not happen. \n gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);\n }\n}\n\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \n\nconst int RenderTypeColor_1 = 1;\nconst int RenderTypeNormal_1 = 2;\nconst int RenderTypeTreeIndex_1 = 3;\nconst int RenderTypePackColorAndNormal_1 = 4;\nconst int RenderTypeDepth_1 = 5;\nconst int RenderTypeEffects_1 = 6;\nconst int RenderTypeGhost_1 = 7;\n\nbool determineVisibility(sampler2D visibilityTexture, vec2 textureSize, float treeIndex, int renderMode) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 visible = texture2D(visibilityTexture, treeIndexUv);\n\n // Byte layout: \n // [isVisible, renderInFront, renderGhosted, outlineColor0, outlineColor1, outlineColor2, unused, unused]\n float byteUnwrapped = floor((visible.a * 255.0) + 0.5);\n \n bool isVisible = floatBitsSubset(byteUnwrapped, 0, 1) == 1.0;\n bool renderInFront = floatBitsSubset(byteUnwrapped, 1, 2) == 1.0;\n bool renderGhosted = floatBitsSubset(byteUnwrapped, 2, 3) == 1.0;\n\n return \n // In ghost mode\n ((renderMode == RenderTypeGhost_1) && isVisible && renderGhosted) ||\n // Not ghost mode\n ((renderMode != RenderTypeGhost_1) && \n !renderGhosted && isVisible && (renderInFront || renderMode != RenderTypeEffects_1));\n}\n\nvec4 determineColor(vec3 originalColor, sampler2D colorDataTexture, vec2 textureSize, float treeIndex) {\n\n treeIndex = floor(treeIndex + 0.5);\n\n float dataTextureWidth = textureSize.x;\n float dataTextureHeight = textureSize.y;\n\n float u = mod(treeIndex, dataTextureWidth);\n float v = floor(treeIndex / dataTextureWidth);\n float uCoord = (u + 0.5) / dataTextureWidth;\n float vCoord = (v + 0.5) / dataTextureHeight; // invert Y axis\n vec2 treeIndexUv = vec2(uCoord, vCoord);\n vec4 overrideColor = texture2D(colorDataTexture, treeIndexUv);\n\n if (any(greaterThan(overrideColor.rgb, vec3(0.0)))) {\n return overrideColor;\n }\n\n return vec4(originalColor.rgb, overrideColor.a);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nuniform sampler2D colorDataTexture;\nuniform sampler2D overrideVisibilityPerTreeIndex;\nuniform sampler2D matCapTexture;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform int renderMode;\n\nvarying vec3 vViewPosition;\n\nvoid main() {\n if (!determineVisibility(colorDataTexture, treeIndexTextureSize, v_treeIndex, renderMode)) {\n discard;\n }\n\n if (isSliced(vViewPosition)) {\n discard;\n }\n\n vec4 color = determineColor(v_color, colorDataTexture, treeIndexTextureSize, v_treeIndex);\n vec3 normal = normalize(v_normal);\n updateFragmentColor(renderMode, color, v_treeIndex, normal, gl_FragCoord.z, matCapTexture, GeometryType.Primitive);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// https://stackoverflow.com/questions/7059962/how-do-i-convert-a-vec4-rgba-value-to-a-float @ Arjan\nfloat unpackVec4ToFloat( vec4 packedFloat)\n{\n vec4 rgba = packedFloat;\n float sign = 1.0 - step(128.0, rgba.x) * 2.0;\n float exponent = 2.0 * mod(rgba.x, 128.0) + step(128.0, rgba.y) - 127.0;\n if (exponent == -127.0) return 0.0;\n float mantissa = mod(rgba.y, 128.0) * 65536.0 + rgba.z * 256.0 + rgba.w + 8388608.0;\n return sign * exp2(exponent - 23.0) * mantissa; \n}\n\nfloat unpackFloatFromRGBATexel(vec4 texel){\n float byteValueR = floor((texel.r * 255.0) + 0.5);\n float byteValueG = floor((texel.g * 255.0) + 0.5);\n float byteValueB = floor((texel.b * 255.0) + 0.5);\n float byteValueA = floor((texel.a * 255.0) + 0.5);\n\n return unpackVec4ToFloat(vec4(byteValueR, byteValueG, byteValueB, byteValueA));\n}\n\nmat4 determineMatrixOverride(\n float treeIndex, \n vec2 treeIndexTextureSize, \n sampler2D transformOverrideIndexTexture, \n vec2 transformOverrideTextureSize, \n sampler2D transformOverrideTexture\n) {\n\n treeIndex = floor(treeIndex + 0.5);\n float dataTextureWidth = treeIndexTextureSize.x;\n float dataTextureHeight = treeIndexTextureSize.y;\n\n float xTreeIndexTextureCoord = mod(treeIndex, dataTextureWidth);\n float yTreeIndexTextureCoord = floor(treeIndex / dataTextureWidth);\n\n vec2 indexUV = vec2((xTreeIndexTextureCoord + 0.5) / dataTextureWidth, (yTreeIndexTextureCoord + 0.5) / dataTextureHeight);\n\n vec3 indexTexel = texture2D(transformOverrideIndexTexture, indexUV).rgb;\n\n float index = floor(indexTexel.r * 256.0) * 65536.0 + floor(indexTexel.g * 256.0) * 256.0 + floor(indexTexel.b * 256.0);\n \n \n if(index == 0.0){\n return mat4(\n 1.0, 0.0, 0.0, 0.0,\n 0.0, 1.0, 0.0, 0.0,\n 0.0, 0.0, 1.0, 0.0,\n 0.0, 0.0, 0.0, 1.0\n );\n }\n\n index = index - 1.0;\n float overridesPerRow = transformOverrideTextureSize.x / 16.0;\n\n float xOverrideTextureCoord = mod(index, overridesPerRow);\n float yOverrideTextureCoord = floor(index / overridesPerRow);\n\n float cellWidth = 1.0 / transformOverrideTextureSize.x;\n float cellHeight = 1.0 / transformOverrideTextureSize.y;\n\n float overrideU = (xOverrideTextureCoord / overridesPerRow) + cellWidth / 2.0;\n float overrideV = (yOverrideTextureCoord / transformOverrideTextureSize.y) + cellHeight / 2.0;\n\n vec2 overrideUV = vec2(overrideU, overrideV); \n\n float matrixElements[12];\n\n for(int i = 0; i < 12; i++){\n matrixElements[i] = unpackFloatFromRGBATexel(texture2D(transformOverrideTexture, overrideUV + vec2(float(i) * cellWidth, 0.0)));\n }\n \n return mat4(\n matrixElements[0], matrixElements[4], matrixElements[8], 0,\n matrixElements[1], matrixElements[5], matrixElements[9], 0,\n matrixElements[2], matrixElements[6], matrixElements[10], 0,\n matrixElements[3], matrixElements[7], matrixElements[11], 1\n );\n}\n\nuniform mat4 inverseModelMatrix;\n\nattribute float a_treeIndex;\nattribute vec3 a_color;\nattribute vec3 a_vertex1;\nattribute vec3 a_vertex2;\nattribute vec3 a_vertex3;\nattribute vec3 a_vertex4;\n\nvarying float v_treeIndex;\nvarying vec3 v_color;\nvarying vec3 v_normal;\n\nvarying vec3 vViewPosition;\n\nuniform vec2 treeIndexTextureSize;\n\nuniform sampler2D transformOverrideIndexTexture;\n\nuniform vec2 transformOverrideTextureSize; \nuniform sampler2D transformOverrideTexture;\n\nvoid main() {\n vec3 transformed;\n // reduce the avarage branchings\n if (position.x < 1.5) {\n transformed = position.x == 0.0 ? a_vertex1 : a_vertex2;\n } else {\n transformed = position.x == 2.0 ? a_vertex3 : a_vertex4;\n }\n\n mat4 treeIndexWorldTransform = determineMatrixOverride(\n a_treeIndex, \n treeIndexTextureSize, \n transformOverrideIndexTexture, \n transformOverrideTextureSize, \n transformOverrideTexture\n );\n\n vec3 objectNormal = cross(a_vertex1 - a_vertex2, a_vertex1 - a_vertex3);\n\n v_treeIndex = a_treeIndex;\n v_color = a_color;\n v_normal = normalMatrix * normalize(inverseModelMatrix * treeIndexWorldTransform * modelMatrix * vec4(objectNormal, 0.0)).xyz;\n\n vec4 mvPosition = viewMatrix * treeIndexWorldTransform * modelMatrix * vec4( transformed, 1.0 );\n vViewPosition = mvPosition.xyz;\n gl_Position = projectionMatrix * mvPosition;\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nfloat floatBitsSubset(float inNumber, int fromLeastSignificantBitIndex, int toMostSignificantBitIndex) {\n float r = float(fromLeastSignificantBitIndex);\n float l = float(toMostSignificantBitIndex);\n\n float bitShift = pow(2.0, r);\n return mod(((inNumber - mod(inNumber, bitShift)) / bitShift), pow(2.0, l - r));\n}\n\n/*\n\nExample:\n\nGet the value from bit 2 to 4 (not inclusive)\n\ninput:\ninNumber = 173 = 1010_1101 , from=2, to=4\n\nexpected output = 1010_|11|01 = 3\n\n1) subtract any bits in the least significant bit-subset\n\n mod(inNumber=173, pow(2.0, from=2)) = mod(inNumber, 4) = 1\n \n inNumber - 1 = 172\n\n2) bitshift such that we remove the least significant bit-subset\n this is guaranteed to be devisible since we subtracted the remainder\n\n (inNumber=172) / pow(2.0, from=2) = 172 / 4 = 43 = 0010_1011\n\n3) lastly, remove the most significant bit-subset\n\n mod((inNumber=43), pow(2.0, to=4 - from=2) = mod(43, 4)\n\n mod(43, 4) = 3\n\n or in binary notation: 0000_0011 which is the expected result.\n\n*/ \nmat3 G[9];\n// hard coded matrix values!!!! as suggested in https://github.com/neilmendoza/ofxPostProcessing/blob/master/src/EdgePass.cpp#L45\nconst mat3 g0 = mat3( 0.3535533845424652, 0, -0.3535533845424652, 0.5, 0, -0.5, 0.3535533845424652, 0, -0.3535533845424652 );\nconst mat3 g1 = mat3( 0.3535533845424652, 0.5, 0.3535533845424652, 0, 0, 0, -0.3535533845424652, -0.5, -0.3535533845424652 );\nconst mat3 g2 = mat3( 0, 0.3535533845424652, -0.5, -0.3535533845424652, 0, 0.3535533845424652, 0.5, -0.3535533845424652, 0 );\nconst mat3 g3 = mat3( 0.5, -0.3535533845424652, 0, -0.3535533845424652, 0, 0.3535533845424652, 0, 0.3535533845424652, -0.5 );\nconst mat3 g4 = mat3( 0, -0.5, 0, 0.5, 0, 0.5, 0, -0.5, 0 );\nconst mat3 g5 = mat3( -0.5, 0, 0.5, 0, 0, 0, 0.5, 0, -0.5 );\nconst mat3 g6 = mat3( 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.6666666865348816, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204 );\nconst mat3 g7 = mat3( -0.3333333432674408, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, 0.6666666865348816, 0.1666666716337204, -0.3333333432674408, 0.1666666716337204, -0.3333333432674408 );\nconst mat3 g8 = mat3( 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408, 0.3333333432674408 );\n\nfloat edgeDetectionFilter(sampler2D baseTexture, vec2 uv, vec2 resolution) {\n\n vec2 texel = vec2(1.0 / resolution.x, 1.0 / resolution.y);\n\n\tG[0] = g0,\n\tG[1] = g1,\n\tG[2] = g2,\n\tG[3] = g3,\n\tG[4] = g4,\n\tG[5] = g5,\n\tG[6] = g6,\n\tG[7] = g7,\n\tG[8] = g8;\n\n\tmat3 I;\n\tfloat cnv[9];\n\tvec3 neighbour;\n\n\t/* fetch the 3x3 neighbourhood and use the RGB vector's length as intensity value */\n\tfor (int i=0; i<3; i++) {\n\t\tfor (int j=0; j<3; j++) {\n\t\t\tneighbour = texture2D(baseTexture, uv + texel * vec2(float(i)-1.0,float(j)-1.0) ).rgb;\n\t\t\tI[i][j] = length(neighbour);\n\t\t}\n\t}\n\n\t/* calculate the convolution values for all the masks */\n\tfor (int i=0; i<9; i++) {\n\t\tfloat dp3 = dot(G[i][0], I[0]) + dot(G[i][1], I[1]) + dot(G[i][2], I[2]);\n\t\tcnv[i] = dp3 * dp3;\n\t}\n\n\tfloat M = (cnv[0] + cnv[1]) + (cnv[2] + cnv[3]);\n\tfloat S = (cnv[4] + cnv[5]) + (cnv[6] + cnv[7]) + (cnv[8] + M);\n\n float edgeStrength = sqrt(M/S);\n\n return edgeStrength;\n}\n\n#include <packing>\n\nvarying vec2 vUv;\n\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nuniform sampler2D tFront;\nuniform sampler2D tFrontDepth;\n\nuniform sampler2D tBack;\nuniform sampler2D tBackDepth;\n\nuniform sampler2D tCustom;\nuniform sampler2D tCustomDepth;\n\nuniform sampler2D tGhost;\nuniform sampler2D tGhostDepth;\n\nuniform sampler2D tOutlineColors;\n\nuniform float cameraNear;\nuniform float cameraFar;\n\nuniform vec2 resolution;\n\nuniform float edgeStrengthMultiplier;\nuniform float edgeGrayScaleIntensity;\n\nconst float infinity = 1e20;\n\nfloat computeFloatEncodedOutlineIndex(float bitEncodedFloat){\n return floatBitsSubset(floor((bitEncodedFloat * 255.0) + 0.5), 3, 6);\n}\n\nvec4 computeNeighborOutlineIndices(sampler2D colorTexture){\n float outlineIndex0 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv0).a);\n float outlineIndex1 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv1).a);\n float outlineIndex2 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv2).a);\n float outlineIndex3 = computeFloatEncodedOutlineIndex(texture2D(colorTexture, vUv3).a);\n\n return vec4(outlineIndex0, outlineIndex1, outlineIndex2, outlineIndex3);\n}\n\nfloat toViewZ(float depth, float near, float far){\n float normalizedDepth = depth * 2.0 - 1.0;\n return 2.0 * near * far / (far + near - normalizedDepth * (far - near)); \n}\n\nvec4 computeNeighborAlphas(sampler2D colorTexture){\n float alpha0 = texture2D(colorTexture, vUv0).a;\n float alpha1 = texture2D(colorTexture, vUv1).a;\n float alpha2 = texture2D(colorTexture, vUv2).a;\n float alpha3 = texture2D(colorTexture, vUv3).a;\n\n return vec4(alpha0, alpha1, alpha2, alpha3);\n}\n\nvoid main() {\n vec4 frontAlbedo = texture2D(tFront, vUv);\n vec4 backAlbedo = texture2D(tBack, vUv);\n vec4 customAlbedo = texture2D(tCustom, vUv);\n vec4 ghostAlbedo = texture2D(tGhost, vUv);\n\n float frontDepth = texture2D(tFrontDepth, vUv).r;\n float backDepth = texture2D(tBackDepth, vUv).r; \n float customDepth = texture2D(tCustomDepth, vUv).r;\n float ghostDepth = texture2D(tGhostDepth, vUv).r;\n\n // This is a hack to make sure that all textures are initialized\n // If a texture is unused, it will have a clear value of 0.0.\n // Without this we've seen issues with MSAA where resizing render targets\n // causes depth to cleared to either 1 or 0 depending on the device/browser\n customDepth = customDepth > 0.0 ? customDepth : 1.0; \n backDepth = backDepth > 0.0 ? backDepth : 1.0;\n ghostDepth = ghostDepth > 0.0 ? ghostDepth : 1.0;\n frontDepth = frontDepth > 0.0 ? frontDepth : 1.0; \n\n if(all(greaterThanEqual(vec4(backDepth, customDepth, ghostDepth, frontDepth), vec4(1.0)))){\n discard;\n }\n \n // Decompose and clamp \"ghost\" color\n vec4 clampedGhostAlbedo = vec4(max(ghostAlbedo.rgb, 0.5), min(ghostAlbedo.a, 0.8));\n\n float frontOutlineIndex = computeFloatEncodedOutlineIndex(frontAlbedo.a);\n vec4 frontNeighborIndices = computeNeighborOutlineIndices(tFront);\n\n // There exsists fragments of rendered objects within the edge width that should have border\n if(any(equal(frontNeighborIndices, vec4(0.0))) && frontOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(frontNeighborIndices.x, frontNeighborIndices.y), max(frontNeighborIndices.z, frontNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = frontDepth;\n#endif\n return;\n }\n\n // texture has drawn fragment\n if(frontDepth < 1.0){\n float customDepthTest = step(customDepth, backDepth); // zero if back is in front\n\n float a = customDepthTest > 0.0 ? ceil(customAlbedo.a) * 0.5 : ceil(backAlbedo.a) * 0.5;\n\n gl_FragColor = vec4(frontAlbedo.rgb, 1.0) * (1.0 - a) + (vec4(backAlbedo.rgb, 1.0) * (1.0 - customDepthTest) + vec4(customAlbedo.rgb, 1.0) * customDepthTest) * a;\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = texture2D(tFrontDepth, vUv).r;\n#endif\n return;\n }\n\n if (customDepth >= backDepth) {\n float backOutlineIndex = computeFloatEncodedOutlineIndex(backAlbedo.a);\n vec4 backNeighborIndices = computeNeighborOutlineIndices(tBack);\n\n if( any(equal(backNeighborIndices, vec4(0.0))) && backOutlineIndex > 0.0) \n { \n float borderColorIndex = max(max(backNeighborIndices.x, backNeighborIndices.y), max(backNeighborIndices.z, backNeighborIndices.w));\n gl_FragColor = texture2D(tOutlineColors, vec2(0.125 * borderColorIndex + (0.125 / 2.0), 0.5));\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tBackDepth, vUv).r;\n#endif\n return;\n }\n }\n \n float edgeStrength = 0.0;\n#if defined(EDGES)\n if (!any(equal(computeNeighborAlphas(tBack), vec4(0.0)))) {\n float depthEdge = toViewZ(backDepth, cameraNear, cameraFar);\n edgeStrength = (1.0 - smoothstep(10.0, 40.0, depthEdge)) * edgeDetectionFilter(tBack, vUv, resolution) * edgeStrengthMultiplier;\n }\n#endif\n\n // Combine color from ghost, back and custom object\n vec4 color = backAlbedo;\n float depth = backDepth;\n if (customDepth < backDepth && ghostDepth == 1.0) {\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * backAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else if (customDepth < backDepth && ghostDepth < 1.0) {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n vec3 modelAlbedo = mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s);\n color = vec4(customAlbedo.rgb * customAlbedo.a + (1.0 - customAlbedo.a) * modelAlbedo.rgb, 1.0);\n depth = customDepth;\n edgeStrength = 0.0;\n } else {\n float s = (1.0 - step(backDepth, ghostDepth)) * clampedGhostAlbedo.a;\n color = vec4(mix(backAlbedo.rgb, clampedGhostAlbedo.rgb, s), backAlbedo.a);\n depth = mix(backDepth, ghostDepth, s);\n }\n \n gl_FragColor = color * (1.0 - edgeStrength) + vec4(vec3(edgeGrayScaleIntensity) * edgeStrength, 1.0);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth) \n gl_FragDepthEXT = depth;\n#endif\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\n// selection outline\nuniform vec2 texelSize;\nvarying vec2 vUv0;\nvarying vec2 vUv1;\nvarying vec2 vUv2;\nvarying vec2 vUv3;\n\nvoid main() {\n vUv = uv;\n\n // selection outline\n vUv0 = vec2(uv.x + texelSize.x, uv.y);\n vUv1 = vec2(uv.x - texelSize.x, uv.y);\n vUv2 = vec2(uv.x, uv.y + texelSize.y);\n vUv3 = vec2(uv.x, uv.y - texelSize.y);\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nuniform vec2 inverseResolution;\nuniform vec2 resolution;\nuniform sampler2D tDiffuse;\nuniform sampler2D tDepth;\n\n#ifndef FXAA_REDUCE_MIN\n #define FXAA_REDUCE_MIN (1.0/ 128.0)\n#endif\n#ifndef FXAA_REDUCE_MUL\n #define FXAA_REDUCE_MUL (1.0 / 8.0)\n#endif\n#ifndef FXAA_SPAN_MAX\n #define FXAA_SPAN_MAX 8.0\n#endif\n\nvec4 fxaa(sampler2D tex, vec2 fragCoord,\n vec2 resolution, vec2 inverseResolution,\n vec2 v_rgbNW, vec2 v_rgbNE,\n vec2 v_rgbSW, vec2 v_rgbSE,\n vec2 v_rgbM) {\n vec4 color;\n\n vec3 rgbNW = texture2D(tex, v_rgbNW).xyz;\n vec3 rgbNE = texture2D(tex, v_rgbNE).xyz;\n vec3 rgbSW = texture2D(tex, v_rgbSW).xyz;\n vec3 rgbSE = texture2D(tex, v_rgbSE).xyz;\n vec4 texColor = texture2D(tex, v_rgbM);\n vec3 rgbM = texColor.xyz;\n\n vec3 luma = vec3(0.299, 0.587, 0.114);\n float lumaNW = dot(rgbNW, luma);\n float lumaNE = dot(rgbNE, luma);\n float lumaSW = dot(rgbSW, luma);\n float lumaSE = dot(rgbSE, luma);\n float lumaM = dot(rgbM, luma);\n float lumaMin = min(lumaM, min(min(lumaNW, lumaNE), min(lumaSW, lumaSE)));\n float lumaMax = max(lumaM, max(max(lumaNW, lumaNE), max(lumaSW, lumaSE)));\n\n mediump vec2 dir;\n dir.x = -((lumaNW + lumaNE) - (lumaSW + lumaSE));\n dir.y = ((lumaNW + lumaSW) - (lumaNE + lumaSE));\n\n float dirReduce = max((lumaNW + lumaNE + lumaSW + lumaSE) *\n (0.25 * FXAA_REDUCE_MUL), FXAA_REDUCE_MIN);\n\n float rcpDirMin = 1.0 / (min(abs(dir.x), abs(dir.y)) + dirReduce);\n dir = min(vec2(FXAA_SPAN_MAX, FXAA_SPAN_MAX),\n max(vec2(-FXAA_SPAN_MAX, -FXAA_SPAN_MAX),\n dir * rcpDirMin));\n\n vec4 rgbA = 0.5 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (1.0 / 3.0 - 0.5))) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * (2.0 / 3.0 - 0.5))));\n vec4 rgbB = rgbA * 0.5 + 0.25 * (\n texture2D(tex, inverseResolution * (v_fragCoord + dir * -0.5)) +\n texture2D(tex, inverseResolution * (v_fragCoord + dir * 0.5)));\n\n float lumaB = dot(rgbB.rgb, luma);\n if ((lumaB < lumaMin) || (lumaB > lumaMax)) {\n color = rgbA;\n } else {\n color = rgbB;\n }\n return color;\n}\n\nvoid main() {\n gl_FragColor = fxaa(tDiffuse, v_fragCoord, \n resolution, inverseResolution, \n v_rgbNW, v_rgbNE, v_rgbSW, v_rgbSE, v_rgbM);\n#if defined(gl_FragDepthEXT) || defined(GL_EXT_frag_depth)\n gl_FragDepthEXT = texture2D(tDepth, v_uv).r;\n#endif\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/*!\n *\n * Adapted from:\n * https://github.com/mattdesl/three-shader-fxaa\n * MIT License (MIT) Copyright (c) 2014 Matt DesLauriers\n *\n */\n\nuniform vec2 resolution;\nuniform vec2 inverseResolution;\n\nvarying vec2 v_uv;\nvarying vec2 v_fragCoord;\nvarying vec2 v_rgbNW;\nvarying vec2 v_rgbNE;\nvarying vec2 v_rgbSW;\nvarying vec2 v_rgbSE;\nvarying vec2 v_rgbM;\n\nvoid main() {\n v_fragCoord = uv * resolution;\n v_rgbNW = (v_fragCoord + vec2(-1.0, -1.0)) * inverseResolution;\n v_rgbNE = (v_fragCoord + vec2(1.0, -1.0)) * inverseResolution;\n v_rgbSW = (v_fragCoord + vec2(-1.0, 1.0)) * inverseResolution;\n v_rgbSE = (v_fragCoord + vec2(1.0, 1.0)) * inverseResolution;\n v_rgbM = vec2(v_fragCoord * inverseResolution);\n v_uv = uv;\n\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\nvarying vec2 vUv;\n\nuniform mat4 projMatrix;\nuniform mat4 inverseProjectionMatrix;\n\nuniform vec3 kernel[MAX_KERNEL_SIZE];\n\nuniform sampler2D tDepth;\nuniform sampler2D tNoise;\n\nuniform vec2 resolution;\n\nuniform float sampleRadius;\nuniform float bias;\n\nvec3 viewPosFromDepth(float depth, vec2 uv) {\n // Depth to clip space: [0, 1] -> [-1, 1]\n float z = depth * 2.0 - 1.0;\n\n // Fragment in clip space\n vec4 clipSpacePosition = vec4(uv * 2.0 - 1.0, z, 1.0);\n vec4 viewSpacePosition = inverseProjectionMatrix * clipSpacePosition;\n\n // Perspective division\n viewSpacePosition /= viewSpacePosition.w;\n\n return viewSpacePosition.xyz;\n}\n\nvec3 computeWorldNormalFromDepth(sampler2D depthTexture, vec2 resolution, vec2 uv, float sampleDepth){\n float dx = 1.0 / resolution.x;\n float dy = 1.0 / resolution.y;\n\n vec2 uv1 = uv + vec2(dx, 0.0); // right\n float d1 = texture2D(depthTexture, uv1).r; \n\n vec2 uv2 = uv + vec2(0.0, dy); // up\n float d2 = texture2D(depthTexture, uv2).r;\n\n vec2 uv3 = uv + vec2(-dx, 0.0); // left\n float d3 = texture2D(depthTexture, uv3).r;\n\n vec2 uv4 = uv + vec2(0.0, -dy); // down\n float d4 = texture2D(depthTexture, uv4).r;\n\n bool horizontalSampleCondition = abs(d1 - sampleDepth) < abs(d3 - sampleDepth);\n\n float horizontalSampleDepth = horizontalSampleCondition ? d1 : d3;\n vec2 horizontalSampleUv = horizontalSampleCondition ? uv1 : uv3;\n\n bool verticalSampleCondition = abs(d2 - sampleDepth) < abs(d4 - sampleDepth);\n\n float verticalSampleDepth = verticalSampleCondition ? d2 : d4;\n vec2 verticalSampleUv = verticalSampleCondition ? uv2 : uv4;\n\n vec3 viewPos = viewPosFromDepth(sampleDepth, vUv);\n \n vec3 viewPos1 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv) : viewPosFromDepth(verticalSampleDepth, verticalSampleUv);\n vec3 viewPos2 = (horizontalSampleCondition == verticalSampleCondition) ? viewPosFromDepth(verticalSampleDepth, verticalSampleUv) : viewPosFromDepth(horizontalSampleDepth, horizontalSampleUv);\n\n return normalize(cross(viewPos1 - viewPos, viewPos2 - viewPos));\n}\n\nvoid main(){\n float d = texture2D(tDepth, vUv).r;\n\n vec3 viewNormal = computeWorldNormalFromDepth(tDepth, resolution, vUv, d);\n\n vec3 viewPosition = viewPosFromDepth(d, vUv);\n\n vec2 noiseScale = vec2( resolution.x / 128.0, resolution.y / 128.0 );\n vec3 randomVec = normalize(texture2D(tNoise, vUv * noiseScale).xyz);\n\n vec3 tangent = normalize(randomVec - viewNormal * dot(randomVec, viewNormal));\n\n vec3 bitangent = cross(viewNormal, tangent);\n\n mat3 TBN = mat3(tangent, bitangent, viewNormal);\n\n float occlusion = 0.0;\n\n for (int i = 0; i < MAX_KERNEL_SIZE; i++){\n \n vec3 sampleVector = TBN * kernel[i];\n sampleVector = viewPosition + sampleVector * sampleRadius;\n\n vec4 offset = projMatrix * vec4(sampleVector, 1.0);\n offset.xyz /= offset.w;\n offset.xyz = offset.xyz * 0.5 + 0.5;\n\n float realDepth = texture2D(tDepth, offset.xy).r;\n vec3 realPos = viewPosFromDepth(realDepth, offset.xy);\n\n float rangeCheck = smoothstep(0.0, 1.0, sampleRadius / length(viewPosition - realPos));\n\n occlusion += (realPos.z >= sampleVector.z + bias ? 1.0 : 0.0) * rangeCheck;\n }\n\n float occlusionFactor = 1.0 - clamp(occlusion / float(MAX_KERNEL_SIZE), 0.0, 1.0);\n\n gl_FragColor = vec4(vec3(occlusionFactor), 1.0);\n}"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// Copyright Cognite (C) 2019 Cognite\n//\n// Efficient Gaussian blur based on technique described by Daniel Rákos in\n// http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/\n//\n\nvarying vec2 vUv;\n\nuniform sampler2D tDiffuse;\nuniform sampler2D tAmbientOcclusion;\n\nuniform vec2 resolution;\n\nvoid main() {\n float blurredAO = 0.5 * (\n 2.0 * texture2D(tAmbientOcclusion, vUv).r * 0.2270270270 +\n texture2D(tAmbientOcclusion, vUv + vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(1.3746153846, 0.0) / resolution.x).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(3.2307692308, 0.0) / resolution.x).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv + vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 1.3746153846) / resolution.y).r * 0.3162162162 +\n texture2D(tAmbientOcclusion, vUv - vec2(0.0, 3.2307692308) / resolution.y).r * 0.0702702703\n );\n\n gl_FragColor = vec4(texture2D(tDiffuse, vUv).rgb * blurredAO, 1.0);\n}\n\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n// From http://www.science-and-fiction.org/rendering/noise.html\nfloat rand2d(in vec2 co){\n return fract(sin(dot(co.xy, vec2(12.9898,78.233))) * 43758.5453);\n}\n\n#if NUM_CLIPPING_PLANES > 0\nuniform vec4 clippingPlanes[NUM_CLIPPING_PLANES];\n#endif\n\nbool isSliced(vec3 point) {\n#if NUM_CLIPPING_PLANES > 0\n vec3 pointFlipped = -point;\n vec4 plane;\n\n for (int i = 0; i < UNION_CLIPPING_PLANES; i++) {\n plane = clippingPlanes[i];\n if (dot(pointFlipped, plane.xyz) > plane.w) return true;\n }\n#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n bool clipped = true;\n for ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n plane = clippingPlanes[ i ];\n clipped = (dot(pointFlipped, plane.xyz) > plane.w) && clipped;\n }\n if ( clipped ) return true;\n#endif\n#endif\n return false;\n}\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp float v_visible;\nvarying lowp vec2 v_seed;\n\nvarying vec3 v_viewPosition;\n\nvoid main() {\n \n if(v_visible != 1.0 || isSliced(v_viewPosition)){\n discard;\n }\n\n float v = rand2d(gl_FragCoord.xy + v_seed);\n if (v >= v_coverageFactor) {\n discard;\n }\n\n gl_FragColor = vec4(v_color, 1.0);\n}\n"},function(e,t,n){"use strict";n.r(t),t.default="#define GLSLIFY 1\n/**\n * Packs the integer part of the number given to a RGB color.\n */\nvec3 packIntToColor(float number) {\n float r = floor(number / (255.0 * 255.0)) / 255.0;\n float g = mod(floor(number / 255.0), 255.0) / 255.0;\n float b = mod(number, 255.0) / 255.0;\n return vec3(r, g, b);\n}\n\nattribute mediump float a_sectorId;\nattribute lowp vec3 a_coverageFactor;\nattribute lowp float a_visible;\n\nvarying mediump vec3 v_color;\nvarying lowp float v_coverageFactor;\nvarying lowp vec2 v_seed;\nvarying lowp float v_visible;\n\nvarying vec3 v_viewPosition;\n\nvoid main()\n{\n v_visible = a_visible;\n v_color = packIntToColor(a_sectorId);\n v_coverageFactor = abs(dot(a_coverageFactor, normal));\n // A seed to ensure that two overlapping sectors A and B \n // doesn't produce the same noise pattern\n v_seed = vec2(a_sectorId / 255.0, a_sectorId / 65025.0);\n\n vec4 mvPosition = modelViewMatrix * instanceMatrix * vec4( position, 1.0 );\n\n v_viewPosition = mvPosition.xyz;\n\n gl_Position = projectionMatrix * modelViewMatrix * instanceMatrix * vec4(position, 1.0);\n}\n"},function(e,t){e.exports=require("@cognite/sdk")},function(e,t){e.exports=require("lodash/debounce")},function(e,t){e.exports=require("lodash/omit")},function(e,t){e.exports=require("skmeans")},function(e,t){e.exports=require("@cognite/sdk-core")},,function(e,t,n){"use strict";
|
|
207
230
|
/*!
|
|
208
231
|
* Copyright 2021 Cognite AS
|
|
209
|
-
*/
|
|
232
|
+
*/
|
|
233
|
+
var r;n.d(t,"a",(function(){return r})),function(e){e[e.Discarded=0]="Discarded",e[e.Simple=1]="Simple",e[e.Detailed=2]="Detailed"}(r||(r={}))},function(e,t,n){"use strict";
|
|
210
234
|
/*!
|
|
211
235
|
* Copyright 2021 Cognite AS
|
|
212
236
|
*/
|
|
213
|
-
|
|
237
|
+
var r;n.d(t,"a",(function(){return r})),function(e){e[e.Color=1]="Color",e[e.Normal=2]="Normal",e[e.TreeIndex=3]="TreeIndex",e[e.PackColorAndNormal=4]="PackColorAndNormal",e[e.Depth=5]="Depth",e[e.Effects=6]="Effects",e[e.Ghost=7]="Ghost",e[e.LOD=8]="LOD",e[e.DepthBufferOnly=9]="DepthBufferOnly",e[e.GeometryType=10]="GeometryType"}(r||(r={}))},function(e,t,n){"use strict";n.r(t),n.d(t,"NodeAppearanceProvider",(function(){return r.c})),n.d(t,"NodeOutlineColor",(function(){return r.e})),n.d(t,"NodeCollectionBase",(function(){return r.d})),n.d(t,"TreeIndexNodeCollection",(function(){return r.f})),n.d(t,"IntersectionNodeCollection",(function(){return r.b})),n.d(t,"UnionNodeCollection",(function(){return r.g})),n.d(t,"revealEnv",(function(){return o.s})),n.d(t,"IndexSet",(function(){return o.e})),n.d(t,"NumericRange",(function(){return o.h})),n.d(t,"BoundingBoxClipper",(function(){return ot.a})),n.d(t,"Cognite3DModel",(function(){return P})),n.d(t,"Cognite3DViewer",(function(){return nt})),n.d(t,"CognitePointCloudModel",(function(){return R})),n.d(t,"PotreePointShape",(function(){return Ie})),n.d(t,"PotreePointColorType",(function(){return Me})),n.d(t,"PotreePointSizeType",(function(){return Pe})),n.d(t,"WellKnownAsprsPointClassCodes",(function(){return Ne})),n.d(t,"NotSupportedInMigrationWrapperError",(function(){return T})),n.d(t,"PropertyFilterNodeCollection",(function(){return j})),n.d(t,"SinglePropertyFilterNodeCollection",(function(){return q})),n.d(t,"AssetNodeCollection",(function(){return G})),n.d(t,"InvertedNodeCollection",(function(){return W})),n.d(t,"registerCustomNodeCollectionType",(function(){return Y})),n.d(t,"THREE",(function(){return i})),n.d(t,"DefaultNodeAppearance",(function(){return r.a}));var r=n(5),o=n(1),i=n(0),a=n(12),s=n.n(a),d=n(57),l=n.n(d),c=n(58),u=n.n(c),m=n(4),h=n(7),p=n(13),f=n(63);
|
|
214
238
|
/*!
|
|
215
239
|
* Copyright 2021 Cognite AS
|
|
216
|
-
*/
|
|
240
|
+
*/
|
|
241
|
+
const v=new i.Color("black");function x(e,t){const{camera:n,normalizedCoords:r,renderer:o,domElement:a}=t,s=new i.Scene,d=e.parent;s.add(e);try{const t={normalizedCoords:r,camera:n,renderer:o,domElement:a,scene:s,cadNode:e},m=function(e){const{cadNode:t}=e,n=t.renderMode;let r;t.renderMode=f.a.TreeIndex;try{r=_(e,v,0)}finally{t.renderMode=n}if(0===r[3])return;return 255*r[0]*255+255*r[1]+r[2]}(t);if(void 0===m)return;const h=function(e){const{cadNode:t}=e,n=t.renderMode;t.renderMode=f.a.Depth;const r=_(e,v,0);t.renderMode=n;return o=r,g.fromArray(o).multiplyScalar(1/255).dot(y);var o}(t),p=function(e,t){const{camera:n,normalizedCoords:r}=e,o=new i.Vector3;return o.set(r.x,r.y,.5).applyMatrix4(n.projectionMatrixInverse),o.multiplyScalar(t/o.z),o.applyMatrix4(n.matrixWorld),o}(t,(l=h,c=n.near,u=n.far,c*u/((u-c)*l-u)));return{distance:(new i.Vector3).subVectors(p,n.position).length(),point:p,treeIndex:m,object:e,cadNode:e}}finally{d&&d.add(e)}var l,c,u}const g=new i.Vector4,y=new i.Vector4(255/256/16777216,255/256/65536,255/256/256,255/256);const b={renderTarget:new i.WebGLRenderTarget(1,1),pixelBuffer:new Uint8Array(4)};function _(e,t,n){const{renderTarget:r,pixelBuffer:a}=b,{scene:s,camera:d,normalizedCoords:l,renderer:c,domElement:u}=e,m=d.clone(),h={x:(l.x+1)/2*u.clientWidth,y:(1-l.y)/2*u.clientHeight};m.setViewOffset(u.clientWidth,u.clientHeight,h.x,h.y,1,1);const p=new o.i(c);try{const{width:e,height:o}=c.getSize(new i.Vector2);r.setSize(e,o),p.setRenderTarget(r),p.setClearColor(t,n),c.clearColor(),c.render(s,m),c.readRenderTargetPixels(r,0,0,1,1,a)}finally{p.resetState()}return a}
|
|
217
242
|
/*!
|
|
218
243
|
* Copyright 2021 Cognite AS
|
|
219
|
-
*/class
|
|
244
|
+
*/class T extends Error{constructor(e){super(e)}}
|
|
220
245
|
/*!
|
|
221
246
|
* Copyright 2021 Cognite AS
|
|
222
|
-
*/
|
|
247
|
+
*/class C{constructor(e){this._camera=e,this._needsRedraw=!0,this._lastCameraPosition=new i.Vector3,this._lastCameraRotation=new i.Euler,this._lastCameraZoom=0,window.addEventListener("focus",()=>{this.redraw()})}get needsRedraw(){return this._needsRedraw}update(){const{_camera:e,_lastCameraPosition:t,_lastCameraRotation:n,_lastCameraZoom:r}=this,{position:o,rotation:i,zoom:a}=e,s=!o.equals(t)||!i.equals(n)||a!==r;t.copy(o),n.copy(i),this._lastCameraZoom=a,s&&(this._needsRedraw=!0)}clearNeedsRedraw(){this._needsRedraw=!1}redraw(){this._needsRedraw=!0}}var I=n(2);
|
|
223
248
|
/*!
|
|
224
249
|
* Copyright 2021 Cognite AS
|
|
225
|
-
*/class
|
|
250
|
+
*/class w{constructor(e,t,n){this.modelId=e,this.revisionId=t,this.nodesApiClient=n,this.nodeIdToTreeIndexMap=new Map,this.treeIndexToNodeIdMap=new Map,this.treeIndexSubTreeSizeMap=new Map,this.nodeIdRequestObservable=new m.Subject,this.nodeIdResponse=this.nodeIdRequestObservable.pipe(Object(I.bufferTime)(50),Object(I.filter)(e=>e.length>0),Object(I.mergeMap)(async e=>{const t=await this.nodesApiClient.mapNodeIdsToTreeIndices(this.modelId,this.revisionId,e);return e.map((e,n)=>({nodeId:e,treeIndex:t[n]}))}),Object(I.mergeAll)(),Object(I.tap)(e=>{this.nodeIdToTreeIndexMap.set(e.nodeId,e.treeIndex),this.treeIndexToNodeIdMap.set(e.treeIndex,e.nodeId)}),Object(I.share)()),this.treeIndexRequestObservable=new m.Subject,this.treeIndexResponse=this.treeIndexRequestObservable.pipe(Object(I.bufferTime)(50),Object(I.filter)(e=>e.length>0),Object(I.mergeMap)(async e=>{const t=await this.nodesApiClient.mapTreeIndicesToNodeIds(this.modelId,this.revisionId,e);return e.map((e,n)=>({nodeId:t[n],treeIndex:e}))}),Object(I.mergeAll)(),Object(I.tap)(e=>{this.nodeIdToTreeIndexMap.set(e.nodeId,e.treeIndex),this.treeIndexToNodeIdMap.set(e.treeIndex,e.nodeId)}),Object(I.share)()),this.subtreeSizeObservable=new m.Subject,this.subtreeSizeResponse=this.subtreeSizeObservable.pipe(Object(I.bufferTime)(50),Object(I.filter)(e=>e.length>0),Object(I.mergeMap)(async e=>this.nodesApiClient.determineTreeIndexAndSubtreeSizesByNodeIds(this.modelId,this.revisionId,e)),Object(I.mergeAll)(),Object(I.tap)(e=>{this.treeIndexSubTreeSizeMap.set(e.treeIndex,e.subtreeSize)}),Object(I.share)())}async getTreeIndex(e){const t=this.nodeIdToTreeIndexMap.get(e);if(void 0!==t)return t;const n=this.nodeIdResponse.pipe(Object(I.first)(t=>t.nodeId===e),Object(I.map)(e=>e.treeIndex)).toPromise();return this.nodeIdRequestObservable.next(e),await n}async getNodeId(e){const t=this.treeIndexToNodeIdMap.get(e);if(void 0!==t)return t;const n=this.treeIndexResponse.pipe(Object(I.first)(t=>t.treeIndex===e),Object(I.map)(e=>e.nodeId)).toPromise();return this.treeIndexRequestObservable.next(e),await n}async getSubtreeSize(e){const t=this.treeIndexSubTreeSizeMap.get(e);if(t)return t;const n=await this.getNodeId(e),r=this.subtreeSizeResponse.pipe(Object(I.first)(t=>t.treeIndex===e),Object(I.map)(e=>e.subtreeSize)).toPromise();return this.subtreeSizeObservable.next(n),await r}async getTreeIndices(e){const t=e.map(e=>this.nodeIdToTreeIndexMap.get(e)||-1),n=e.filter((e,n)=>-1===t[n]);if(0===n.length)return t;const r=new Map(e.map((e,t)=>[e,t])),o=await this.nodesApiClient.mapNodeIdsToTreeIndices(this.modelId,this.revisionId,n);console.assert(n.length===o.length);for(let e=0;e<o.length;e++){const i=n[e],a=o[e],s=r.get(i);t[s]=a,this.add(i,a)}return t}async getNodeIds(e){const t=e.map(e=>this.treeIndexToNodeIdMap.get(e)||-1),n=e.filter((e,n)=>-1===t[n]);if(0===n.length)return t;const r=new Map(e.map((e,t)=>[e,t])),o=await this.nodesApiClient.mapTreeIndicesToNodeIds(this.modelId,this.revisionId,n);console.assert(o.length===n.length);for(let e=0;e<o.length;e++){const i=o[e],a=n[e],s=r.get(a);t[s]=i,this.add(i,a)}return t}add(e,t){this.nodeIdToTreeIndexMap.set(e,t),this.treeIndexToNodeIdMap.set(t,e)}}
|
|
226
251
|
/*!
|
|
227
252
|
* Copyright 2021 Cognite AS
|
|
228
|
-
*/
|
|
253
|
+
*/async function M(e,t,n,r=15e3){let o=e;return new Promise(e=>{!function i(){for(let e=0;e<r&&o<=t;e++)n(o++);o<=t?setTimeout(i):e()}()})}
|
|
229
254
|
/*!
|
|
230
255
|
* Copyright 2021 Cognite AS
|
|
231
|
-
*/
|
|
256
|
+
*/const S=new Map([["Meters",1],["Centimeters",.01],["Millimeters",.001],["Micrometers",1e-6],["Kilometers",1e3],["Feet",.3048],["Inches",.0254],["Yards",.9144],["Miles",1609.34],["Mils",254e-7],["Microinches",2.54e-8]]);
|
|
232
257
|
/*!
|
|
233
258
|
* Copyright 2021 Cognite AS
|
|
234
|
-
*/class
|
|
259
|
+
*/class P extends i.Object3D{constructor(e,t,n,r){super(),this.type="cad",this._styledNodeCollections=[],this.modelId=e,this.revisionId=t,this.cadModel=n.cadModelMetadata,this.nodesApiClient=r,this.nodeIdAndTreeIndexMaps=new w(e,t,this.nodesApiClient),this.cadNode=n,this.add(this.cadNode),Object.defineProperty(this,"visible",{get:()=>this.cadNode.visible,set:e=>{this.cadNode.visible=e}})}get nodeTransformProvider(){return this.cadNode.nodeTransformProvider}get styledNodeCollections(){return this._styledNodeCollections}get modelUnit(){return this.cadNode.cadModelMetadata.scene.unit}get modelUnitToMetersFactor(){return S.get(this.modelUnit)}setDefaultNodeAppearance(e){this.cadNode.defaultNodeAppearance=e}getDefaultNodeAppearance(){return this.cadNode.defaultNodeAppearance}assignStyledNodeCollection(e,t){this._styledNodeCollections.push({nodes:e,appearance:t}),this.cadNode.nodeAppearanceProvider.assignStyledNodeCollection(e,t)}unassignStyledNodeCollection(e){const t=this._styledNodeCollections.findIndex(t=>t.nodes===e);-1!==t&&this._styledNodeCollections.splice(t,1),this.cadNode.nodeAppearanceProvider.unassignStyledNodeCollection(e)}removeAllStyledNodeCollections(){this.cadNode.nodeAppearanceProvider.clear()}setNodeTransform(e,t){this.nodeTransformProvider.setNodeTransform(e,t)}resetNodeTransform(e){this.nodeTransformProvider.resetNodeTransform(e)}mapFromCdfToModelCoordinates(e,t){return(t=void 0!==t?t:new i.Vector3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.modelMatrix),t}mapPositionFromModelToCdfCoordinates(e,t){return(t=void 0!==t?t:new i.Vector3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.inverseModelMatrix),t}mapBoxFromModelToCdfCoordinates(e,t){return(t=null!=t?t:new i.Box3)!==e&&t.copy(e),t.applyMatrix4(this.cadModel.inverseModelMatrix),t}dispose(){this.children=[]}async getSubtreeTreeIndices(e){return this.determineTreeIndices(e,!0)}async getAncestorTreeIndices(e,t){const n=await this.mapTreeIndexToNodeId(e),r=await this.nodesApiClient.determineNodeAncestorsByNodeId(this.modelId,this.revisionId,n,t);return new o.h(r.treeIndex,r.subtreeSize)}getModelBoundingBox(e,t){const n=t?this.cadModel.scene.getBoundsOfMostGeometry():this.cadModel.scene.root.bounds;return(e=e||new i.Box3).copy(n),e.applyMatrix4(this.cadModel.modelMatrix),e}getCameraConfiguration(){return this.cadModel.cameraConfiguration}setModelTransformation(e){this.cadNode.setModelTransformation(e)}getModelTransformation(e){return this.cadNode.getModelTransformation(e)}async getBoundingBoxByNodeId(e,t){try{return(t=await this.nodesApiClient.getBoundingBoxByNodeId(this.modelId,this.revisionId,e,t)).applyMatrix4(this.cadModel.modelMatrix),t}catch(e){throw Object(o.u)(e,{moduleName:"Cognite3DModel",methodName:"getBoundingBoxByNodeId"}),e}}async getBoundingBoxByTreeIndex(e,t){const n=await this.nodeIdAndTreeIndexMaps.getNodeId(e);return this.getBoundingBoxByNodeId(n,t)}iterateNodesByTreeIndex(e){return M(0,this.cadModel.scene.maxTreeIndex,e)}get nodeCount(){return this.cadModel.scene.maxTreeIndex+1}async iterateSubtreeByTreeIndex(e,t){const n=await this.determineTreeIndices(e,!0);return M(n.from,n.toInclusive,t)}async setNodeTransformByTreeIndex(e,t,n=!0){const r=await this.determineTreeIndices(e,n);return this.nodeTransformProvider.setNodeTransform(r,t),r.count}async resetNodeTransformByTreeIndex(e,t=!0){const n=await this.determineTreeIndices(e,t);return this.nodeTransformProvider.resetNodeTransform(n),n.count}async mapNodeIdsToTreeIndices(e){return this.nodeIdAndTreeIndexMaps.getTreeIndices(e)}async mapNodeIdToTreeIndex(e){return this.nodeIdAndTreeIndexMaps.getTreeIndex(e)}async mapTreeIndicesToNodeIds(e){return this.nodeIdAndTreeIndexMaps.getNodeIds(e)}async mapTreeIndexToNodeId(e){return this.nodeIdAndTreeIndexMaps.getNodeId(e)}async determineTreeIndices(e,t){let n=1;if(t){const t=await this.nodeIdAndTreeIndexMaps.getSubtreeSize(e);n=t||1}return new o.h(e,n)}}
|
|
235
260
|
/*!
|
|
236
261
|
* Copyright 2021 Cognite AS
|
|
237
|
-
*/
|
|
262
|
+
*/class R extends i.Object3D{constructor(e,t,n){super(),this.type="pointcloud",this.modelId=e,this.revisionId=t,this.pointCloudNode=n,this.add(n)}dispose(){this.children=[]}getModelBoundingBox(e){return this.pointCloudNode.getBoundingBox(e)}getCameraConfiguration(){return this.pointCloudNode.cameraConfiguration}setModelTransformation(e){this.pointCloudNode.setModelTransformation(e)}getModelTransformation(e){return this.pointCloudNode.getModelTransformation(e)}setClassVisible(e,t){this.pointCloudNode.setClassVisible(e,t)}isClassVisible(e){return this.pointCloudNode.isClassVisible(e)}hasClass(e){return this.pointCloudNode.hasClass(e)}getClasses(){return this.pointCloudNode.getClasses()}get visiblePointCount(){return this.pointCloudNode.visiblePointCount}get pointColorType(){return this.pointCloudNode.pointColorType}set pointColorType(e){this.pointCloudNode.pointColorType=e}get pointSize(){return this.pointCloudNode.pointSize}set pointSize(e){this.pointCloudNode.pointSize=e}get pointShape(){return this.pointCloudNode.pointShape}set pointShape(e){this.pointCloudNode.pointShape=e}}
|
|
238
263
|
/*!
|
|
239
264
|
* Copyright 2021 Cognite AS
|
|
240
265
|
*/
|
|
266
|
+
class N{constructor(e){this._loading=!1,N.loadStyles(),this.el=document.createElement("div"),this.el.title=N.titles.idle,this.el.className=N.classnames.base,this.el.innerHTML='<svg fill="none" xmlns="http://www.w3.org/2000/svg" width="64" height="38" viewBox="0 0 64 38">\n <path id="reveal-spinner-top-1" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288c-28.2275 5.9674-28.7378 6.0661-29.2355 6.1624h-.0001c-1.3944.2698-2.6891.5203-3.9772.7036v-6.3583c-.0652-1.475-1.2799-2.6372-2.7563-2.6372s-2.6911 1.1622-2.7563 2.6372v7.1072z"/>\n <path id="reveal-spinner-top-2" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288c-16.8452 3.375-17.0189 3.4183-17.1711 3.506-1.7276.3694-3.3435.7389-4.9594 1.1083l-.0571.013V7.6106c-.0465-1.48876-1.2668-2.67155-2.7563-2.67155S22.8005 6.12184 22.754 7.6106v13.5866z"/>\n <path id="reveal-spinner-top-3" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288c-7.274 1.2593-9.1537 1.6344-11.0347 2.0095V2.74337c-.0338-1.54541-1.3116-2.7721267-2.8571-2.74285692h-.0134C35.0327.00556379 33.8015 1.2272 33.7846 2.74337V18.7098z"/>\n <path id="reveal-spinner-top-4" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288v-3.2274c0-1.5594-1.2642-2.82358-2.8236-2.82358s-2.8235 1.26418-2.8235 2.82358v4.4z"/>\n <path id="reveal-spinner-center" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M50.4542 15.3288C5.46324 23.6926 1.83164 23.4439 1.45248 22.3172c-.773329-1.6548 3.63593-3.8942 5.79662-4.9915.28489-.1447.53068-.2695.72036-.3706v-.1237h-.12505c-1.62823.6226-8.646718 3.4958-7.768735 5.9832.875294 2.7456 11.653125 3.4958 34.086725-1.9939 18.7926-4.6145 27.4393-5.4925 28.3173-3.7446.6266 1.4952-4.0121 4.3644-8.5217 6.3597-.0966.0984-.1427.2357-.125.3724l.002.002c.1243.1244.2474.2475.3745.1231 1.6268-.6266 10.1499-4.238 9.7721-7.2323-.3738-1.9935-4.8842-2.6175-13.5274-1.3722z"/>\n <path id="reveal-spinner-bot-1" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M11.7557 30.5551v-2.0894c1.8291-.1048 3.6511-.3108 5.4575-.6171v2.7065c-.033 1.4835-1.245 2.6689-2.7288 2.6689s-2.6958-1.1854-2.7287-2.6689z"/>\n <path id="reveal-spinner-bot-2" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M22.7715 26.9222v7.602c-.0132 1.496 1.1804 2.7238 2.6762 2.7529 1.4957.0291 2.7362-1.1513 2.7813-2.6467v-8.8309c-1.7953.4821-3.6177.857-5.4575 1.1227z"/>\n <path id="reveal-spinner-bot-3" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M33.87 24.7193h-.3764v5.3419c.066 1.5151 1.3136 2.7095 2.8302 2.7095s2.7642-1.1944 2.8303-2.7095v-6.5883c-.3876.0886-.7821.1771-1.1838.2671l-.0007.0002c-1.2926.2897-2.6585.5959-4.0996.9764v.0027z"/>\n <path id="reveal-spinner-bot-4" fill-rule="evenodd" clip-rule="evenodd" fill="currentColor" d="M44.5094 22.2377c2.1383-.3726 4.0242-.7438 5.6578-1.1137v4.3375c-.0249 1.5094-1.2534 2.7212-2.763 2.7254h-.0162c-.7432.0206-1.4641-.255-2.004-.7662-.54-.5112-.8546-1.216-.8746-1.9592v-3.2238z"/>\n</svg>\n',e.style.position="relative",e.appendChild(this.el)}static loadStyles(){if(document.getElementById(N.stylesId))return;const e=document.createElement("style");e.id=N.stylesId,e.appendChild(document.createTextNode(".reveal-viewer-spinner {\n position: absolute;\n top: 10px;\n left: 10px;\n color: white;\n}\n.reveal-viewer-spinner--dark {\n color: black;\n}\n\n.reveal-viewer-spinner--loading #reveal-spinner-bot-1,\n.reveal-viewer-spinner--loading #reveal-spinner-top-1 {\n animation: reveal-loading-opacity 0.8s ease-out infinite alternate;\n}\n.reveal-viewer-spinner--loading #reveal-spinner-bot-2,\n.reveal-viewer-spinner--loading #reveal-spinner-top-2 {\n animation: reveal-loading-opacity 0.8s ease-out 0.2s infinite alternate;\n}\n.reveal-viewer-spinner--loading #reveal-spinner-bot-3,\n.reveal-viewer-spinner--loading #reveal-spinner-top-3 {\n animation: reveal-loading-opacity 0.8s ease-out 0.4s infinite alternate;\n}\n.reveal-viewer-spinner--loading #reveal-spinner-bot-4,\n.reveal-viewer-spinner--loading #reveal-spinner-top-4 {\n animation: reveal-loading-opacity 0.8s ease-out 0.6s infinite alternate;\n}\n\n.reveal-viewer-spinner--loading #center {\n transform-origin: center;\n animation: reveal-loading-scale 1.6s ease-out infinite 0.8s alternate,\n reveal-loading-opacity 1.6s ease-out infinite alternate;\n}\n\n@keyframes reveal-loading-opacity {\n 0% {\n opacity: 1;\n }\n 100% {\n opacity: 0.2;\n }\n}\n\n@keyframes reveal-loading-scale {\n 100% {\n transform: scaleY(1.2) translateY(0.6px);\n }\n}\n")),document.head.appendChild(e)}get loading(){return this._loading}set loading(e){this._loading=e,e?(this.el.classList.add(N.classnames.loading),this.el.title=N.titles.loading):(this.el.classList.remove(N.classnames.loading),this.el.title=N.titles.idle)}updateBackgroundColor(e){const{l:t}=e.getHSL({h:0,s:0,l:0});t>.5?this.el.classList.add(N.classnames.dark):this.el.classList.remove(N.classnames.dark)}dispose(){this.el.remove();const e=document.getElementById(N.stylesId);e&&e.remove()}}N.stylesId="reveal-viewer-spinner-styles",N.classnames={base:"reveal-viewer-spinner",loading:"reveal-viewer-spinner--loading",dark:"reveal-viewer-spinner--dark"},N.titles={idle:"2.1.2",loading:"2.1.2 Loading..."};var D=n(9);
|
|
241
267
|
/*!
|
|
242
268
|
* Copyright 2021 Cognite AS
|
|
243
|
-
*/
|
|
269
|
+
*/const z=new i.Vector2,E=new i.Raycaster;function O(e,t,n=.05){const{normalizedCoords:r,camera:o}=t;z.set(r.x,r.y),E.setFromCamera(r,o),E.params.Points={threshold:n};return E.intersectObjects(e,!0).filter(e=>function(e,t){let n=!0;for(let r=0;n&&r<t.length;++r)n=t[r].distanceToPoint(e)>=0;return n}(e.point,t.clippingPlanes)).map(t=>{const n=function(e,t){for(;"Points"===e.type&&null!==e.parent;)e=e.parent;if(e instanceof D.PointCloudOctree){const n=e.root;return t.find(e=>n.pointcloud===e.potreeNode.octtree)||null}return null}(t.object,e);if(null===n)throw new Error("Could not find PointCloudNode for intersected point");return{distance:t.distance,point:t.point,pointIndex:t.index,pointCloudNode:n,object:t.object}})}var A=n(6),F=n.n(A);
|
|
244
270
|
/*!
|
|
245
271
|
* Copyright 2021 Cognite AS
|
|
246
|
-
*/
|
|
272
|
+
*/
|
|
273
|
+
class B{constructor(e,t){this._ongoingOperations=0,this._interrupted=!1,this._itemToTreeIndexRangeCallback=e,this._notifyChangedCallback=t}interrupt(){this._interrupted=!0}get isLoading(){return!this._interrupted&&this._ongoingOperations>0}async pageResults(e,t){const n=this._itemToTreeIndexRangeCallback,r=this._notifyChangedCallback;this._ongoingOperations++;try{let o=await t;for(;!this._interrupted;){const t=o.next?o.next():void 0;if(o.items.forEach(t=>{const r=n(t);e.addRange(r)}),r(),!t)break;o=await t}return!this._interrupted}finally{this._ongoingOperations--}}}var k=n(14),L=n.n(k);
|
|
247
274
|
/*!
|
|
248
275
|
* Copyright 2021 Cognite AS
|
|
249
|
-
|
|
276
|
+
*/
|
|
277
|
+
class G extends r.d{constructor(e,t){super(G.classToken),this._indexSet=new o.e,this._client=e,this._model=t,this._fetchResultHelper=void 0}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const t=this._model;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const n=new B(e=>new o.h(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=n;const r={assetId:e.assetId,intersectsBoundingBox:function(e){if(void 0===e)return;const n=(new i.Box3).copy(e);return t.mapBoxFromModelToCdfCoordinates(n,n),{min:[n.min.x,n.min.y,n.min.z],max:[n.max.x,n.max.y,n.max.z]}}(e.boundingBox),limit:1e3},a=new o.e;this._indexSet=a,this._filter=e;const s=this._client.assetMappings3D.list(t.modelId,t.revisionId,r);await n.pageResults(a,s)&&(this._fetchResultHelper=void 0)}getFilter(){return this._filter}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear()}getIndexSet(){return this._indexSet}serialize(){return{token:this.classToken,state:L()(this._filter)}}}G.classToken="AssetNodeCollection";var U=n(25),V=n.n(U);
|
|
250
278
|
/*!
|
|
251
279
|
* Copyright 2021 Cognite AS
|
|
252
280
|
*/
|
|
253
|
-
class gn{constructor(e,t,n,r){var i,a;this._lastFrameSceneState={hasBackElements:!0,hasInFrontElements:!0,hasGhostElements:!0,hasCustomObjects:!0},this._rootSectorNodeBuffer=new Set,this._outlineTexelSize=2,this._autoSetTargetSize=!1,this._uiObjects=[],this._renderer=e,this._renderOptions=r,this._materialManager=n,this._orthographicCamera=new o.OrthographicCamera(-1,1,1,-1,-1,1),this._renderTarget=null,this._originalScene=t,this._cadScene=new o.Scene,this._cadScene.autoUpdate=!1,this._normalScene=new o.Scene,this._normalScene.autoUpdate=!1,this._inFrontScene=new o.Scene,this._inFrontScene.autoUpdate=!1,this._compositionScene=new o.Scene,this._compositionScene.autoUpdate=!1,this._fxaaScene=new o.Scene,this._fxaaScene.autoUpdate=!1,this._ssaoScene=new o.Scene,this._ssaoScene.autoUpdate=!1,this._ssaoBlurScene=new o.Scene,this._ssaoBlurScene.autoUpdate=!1,this._emptyScene=new o.Scene,this._emptyScene.autoUpdate=!1;const s=e.capabilities.isWebGL2,d=this.createOutlineColorTexture();this._inFrontRenderedCadModelTarget=yn(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._inFrontRenderedCadModelTarget.depthTexture=new o.DepthTexture(0,0),this._inFrontRenderedCadModelTarget.depthTexture.format=o.DepthFormat,this._inFrontRenderedCadModelTarget.depthTexture.type=o.UnsignedIntType,this._normalRenderedCadModelTarget=yn(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._normalRenderedCadModelTarget.depthTexture=new o.DepthTexture(0,0),this._normalRenderedCadModelTarget.depthTexture.format=o.DepthFormat,this._normalRenderedCadModelTarget.depthTexture.type=o.UnsignedIntType,this._ghostObjectRenderTarget=yn(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._ghostObjectRenderTarget.depthTexture=new o.DepthTexture(0,0),this._ghostObjectRenderTarget.depthTexture.format=o.DepthFormat,this._ghostObjectRenderTarget.depthTexture.type=o.UnsignedIntType,this._customObjectRenderTarget=yn(s,this.multiSampleCountHint,{stencilBuffer:!1}),this._customObjectRenderTarget.depthTexture=new o.DepthTexture(0,0),this._customObjectRenderTarget.depthTexture.format=o.DepthFormat,this._customObjectRenderTarget.depthTexture.type=o.UnsignedIntType,this._compositionTarget=new o.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._compositionTarget.depthTexture=new o.DepthTexture(0,0),this._compositionTarget.depthTexture.format=o.DepthFormat,this._compositionTarget.depthTexture.type=o.UnsignedIntType,this._ssaoTarget=new o.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoTarget.depthTexture=new o.DepthTexture(0,0),this._ssaoTarget.depthTexture.format=o.DepthFormat,this._ssaoTarget.depthTexture.type=o.UnsignedIntType,this._ssaoBlurTarget=new o.WebGLRenderTarget(0,0,{stencilBuffer:!1}),this._ssaoBlurTarget.depthTexture=new o.DepthTexture(0,0),this._ssaoBlurTarget.depthTexture.format=o.DepthFormat,this._ssaoBlurTarget.depthTexture.type=o.UnsignedIntType,this._combineOutlineDetectionMaterial=new o.ShaderMaterial({vertexShader:Rt.vertex,fragmentShader:Rt.fragment,uniforms:{tFront:{value:this._inFrontRenderedCadModelTarget.texture},tFrontDepth:{value:this._inFrontRenderedCadModelTarget.depthTexture},tBack:{value:this._normalRenderedCadModelTarget.texture},tBackDepth:{value:this._normalRenderedCadModelTarget.depthTexture},tCustom:{value:this._customObjectRenderTarget.texture},tCustomDepth:{value:this._customObjectRenderTarget.depthTexture},tGhost:{value:this._ghostObjectRenderTarget.texture},tGhostDepth:{value:this._ghostObjectRenderTarget.depthTexture},tOutlineColors:{value:d},resolution:{value:new o.Vector2(0,0)},texelSize:{value:new o.Vector2(0,0)},cameraNear:{value:.1},cameraFar:{value:1e4},edgeStrengthMultiplier:{value:2.5},edgeGrayScaleIntensity:{value:.1}},extensions:{fragDepth:!0},defines:{EDGES:null!==(a=null===(i=this._renderOptions.edgeDetectionParameters)||void 0===i?void 0:i.enabled)&&void 0!==a?a:un.edgeDetectionParameters.enabled}});const l=this.createNoiseTexture(),c=this.ssaoParameters(this._renderOptions),u=c.sampleSize,m=this.createKernel(u),p=c.sampleRadius,h=c.depthCheckBias;this._ssaoMaterial=new o.ShaderMaterial({uniforms:{tDepth:{value:this._compositionTarget.depthTexture},tNoise:{value:l},kernel:{value:m},sampleRadius:{value:p},bias:{value:h},projMatrix:{value:new o.Matrix4},inverseProjectionMatrix:{value:new o.Matrix4},resolution:{value:new o.Vector2}},defines:{MAX_KERNEL_SIZE:u},vertexShader:Dt.vertex,fragmentShader:Dt.fragment}),this._ssaoBlurMaterial=new o.ShaderMaterial({uniforms:{tDiffuse:{value:this._compositionTarget.texture},tAmbientOcclusion:{value:this._ssaoTarget.texture},resolution:{value:new o.Vector2}},vertexShader:zt.vertex,fragmentShader:zt.fragment});const f=this.supportsSsao(c)?this._ssaoBlurTarget.texture:this._compositionTarget.texture;this._fxaaMaterial=new o.ShaderMaterial({uniforms:{tDiffuse:{value:f},tDepth:{value:this._compositionTarget.depthTexture},resolution:{value:new o.Vector2},inverseResolution:{value:new o.Vector2}},vertexShader:Nt.vertex,fragmentShader:Nt.fragment,extensions:{fragDepth:!0}}),this.setupCompositionScene(),this.setupSsaoScene(),this.setupSsaoBlurCombineScene(),this.setupFxaaScene(),this._normalSceneBuilder=new _n(this._normalScene),this._inFrontSceneBuilder=new _n(this._inFrontScene)}set renderOptions(e){const t=this.ssaoParameters(e),n={...t};this.setSsaoParameters(n),this._renderOptions={...e,ssaoRenderParameters:{...t}}}addUiObject(e,t,n){this._uiObjects.push({object:e,screenPos:t,width:n.x,height:n.y})}removeUiObject(e){this._uiObjects=this._uiObjects.filter(t=>{const n=t.object;return e!==n})}ssaoParameters(e){var t;return null!==(t=null==e?void 0:e.ssaoRenderParameters)&&void 0!==t?t:{...un.ssaoRenderParameters}}get antiAliasingMode(){const{antiAliasing:e=un.antiAliasing}=this._renderOptions;return e}get multiSampleCountHint(){const{multiSampleCountHint:e=un.multiSampleCountHint}=this._renderOptions;return e}supportsSsao(e){return!ve()&&(this._renderer.capabilities.isWebGL2||this._renderer.extensions.has("EXT_frag_depth"))&&e.sampleSize!==cn.None}renderDetailedToDepthOnly(e){const t={renderMode:this._materialManager.getRenderMode()},n=new N(this._renderer);this._materialManager.setRenderMode(T.DepthBufferOnly);try{n.setRenderTarget(this._renderTarget),this.setVisibilityOfSectors(K.a.Simple,!1),this.traverseForRootSectorNode(this._originalScene),this.extractCadNodes(this._originalScene),this.clearTarget(this._renderTarget);const{hasBackElements:r,hasInFrontElements:o,hasGhostElements:i}=this.splitToScenes();r&&!i?this.renderNormalCadModelsFromBaseScene(e,this._renderTarget):r&&i&&(this.renderNormalCadModels(e,this._renderTarget),this._normalSceneBuilder.restoreOriginalScene()),o&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene())}finally{this._materialManager.setRenderMode(t.renderMode),n.resetState(),this.restoreCadNodes(),this.setVisibilityOfSectors(K.a.Simple,!0)}}render(e){const t=this._renderer,n=this._originalScene,r=new N(t),o={autoClear:t.autoClear,clearAlpha:t.getClearAlpha(),renderMode:this._materialManager.getRenderMode()};t.info.autoReset=!1,t.info.reset(),r.autoClear=!1;try{r.setRenderTarget(this._renderTarget),this.updateRenderSize(t),t.info.autoReset=!1,t.info.reset(),r.autoClear=!1,this.traverseForRootSectorNode(n),this.extractCadNodes(n),this.clearTarget(this._ghostObjectRenderTarget),this.clearTarget(this._compositionTarget),this.clearTarget(this._customObjectRenderTarget),t.setClearAlpha(0),this.clearTarget(this._normalRenderedCadModelTarget),this.clearTarget(this._inFrontRenderedCadModelTarget),t.setClearAlpha(o.clearAlpha);const i={...this._lastFrameSceneState},{hasBackElements:a,hasInFrontElements:s,hasGhostElements:d}=this.splitToScenes(),l=n.children.length>0;this._lastFrameSceneState={hasBackElements:a,hasInFrontElements:s,hasGhostElements:d,hasCustomObjects:l},a&&!d?this.renderNormalCadModelsFromBaseScene(e):a&&d?(this.renderNormalCadModels(e),this._normalSceneBuilder.restoreOriginalScene(),this.renderGhostedCadModelsFromBaseScene(e)):!a&&d&&this.renderGhostedCadModelsFromBaseScene(e),s&&(this.renderInFrontCadModels(e),this._inFrontSceneBuilder.restoreOriginalScene()),l&&this.renderCustomObjects(n,e),t.capabilities.isWebGL2&&(!a&&i.hasBackElements&&this.explicitFlushRender(e,this._normalRenderedCadModelTarget),!d&&i.hasGhostElements&&this.explicitFlushRender(e,this._ghostObjectRenderTarget),!s&&i.hasInFrontElements&&this.explicitFlushRender(e,this._inFrontRenderedCadModelTarget),!l&&i.hasInFrontElements&&this.explicitFlushRender(e,this._customObjectRenderTarget));const c=this.supportsSsao(this.ssaoParameters(this._renderOptions));switch(this.antiAliasingMode){case ln.FXAA:this.renderComposition(t,e,this._compositionTarget),r.autoClear=o.autoClear,c&&(this.renderSsao(t,this._ssaoTarget,e),this.renderPostProcessStep(t,this._ssaoBlurTarget,this._ssaoBlurScene)),this.renderPostProcessStep(t,this._renderTarget,this._fxaaScene);break;case ln.NoAA:t.autoClear=o.autoClear,c?(this.renderComposition(t,e,this._compositionTarget),this.renderSsao(t,this._ssaoTarget,e),this.renderPostProcessStep(t,this._renderTarget,this._ssaoBlurScene)):this.renderComposition(t,e,this._renderTarget);break;default:throw new Error("Unsupported anti-aliasing mode: "+this.antiAliasingMode)}}finally{r.resetState(),this._materialManager.setRenderMode(o.renderMode),this.restoreCadNodes()}}restoreCadNodes(){this._rootSectorNodeBuffer.forEach(e=>{e[1].add(e[0])}),this._rootSectorNodeBuffer.clear()}extractCadNodes(e){this._rootSectorNodeBuffer.forEach(t=>{if(t[1].parent!==e&&!(t[1].parent instanceof _))throw new Error("CadNode must be put at scene root");this._cadScene.add(t[0])})}setRenderTarget(e){this._renderTarget=e}getRenderTarget(){return this._renderTarget}setRenderTargetAutoSize(e){this._autoSetTargetSize=e}getRenderTargetAutoSize(){return this._autoSetTargetSize}clearTarget(e){this._renderer.setRenderTarget(e),this._renderer.clear()}explicitFlushRender(e,t){this._renderer.setRenderTarget(t),this._renderer.render(this._emptyScene,e)}splitToScenes(){const e={hasBackElements:!1,hasInFrontElements:!1,hasGhostElements:!1};return this._rootSectorNodeBuffer.forEach(t=>{const n=t[1],r=t[0],i=this._materialManager.getModelBackTreeIndices(n.cadModelMetadata.blobUrl),a=this._materialManager.getModelInFrontTreeIndices(n.cadModelMetadata.blobUrl),s=this._materialManager.getModelGhostedTreeIndices(n.cadModelMetadata.blobUrl),d=i.count>0,l=a.count>0,c=s.count>0;e.hasBackElements=e.hasBackElements||d,e.hasInFrontElements=e.hasInFrontElements||l,e.hasGhostElements=e.hasGhostElements||c;const u=new o.Object3D;u.applyMatrix4(r.matrix),d&&c&&this._normalScene.add(u);const m=new o.Object3D;m.applyMatrix4(r.matrix),l&&this._inFrontScene.add(m);const p=[t[0]];for(;p.length>0;){const e=p.pop(),t=e.userData.treeIndices;t?(l&&a.hasIntersectionWith(t)&&this._inFrontSceneBuilder.addElement(e,m),d&&!c||c&&i.hasIntersectionWith(t)&&this._normalSceneBuilder.addElement(e,u)):p.push(...e.children)}}),e}renderNormalCadModels(e,t=this._normalRenderedCadModelTarget){this._normalSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this._renderer.render(this._normalScene,e)}renderNormalCadModelsFromBaseScene(e,t=this._normalRenderedCadModelTarget){this._renderer.setRenderTarget(t),this._renderer.render(this._cadScene,e)}renderInFrontCadModels(e,t=this._inFrontRenderedCadModelTarget){this._inFrontSceneBuilder.populateTemporaryScene(),this._renderer.setRenderTarget(t),this._materialManager.setRenderMode(T.Effects),this._renderer.render(this._inFrontScene,e)}renderGhostedCadModelsFromBaseScene(e){this._renderer.setRenderTarget(this._ghostObjectRenderTarget),this._materialManager.setRenderMode(T.Ghost),this._renderer.render(this._cadScene,e)}renderCustomObjects(e,t){this._renderer.setRenderTarget(this._customObjectRenderTarget),this._renderer.render(e,t)}updateRenderSize(e){const t=new o.Vector2;return e.getSize(t),this._renderTarget&&this._autoSetTargetSize&&t.x!==this._renderTarget.width&&t.y!==this._renderTarget.height&&this._renderTarget.setSize(t.x,t.y),t.x===this._normalRenderedCadModelTarget.width&&t.y===this._normalRenderedCadModelTarget.height||(this._normalRenderedCadModelTarget.setSize(t.x,t.y),this._inFrontRenderedCadModelTarget.setSize(t.x,t.y),this._customObjectRenderTarget.setSize(t.x,t.y),this._ghostObjectRenderTarget.setSize(t.x,t.y),this._compositionTarget.setSize(t.x,t.y),this._ssaoTarget.setSize(t.x,t.y),this._ssaoBlurTarget.setSize(t.x,t.y),this._combineOutlineDetectionMaterial.uniforms.texelSize.value=new o.Vector2(this._outlineTexelSize/t.x,this._outlineTexelSize/t.y),this._combineOutlineDetectionMaterial.uniforms.resolution.value=t,this._ssaoMaterial.uniforms.resolution.value=t,this._ssaoBlurMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.resolution.value=t,this._fxaaMaterial.uniforms.inverseResolution.value=new o.Vector2(1/t.x,1/t.y)),t}renderComposition(e,t,n){this._combineOutlineDetectionMaterial.uniforms.cameraNear.value=t.near,this._combineOutlineDetectionMaterial.uniforms.cameraFar.value=t.far,this.renderPostProcessStep(e,n,this._compositionScene)}setSsaoParameters(e){var t;const n=un.ssaoRenderParameters;if(this._ssaoMaterial.uniforms.sampleRadius.value=e.sampleRadius,this._ssaoMaterial.uniforms.bias.value=e.depthCheckBias,e.sampleSize!==this.ssaoParameters(this._renderOptions).sampleSize){const r=null!==(t=null==e?void 0:e.sampleSize)&&void 0!==t?t:n.sampleSize,o=this.createKernel(r);this._fxaaMaterial.uniforms.tDiffuse.value=e.sampleSize!==cn.None?this._ssaoBlurTarget.texture:this._compositionTarget.texture,this._ssaoMaterial.uniforms.kernel.value=o,this._ssaoMaterial.defines={MAX_KERNEL_SIZE:r},this._ssaoMaterial.needsUpdate=!0}}renderPostProcessStep(e,t,n){if(e.setRenderTarget(t),e.render(n,this._orthographicCamera),t===this._renderTarget){const t=e.getSize(new o.Vector2),n=new o.Vector2(e.domElement.clientWidth,e.domElement.clientHeight),r=new o.Vector2(t.x/n.x,t.y/n.y);e.autoClear=!1,this._uiObjects.forEach(t=>{const n=new o.Scene;n.add(t.object);const i=t.screenPos.clone().multiply(r),a=t.width*r.x,s=t.height*r.y;e.setViewport(i.x,i.y,a,s),e.clearDepth(),e.render(n,this._orthographicCamera)}),e.setViewport(0,0,t.x,t.y),e.autoClear=!0}}renderSsao(e,t,n){this._ssaoMaterial.uniforms.inverseProjectionMatrix.value=n.projectionMatrixInverse,this._ssaoMaterial.uniforms.projMatrix.value=n.projectionMatrix,this.renderPostProcessStep(e,t,this._ssaoScene)}createOutlineColorTexture(){const e=new Uint8Array(32),t=new o.DataTexture(e,8,1);return bn(t.image.data,mn.White,V.White),bn(t.image.data,mn.Black,V.Black),bn(t.image.data,mn.Cyan,V.Cyan),bn(t.image.data,mn.Blue,V.Blue),bn(t.image.data,mn.Green,j.Green),bn(t.image.data,mn.Red,j.Red),bn(t.image.data,mn.Orange,V.Orange),t}setupCompositionScene(){const e=this.createRenderTriangle(),t=new o.Mesh(e,this._combineOutlineDetectionMaterial);this._compositionScene.add(t)}setupFxaaScene(){const e=this.createRenderTriangle(),t=new o.Mesh(e,this._fxaaMaterial);this._fxaaScene.add(t)}setupSsaoScene(){const e=this.createRenderTriangle(),t=new o.Mesh(e,this._ssaoMaterial);this._ssaoScene.add(t)}setupSsaoBlurCombineScene(){const e=this.createRenderTriangle(),t=new o.Mesh(e,this._ssaoBlurMaterial);this._ssaoBlurScene.add(t)}createNoiseTexture(){const e=new Float32Array(65536);for(let t=0;t<16384;t++){const n=4*t,r=2*Math.random()-1,o=2*Math.random()-1,i=2*Math.random()-1;e[n]=r,e[n+1]=o,e[n+2]=i,e[n+3]=1}const t=new o.DataTexture(e,128,128,o.RGBAFormat,o.FloatType);return t.wrapS=o.RepeatWrapping,t.wrapT=o.RepeatWrapping,t}createKernel(e){const t=[];for(let r=0;r<e;r++){const i=new o.Vector3;for(;i.length()<.5;)i.x=2*Math.random()-1,i.y=2*Math.random()-1,i.z=Math.random();i.normalize();let a=r/e;a=n(.1,1,a*a),i.multiplyScalar(a),t.push(i)}return t;function n(e,t,n){return e+(t-e)*(n=(n=n<0?0:n)>1?1:n)}}createRenderTriangle(){const e=new o.BufferGeometry,t=new Float32Array([-1,-1,0,3,-1,0,-1,3,0]),n=new Float32Array([0,0,2,0,0,2]);return e.setAttribute("position",new o.BufferAttribute(t,3)),e.setAttribute("uv",new o.BufferAttribute(n,2)),e}traverseForRootSectorNode(e){const t=[e];for(;t.length>0;){const e=t.pop();e instanceof de?this._rootSectorNodeBuffer.add([e,e.parent]):e instanceof o.Group||t.push(...e.children)}}setVisibilityOfSectors(e,t){this._originalScene.traverse(n=>{n instanceof se&&n.levelOfDetail===e&&(n.visible=t)})}}function yn(e,t,n){if(e&&t>1){const e=new o.WebGLMultisampleRenderTarget(0,0,n);return e.samples=t,e}return new o.WebGLRenderTarget(0,0,n)}function bn(e,t,n){e[4*t+0]=Math.floor(255*n.r),e[4*t+1]=Math.floor(255*n.g),e[4*t+2]=Math.floor(255*n.b),e[4*t+3]=255}class _n{constructor(e){this.buffer=[],this.temporaryScene=e}addElement(e,t){this.buffer.push({object:e,parent:e.parent,sceneParent:t})}populateTemporaryScene(){this.buffer.forEach(e=>e.sceneParent.add(e.object))}restoreOriginalScene(){this.buffer.forEach(e=>{e.parent.add(e.object)}),this.buffer.length=0,this.temporaryScene.remove(...this.temporaryScene.children)}}
|
|
281
|
+
class j extends r.d{constructor(e,t,n={}){super(j.classToken),this._indexSet=new o.e,this._filter={},this._client=e,this._modelId=t.modelId,this._revisionId=t.revisionId,this._options={requestPartitions:1,...n}}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const t=new o.e,{requestPartitions:n}=this._options;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const r=new B(e=>new o.h(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=r,this._indexSet=t;const i=V()(1,n+1).map(async o=>{const i=this._client.revisions3D.list3DNodes(this._modelId,this._revisionId,{properties:e,limit:1e3,sortByNodeId:!0,partition:`${o}/${n}`});return r.pageResults(t,i)});this._filter=e,this.notifyChanged(),await Promise.all(i)}getFilter(){return this._filter}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear(),this.notifyChanged()}getIndexSet(){return this._indexSet}serialize(){return{token:this.classToken,state:L()(this._filter),options:{...this._options}}}}j.classToken="PropertyFilterNodeCollection";class W extends r.d{constructor(e,t){super(W.classToken),this._innerCollection=t,this._innerCollection.on("changed",()=>{this._cachedIndexSet=void 0,this.notifyChanged()}),this._allTreeIndicesRange=new o.h(0,e.nodeCount)}get isLoading(){return this._innerCollection.isLoading}getIndexSet(){if(void 0===this._cachedIndexSet){const e=this._innerCollection.getIndexSet(),t=new o.e;t.addRange(this._allTreeIndicesRange),t.differenceWith(e),this._cachedIndexSet=t}return this._cachedIndexSet}serialize(){return{token:this.classToken,state:{innerCollection:this._innerCollection.serialize()}}}clear(){throw new Error("clear() is not supported")}}W.classToken="InvertedNodeCollection";
|
|
254
282
|
/*!
|
|
255
283
|
* Copyright 2021 Cognite AS
|
|
256
|
-
*/const Tn=new Image;Tn.src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAABgGlDQ1BJQ0MgcHJvZmlsZQAAKJFjYGCqSCwoyGFhYGDIzSspCnJ3UoiIjFJgv8PAzcDDIMRgxSCemFxc4BgQ4MOAE3y7xsAIoi/rgsxqOqd2d+pGwehjat+yq+1cc3DrAwPulNTiZAYGRg4gOyWlODkXyAbp0UsuKCoBsucA2brlJQUg9hkgW6QI6EAg+wGInQ5hfwGxk8BsJg6wmpAgZyBbBsgWSIKwdUDsdAjbBsROzkhMAbJB/tKBuAEMuIJdFAzNDXx1HQk4nFSQm1MKswMUWjypeaHBQFoIiGUYghlcGBQYDBnMGQwYfBl0GYCWl6RWlIAUO+cXVBZlpmeUKDgCQzdVwTk/t6C0JLVIR8EzL1lPR8HIwNAApA4UbxDjPweBbWAUO48Qy5rMwGDxhoGBuQohlrKcgWGLPQODeDBCTH020EnvGRh2hBckFiXCHc/4jYUQvzjN2AjC5nFiYGC99///ZzUGBvZJDAx/J/7//3vR//9/FwPtv8PAcCAHALbUa33lfYEHAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5AobCyAEEhU0UQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAACAASURBVHjalV3bkiLXESzoC9cZdle7lixF7IMj7Ad/qn/RVliybMmyZhkGGKCBBj+s6ig7yTqNJmJjbzPN6XOpysrKqjP429/+djUzu16vNhgMbDAY2HA4tLIsbTKZWFmW1rat7fd7O5/P1ratXS4Xw5+5Xq82HA47v1+vVzOz9Hcz6/zc5XKxtm07z/Ev/34elz9vOBzaYDCwtm3tfD7b6XSy6/Vq5/M5fU9RFFbXtZVlaXVdm5mlZ1wuFyvLMn2GP78oCjMzO51Odjqd0rNxrP5VFEXnl7+nf4bP4+VySe/jn+Hj97nzMU+nUyuKwo7HY3onfGZRFDabzWyxWNjpdLLVamVN06Tvw3nHL5xb/LfL5WIlDsJ/90Vu29bKsrTr9Wpt23YWHxfocrl0Jtf/HQeE/68GpDaBP5tfwsdzPp/tcrnYcDi0qqpsNptZVVVW17UVRZEWwZ91PB7TJuHF8gnxBR0MBukQ+Lh9kx2Px84iFUVhZVlaWZbpM3Hjq7nCeTezNObL5WLn81keoLqubTqd2vl8ts1mY03TpHGphY/mFte69P/gTaBOLf7CBR0Oh9a2rQ2Hw5tF8pfljYaf65/hz8Hv8/8/nU5p0cuytNFoZKPRyKqqSiceFxInECfRTzlaFPydLZSP1Z8zHo9tNBqlzXA6naxpGjsej3Y4HNJmqOu680z/bJ7H6/WaNu3lcrHD4ZDmDdfBF9/MbL1e236/l3OFm1ptAt4MJU4Un27frfhwPvG+0GaWTheamMgMqQ2AL+QL0DSNnc/nNPnT6dTG47FVVWWXy+XmM/CXLxr/P1shXxh2P/w8f0devNFolMbcNI01TWObzSZtVLcqfDh83t1FuYXiuSrL0sbjsRVFYa+vr2lO+DRHi8//jgei4wLYLKEFwIfipsAHKlPEbsD/7j/DE3q9Xu10OtnxeEy+ej6fp5PuC+obEn0qnlb/HceMC41+GBfbD4JbFDTZuFkQE7jFGQwGaZO6RXh9fU2bt67rjoVyrDUYDOxwOEi/79akqiprmsb2+33CJYy5/J0iF8DWouMCcKH5FLA7wMlmP+0P9wnCF1bPRdxxOp3scDiYmSVTy2bUzb0vBO9uXEj8N5wsPgkM3HDh8eTi+7CP5w3sG3c6nVrTNHY4HGy321ld1+k0O25wPKHG65vEN5SDPv8+drO5Lwad1+vVSpw0BSR48ZWJ50lEk46m3jcLY4jz+Wz7/d7MzCaTiY3H405UgSi9LEuJotmPIwD033E8yiziBLKlUT4Ux6Asqf8aj8c2Ho/tfD6nk+6Wwt0cR09u2aqqSt9zPB5vcJjCWJEVUAel7DMXKkrAk4MbQ/0M/r/7bNwg+/3ertdr8u24iH7a/d9wlzv6xs9UoSJvCPb/yl/ySeF/ZzPt74KuyDcbbtCyLG02m9nxeLTlcmnr9boTauM40d0cDoe0+BGuUqFzzhI44C5VWKJMmzrx/H3RTvOXc2DXtq3tdrt0OqbT6c3pcx/M5h9NI5rhnAnk08LjRovC40Bcge+IGwMxA4NqPiyOazzeX61WVlVVQvg4FneLDo755OM40MKyJYxcQbIA/NDIfCCCxYHyKcdFcfLIB7jf761pGiuKwubzeQJG7mv9xLMlYB/NII5fjMOo6KSrU65cCm9qhSXYwuAYGF9UVWXv3r2zpmlsuVzadrtNmGE0Gtn5fLamaTqRUUTm8EG455397yWHfAzsGDUiaFOxpm8QRpwO8Nq2tel0mtCvLzqSKMpcqZBMvVhkFpU7UBtMPVtZumizoFtSB8ldl5/4wWCQNsJ6vbbL5WJffPGFtW1rx+Mx+5loYdW7RiAZfy/ZxOMH9G0ChejdAvjv/iL7/d6KorDFYiERvZ98NGEK6KD5V0RPNFF4Chn0KbPPoFfNQy7WZvCLOMCt3uFw6LiFN2/e2MvLi/3jH/+wh4eHRA5dr1cry7IzN7zROWTNYRt0ryX7cwYZiNpVjM+cAVoBZ7aOx2MCOz7JfuJ9UdD0c6zOuABPm0LCjD944ftOewRsmRPhRVbg0RfFf7aqKptMJtY0TWfDOU56//697XY7Wy6XKWR0V6rcNLu6vq8bkolPj4rpmRLOoU8ndy6Xi+12O2vb1h4fH7t+B0I5PNFMaiDVrHw/x+/3+EF2MfckTnI4QJ0wB4Uq9h6Px50owefVreBkMkkU99PTk202G5vP5535UGPhOWGgGkUGQ4WIOeTLgUIflLNTnkXbbDZmZvbw8NAhNHzx0fw7y4exPsb+vFkQATMw5BOuACQyg8oN8IRGG94XmRlK3NA4Vmf0PBJifsPNvLOg7969s7qubbPZdDKnmLBjfKBctTqk/ueSzRxSnNGu4ZPmg3FSZ7PZWF3XNp/POz4nonHVCceQBicqOtF8Mpmq7ov1owhIAVLlShStnOjWX13faDRKC8zP8twGxvxuPXe7na3X60SJq3dCfKA4EA7fE/ZQ8TsnaCIziMSOZ8VeX19TbO8vzwvINGeExvHvjq5V7K8yYYrQ4mRQxI71bYK+UJFdElq/3W53Q/e6yXdSrCiK5PNPp1PSCaxWK5tOpwkcKmJKhZ6RBTCz3yxAn+lQdDCe+uPxaNvt1kajkU0mk87i4knHiVfiCPydQV4U4qgdz36Pn63Qfk5U0cccKs4BN3pVVck94pdbShfc8OINh0M7nU42Go3s7du39unTJ5vNZglLME7jw9PHGg45XufJZ3yA3+chXtM0tt1uE4/vQM99Gy40U7ycuUOqGK0RAy4kXSLTG0UtDB7v/cq5iQhLIFbwhA/O8WKxsMFgkEgfprI9RHRxzvv37ztJIU6JMwvIhwA35WAwsKECCYzWVU7AxRCHw8HW63VKeLgUC+N7NufI/CkWUqmJWJiCoCjiANRpxZdX38efo/QRymVE6Nx/d5CM3/P4+GiTycS2220KC3EhfQ18jtq2taIo7O3bt0kXwGAwEogo63S9Xm0YERbKr+BEOFW52WxSrtuFCxzjq8VnQQbG9Zg34J3tCFqZP/+ZSCSCAhTMp0fRzT0hJIet/Hmoe8D/H4/H9vDwYOv12g6Hww2NzVwJsozD4dDevXtnu92uIx9Dy+hzhO+KVH7CJzn5EJ9G1sVtt1ur69pGo1FSv6DpR9dRVVVnctACKHGo2tEMSO/RwanwMQJFvFlzz+bf2TqwtcS/V1Vlj4+PdjgcUrgcRV58iHy8ZVnaYrGw19fXThTmohJXL+XY3Ov1+hkDRKecdw/G+a+vr4m4iDRwatAe9+MmUdIuJoFyC6iUOQr98gJFGUx10nPkl+JReBPg5vcIabVadUx4n7ADuRNUS202Gzufz8nKsAVQhziNF3eof7CLL9mMuOnfbrdmZimp4+ZfWQ5W1Mznc/v48aN98cUXHTm1ynqxG1J4RAFAxQJGGkUlO+9zAyqrxtR0pGr2uVqv1wkXMKGj8hN+WPCzLpdLcr/r9TqtmzrpYRiLA2X/iYvvO8w1aXjy0VfxwB3I+NdoNLIPHz7YH/7wh5scO6pwGRiyzDuaZLWwyqzzpEesopKOq39ncKkApH/PbrfroPhc3p6BNOZO6rq26/VqDw8PNhgMkjtQqqUIGJd9oQ9ugP1+b/v9PpERTO0qMoTB0Xa7tR9//DHt4FyYxSpkxA/olljVq15YkUIKKKq6BUVQRZOMIR4mgvDnnOqNSKvISiFuSlRuWdrpdLLFYmHPz89Ja8GgV2kkzew3RVBfbYCreKqqsqqqOmlNJngY6WOat21b+/nnn+/i4BlguctAMKjSooq34CKRKNcfbQLFlnKaWlkdfj4DwkjHwGOPimNQiPL4+Jjk6FhowuPthPosjMBkA7oGlzdPp9MbAsd3Ji4UZsMGg4FVVSUFlszIKbUNF1cwsFHWR20Qj6Mj/95HG7MUS/ECnEhjidY9GUac25zcnj/XaxT8oPr7qkKT5AI4z81MHypS5/N5h+hRIQqKOB28RJOAAFBtRD5VeLr4pRTZhAQNppvVZKiSN1VIweOLijAUd8CJNrXhlb7BP8utB6eGHWedz2ebTCZJQj4ejyXNjfNTIkGhTpGHfF6Nw3G+CtkYbEVaOvWiKoOnJNwKP/wenj4KD9Umjfw8vw9L3vkU42HDwlGmw/F70b36IiMmYJbwfD7bbDZLHA1GYgpwllEs7IN+fX1NzBXLtPGlMQRyfX9RFNY0Ta+eD/16tBH59KF/yxE3OZDFG6Ev0xidzD4pNi42RywqJa3EppEa2zcBbgbXHbgMzzcHR0+yLgB9//F4tLZtUx4aK2AVJ+6boCgK+/rrr62ua/v3v/+deO5cJQ/69hz1yieFcUWfNPwegicifaJows2zqoD2PzuvojaHUjnl1Fio2/C/Y2n85XKx+Xxuz8/PxhaesVfJOx+R6uFwSFW4zuDxALlmzwd0OBxukkkKAEbmkpU8+L2cM2DSKYrFGZmzrDqXSYwKY1URLZ9YP4G4cLl6hByHgGNgsanPjeOj6XRq2+3WHh4e0uexrqLkl/IvL2FySZcvPgM+9QKXy8U+ffpkq9WqUzShkK7K0eOCKgEkAz6up1eIPIoM1AZVglgGyQpIKq0CLzjTvlEdZq5Gg78HRbIOqN0K7HY7Ox6PnSYZiKFKPAFu/p308eJM1tPhLlLNFdCSMBOYyzxGNGu0qBiKRdnLKAZWYZwSkOQKZHJ6AVUHGdG9rEvgRY4KeCMXgfPpGUcPw7mQZOgL5pkjL832ej1H/FzCzaDPQz6UfXHlkKrdizQA+GL4ghxloOBEFZDk6GEUdXJFUo4+jYpJ8d9yMi0Oadn8I7B2F9u38dSzXE42HA5vCkvTBmDT7QWbTiQoAQW/HCZ1ol2uMm7RIvVJuHmTOTuZE2gqmRZPsHJlKr/PHUmQ/FK4Rsm1ImuXqnZ/xU+oc2D8owplMNLwn314eEjtZLzeMM0xL5JnqNxkqDx8TjSCp52zd7nYnXsLsNWI2D/k2ZUKKccPRMkfLjyJ0txK4II1D4qLjwpsWEDqgLFv4VVvJnw39P++CXBDDdn0+A9wvR738olKoZSIFF0BLti9ihs+wVztg1qDvuyXeo/xeJxCXdxAqstIJJ5ly5UTpkaAlCloLiqN8hNctcz5icvlYrPZrNOEwjfBkJFq27Y2m81SzI9FnKr4k09nTnuPANF9HLdZi4Af73LvG6QYOHXKIzc2mUzsm2++sY8fP9r79+9vElzRxnTMpLALS+Aj8osl8vyeyrQr/V+fwPV8PicrwBT6EHUA3toENfuo7OU0YyS85EVQfLyLF1QuXrkS1NBj6OoRBkqhFL+vNoNXLj0+PtpoNLLFYtHBEpjb4OTX27dv7e3bt1KDF9U25MApWzelYWSQGPH8OF6k7Ou67qiGLpfL5w2AqNETCKpok30sPihqRKTy2MhmqdJmBk94ir755hv78ssvE17Bz0Okq1yIcguo1nVtvppkNO2TycQ+fvxoHz9+tIeHh45Qlq1AZL6VGIWbS/KpVwkrnD+2eniYL5eLjUajToOptm2tdPPvyNBjf25r1hcWqTCNY2EVEqk+A5z08e9tmsZWq5X99a9/tePxaE9PTx2dgT+PGT8mlHDCttut/fTTTzYcDlPnTbeInA7m/LoTZOwOo3K1HL5BjMRKZ5U5RGV0LoLCVjWj0cg2m42dTqfkEko8PQ6ClJ9SG4CTFUxCqMRJLn8fsX44hv/85z82Ho/t3bt39vT0ZOfzuZPt8g2Ai5ir9vGNxCdOjccnc7vd2g8//GBFUdh6vb5h5WazmRVFkbSTPJaorgC/T4leGOzhZlel6Vym3ratjUajBPRTcaj/53Q6vYnvlc9X1Tw5LNCnsuWBcg6ew6h//vOfKdvogBA3gdI15j6L2TvF6jHj6d08eB6qqrJvvvnG6rq2H3/80ZbL5Q0oVJlXThMrMQlrJllWx+6DtY9t26YsoYPoIZoJ7JEbMVY5ti0nwuTkiWK2nNRBkz2ZTGyxWNycYG+Vys/B2FmVl+XUOWhyVeZSpWN5wh2bIJnDTa9U+KiqozgyQFwRFb6oSiZ059gAMymCfGe4egcnU1GcESUZDYw7kESly6rJw/l8tj/96U+22+3su+++k/42om+jvn8Rr99X94d/5kaYPkeHw8H+9a9/2XQ6TU0vPV1clmVi5fb7fW/TB6VC4p5AUSfXKA/ifQp8zVNdgNenI9qPkjLKX7IuPurioWrxVaoVZU7ffvutDQYDm8/nlqtl5LHhaYmaODKxpXL9GAarDcWfsd/vbblcpnJvfLfRaJTK55yKVhY3IqIi6xm5VWy84e9eVVWKdkrfTd54IOpEGZUtMUGEZicqcmBLgKbVuQh/OW+Z9t1333W0hgrxRp/HYaoytRhD89hUY0iuWeBwTG3Q0+lkr6+v6fmLxcI+fPhgm83Gfv755xSJMf7hpJhqapWzIPxOvtZt237WBHJm7J48uiIrlComCoEUIsZCUew+ggWWub44qh4hkorj5Kl4H8uz2TJhroK7lUZ0uP/ZS+q8NNwrhI/Ho/3yyy/SKubo9ahcTrXDZxDZtq0N3Tep7FfUCJonCZUmyryyKe5rxcKgyReeFcecgeOxMIHD1icaC5pNVSfJERFHC6qFjurA4l1APW/Pc8RiD6y8Vhb4HlzDVmvojQc6JcPEnSsNXh9B5C/uL6nSxrmmCqpejtO5yhqoTKBqOq1MJfcGwA2FixlpFKOmknyC/Tnr9bqjl1SSfC7OURgod2VMpIN0fFWi2eXYkxdN9aTJJWFUzV8uWuC4FjckM4ZRkiWXjkZlLH6WWlgUuJ5Op5s5wGiAMcY9m2AwGNhms7GffvrJ5vO5LZdLKaHHknyl+mG2U9HFqtWvg/6SgQTGieoyBv4QlexQdWgsBlVsHy+a6nkTpWijsA+tgCJSfOH9RKCF8EnCfAEjcUzecASB4FBxCOfz2Z6enmy5XIbKKAdseEVM1KJXsYaROAf0C+VNOOSnA82OYth4QfvKnnIl3oo8Yl5A8d3qciYOz9j/4+8fPnywtm3t06dPYbNJb9TEuQDFGCr3qe4pYHenzDluOi/66NMWqjnCMfszXPMxVHIuJaDgU88Lyrl9lk0pWravSZOipZX1iEIh1WeIy8j++Mc/2rt37zob5i9/+Yu9efMmxfGYUWM6XJVzRxFErhZBgUzPVXg1MfdaUA0gmAuJtJZwZU0pmaNIY8YxMwMl3iSqzInDtIjUQD+NUYYCVaoalyMb9e9///vfb3ICP/74ox0OBxsOh/bnP//ZvvvuO1utVimdiu3yFcmk4vGcDEyFocj6cTpYpYdz8jAFpr3DSMlgSBVH8I5V3b8UMkZRI4d47ttQXaNEoz4ZKGTEujj+xWxjdLWN/9mrnvH7PIEzHA7t+++/t6ZprK7rRON++eWX9vT0ZKvV6qZzuYPG6OJIRSQp8KzawOKBVA20ouqoTjEo0P2DweC3W8OiFit9SQYEWHxrBloJVMx4MYdvvijs5CSN6tatiis5KuCESFQMwwLO6/Vqz8/PKWxaLpf21Vdf2ddff935P3w/DN0QtEUWLFfYij6c8QBGNFHSTV31w1xI6R0mMPnDk6PoUMwaMv8eTSbHuXhd6n6/T63Trtdr6nRRlmXKq/PNWhENnLsoMkoOcRcPlGe7fM2tw7fffmur1aqzkeu67txxyFp+VgZxqjfXtj833qiLisIFLCsbDoefK4PQz6LWTlXiRMSL6mad6zjuz/nw4UNqjb5YLOzt27epIcVkMrGqquz5+TnRpNgSjYWbDNhU+baiVVWXMt/MHP7t93v74YcfOnODdyqrOgB2P6p1m7pfKMfwKc0gWjoFNPH5/n2lDz7qzRu1M3HFMJssjKXV9XH4fGfDvIs2K1mc+mT1S9QyTlX9qMZXbNX4NCnwxBskYt0U6IpOqNoErDFQtRSKEIoSUMql4zNKVI0yv85EB1fBqElFLgE3QVQfuFwuOy3QvQml3yh2PB5tt9vdtJFXyZwPHz4kda/n5r///nvJYURavQitRz2Vo43FFoIvlc718EdXzCrhnKYhCuf5Kh/8zBIJBqXaiQonUQnLDCBe8Ij6PA5z/FQ4BtlsNqkhRV3X9t///je1qOGaObYsZpawgn+/X7qUaxAZ9SpWrKfCRlFDZgav6FZyIDC3WH3C0qgRVC4LW+IV7JiocGsQ5feRo+b+OyigZI2Ain85MnAFMHbQVoWYLBnf7Xa23++TyDOq2GVwmkue5Pj9SHmj7jFWlkOF3SpiUalsdb08W6Xo4quOK+eXUehU7XjuUKGUQmx6o4nHGN7DySipoZA6jo1Lzxj5Rr4wEoIq/35P7X5UnMJzGgFCdVpz4bmy3HiAWQ/gn1MyEMKya9XESN0BHHX1iLpx5nwZRwxsrlk+heCQQafKOjIzqQQUTMsqnkQdHLWJIqAYbRw0/4zq++4GRveIa4J9gnAtzufz5zCQrxyNBAfRtaROK7JQgweh2DA+obw4yuRyiKbAmbe25WtquPBC1R2quFkBPEVDR907VPUuz4OqnVCp+JzVUdfVMW2P710q/R/XnDMAyl1IpGJgpdRljkC1heer3DHJxI2YFOePqV60ZNyePmqujIvIUvMINHI4misHj274VMmbeyMOjBpUcgnnpm3b35pFs89TDQ4RJGKygs04mxp1kll5rFgsjCjw55TPYyHlzYvSFfKRikaNQ+kf7uldFNVERh1B1FgiU88/E3USVXON71Fyu1a+sJHboirqFHco07C4ee5pdcIhFPtADFm9OJSRME8IF22qhheRwkhpJVU4qWRyUcZPAVyOTriXT1RxHc1hdKkF/l8qDkV5U0Qm5O74UbsZJx19cZ9ciSMNJn4Y8CgaV4E0Vvuo+r9IucMKKRaV8viQ94hYuYgLUPqLaIMqljJXycQ5m+v1+pkH8ELBXK99VQShysQU2ENAGCUqolIybHTI2r2ojDsq4+IcRVSmpWTZWLeg5GdcFNt3e6dqO8/AOXcJBrfr442jxoAZRD8Upep8He20qMqHVbjYPFLRsDnyRWUhWX+HJ00JKRR6jpizHMOmQF6OGIquuFPm+vemhu9RPXEndbd6qgQ/9QfwH4yuU8stvAJxDIByeepIIBFZiqjMC8eGiS2+kCqaVMVrRGPmLGOkQFI0sYpU+m4qVcomfAa/nyLGVFrYN0ip2r2xsvXmssGgxRuqilUVCw5Y8QlKIJmbVL+N0wkirgdkd6FSshwb9/nsXDvZvqIM1UcwKrfrE3qqht2RNVRVRe4KS6xcjV5E9eBVSRHuAaTQOde+K8CV09Mz2VEUhX311Ve22+3s5eUl1DKiKeT7CCIAmMv65ZIykc+/B7UrwWxf63ylKFYVUS4yLYoiXWBdeqjG3bE4BMtJxHjCOe2p+HgGJBGAi7qH+vdVVZVu3PYaO7QyKu7nEDcqCv09JjqSzvWJXxXax0qg6OLN6Fq4SCugoqK6rq3EzlH4gGjxlXiRlSxR2ZJqO6N6/uViVwasl8vFnp6eUvrZXYK6twdZRSVAUQg9KtDM5RlyTF/OiiiNAja84LsVVBeQHPDFNUobwGvF8Yvr4SNRJWfcUMjQNwGK31d3BEcnxuldNO+uK4hy9pxhU1fZRMUhCh/0RS8qNOy70DraCEx5K7wRqaT4GS5tL8vSht4XL+nEodWp6qun/FtEVXJcmruOViUz7hVEKCTvZg41hMqn8wmLFiPXJY0roFV003dbiapm5iwq9xlSBFhUiIMXgHhziF/HPEwdI1AIijtG3UjJiD2K9bnIUXHYSrBxT8UrdtbKNV7Kxd65f0MxSw6kjkajpG7OJWByBTAqP8CAMAoh1cbmZ/lt706hpwsjfACOCvF+Geb4FVDDU6dSyhHTpvxylAlD369YQdWbUJnqKOLIMYcMYplg8hYy3jDS9YxKvxhFBSqncE+95b38AY+j0ybOY2inObFYg5tF5VDy7yE41G5lV6C6hUfAUNXvRT0M2R+r5JdyWYqS9XE0TZNKyTysvsfUR+xkFMfnXGFOBIpaSrdq3uByiD1lPUbMceqqrRmb+SjH3vcrwheKo1cNqxEdqz7B3JQ6QtC5tiyqZOt8Ptt6vU79f9R7KCwUldRFSap7Fj4ShvqzveAmrTveTOFuICpsiDaDMrNRWzmVD1d3DnCLWbUYSpiRk3vnJOARWo+IHbZOrkC+p01LBDKjjmdRoqyvJ5HKRzRNk3I1g8Hg840hTqhgtkvJm3Pp29yt2arFuco55HrxR9agj77NTaA6QUgcKUsX3R8YgVfl76PkUK4WIbpmPqdm4s3tEY/7/4QBMJTZ7Xb28PDQAYNcsKAaL0R1A4rDz5n7KL2r0tT3mMSofCxqdhHJp3MW5B5iJ9rYUeY191m5cJjdMeZo/E5h0gj+NkF1XSc3wFe4YciF+YEoXle7mJMYkUmMmlUpsibCGSqppNLXCjhFobA6ibnCE/WluPvIGuQkYQyGcyGv/7lpms4NsKk83L/cDex2O5tMJp2SJoxVc02fVLMGJpSiiWYqNgeaIq18X46dpdNqQXONnbiqKZJ5Ky5enWKlpVTRj5KeM15T7tijG7/C9+Zyb5zwtm1tPB6nDaAWgAs91WlU1Tw8aWweuQeuyoJFDFvuPh3+d59kLC9jNTKGhHi6WFiBCTPmKaJ6h5wkLHJBCnQyDxM9A83/aDS6vZ+YpVIOEA6Hw02Ha97BUf/AiJDgMrH5fJ4aJHJ+PufjVReNnFtRVCu+W45lU3oFZAlzl0UpU50Doip05HSuqrrKVWYNh0M7HA6J6r/BReyf/V6e3W4X1qlH9fY5lk0t0HQ6tTdv3nQuprynK7mimqN2qY58lSI2pwdkqlbpDFi4mqNpVYlYdFCiSuZc7iWnMt7tdlbX9U29x2Aw+CwI4YUejUbWNE26RURl9iKdWa6aGF+wbVt7eXnpXFDJNQiRHu6epBTToEhVq7Erqll1GlWtc90VcVCcmwAACrxJREFUcL2EqkTKZRBzQFCRT9GB8Dkuy9Kapkl3QWFpXcIBKtvlzRk2m83NzlftYLGtKcf8zA7iRluv1/by8pImq67rTtuZnNQqyjoq/psxB/t9ZX1UTWOuKIOtjGoBn5PC33PrKiuBVM9F7u+42WzS/UBSpqZiXu9rb/a5QyXX1qnMH19+hKlYXBQmSBCFs8o36uARsWtoPXCRlannTF/fla7chk3VCCpuoS+TmOt6nsMG7Io4GVZVVSqv9w2AUYY/o1SXGPnkTSYT2263aTPwSWFTGpV95+hf1P+rCc6FjSpe9uew9IuRvVqAqL8P9zJWdDdrHVUxS870MyBFF8J4Symv2PWWZWmfPn36LesHFqkTskdKF9T3bzab1C2rz4z1UZJ9SFhlBXMKl6jxgqKwOcyMmLk+di66mSPi4/siDH+uK7F4kRWNzkQdvmNVVbZerztrGLahy+n9zMym06nt9/tOKzmFUrmnPi+OuiRSmWVG50p8krs9O6pEVmxlpFKK8EYkic/5dNVeNqoSqus69fCNGL7IdWC3Fr8BHq+miSKPYU7m5AObTCa2Wq06N4pHcW7OOkSMW8TIqYxWdJ2NUteoNHJkbfqijOhEq8ZPqr5PYQR8Vl3XNp1OO4DNDxsKXyJq29+nLEtbrVZWlmVqvhX1D7xer583gKqwwZZxzgp6Sze+4p2JHvbj2EOPTZfKl0eFFLk7DJX5VUxkBEpzXEaEG/pqGThSyfVVwK5gLr9TuYic7tD5G2d08eq6CD+VuZOGoGg+n9vz87ONRqMU80aFnkwv50qjWHUU+SpFJed4dSX3UpEEbiZ1KUVfZjJK5UZ8Apd3I/D2Cx2jqMK1fCyju1wuKXez3W5tNptJt6jmpezrlIUuYjKZ2HK5tPfv33dMlCpcVNfAcZ4g0ujxhsmlaCPhQy4yyYlVuNlUbnHVpsuVdedqIvF6HRVq8vO5OqooCvvll19sOp3enPpcTeJQyb7xA/Dk+HWt7mOwQTKaV9XiPIcbchp8Zaa5aVVOcRRtLKWZU9fh3KO94xI0/gzuzqVOIp78XHdTzgU4abdcLq0sS5tOp507oHNjTyAwusJN9brx8isPDVkurnLUEUDknZm701eRK4rDiKIZhQ8Uq5kDsrmMmxqzN6pi8Mr1+pEWIddxDAmf5+dnO5/PNp1ObyKjHLk1GAw+9wiKZMiqD79fcbbdbq1pmrQJItZNycmia1lytQB48nM5/z7tnGqIpU5ZjuPIJXUUJ6HqF3jTYKgcye/w5Ltia7PZJL8fSe1yX8Mo/FKCD5y42Wxmq9UqScndEmB/2wikReRNn8lVdGjU0JHj9Rxaxw2YUzf16e+jcJjzFZFiKgqVOc+CiZ7VamXT6TTUL0YbIR1s1vb1vTzKyCeTiT0/P3d680Vl11wZpPLbOc38PVU2fRKzqJomB/b6yuD6tP+sqWQ3k0uW4S88YF7csVqtbDKZhAdC3aF4g/VyA4/MOJIX4/HYPn36FE6kMl8RSufIQW08NUG/pwVLpHO8J5uYk55xJBRtXGY7VSWVqkvgQtjn52cbj8dJ5BlhJqar+d6mYdSaVeW+Fdjy27D9/pwoFOpD9VGGL7rWJaex6xOM5iYqp8LN5e9VQUjUExA3nUr2cGYUw1JefGYEo45hkcCljKpflDhCoW7v6z8YDOz5+dnevHmTyqO4x4ACWmoDclu5HE6IFiXCNUq4mdMf5OoK1bWuOYzAvRhVG1hOqfv/1XVt+/3eXl5ebDabpZOv7nlmJXbOIpWR7+U+/1wQya3NncN+eXmxh4cHq+s6AURWDLPPUyxd1FiSFwzVRMpMcx1AdIVaX/dwRZnnupcoSZjqHRxlD/HZTvFuNpvO4vdZS9WxhX+mvAeJRxODcWxRFDYajWw4HNp6vbaHh4d0zx76H54wXxyWROcqd1XpuUoCRQ2ZFXGkZNX3CDgjyTdrG6LOnbzo3PK2qirbbDa22Wzs8fGxo0Jm85+7H5FTyGkDRL6PW8FGBQ58Unx3bjYbO5/PNpvNkjqIawNY0x6d4qhrR7Qhojau0QbpSwblqn4iV6O6nEXEjkqQ+aZ+eXmxpmns8fGxk91TafHcGDEqkMkg7sHTp2Pnxccd5pLv3W5n5/PZFovFjQZfgSN1+yX7RwVMWcmjJkmZXmVdVDzdF3bmLABzIuqwqXa9g8HAVquVtW1r8/m8c/Lv6Q+g6juVdH2owj9uL9ZHKnCrWD/ps9ksXQzl5ozJEG51rkIgJELUBY2KZ8iVb0Wtb6IoQOUT+qxErjsp5wkwDewxvl9mvVgskqRbnXh+h2itIkay7CNbOGd9j1XAD59Op3Y8Hu35+dlms5nNZrMOYo40dgzwVA6BrQbr6iNFM+sDI+4i14uP9ZN9rWf58xURVJalrddra5rGZrNZquML1TwBfZ/bqNgsItUF9PEAaNpzzYgVZ+9hYlVVtt/vrWkaWywWhjeWRrdps2nnhApLtSMBqQJeEXmUGwsveqT46esn7OJV7HjWtq09PT3ZYDCw+Xze6besOpfcE+Jx5MWHqCiKWz2A6pLdRyxwLaAqw/YXOxwOtlwubTqd2mw2kyVOqmBD5erZp6F5QyyTYxoVgRLJp7CiKCKeGIfkxJ0+9u12mzR8k8kkdWtTauWoiimX9IqIrMvl8tkF5KRPLFmKwoyopy6bYb8O1q2BcwaqVQuesoho6Us65W4fizaYAlJ9esIICPL1tNjy7XA4pBK8+XzeEXDyHCh53D2ZyQivpeqhCLhwL/uIo88lYlRxhZNGdV3b4XCw9XptVVUlgiMifBSC5jHeZLqCzmbMeEY99nJJHQaz7KZUHsRPddM09vr6mrR7KAePxDmRVcoplCOLgPihZFKGOeiI5eoTSOQ2B1LIzhhuNhsrisKm02mnjJk7fkeiFS5Nd1PN16hFt33mJGTKKkUdRqIWMd6Ea7fb2eVySQvPsvFIxaT+LZd3ENKvGzY3WYC+Xvi5+3RyjOE9HILr2cbjsZ1OJ3t9fbXdbmfT6TS5C9fKqVtKWCTCpEeuVWzU01+BwL7iFnUZtYtlmqZJal1vKslhncp9qL7DvQKP4F4mNP2dBhG5erbcxN3LDCr0GsXW4/HYRqORnc9ne319tfV6bePxONHKHisr3MGXQTGZpe7tU5MaXegUsYUM8HxDe/Vzasv+q16Pr7/Dkxm1z4k4GLbWag1yjGGHCu7jAiJFLg+QO4iwv1b4gusFPGw8nU623+/tf//7n5VlabPZrAOUONpgppEznNgGv09MosSuSrGMFul4PHbuPB4Oh+m0R00ocxRuX0kZ1yoii6i6qysLkiWC+pJDSqacawKVa7nGhEsiKn7lCxwnOIj0AhVPQPmFF03TdBZQET54jWr0/qq6B822T/7pdLLD4dBxU2VZdnw8/pzqtJbLQEYUL46Hay9YNa2ypZ0NoNQsfRyzMj0KZavFjSwJ3+zpP1NVVZpUpFAxjPJNgTeg4Wd7Qkq5O743gMeHoahvxuPxmGRaZVmmghnVERSf7xs10laouxiiu5zQEqhEUt9NJWbWZQL7Up4KKEZ3/+QAS3TTJpeFMRPpk435hsvlkjbCer3u+NeyLFPTiaIoUl0DXlLBp7pt29RV43g82vF4TKebGUH06RGhxuKPXIUx8wf3MH2clbw3UeSf+X/9B04mXw6cfAAAAABJRU5ErkJggg==";var In=Tn;
|
|
284
|
+
*/
|
|
285
|
+
class q extends r.d{constructor(e,t,n={}){super(q.classToken),this._indexSet=new o.e,this._filter={propertyCategory:"",propertyKey:"",propertyValues:new Array},this._client=e,this._modelId=t.modelId,this._revisionId=t.revisionId,this._options={requestPartitions:1,...n}}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e,t,n){const r=new o.e,{requestPartitions:i}=this._options;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const a=new B(e=>new o.h(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=a,this._indexSet=r;const s=this.buildUrl(),d=Array.from(function*(e){for(let t=0;t<e.length;t+=1e3){const n=e.slice(t,Math.min(e.length,t+1e3));yield n}}(n)).flatMap(n=>{const o={properties:{[""+e]:{[""+t]:n}}};return V()(1,i+1).map(async e=>{const t=async function(e,t,n){const r=await K(e,t,n);return new H(e,t,n,r)}(this._client,s,{data:{filter:o,limit:1e3,partition:`${e}/${i}`}});return a.pageResults(r,t)})});this.notifyChanged(),await Promise.all(d)}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear(),this.notifyChanged()}getIndexSet(){return this._indexSet}buildUrl(){return`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${this._modelId}/revisions/${this._revisionId}/nodes/list`}serialize(){return{token:this.classToken,state:L()(this._filter),options:{...this._options}}}}q.classToken="SinglePropertyNodeCollection";class H{constructor(e,t,n,r){this.items=r.items,this.nextCursor=r.nextCursor,void 0!==this.nextCursor&&(this.next=async()=>{const r={...n,data:{...n.data,cursor:this.nextCursor}},o=await K(e,t,r);return new H(e,t,n,o)})}}async function K(e,t,n){const r=await e.post(t,n);if(200===r.status)return r.data;throw new Error(`Unexpected status from server while POST ${t}: ${r.status} (body: ${r.data})`)}
|
|
257
286
|
/*!
|
|
258
287
|
* Copyright 2021 Cognite AS
|
|
259
|
-
*/
|
|
288
|
+
*/class X{constructor(){this._types=new Map,this._types=new Map,this.registerWellKnownNodeCollectionTypes()}registerNodeCollectionType(e,t){this._types.set(e,{deserializer:(e,n)=>t(e,n)})}async deserialize(e,t,n){const r={client:e,model:t};return this.getDeserializer(n.token)(n,r)}getDeserializer(e){const t=this._types.get(e);return F()(void 0!==t),t.deserializer}registerWellKnownNodeCollectionTypes(){this.registerNodeCollectionType(G.classToken,async(e,t)=>{const n=new G(t.client,t.model);return await n.executeFilter(e.state),n}),this.registerNodeCollectionType(j.classToken,async(e,t)=>{const n=new j(t.client,t.model,e.options);return await n.executeFilter(e.state),n}),this.registerNodeCollectionType(q.classToken,async(e,t)=>{const n=new q(t.client,t.model,e.options),{propertyCategory:r,propertyKey:o,propertyValues:i}=e.state;return await n.executeFilter(r,o,i),n}),this.registerNodeCollectionType(r.f.classToken,e=>{const t=new o.e;e.state.forEach(e=>t.addRange(new o.h(e.from,e.count)));const n=new r.f(t);return Promise.resolve(n)}),this.registerNodeCollectionType(r.b.classToken,async(e,t)=>{const n=await Promise.all(e.state.subCollections.map(e=>this.deserialize(t.client,t.model,e)));return new r.b(n)}),this.registerNodeCollectionType(r.g.classToken,async(e,t)=>{const n=await Promise.all(e.state.subCollections.map(e=>this.deserialize(t.client,t.model,e)));return new r.g(n)}),this.registerNodeCollectionType(W.classToken,async(e,t)=>{const n=await this.deserialize(t.client,t.model,e.state.innerSet);return new W(t.model,n)})}}function Y(e,t){X.Instance.registerNodeCollectionType(e,t)}
|
|
260
289
|
/*!
|
|
261
290
|
* Copyright 2021 Cognite AS
|
|
262
|
-
*/class
|
|
291
|
+
*/X.Instance=new X;class Z{constructor(e,t){this._viewer=e,this._client=t,this._cameraControls=e.cameraControls}getCurrentState(){const e=this._cameraControls.getState(),t=this._viewer.models.filter(e=>e instanceof P).map(e=>e).map(e=>({defaultNodeAppearance:e.getDefaultNodeAppearance(),modelId:e.modelId,revisionId:e.revisionId,styledSets:e.styledNodeCollections.map(e=>{const{nodes:t,appearance:n}=e;return{...t.serialize(),appearance:n}})}));return{camera:{position:e.position,target:e.target},models:t}}async setState(e){const t=new i.Vector3(e.camera.position.x,e.camera.position.y,e.camera.position.z),n=new i.Vector3(e.camera.target.x,e.camera.target.y,e.camera.target.z);this._cameraControls.setState(t,n);const r=this._viewer.models.filter(e=>e instanceof P).map(e=>e);await Promise.all(e.models.map(e=>{const t=r.find(t=>t.modelId==e.modelId&&t.revisionId==e.revisionId);if(void 0===t)throw new Error(`Cannot apply model state. Model (modelId: ${e.modelId}, revisionId: ${e.revisionId}) has not been added to viewer.`);return{model:t,state:e}}).map(async e=>{const{model:t,state:n}=e;t.setDefaultNodeAppearance(n.defaultNodeAppearance),await Promise.all(n.styledSets.map(async e=>{const n=await X.Instance.deserialize(this._client,t,{token:e.token,state:e.state,options:e.options});t.assignStyledNodeCollection(n,e.appearance)}))}))}}var Q=n(56);
|
|
263
292
|
/*!
|
|
264
293
|
* Copyright 2021 Cognite AS
|
|
265
|
-
*/
|
|
294
|
+
*/class J{constructor(e){this._client=e}async mapTreeIndicesToNodeIds(e,t,n){const r=[...$(n,J.MaxItemsPerRequest)].map(async n=>this.postByTreeIndicesRequest(e,t,n));return(await Promise.all(r)).flat()}async mapNodeIdsToTreeIndices(e,t,n){const r=[...$(n,J.MaxItemsPerRequest)].map(async n=>this.postByNodeIdsRequest(e,t,n));return(await Promise.all(r)).flat()}async determineTreeIndexAndSubtreeSizesByNodeIds(e,t,n){const r=n.map(e=>({id:e}));return(await this._client.revisions3D.retrieve3DNodes(e,t,r)).map(e=>({treeIndex:e.treeIndex,subtreeSize:e.subtreeSize}))}async determineNodeAncestorsByNodeId(e,t,n,r){const o=await this._client.revisions3D.list3DNodeAncestors(e,t,n,{limit:1e3}),i=o.items.find(e=>e.id===n);F()(void 0!==i,"Could not find ancestor for node with nodeId "+n),r=Math.min(i.depth,r);const a=o.items.find(e=>e.depth===i.depth-r);return F()(void 0!==i,`Could not find ancestor for node with nodeId ${n} at 'generation' ${r}`),{treeIndex:a.treeIndex,subtreeSize:a.subtreeSize}}async getBoundingBoxByNodeId(e,t,n,r){const o=await this._client.revisions3D.retrieve3DNodes(e,t,[{id:n}]);if(o.length<1)throw new Error("NodeId not found");const a=o[0].boundingBox;if(void 0===a)throw new Error(`Node ${n} doesn't have a defined bounding box`);const s=a.min,d=a.max,l=r||new i.Box3;return l.min.set(s[0],s[1],s[2]),l.max.set(d[0],d[1],d[2]),l}async postByTreeIndicesRequest(e,t,n){console.assert(n.length<=J.MaxItemsPerRequest);const r=`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${e}/revisions/${t}/nodes/internalids/bytreeindices`,o=await this._client.post(r,{data:{items:n}});if(200===o.status)return o.data.items;throw new Q.HttpError(o.status,o.data,o.headers)}async postByNodeIdsRequest(e,t,n){console.assert(n.length<=J.MaxItemsPerRequest);const r=`${this._client.getBaseUrl()}/api/v1/projects/${this._client.project}/3d/models/${e}/revisions/${t}/nodes/treeindices/byinternalids`,o=await this._client.post(r,{data:{items:n}});if(200===o.status)return o.data.items;throw new Q.HttpError(o.status,o.data,o.headers)}}function*$(e,t){let n=0;for(;n<e.length;){const r=Math.min(e.length-n,t);yield e.slice(n,n+r),n+=r}}
|
|
266
295
|
/*!
|
|
267
|
-
* Copyright 2021 Cognite AS
|
|
268
|
-
*/class
|
|
296
|
+
* Copyright 2021 Cognite AS
|
|
297
|
+
*/J.MaxItemsPerRequest=1e3;class ee{constructor(e){}mapTreeIndicesToNodeIds(e,t,n){return Promise.resolve(n)}mapNodeIdsToTreeIndices(e,t,n){return Promise.resolve(n)}determineTreeIndexAndSubtreeSizesByNodeIds(e,t,n){throw new Error("Not supported for local models")}determineNodeAncestorsByNodeId(e,t,n,r){throw new Error("Not supported for local models")}getBoundingBoxByNodeId(e,t,n,r){throw new Error("Not supported for local models")}}var te,ne=n(59),re=n.n(ne);
|
|
269
298
|
/*!
|
|
270
299
|
* Copyright 2021 Cognite AS
|
|
271
|
-
*/
|
|
300
|
+
*/
|
|
301
|
+
class oe{constructor(e,t,n,r,o){this.version=e,this.maxTreeIndex=t,this.root=r,this.sectors=o,this.unit=n}get sectorCount(){return this.sectors.size}getSectorById(e){return this.sectors.get(e)}getAllSectors(){return[...this.sectors.values()]}getSectorsContainingPoint(e){const t=[];return Object(o.y)(this.root,n=>!!n.bounds.containsPoint(e)&&(t.push(n),!0)),t}getSectorsIntersectingBox(e){const t=[];return Object(o.y)(this.root,n=>!!n.bounds.intersectsBox(e)&&(t.push(n),!0)),t}getBoundsOfMostGeometry(){if(0===this.root.children.length)return this.root.bounds;const e=[],t=[];Object(o.y)(this.root,n=>(0===n.children.length&&(t.push(n.bounds.min.toArray(),n.bounds.max.toArray()),e.push(n.bounds,n.bounds)),!0));const n=Math.min(t.length,4),r=re()(t,n,"kmpp",10),a=new Array(r.idxs.length).fill(0),s=a.map(e=>new i.Box3);r.idxs.map(e=>a[e]++);const d=a.reduce((e,t,n)=>(t>e.count&&(e.count=t,e.idx=n),e),{count:0,idx:-1}).idx;r.idxs.forEach((t,n)=>{a[t]++,s[t].expandByPoint(e[n].min),s[t].expandByPoint(e[n].max)});const l=s.filter((e,t)=>!(t===d||!e.intersectsBox(s[d])));if(l.length>0){const e=s[d].clone();return l.forEach(t=>{e.expandByPoint(t.min),e.expandByPoint(t.max)}),e}return s[d]}getSectorsIntersectingFrustum(e,t){const n=(new i.Matrix4).multiplyMatrices(e,t),r=(new i.Frustum).setFromProjectionMatrix(n),a=[];return Object(o.y)(this.root,e=>!!r.intersectsBox(e.bounds)&&(a.push(e),!0)),a}}
|
|
272
302
|
/*!
|
|
273
303
|
* Copyright 2021 Cognite AS
|
|
274
|
-
*/
|
|
304
|
+
*/function ie(e){const t=new Map,n=[];e.sectors.forEach(e=>{const r=function(e){const t=function(e){if(!e.facesFile)return{quadSize:-1,coverageFactors:{xy:-1,yz:-1,xz:-1},recursiveCoverageFactors:{xy:-1,yz:-1,xz:-1},fileName:null,downloadSize:e.indexFile.downloadSize};return{...e.facesFile,recursiveCoverageFactors:e.facesFile.recursiveCoverageFactors||e.facesFile.coverageFactors}}(e),n=e.boundingBox,r=n.min.x,o=n.min.y,a=n.min.z,s=n.max.x,d=n.max.y,l=n.max.z;return{id:e.id,path:e.path,depth:e.depth,bounds:new i.Box3(new i.Vector3(r,o,a),new i.Vector3(s,d,l)),estimatedDrawCallCount:e.estimatedDrawCallCount,estimatedRenderCost:e.estimatedTriangleCount||0,indexFile:{...e.indexFile},facesFile:t,children:[]}}(e);t.set(e.id,r),n[e.id]=e.parentId});for(const e of t.values()){const r=n[e.id];if(-1===r)continue;t.get(r).children.push(e)}const r=t.get(0);if(!r)throw new Error("Root sector not found, must have ID 0");!function e(t,n){!function(e){return-1===e.facesFile.coverageFactors.xy}(t)?t.children.forEach(n=>e(n,t.facesFile)):(t.facesFile.coverageFactors.xy=n.recursiveCoverageFactors.xy,t.facesFile.coverageFactors.yz=n.recursiveCoverageFactors.yz,t.facesFile.coverageFactors.xz=n.recursiveCoverageFactors.xz,t.facesFile.recursiveCoverageFactors.xy=n.recursiveCoverageFactors.xy,t.facesFile.recursiveCoverageFactors.yz=n.recursiveCoverageFactors.yz,t.facesFile.recursiveCoverageFactors.xz=n.recursiveCoverageFactors.xz,t.children.forEach(t=>e(t,n)))}
|
|
275
305
|
/*!
|
|
276
306
|
* Copyright 2021 Cognite AS
|
|
277
|
-
*/
|
|
307
|
+
*/(r,r.facesFile);const o=null!==e.unit?e.unit:"Meters";return new oe(e.version,e.maxTreeIndex,o,r,t)}class ae{parse(e){const t=e.version;switch(t){case 8:return ie(e);case void 0:throw new Error('Metadata must contain a "version"-field');default:throw new Error(`Version ${t} is not supported`)}}}class se{constructor(e){this.client=e}get headers(){return this.client.getDefaultRequestHeaders()}async getBinaryFile(e,t){const n=`${e}/${t}`,r={...this.client.getDefaultRequestHeaders(),Accept:"*/*"};return(await async function(e,t,n=3){let r;for(let o=0;o<n;o++)try{return await fetch(e,t)}catch(e){void 0!==r&&(r=e)}throw r}(n,{headers:r,method:"GET"})).arrayBuffer()}async getJsonFile(e,t){return(await this.client.get(`${e}/${t}`)).data}getApplicationIdentifier(){return void 0===this.appId&&(this.appId=this.client.getDefaultRequestHeaders()["x-cdp-app"]||"unknown"),this.appId}}!function(e){e.EptPointCloud="ept-pointcloud",e.RevealCadModel="reveal-directory",e.AnyFormat="all-outputs"}(te||(te={}));
|
|
278
308
|
/*!
|
|
279
309
|
* Copyright 2021 Cognite AS
|
|
280
310
|
*/
|
|
281
|
-
|
|
311
|
+
const de=(new i.Matrix4).set(1,0,0,0,0,0,1,0,0,-1,0,0,0,0,0,1);function le(e,t){switch(t){case te.RevealCadModel:e.premultiply(de);break;case te.EptPointCloud:break;default:throw new Error("Unknown model format '"+t)}}
|
|
282
312
|
/*!
|
|
283
313
|
* Copyright 2021 Cognite AS
|
|
284
|
-
*/
|
|
314
|
+
*/class ce{constructor(e,t,n){this.modelId=e,this.revisionId=t,this.outputs=n}findMostRecentOutput(e,t){const n=this.outputs.filter(n=>n.format===e&&(!t||-1!==t.indexOf(n.version)));return n.length>0?n.reduce((e,t)=>t.version>e.version?t:e):void 0}}
|
|
285
315
|
/*!
|
|
286
316
|
* Copyright 2021 Cognite AS
|
|
287
|
-
*/
|
|
288
|
-
class Kn{constructor(){this._events={materialsChanged:new pn.a},this._renderMode=T.Color,this.materialsMap=new Map,this._clippingPlanes=[],this._clipIntersection=!1}get clippingPlanes(){return this._clippingPlanes}set clippingPlanes(e){this._clippingPlanes=e,this.applyToAllMaterials(t=>{t.clippingPlanes=e}),this.triggerMaterialsChanged()}get clipIntersection(){return this._clipIntersection}set clipIntersection(e){this._clipIntersection=e,this.applyToAllMaterials(t=>{t.clipIntersection=e}),this.triggerMaterialsChanged()}on(e,t){switch(e){case"materialsChanged":this._events.materialsChanged.subscribe(t);break;default:Object(he.a)(e,"Unexpected event '"+e)}}off(e,t){switch(e){case"materialsChanged":this._events.materialsChanged.unsubscribe(t);break;default:Object(he.a)(e,"Unexpected event '"+e)}}addModelMaterials(e,t){const n=new Mn,r=new zn(t+1,n);r.build();const i=new Wn,a=new Vn(t+1,i);a.build();const s=Hn()(()=>this.updateMaterials(e),75,{leading:!0,trailing:!0}),d=()=>this.updateTransforms(e);n.on("changed",s),i.on("changed",d);const l=function(e,t,n,r,i){const a=new o.Texture(In);a.needsUpdate=!0;const s=new o.ShaderMaterial({name:"Primitives (Box)",clipping:!0,clippingPlanes:t,extensions:{fragDepth:!0},vertexShader:Pt.boxPrimitive.vertex,fragmentShader:Pt.boxPrimitive.fragment,side:o.DoubleSide,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},transparent:!1}),d=new o.ShaderMaterial({name:"Primitives (Circle)",clipping:!0,clippingPlanes:t,extensions:{fragDepth:!0},vertexShader:Pt.circlePrimitive.vertex,fragmentShader:Pt.circlePrimitive.fragment,side:o.DoubleSide,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},transparent:!1}),l=new o.ShaderMaterial({name:"Primitives (Nuts)",clipping:!0,clippingPlanes:t,vertexShader:Pt.nutPrimitive.vertex,fragmentShader:Pt.nutPrimitive.fragment,side:o.DoubleSide,transparent:!1}),c=new o.ShaderMaterial({name:"Primitives (Quads)",clipping:!0,clippingPlanes:t,vertexShader:Pt.quadPrimitive.vertex,fragmentShader:Pt.quadPrimitive.fragment,side:o.DoubleSide,transparent:!1}),u=new o.ShaderMaterial({name:"Primitives (General rings)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0},vertexShader:Pt.generalRingPrimitive.vertex,fragmentShader:Pt.generalRingPrimitive.fragment,side:o.DoubleSide,transparent:!1}),m=new o.ShaderMaterial({name:"Primitives (Cone)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0},vertexShader:Pt.conePrimitive.vertex,fragmentShader:Pt.conePrimitive.fragment,side:o.DoubleSide,transparent:!1}),p=new o.ShaderMaterial({name:"Primitives (Eccentric cone)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0},vertexShader:Pt.eccentricConePrimitive.vertex,fragmentShader:Pt.eccentricConePrimitive.fragment,side:o.DoubleSide,transparent:!1}),h=new o.ShaderMaterial({name:"Primitives (Ellipsoid segments)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0},vertexShader:Pt.ellipsoidSegmentPrimitive.vertex,fragmentShader:Pt.ellipsoidSegmentPrimitive.fragment,side:o.DoubleSide,transparent:!1}),f=new o.ShaderMaterial({name:"Primitives (General cylinder)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0},vertexShader:Pt.generalCylinderPrimitive.vertex,fragmentShader:Pt.generalCylinderPrimitive.fragment,side:o.DoubleSide,transparent:!1}),v=new o.ShaderMaterial({name:"Primitives (Trapezium)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0},vertexShader:Pt.trapeziumPrimitive.vertex,fragmentShader:Pt.trapeziumPrimitive.fragment,side:o.DoubleSide,transparent:!1}),x=new o.ShaderMaterial({name:"Primitives (Torus segment)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0,derivatives:!0},vertexShader:Pt.torusSegmentPrimitive.vertex,fragmentShader:Pt.torusSegmentPrimitive.fragment,side:o.DoubleSide,transparent:!1}),g=new o.ShaderMaterial({name:"Primitives (Spherical segment)",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},extensions:{fragDepth:!0},vertexShader:Pt.ellipsoidSegmentPrimitive.vertex,fragmentShader:Pt.ellipsoidSegmentPrimitive.fragment,side:o.DoubleSide,transparent:!1}),y=new o.ShaderMaterial({name:"Triangle meshes",clipping:!0,clippingPlanes:t,extensions:{derivatives:!0},side:o.DoubleSide,fragmentShader:Pt.detailedMesh.fragment,vertexShader:Pt.detailedMesh.vertex,transparent:!1}),b={box:s,circle:d,nut:l,generalRing:u,quad:c,cone:m,eccentricCone:p,sphericalSegment:g,torusSegment:x,generalCylinder:f,trapezium:v,ellipsoidSegment:h,instancedMesh:new o.ShaderMaterial({name:"Instanced meshes",clipping:!0,clippingPlanes:t,extensions:{derivatives:!0},side:o.DoubleSide,fragmentShader:Pt.instancedMesh.fragment,vertexShader:Pt.instancedMesh.vertex,transparent:!1}),triangleMesh:y,simple:new o.ShaderMaterial({name:"Low detail material",clipping:!0,clippingPlanes:t,uniforms:{inverseModelMatrix:{value:new o.Matrix4}},side:o.FrontSide,fragmentShader:Pt.simpleMesh.fragment,vertexShader:Pt.simpleMesh.vertex,transparent:!1})};for(const t of Object.values(b))Cn(t,n,r,i,a,e);return{...b}}(this._renderMode,this._clippingPlanes,r.overrideColorPerTreeIndexTexture,a.overrideTransformIndexTexture,a.transformLookupTexture);this.materialsMap.set(e,{materials:l,nodeAppearanceProvider:n,nodeTransformProvider:i,nodeAppearanceTextureBuilder:r,nodeTransformTextureBuilder:a,updateMaterialsCallback:s,updateTransformsCallback:d})}getModelMaterials(e){return this.getModelMaterialsWrapper(e).materials}getModelNodeAppearanceProvider(e){return this.getModelMaterialsWrapper(e).nodeAppearanceProvider}getModelNodeTransformProvider(e){return this.getModelMaterialsWrapper(e).nodeTransformProvider}getModelDefaultNodeAppearance(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.getDefaultAppearance()}setModelDefaultNodeAppearance(e,t){this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.setDefaultAppearance(t),this.updateMaterials(e)}getModelBackTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.regularNodeTreeIndices}getModelInFrontTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.infrontNodeTreeIndices}getModelGhostedTreeIndices(e){return this.getModelMaterialsWrapper(e).nodeAppearanceTextureBuilder.ghostedNodeTreeIndices}setRenderMode(e){this._renderMode=e;const t=e===T.Ghost,n=e!==T.DepthBufferOnly;this.applyToAllMaterials(r=>{r.uniforms.renderMode.value=e,r.colorWrite=n,r.transparent=t}),this.triggerMaterialsChanged()}getRenderMode(){return this._renderMode}updateMaterials(e){const t=this.getModelMaterialsWrapper(e);if(t.nodeAppearanceTextureBuilder.needsUpdate){const{nodeAppearanceTextureBuilder:e}=t;e.build()}this.triggerMaterialsChanged()}updateTransforms(e){const t=this.getModelMaterialsWrapper(e);if(t.nodeTransformTextureBuilder.needsUpdate){const{nodeTransformTextureBuilder:e,materials:n}=t;e.build();const r=e.transformLookupTexture,i=new o.Vector2(r.image.width,r.image.height);Xn(n,e=>{e.uniforms.transformOverrideTexture.value=r,e.uniforms.transformOverrideTextureSize.value=i})}this.triggerMaterialsChanged()}getModelMaterialsWrapper(e){const t=this.materialsMap.get(e);if(void 0===t)throw new Error(`Model ${e} has not been added to MaterialManager`);return t}applyToAllMaterials(e){for(const t of this.materialsMap.values()){Xn(t.materials,e)}}triggerMaterialsChanged(){this._events.materialsChanged.fire()}}function Xn(e,t){t(e.box),t(e.circle),t(e.generalRing),t(e.nut),t(e.quad),t(e.cone),t(e.eccentricCone),t(e.sphericalSegment),t(e.torusSegment),t(e.generalCylinder),t(e.trapezium),t(e.ellipsoidSegment),t(e.instancedMesh),t(e.triangleMesh),t(e.simple)}
|
|
317
|
+
*/class ue{constructor(e){this._client=e}async getModelMatrix(e){const{modelId:t,revisionId:n,format:r}=e,o=await this._client.revisions3D.retrieve(t,n),a=new i.Matrix4;return o.rotation&&a.makeRotationFromEuler(new i.Euler(...o.rotation)),le(a,r),a}async getModelCamera(e){const{modelId:t,revisionId:n}=e,r=await this._client.revisions3D.retrieve(t,n);if(r.camera&&r.camera.position&&r.camera.target){const{position:e,target:t}=r.camera;return{position:new i.Vector3(e[0],e[1],e[2]),target:new i.Vector3(t[0],t[1],t[2])}}}async getModelUri(e){const{modelId:t,revisionId:n,format:r}=e,o=(await this.getOutputs({modelId:t,revisionId:n,format:r})).findMostRecentOutput(r);if(!o)throw new Error(`Model '${t}/${n}' is not compatible with this version of Reveal, because no outputs for format '(${r})' was found. If this model works with a previous version of Reveal it must be reprocessed to support this version.`);const i=o.blobId;return`${this._client.getBaseUrl()}${this.getRequestPath(i)}`}async getOutputs(e){const{modelId:t,revisionId:n,format:r}=e,o=`/api/v1/projects/${this._client.project}/3d/models/${t}/revisions/${n}/outputs`,i=void 0!==r?{params:{format:r}}:void 0,a=await this._client.get(o,i);if(200===a.status)return new ce(t,n,a.data.items);throw new Error(`Unexpected response ${a.status} (payload: '${a.data})`)}getRequestPath(e){return`/api/v1/projects/${this._client.project}/3d/files/${e}`}}var me=n(60);
|
|
289
318
|
/*!
|
|
290
319
|
* Copyright 2021 Cognite AS
|
|
291
|
-
*/
|
|
320
|
+
*/async function he(e){const t=await fetch(e);if(!t.ok){const e={};throw t.headers.forEach((t,n)=>{e[t]=n}),new me.HttpError(t.status,t.body,e)}return t}
|
|
292
321
|
/*!
|
|
293
322
|
* Copyright 2021 Cognite AS
|
|
294
|
-
*/
|
|
295
|
-
|
|
323
|
+
*/class pe{get headers(){return{}}async getBinaryFile(e,t){return(await he(`${e}/${t}`)).arrayBuffer()}async getJsonFile(e,t){return(await he(`${e}/${t}`)).json()}getApplicationIdentifier(){return"LocalClient"}}
|
|
324
|
+
/*!
|
|
325
|
+
* Copyright 2021 Cognite AS
|
|
326
|
+
*/class fe{getModelUri(e){return Promise.resolve(`${location.origin}/${e.fileName}`)}async getModelMatrix(e){const t=new i.Matrix4;return le(t,te.RevealCadModel),t}getModelCamera(e){return Promise.resolve(void 0)}}
|
|
296
327
|
/*!
|
|
297
328
|
* Copyright 2021 Cognite AS
|
|
298
329
|
*/
|
|
299
|
-
function Jn(e,t){const n=t.getBoundingClientRect();if(e instanceof MouseEvent)return{offsetX:e.clientX-n.left,offsetY:e.clientY-n.top};if(e.changedTouches.length>0){const t=e.changedTouches[0];return{offsetX:t.clientX-n.left,offsetY:t.clientY-n.top}}return{offsetX:-1,offsetY:-1}}
|
|
300
330
|
/*!
|
|
301
331
|
* Copyright 2021 Cognite AS
|
|
302
|
-
*/
|
|
332
|
+
*/class ve{constructor(e,t,n,r="scene.json"){this._currentModelIdentifier=0,this._modelMetadataProvider=e,this._modelDataClient=t,this._cadSceneParser=n,this._blobFileName=r}async loadData(e){const t={format:te.RevealCadModel,...e},n=this._modelMetadataProvider.getModelUri(t),r=this._modelMetadataProvider.getModelMatrix(t),a=this._modelMetadataProvider.getModelCamera(t),s=await n,d=await this._modelDataClient.getJsonFile(s,this._blobFileName),l=""+this._currentModelIdentifier++,c=this._cadSceneParser.parse(d),u=function(e,t){const n=S.get(e);if(void 0===n)throw new Error(`Unknown model unit '${e}'`);return(new i.Matrix4).makeScale(n,n,n).multiply(t)}
|
|
303
333
|
/*!
|
|
304
334
|
* Copyright 2021 Cognite AS
|
|
305
|
-
*/(
|
|
335
|
+
*/(c.unit,await r),m=(new i.Matrix4).copy(u).invert(),h=await a;return{modelIdentifier:l,modelBaseUrl:s,geometryClipBox:null,modelMatrix:u,inverseModelMatrix:m,cameraConfiguration:Object(o.x)(h,u),scene:c}}}class xe{constructor(e=o.j.defaultPool){this.workerPool=e}parseI3D(e){return this.parseDetailed(e)}parseF3D(e){return this.parseSimple(e)}parseCTM(e){return this.parseCtm(e)}async parseSimple(e){return this.workerPool.postWorkToAvailable(async t=>t.parseQuads(e))}async parseDetailed(e){return this.workerPool.postWorkToAvailable(async t=>t.parseSector(e))}async parseCtm(e){return this.workerPool.postWorkToAvailable(async t=>t.parseCtm(e))}}var ge=n(62);
|
|
306
336
|
/*!
|
|
307
337
|
* Copyright 2021 Cognite AS
|
|
308
|
-
*/const or=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");function ir(e,t,n){return new o.Vector2(t-e.offsetLeft,n-e.offsetTop)}function ar(e,t){if(2!==t.length)throw new Error("getPinchInfo only works if touches.length === 2");const n=[t[0],t[1]].map(({clientX:t,clientY:n})=>ir(e,t,n));return{center:n[0].clone().add(n[1]).multiplyScalar(.5),distance:n[0].distanceTo(n[1]),offsets:n}}const sr=Math.PI/360,dr=10*sr;class lr extends o.EventDispatcher{constructor(e,t){super(),this.enabled=!0,this.enableDamping=!0,this.dampingFactor=.2,this.dynamicTarget=!0,this.minDistance=1,this.maxDistance=1/0,this.dollyFactor=.98,this.minPolarAngle=0,this.maxPolarAngle=Math.PI,this.minAzimuthAngle=-1/0,this.maxAzimuthAngle=1/0,this.panDollyMinDistanceFactor=10,this.firstPersonRotationFactor=.4,this.pointerRotationSpeedAzimuth=sr,this.pointerRotationSpeedPolar=sr,this.enableKeyboardNavigation=!0,this.keyboardRotationSpeedAzimuth=dr,this.keyboardRotationSpeedPolar=dr,this.mouseFirstPersonRotationSpeed=2*sr,this.keyboardDollySpeed=2,this.keyboardPanSpeed=10,this.keyboardSpeedFactor=3,this.pinchEpsilon=2,this.pinchPanSpeed=1,this.EPSILON=.001,this.minZoom=0,this.maxZoom=1/0,this.orthographicCameraDollyFactor=.3,this.temporarilyDisableDamping=!1,this.firstPersonMode=!1,this.reusableVector3=new o.Vector3,this._accumulatedMouseMove=new o.Vector2,this.target=new o.Vector3,this.targetEnd=new o.Vector3,this.spherical=new o.Spherical,this.sphericalEnd=new o.Spherical,this.deltaTarget=new o.Vector3,this.keyboard=new rr,this.offsetVector=new o.Vector3,this.panVector=new o.Vector3,this.raycaster=new o.Raycaster,this.targetFPS=30,this.targetFPSOverActualFPS=1,this.isFocused=!1,this.update=e=>{const{camera:t,target:n,targetEnd:r,spherical:o,sphericalEnd:i,deltaTarget:a,handleKeyboard:s,enableDamping:d,dampingFactor:l,EPSILON:c,targetFPS:u,enabled:m}=this;if(!m)return!1;const p=Math.min(1/e,u);this.targetFPSOverActualFPS=u/p,s(),this._accumulatedMouseMove.lengthSq()>0&&(this.rotate(this._accumulatedMouseMove.x,this._accumulatedMouseMove.y),this._accumulatedMouseMove.set(0,0)),i.theta=Math.sign(i.theta)*Math.min(Math.abs(i.theta),2*Math.PI);let h=i.theta-o.theta;Math.abs(h)>Math.PI&&(h-=2*Math.PI*Math.sign(h));const f=i.phi-o.phi,v=i.radius-o.radius;a.subVectors(r,n);let x=!1;const g=d&&!this.temporarilyDisableDamping?Math.min(l*this.targetFPSOverActualFPS,1):1;return this.temporarilyDisableDamping=!1,Math.abs(h)>c||Math.abs(f)>c||Math.abs(v)>c||Math.abs(a.x)>c||Math.abs(a.y)>c||Math.abs(a.z)>c?(o.set(o.radius+v*g,o.phi+f*g,o.theta+h*g),o.theta=o.theta%(2*Math.PI),n.add(a.multiplyScalar(g)),x=!0):(o.copy(i),n.copy(r)),o.makeSafe(),t.position.setFromSpherical(o).add(n),t.lookAt(n),x&&this.triggerCameraChangeEvent(),x},this.getState=()=>{const{target:e,camera:t}=this;return{target:e.clone(),position:t.position.clone()}},this.setState=(e,t)=>{const n=e.clone().sub(t);this.targetEnd.copy(t),this.sphericalEnd.setFromVector3(n),this.target.copy(this.targetEnd),this.spherical.copy(this.sphericalEnd),this.update(1e3/this.targetFPS),this.triggerCameraChangeEvent()},this.triggerCameraChangeEvent=()=>{const{camera:e,target:t}=this;this.dispatchEvent({type:"cameraChange",camera:{position:e.position,target:t}})},this.onMouseDown=e=>{if(this.enabled)switch(e.button){case o.MOUSE.LEFT:this.startMouseRotation(e);break;case o.MOUSE.RIGHT:e.preventDefault(),this.startMousePan(e)}},this.onMouseUp=e=>{this._accumulatedMouseMove.set(0,0)},this.onMouseWheel=e=>{if(!this.enabled)return;e.preventDefault();let t=0;if(e.wheelDelta)t=-e.wheelDelta/40;else if(e.detail)t=e.detail;else if(e.deltaY){const n=or?1:40;t=e.deltaY/n}const{domElement:n}=this;let r=e.offsetX,o=e.offsetY;r=r/n.clientWidth*2-1,o=o/n.clientHeight*-2+1;const i=t<0,a=this.camera.isPerspectiveCamera?this.getDollyDeltaDistance(i,Math.abs(t)):Math.sign(t)*this.orthographicCameraDollyFactor;this.dolly(r,o,a)},this.onTouchStart=e=>{if(this.enabled)switch(e.preventDefault(),e.touches.length){case 1:this.startTouchRotation(e);break;case 2:this.startTouchPinch(e)}},this.onFocusChanged=e=>{this.isFocused="blur"!==e.type&&(e.target===this.domElement||document.activeElement===this.domElement),this.keyboard.disabled=!this.isFocused},this.onContextMenu=e=>{this.enabled&&e.preventDefault()},this.rotate=(e,t)=>{if(0===e&&0===t)return;const n=(this.firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedAzimuth)*e,r=(this.firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedPolar)*t;this.firstPersonMode?(this.temporarilyDisableDamping=!0,this.rotateFirstPersonMode(n,r)):this.rotateSpherical(n,r)},this.startMouseRotation=e=>{let t=ir(this.domElement,e.clientX,e.clientY);const n=e=>{const n=ir(this.domElement,e.clientX,e.clientY),r=t.clone().sub(n);this._accumulatedMouseMove.add(r),t=n},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startMousePan=e=>{let t=ir(this.domElement,e.clientX,e.clientY);const n=e=>{const n=ir(this.domElement,e.clientX,e.clientY),r=n.x-t.x,o=n.y-t.y;t=n,this.pan(r,o)},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startTouchRotation=e=>{const{domElement:t}=this;let n=ir(t,e.touches[0].clientX,e.touches[0].clientY);const r=e=>{if(1!==e.touches.length)return;const r=ir(t,e.touches[0].clientX,e.touches[0].clientY);this.rotate(n.x-r.x,n.y-r.y),n=r},o=e=>{1!==e.touches.length&&a()},i=()=>{a()},a=()=>{document.removeEventListener("touchstart",o),document.removeEventListener("touchmove",r),document.removeEventListener("touchend",i)};document.addEventListener("touchstart",o),document.addEventListener("touchmove",r,{passive:!1}),document.addEventListener("touchend",i,{passive:!1})},this.startTouchPinch=e=>{const{domElement:t}=this;let n=ar(t,e.touches);const r=ar(t,e.touches),o=this.spherical.radius,i=e=>{if(2!==e.touches.length)return;const i=ar(t,e.touches),a=r.distance/i.distance;this.sphericalEnd.radius=Math.max(a*o,this.minDistance/5);const s=i.center.clone().sub(n.center);s.length()>this.pinchEpsilon&&(s.multiplyScalar(this.pinchPanSpeed),this.pan(s.x,s.y)),n=i},a=e=>{2!==e.touches.length&&d()},s=()=>{d()},d=()=>{document.removeEventListener("touchstart",a),document.removeEventListener("touchmove",i),document.removeEventListener("touchend",s)};document.addEventListener("touchstart",a),document.addEventListener("touchmove",i),document.addEventListener("touchend",s)},this.handleKeyboard=()=>{if(!this.enabled||!this.enableKeyboardNavigation||!this.isFocused)return;const{keyboard:e,keyboardDollySpeed:t,keyboardPanSpeed:n,keyboardSpeedFactor:r}=this,o=this.keyboardRotationSpeedAzimuth*(Number(e.isPressed("left"))-Number(e.isPressed("right")));let i=this.keyboardRotationSpeedPolar*(Number(e.isPressed("up"))-Number(e.isPressed("down")));if(0!==o||0!==i){const{sphericalEnd:e}=this,t=e.phi;e.phi+=i,e.makeSafe(),i=e.phi-t,e.phi=t,this.rotateFirstPersonMode(o,i)}this.firstPersonMode=!1;const a=e.isPressed("shift")?r:1,s=!!e.isPressed("w")||!e.isPressed("s")&&void 0;void 0!==s&&(this.dolly(0,0,this.getDollyDeltaDistance(s,t*a)),this.firstPersonMode=!0);const d=Number(e.isPressed("a"))-Number(e.isPressed("d")),l=Number(e.isPressed("e"))-Number(e.isPressed("q"));0===d&&0===l||(this.pan(a*n*d,a*n*l),this.firstPersonMode=!0)},this.rotateSpherical=(e,t)=>{const{sphericalEnd:n}=this,r=o.MathUtils.clamp(n.theta+e,this.minAzimuthAngle,this.maxAzimuthAngle),i=o.MathUtils.clamp(n.phi+t,this.minPolarAngle,this.maxPolarAngle);n.theta=r,n.phi=i,n.makeSafe()},this.rotateFirstPersonMode=(e,t)=>{const{firstPersonRotationFactor:n,reusableCamera:r,reusableVector3:o,sphericalEnd:i,targetEnd:a}=this;r.position.setFromSpherical(i).add(a),r.lookAt(a),r.rotateX(n*t),r.rotateY(n*e);const s=a.distanceTo(r.position);r.getWorldDirection(o),a.addVectors(r.position,o.multiplyScalar(s)),i.setFromVector3(o.subVectors(r.position,a)),i.makeSafe()},this.pan=(e,t)=>{const{domElement:n,camera:r,offsetVector:o,target:i}=this;o.copy(r.position).sub(i);let a=o.length();a=Math.max(a,this.panDollyMinDistanceFactor*this.minDistance),r.isPerspectiveCamera&&(a*=Math.tan(r.fov/2*Math.PI/180)),this.panLeft(2*e*a/n.clientHeight),this.panUp(2*t*a/n.clientHeight)},this.dollyOrthographicCamera=(e,t,n)=>{const r=this.camera;r.zoom*=1-n,r.zoom=o.MathUtils.clamp(r.zoom,this.minZoom,this.maxZoom),r.updateProjectionMatrix()},this.dollyPerspectiveCamera=(e,t,n)=>{const{dynamicTarget:r,minDistance:i,raycaster:a,reusableVector3:s,sphericalEnd:d,targetEnd:l,camera:c,reusableCamera:u}=this,m=Math.tan(o.MathUtils.degToRad(90-.5*c.fov)),p=Math.sqrt(m*m+e*e+t*t)/m,h=s.setFromSpherical(d).length();u.copy(c),u.position.setFromSpherical(d).add(l),u.lookAt(l),a.setFromCamera({x:e,y:t},u);const f=s;let v=h+n;v<i&&(v=i,r?(u.getWorldDirection(f),l.add(f.normalize().multiplyScalar(Math.abs(n)))):n=h-v);const x=-n*p;d.radius=v,u.getWorldDirection(f),f.normalize().multiplyScalar(n);const g=a.ray.direction.normalize().multiplyScalar(x).add(f);l.add(g)},this.dolly=(e,t,n)=>{const{camera:r}=this;r.isOrthographicCamera?this.dollyOrthographicCamera(e,t,n):r.isPerspectiveCamera&&this.dollyPerspectiveCamera(e,t,n)},this.getDollyDeltaDistance=(e,t=1)=>{const{sphericalEnd:n,dollyFactor:r}=this,o=r**t,i=e?o:1/o;return Math.max(n.radius,this.panDollyMinDistanceFactor*this.minDistance)*(i-1)},this.panLeft=e=>{const{camera:t,targetEnd:n,panVector:r}=this;r.setFromMatrixColumn(t.matrix,0),r.multiplyScalar(-e),n.add(r)},this.panUp=e=>{const{camera:t,targetEnd:n,panVector:r}=this;r.setFromMatrixColumn(t.matrix,1),r.multiplyScalar(e),n.add(r)},this.camera=e,this.reusableCamera=e.clone(),this.domElement=t,this.spherical.setFromVector3(e.position),this.sphericalEnd.copy(this.spherical),t.addEventListener("mousedown",this.onMouseDown),t.addEventListener("touchstart",this.onTouchStart),t.addEventListener("wheel",this.onMouseWheel),t.addEventListener("contextmenu",this.onContextMenu),t.addEventListener("focus",this.onFocusChanged),t.addEventListener("blur",this.onFocusChanged),window.addEventListener("mouseup",this.onMouseUp),window.addEventListener("mousedown",this.onFocusChanged),window.addEventListener("touchstart",this.onFocusChanged),this.dispose=()=>{t.removeEventListener("mousedown",this.onMouseDown),t.removeEventListener("wheel",this.onMouseWheel),t.removeEventListener("touchstart",this.onTouchStart),t.removeEventListener("contextmenu",this.onContextMenu),t.removeEventListener("focus",this.onFocusChanged),t.removeEventListener("blur",this.onFocusChanged),window.removeEventListener("mouseup",this.onMouseUp),window.removeEventListener("mousedown",this.onFocusChanged),window.removeEventListener("touchstart",this.onFocusChanged)}}}
|
|
338
|
+
*/class ye{createSectorScene(e,t,n,r){F()(8===e,"Only version 8 is currently supported");const i=new Map;return Object(o.y)(r,e=>(i.set(e.id,e),!0)),new oe(e,t,n,r,i)}}
|
|
309
339
|
/*!
|
|
310
340
|
* Copyright 2021 Cognite AS
|
|
311
|
-
*/class
|
|
341
|
+
*/class be{constructor(e){this._geometryClipBox=e}createClippedModel(e){const t=function e(t,n){const r=t.bounds,o=t.bounds.clone();if(o.intersect(n),o.isEmpty())return;{const i=[];for(let r=0;r<t.children.length;r++){const o=e(t.children[r],n);void 0!==o&&i.push(o)}const a=Te(o)/Te(r),s=Math.min(1,1-1/(1+10*a));return{...t,children:i,estimatedDrawCallCount:Math.ceil(s*t.estimatedDrawCallCount),estimatedRenderCost:Math.ceil(s*t.estimatedRenderCost),bounds:o}}}(e.scene.root,this._geometryClipBox);if(void 0===t)throw new Error("No sectors inside provided geometry clip box");const n=new Map;Object(o.y)(t,e=>(n.set(e.id,e),!0));const r=(new ye).createSectorScene(e.scene.version,e.scene.maxTreeIndex,e.scene.unit,t);return{...e,scene:r,geometryClipBox:this._geometryClipBox.clone()}}}const _e={size:new i.Vector3};function Te(e){const{size:t}=_e;return e.getSize(t),t.x*t.y*t.z}
|
|
312
342
|
/*!
|
|
313
343
|
* Copyright 2021 Cognite AS
|
|
314
|
-
*/class
|
|
344
|
+
*/class Ce{constructor(e,t,n,r){this._cadModelMap=new Map,this._subscription=new m.Subscription,this._needsRedraw=!1,this._markNeedsRedrawBound=this.markNeedsRedraw.bind(this),this._materialsChangedListener=this.handleMaterialsChanged.bind(this),this._materialManager=e,this._cadModelMetadataRepository=t,this._cadModelFactory=n,this._cadModelUpdateHandler=r,this._materialManager.on("materialsChanged",this._materialsChangedListener);this._subscription.add(this._cadModelUpdateHandler.consumedSectorObservable().subscribe({next:e=>{const t=this._cadModelMap.get(e.modelIdentifier);if(!t)return;e.instancedMeshes&&e.levelOfDetail===ge.a.Detailed?t.updateInstancedMeshes(e.instancedMeshes,e.modelIdentifier,e.metadata.id):e.levelOfDetail!==ge.a.Simple&&e.levelOfDetail!==ge.a.Discarded||t.discardInstancedMeshes(e.metadata.id);const n=t.rootSector.sectorNodeMap.get(e.metadata.id);if(!n)throw new Error(`Could not find 3D node for sector ${e.metadata.id} - invalid id?`);e.group&&n.add(e.group),n.updateGeometry(e.group,e.levelOfDetail),this.markNeedsRedraw()},error:e=>{Object(o.u)(e,{moduleName:"CadManager",methodName:"constructor"})}}))}get materialManager(){return this._materialManager}get budget(){return this._cadModelUpdateHandler.budget}set budget(e){this._cadModelUpdateHandler.budget=e}get loadedStatistics(){return this._cadModelUpdateHandler.lastBudgetSpendage}dispose(){this._cadModelUpdateHandler.dispose(),this._subscription.unsubscribe(),this._materialManager.off("materialsChanged",this._materialsChangedListener)}requestRedraw(){this._needsRedraw=!0}resetRedraw(){this._needsRedraw=!1}get needsRedraw(){return this._needsRedraw}updateCamera(e){this._cadModelUpdateHandler.updateCamera(e),this._needsRedraw=!0}get clippingPlanes(){return this._materialManager.clippingPlanes}set clippingPlanes(e){this._materialManager.clippingPlanes=e,this._cadModelUpdateHandler.clippingPlanes=e,this._needsRedraw=!0}get renderMode(){return this._materialManager.getRenderMode()}set renderMode(e){this._materialManager.setRenderMode(e)}async addModel(e,t){const n=await this._cadModelMetadataRepository.loadData(e);if(this._cadModelMap.has(n.modelIdentifier))throw new Error(`Model ${e} has already been added`);const r=function(e,t){if(null===t)return e;return new be(t).createClippedModel(e)}(n,function(e,t){if(void 0===e||void 0===e.boundingBox)return null;if(!e.isBoundingBoxInModelCoordinates)return e.boundingBox;const n=e.boundingBox.clone();return n.applyMatrix4(t.inverseModelMatrix),n}(t,n)),o=this._cadModelFactory.createModel(r);return o.addEventListener("update",this._markNeedsRedrawBound),this._cadModelMap.set(n.modelIdentifier,o),this._cadModelUpdateHandler.addModel(o),o}removeModel(e){const t=e.cadModelMetadata;if(!this._cadModelMap.delete(t.modelIdentifier))throw new Error(`Could not remove model ${t.modelIdentifier} because it's not added`);e.removeEventListener("update",this._markNeedsRedrawBound),this._cadModelUpdateHandler.removeModel(e)}getLoadingStateObserver(){return this._cadModelUpdateHandler.getLoadingStateObserver()}markNeedsRedraw(){this._needsRedraw=!0}handleMaterialsChanged(){this.requestRedraw()}}var Ie,we,Me,Se,Pe,Re,Ne,De=n(18);
|
|
315
345
|
/*!
|
|
316
346
|
* Copyright 2021 Cognite AS
|
|
317
|
-
*/
|
|
318
|
-
class hr extends ur{constructor(e,t){super(hr.classToken),this._indexSet=new Dn,this._client=e,this._model=t,this._fetchResultHelper=void 0}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const t=this._model;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const n=new cr(e=>new l(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=n;const r={assetId:e.assetId,intersectsBoundingBox:function(e){if(void 0===e)return;const n=(new o.Box3).copy(e);return t.mapBoxFromModelToCdfCoordinates(n,n),{min:[n.min.x,n.min.y,n.min.z],max:[n.max.x,n.max.y,n.max.z]}}(e.boundingBox),limit:1e3},i=new Dn;this._indexSet=i,this._filter=e;const a=this._client.assetMappings3D.list(t.modelId,t.revisionId,r);await n.pageResults(i,a)&&(this._fetchResultHelper=void 0)}getFilter(){return this._filter}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear()}getIndexSet(){return this._indexSet}serialize(){return{token:this.classToken,state:pr()(this._filter)}}}hr.classToken="AssetNodeCollection";var fr=n(15),vr=n.n(fr);
|
|
347
|
+
*/class ze{constructor(e){this._materialManager=e}createModel(e){const{modelIdentifier:t,scene:n}=e,r=new h.e(e,this._materialManager);if(this._materialManager.addModelMaterials(t,n.maxTreeIndex),null!==e.geometryClipBox){const t=function(e,t){const n=e.min.clone().applyMatrix4(t.modelMatrix),r=e.max.clone().applyMatrix4(t.modelMatrix);return(new i.Box3).setFromPoints([n,r])}
|
|
319
348
|
/*!
|
|
320
349
|
* Copyright 2021 Cognite AS
|
|
321
|
-
*/
|
|
322
|
-
class xr extends ur{constructor(e,t,n={}){super(xr.classToken),this._indexSet=new Dn,this._filter={},this._client=e,this._modelId=t.modelId,this._revisionId=t.revisionId,this._options={requestPartitions:1,...n}}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e){const t=new Dn,{requestPartitions:n}=this._options;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const r=new cr(e=>new l(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=r,this._indexSet=t;const o=vr()(1,n+1).map(async o=>{const i=this._client.revisions3D.list3DNodes(this._modelId,this._revisionId,{properties:e,limit:1e3,sortByNodeId:!0,partition:`${o}/${n}`});return r.pageResults(t,i)});this._filter=e,this.notifyChanged(),await Promise.all(o)}getFilter(){return this._filter}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear(),this.notifyChanged()}getIndexSet(){return this._indexSet}serialize(){return{token:this.classToken,state:pr()(this._filter),options:{...this._options}}}}xr.classToken="PropertyFilterNodeCollection";
|
|
350
|
+
*/(e.geometryClipBox,e),n=new De.a(t).clippingPlanes;this._materialManager.setModelClippingPlanes(e.modelIdentifier,n)}return r}}function Ee(e,t,n,r,o,i){const a=new ae,s=new ve(e,t,a),d=new ze(r),l=new xe,c=new h.g(r),u=new h.b(t,l,c),{internal:m}=i,p=m&&m.sectorCuller?m.sectorCuller:Object(h.i)(n,o),f=new h.d(u,p);return new Ce(r,s,d,f)}
|
|
323
351
|
/*!
|
|
324
352
|
* Copyright 2021 Cognite AS
|
|
325
|
-
*/
|
|
326
|
-
|
|
353
|
+
*/class Oe extends i.Object3D{constructor(e=200){super(),this._needsRedraw=!1,this._lastDrawPointBuffersHash=0,this._forceLoadingSubject=new m.Subject,this.nodes=[],this.numNodesLoadingAfterLastRedraw=0,this.numChildrenAfterLastRedraw=0,this.potreeGroup=new D.Group,this.potreeGroup.name="Potree.Group",this.name="Potree point cloud wrapper",this.add(this.potreeGroup);const t=new i.Mesh(new i.BufferGeometry);t.name="onAfterRender trigger (no geometry)",t.frustumCulled=!1,t.onAfterRender=()=>{this.resetRedraw(),this._lastDrawPointBuffersHash=this.pointBuffersHash},this.add(t),this._loadingObservable=this.createLoadingStateObservable(e),this._lastDrawPointBuffersHash=this.pointBuffersHash,this.pointBudget=2e6}get needsRedraw(){return this._needsRedraw||this._lastDrawPointBuffersHash!==this.pointBuffersHash||D.Global.numNodesLoading!==this.numNodesLoadingAfterLastRedraw||this.numChildrenAfterLastRedraw!==this.potreeGroup.children.length||this.nodes.some(e=>e.needsRedraw)}get pointBudget(){return this.potreeGroup.pointBudget}set pointBudget(e){this.potreeGroup.pointBudget=e}getLoadingStateObserver(){return this._loadingObservable}addPointCloud(e){this.potreeGroup.add(e.octtree),this.nodes.push(e),this._forceLoadingSubject.next(),this.requestRedraw()}removePointCloud(e){const t=this.nodes.indexOf(e);if(-1===t)throw new Error("Point cloud is not added - cannot remove it");this.potreeGroup.remove(e.octtree),this.nodes.splice(t,1)}requestRedraw(){this._needsRedraw=!0}resetRedraw(){this._needsRedraw=!1,this.numNodesLoadingAfterLastRedraw=D.Global.numNodesLoading,this.numChildrenAfterLastRedraw=this.potreeGroup.children.length,this.nodes.forEach(e=>e.resetRedraw())}createLoadingStateObservable(e){const t=this._forceLoadingSubject.pipe((n=5*e,Object(m.pipe)(Object(I.switchMap)(()=>Object(m.of)(!1).pipe(Object(I.delay)(n),Object(I.startWith)(!0))),Object(I.distinctUntilChanged)())));var n;return Object(m.combineLatest)([Object(m.interval)(e).pipe(Object(I.map)(Ae),Object(I.distinctUntilChanged)((e,t)=>e.isLoading===t.isLoading&&e.itemsLoaded===t.itemsLoaded&&e.itemsRequested===t.itemsRequested)),t]).pipe(Object(I.map)(e=>{const[t,n]=e;return n&&!t.isLoading?{isLoading:!0,itemsLoaded:0,itemsRequested:1,itemsCulled:0}:t}),Object(I.startWith)({isLoading:!1,itemsLoaded:0,itemsRequested:0,itemsCulled:0}),Object(I.distinctUntilChanged)(),Object(I.share)())}get pointBuffersHash(){const e=this.potreeGroup.pointclouds;let t=3131961357;for(const n of e)n.traverseVisible(e=>{if(e.isPoints){const n=e.geometry;t^=n.getAttribute("position").count}}),t^=n.id;return t}}function Ae(){return{isLoading:D.Global.numNodesLoading>0,itemsLoaded:0,itemsRequested:D.Global.numNodesLoading,itemsCulled:0}}
|
|
354
|
+
/*!
|
|
355
|
+
* Copyright 2021 Cognite AS
|
|
356
|
+
*/(we=Ie||(Ie={}))[we.Circle=D.PointShape.CIRCLE]="Circle",we[we.Square=D.PointShape.SQUARE]="Square",(Se=Me||(Me={}))[Se.Rgb=D.PointColorType.RGB]="Rgb",Se[Se.Depth=D.PointColorType.DEPTH]="Depth",Se[Se.Height=D.PointColorType.HEIGHT]="Height",Se[Se.PointIndex=D.PointColorType.POINT_INDEX]="PointIndex",Se[Se.LevelOfDetail=D.PointColorType.LOD]="LevelOfDetail",Se[Se.Classification=D.PointColorType.CLASSIFICATION]="Classification",Se[Se.Intensity=D.PointColorType.INTENSITY]="Intensity",(Re=Pe||(Pe={}))[Re.Adaptive=D.PointSizeType.ADAPTIVE]="Adaptive",Re[Re.Fixed=D.PointSizeType.FIXED]="Fixed",function(e){e[e.Default=-1]="Default",e[e.Created=0]="Created",e[e.Unclassified=1]="Unclassified",e[e.Ground=2]="Ground",e[e.LowVegetation=3]="LowVegetation",e[e.MedVegetation=4]="MedVegetation",e[e.HighVegetation=5]="HighVegetation",e[e.Building=6]="Building",e[e.LowPoint=7]="LowPoint",e[e.ReservedOrHighPoint=8]="ReservedOrHighPoint",e[e.Water=9]="Water",e[e.Rail=10]="Rail",e[e.RoadSurface=11]="RoadSurface",e[e.ReservedOrBridgeDeck=12]="ReservedOrBridgeDeck",e[e.WireGuard=13]="WireGuard",e[e.WireConductor=14]="WireConductor",e[e.TransmissionTower=15]="TransmissionTower",e[e.WireStructureConnector=16]="WireStructureConnector",e[e.BridgeDeck=17]="BridgeDeck",e[e.HighNoise=18]="HighNoise",e[e.OverheadStructure=19]="OverheadStructure",e[e.IgnoredGround=20]="IgnoredGround",e[e.Snow=21]="Snow",e[e.TemporalExclusion=22]="TemporalExclusion",e[e.UserDefinableOffset=64]="UserDefinableOffset"}(Ne||(Ne={}));class Fe extends i.Group{constructor(e,t,n){super(),this.name="PointCloudNode",this._potreeGroup=e,this._potreeNode=t,this._cameraConfiguration=n,this.add(this._potreeGroup),this.matrixAutoUpdate=!1}get potreeGroup(){return this._potreeGroup}get potreeNode(){return this._potreeNode}get hasCameraConfiguration(){return void 0!==this._cameraConfiguration}get cameraConfiguration(){return this._cameraConfiguration}get needsRedraw(){return this._potreeGroup.needsRedraw}requestRedraw(){this._potreeGroup.requestRedraw()}get pointSize(){return this._potreeNode.pointSize}set pointSize(e){this._potreeNode.pointSize=e}get pointSizeType(){return this._potreeNode.pointSizeType}set pointSizeType(e){this._potreeNode.pointSizeType=e}get pointBudget(){return this._potreeNode.pointBudget}set pointBudget(e){this._potreeNode.pointBudget=e}get visiblePointCount(){return this._potreeNode.visiblePointCount}get pointColorType(){return this._potreeNode.pointColorType}set pointColorType(e){this._potreeNode.pointColorType=e}get pointShape(){return this._potreeNode.pointShape}set pointShape(e){this._potreeNode.pointShape=e}setClassVisible(e,t){if(!this.hasClass(e))throw new Error("Point cloud model doesn't have class "+e);const n=Be(e);this._potreeNode.classification[n].w=t?1:0,this._potreeNode.recomputeClassification()}isClassVisible(e){if(!this.hasClass(e))throw new Error("Point cloud model doesn't have class "+e);const t=Be(e);return 0!==this._potreeNode.classification[t].w}hasClass(e){const t=Be(e);return void 0!==this._potreeNode.classification[t]}getClasses(){return Object.keys(this._potreeNode.classification).map(e=>"DEFAULT"===e?-1:parseInt(e,10)).sort((e,t)=>e-t)}getBoundingBox(e){return(e=e||new i.Box3).copy(this._potreeNode.boundingBox),e.applyMatrix4(this.matrixWorld),e}setModelTransformation(e){this.matrix.copy(e),this.updateMatrixWorld(!0)}getModelTransformation(e=new i.Matrix4){return e.copy(this.matrix)}}function Be(e){return e===Ne.Default?"DEFAULT":e}
|
|
357
|
+
/*!
|
|
358
|
+
* Copyright 2021 Cognite AS
|
|
359
|
+
*/class ke{constructor(e,t){this._pointCloudMetadataRepository=e,this._pointCloudFactory=t,this._pointCloudGroupWrapper=new Oe}requestRedraw(){this._pointCloudGroupWrapper.requestRedraw()}resetRedraw(){this._pointCloudGroupWrapper.resetRedraw()}get pointBudget(){return this._pointCloudGroupWrapper.pointBudget}set pointBudget(e){this._pointCloudGroupWrapper.pointBudget=e}get needsRedraw(){return this._pointCloudGroupWrapper.needsRedraw}set clippingPlanes(e){this._pointCloudGroupWrapper.traverse(t=>{if(t.material){const n=t.material;n.clipping=!0,n.clipIntersection=!1,n.clippingPlanes=e}})}getLoadingStateObserver(){return this._pointCloudGroupWrapper.getLoadingStateObserver()}updateCamera(e){}async addModel(e){const t=await this._pointCloudMetadataRepository.loadData(e),n=this._pointCloudFactory.createModel(t);this._pointCloudGroupWrapper.addPointCloud(n);const r=new Fe(this._pointCloudGroupWrapper,n,t.cameraConfiguration);return r.setModelTransformation(t.modelMatrix),r}removeModel(e){this._pointCloudGroupWrapper.removePointCloud(e.potreeNode)}}
|
|
327
360
|
/*!
|
|
328
361
|
* Copyright 2021 Cognite AS
|
|
329
|
-
*/
|
|
362
|
+
*/const Le=(new i.Matrix4).identity();class Ge{constructor(e,t,n="ept.json"){this._modelMetadataProvider=e,this._modelDataClient=t,this._blobFileName=n}async loadData(e){const t={format:te.EptPointCloud,...e},n=this._modelMetadataProvider.getModelUri(t),r=this._modelMetadataProvider.getModelMatrix(t),i=this._modelMetadataProvider.getModelCamera(t),a=await n,s=await r,d=await this._modelDataClient.getJsonFile(a,this._blobFileName),l=await i;return{modelBaseUrl:a,modelMatrix:s,cameraConfiguration:Object(o.x)(l,Le),scene:d}}}
|
|
363
|
+
/*!
|
|
364
|
+
* Copyright 2021 Cognite AS
|
|
365
|
+
*/class Ue{constructor(e){this._needsRedraw=!1,this.octtree=e,this.pointSize=2,this.pointSizeType=Pe.Adaptive,this.pointColorType=Me.Rgb,this.pointShape=Ie.Circle,this.pointBudget=1/0}get needsRedraw(){return this._needsRedraw}get pointSize(){return this.octtree.material.size}set pointSize(e){this.octtree.material.size=e,this._needsRedraw=!0}get pointSizeType(){return this.octtree.material.pointSizeType}set pointSizeType(e){this.octtree.material.pointSizeType=e,this._needsRedraw=!0}get pointBudget(){return this.octtree.pointBudget}set pointBudget(e){this.octtree.pointBudget=e,this._needsRedraw=!0}get visiblePointCount(){return this.octtree.numVisiblePoints||0}get boundingBox(){const e=this.octtree.pcoGeometry.tightBoundingBox||this.octtree.pcoGeometry.boundingBox||this.octtree.boundingBox,t=new i.Vector3(e.min.x,e.min.z,-e.min.y),n=new i.Vector3(e.max.x,e.max.z,-e.max.y);return(new i.Box3).setFromPoints([t,n])}get pointColorType(){return this.octtree.material.pointColorType}set pointColorType(e){this.octtree.material.pointColorType=e,this._needsRedraw=!0}get pointShape(){return this.octtree.material.shape}set pointShape(e){this.octtree.material.shape=e,this._needsRedraw=!0}get classification(){return this.octtree.material.classification}recomputeClassification(){this.octtree.material.recomputeClassification()}resetRedraw(){this._needsRedraw=!1}}
|
|
366
|
+
/*!
|
|
367
|
+
* Copyright 2021 Cognite AS
|
|
368
|
+
*/class Ve{constructor(e){this._httpHeadersProvider=e}createModel(e){this.initializePointCloudXhrRequestHeaders();const{modelBaseUrl:t,scene:n}=e,r=new D.PointCloudEptGeometry(t+"/",n),o=r.offset.x,i=r.offset.y,a=r.offset.z,s=new D.PointCloudEptGeometryNode(r,r.boundingBox,0,o,i,a);r.root=s,r.root.load();const d=new D.PointCloudOctree(r);d.name="PointCloudOctree: "+t;return new Ue(d)}initializePointCloudXhrRequestHeaders(){const e=this._httpHeadersProvider.headers;let t=D.XHRFactory.config.customHeaders;for(const[n,r]of Object.entries(e))t=t.filter(e=>e.header!==n),t.push({header:n,value:r});D.XHRFactory.config.customHeaders=t.filter(e=>e.header)}}
|
|
369
|
+
/*!
|
|
370
|
+
* Copyright 2021 Cognite AS
|
|
371
|
+
*/function je(e,t){const n=new Ge(e,t),r=new Ve(t);return new ke(n,r)}
|
|
372
|
+
/*!
|
|
373
|
+
* Copyright 2021 Cognite AS
|
|
374
|
+
*/class We{constructor(e,t,n){this._lastCamera={position:new i.Vector3(NaN,NaN,NaN),quaternion:new i.Quaternion(NaN,NaN,NaN,NaN),zoom:NaN},this._isDisposed=!1,this._subscriptions=new m.Subscription,this._events={loadingStateChanged:new o.d},this._effectRenderManager=t,this._cadManager=e,this._pointCloudManager=n,this.initLoadingStateObserver(this._cadManager,this._pointCloudManager),this._updateSubject=new m.Subject,this._updateSubject.pipe(Object(I.auditTime)(5e3),Object(I.tap)(()=>{Object(o.t)({moduleName:"RevealManager",methodName:"update"})})).subscribe()}dispose(){this._isDisposed||(this._cadManager.dispose(),this._subscriptions.unsubscribe(),this._isDisposed=!0)}requestRedraw(){this._cadManager.requestRedraw(),this._pointCloudManager.requestRedraw()}resetRedraw(){this._cadManager.resetRedraw(),this._pointCloudManager.resetRedraw()}get renderOptions(){return this._effectRenderManager.renderOptions}set renderOptions(e){this._effectRenderManager.renderOptions=null!=e?e:h.j}get needsRedraw(){return this._cadManager.needsRedraw||this._pointCloudManager.needsRedraw}update(e){(this._lastCamera.zoom!==e.zoom||!this._lastCamera.position.equals(e.position)||!this._lastCamera.quaternion.equals(e.quaternion))&&(this._lastCamera.position.copy(e.position),this._lastCamera.quaternion.copy(e.quaternion),this._lastCamera.zoom=e.zoom,this._cadManager.updateCamera(e),this._updateSubject.next())}get cadBudget(){return this._cadManager.budget}set cadBudget(e){this._cadManager.budget=e}get cadLoadedStatistics(){return this._cadManager.loadedStatistics}get cadRenderMode(){return this._cadManager.renderMode}set cadRenderMode(e){this._cadManager.renderMode=e}get pointCloudBudget(){return{numberOfPoints:this._pointCloudManager.pointBudget}}set pointCloudBudget(e){this._pointCloudManager.pointBudget=e.numberOfPoints}set clippingPlanes(e){this._cadManager.clippingPlanes=e,this._pointCloudManager.clippingPlanes=e}get clippingPlanes(){return this._cadManager.clippingPlanes}on(e,t){switch(e){case"loadingStateChanged":this._events.loadingStateChanged.subscribe(t);break;default:throw new Error(`Unsupported event '${e}'`)}}off(e,t){switch(e){case"loadingStateChanged":this._events.loadingStateChanged.unsubscribe(t);break;default:throw new Error(`Unsupported event '${e}'`)}}render(e){this._effectRenderManager.render(e),this.resetRedraw()}setRenderTarget(e,t=!0){this._effectRenderManager.setRenderTarget(e),this._effectRenderManager.setRenderTargetAutoSize(t)}async addModel(e,t,n){switch(Object(o.w)({moduleName:"RevealManager",methodName:"addModel",type:e,options:n},t),e){case"cad":return this._cadManager.addModel(t,null==n?void 0:n.geometryFilter);case"pointcloud":return this._pointCloudManager.addModel(t);default:throw new Error(`Model type '${e}' is not supported`)}}removeModel(e,t){switch(e){case"cad":this._cadManager.removeModel(t);break;case"pointcloud":this._pointCloudManager.removeModel(t);break;default:Object(o.k)(e)}}addUiObject(e,t,n){this._effectRenderManager.addUiObject(e,t,n),this.requestRedraw()}removeUiObject(e){this._effectRenderManager.removeUiObject(e),this.requestRedraw()}notifyLoadingStateChanged(e){this._events.loadingStateChanged.fire(e)}initLoadingStateObserver(e,t){this._subscriptions.add(Object(m.combineLatest)([e.getLoadingStateObserver(),t.getLoadingStateObserver()]).pipe(Object(I.observeOn)(m.asyncScheduler),Object(I.subscribeOn)(m.asyncScheduler),Object(I.map)(([e,t])=>({isLoading:e.isLoading||t.isLoading,itemsLoaded:e.itemsLoaded+t.itemsLoaded,itemsRequested:e.itemsRequested+t.itemsRequested,itemsCulled:e.itemsCulled+t.itemsCulled})),Object(I.distinctUntilChanged)((e,t)=>e.itemsLoaded===t.itemsLoaded&&e.itemsRequested===t.itemsRequested)).subscribe(this.notifyLoadingStateChanged.bind(this),e=>Object(o.u)(e,{moduleName:"RevealManager",methodName:"constructor"})))}}
|
|
375
|
+
/*!
|
|
376
|
+
* Copyright 2021 Cognite AS
|
|
377
|
+
*/class qe{constructor(e){this._renderManager=e}renderOccludingGeometry(e,t){const n={renderTarget:this._renderManager.getRenderTarget(),autoSize:this._renderManager.getRenderTargetAutoSize()};try{this._renderManager.setRenderTarget(e),this._renderManager.renderDetailedToDepthOnly(t)}finally{this._renderManager.setRenderTarget(n.renderTarget),this._renderManager.setRenderTargetAutoSize(n.autoSize)}}}function He(e,t,n,r,i,a={}){const s=n.getApplicationIdentifier();Object(o.p)(!1!==a.logMetrics,e,s,{moduleName:"createRevealManager",methodName:"createRevealManager",constructorOptions:a});const d=a.renderOptions||{},l=new h.c,c=new h.f(r,i,l,d),u=Ee(t,n,r,l,new qe(c),a),m=je(t,n);return new We(u,c,m)}class Ke{constructor(e,t){switch(e){case"cdf":{const e=t;this.addCadModel=t=>Ke.addCdfCadModel(t,e),this.addPointCloudModel=t=>Ke.addCdfPointCloudModel(t,e),this._revealManager=e}break;case"local":{const e=t;this.addCadModel=t=>Ke.addLocalCadModel(t,e),this.addPointCloudModel=()=>{throw new Error("Local point cloud models are not supported")},this._revealManager=e}break;default:Object(o.k)(e)}}static createLocalHelper(e,t,n){const r=function(e,t,n={}){return He("local",new fe,new pe,e,t,n)}(e,t,n);return new Ke("local",r)}static createCdfHelper(e,t,n,r){const o=function(e,t,n,r={}){const o=new ue(e),i=new se(e);return He(e.project,o,i,t,n,r)}(r,e,t,n);return new Ke("cdf",o)}get revealManager(){return this._revealManager}static addLocalCadModel(e,t){if(void 0===e.localPath)throw new Error("addLocalCadModel only works with local models");return t.addModel("cad",{fileName:e.localPath},{geometryFilter:e.geometryFilter})}static addCdfCadModel(e,t){if(-1===e.modelId||-1===e.revisionId)throw new Error("addCdfCadModel only works with local models");return t.addModel("cad",{modelId:e.modelId,revisionId:e.revisionId},{geometryFilter:e.geometryFilter})}static addCdfPointCloudModel(e,t){if(-1===e.modelId||-1===e.revisionId)throw new Error("addCdfPointCloudModel only works with local models");return t.addModel("pointcloud",{modelId:e.modelId,revisionId:e.revisionId})}}
|
|
378
|
+
/*!
|
|
379
|
+
* Copyright 2021 Cognite AS
|
|
380
|
+
*/const Xe={16:"shift",17:"ctrl",18:"alt",27:"escape",32:"space",37:"left",38:"up",39:"right",40:"down",65:"a",68:"d",69:"e",81:"q",83:"s",87:"w"};class Ye{constructor(){this.keys={},this._disabled=!1,this.isPressed=e=>this.keys[e]>=1,this.comsumePressed=e=>{const t=2===this.keys[e];return t&&(this.keys[e]=1),t},this.addEventListeners=()=>{this.clearPressedKeys(),window.addEventListener("keydown",this.onKeydown),window.addEventListener("keyup",this.onKeyup),window.addEventListener("blur",this.clearPressedKeys)},this.removeEventListeners=()=>{window.removeEventListener("keydown",this.onKeydown),window.removeEventListener("keyup",this.onKeyup),window.removeEventListener("blur",this.clearPressedKeys)},this.onKeydown=e=>{e.metaKey||e.altKey||e.ctrlKey||e.keyCode in Xe&&(0===this.keys[Xe[e.keyCode]]&&(this.keys[Xe[e.keyCode]]=2),e.preventDefault())},this.onKeyup=e=>{e.keyCode in Xe&&(this.keys[Xe[e.keyCode]]=0)},this.clearPressedKeys=()=>{Object.keys(Xe).forEach(e=>{this.keys[Xe[e]]=0})},this.addEventListeners()}get disabled(){return this._disabled}set disabled(e){this._disabled=e,e?this.removeEventListeners():this.addEventListeners()}}
|
|
381
|
+
/*!
|
|
382
|
+
* Copyright 2021 Cognite AS
|
|
383
|
+
*/const Ze=-1!==navigator.userAgent.toLowerCase().indexOf("firefox");function Qe(e,t,n){return new i.Vector2(t-e.offsetLeft,n-e.offsetTop)}function Je(e,t){if(2!==t.length)throw new Error("getPinchInfo only works if touches.length === 2");const n=[t[0],t[1]].map(({clientX:t,clientY:n})=>Qe(e,t,n));return{center:n[0].clone().add(n[1]).multiplyScalar(.5),distance:n[0].distanceTo(n[1]),offsets:n}}const $e=Math.PI/360,et=10*$e;class tt extends i.EventDispatcher{constructor(e,t){super(),this.enabled=!0,this.enableDamping=!0,this.dampingFactor=.2,this.dynamicTarget=!0,this.minDistance=1,this.maxDistance=1/0,this.dollyFactor=.98,this.minPolarAngle=0,this.maxPolarAngle=Math.PI,this.minAzimuthAngle=-1/0,this.maxAzimuthAngle=1/0,this.panDollyMinDistanceFactor=10,this.firstPersonRotationFactor=.4,this.pointerRotationSpeedAzimuth=$e,this.pointerRotationSpeedPolar=$e,this.enableKeyboardNavigation=!0,this.keyboardRotationSpeedAzimuth=et,this.keyboardRotationSpeedPolar=et,this.mouseFirstPersonRotationSpeed=2*$e,this.keyboardDollySpeed=2,this.keyboardPanSpeed=10,this.keyboardSpeedFactor=3,this.pinchEpsilon=2,this.pinchPanSpeed=1,this.EPSILON=.001,this.minZoom=0,this.maxZoom=1/0,this.orthographicCameraDollyFactor=.3,this.temporarilyDisableDamping=!1,this.firstPersonMode=!1,this.reusableVector3=new i.Vector3,this._accumulatedMouseMove=new i.Vector2,this.target=new i.Vector3,this.targetEnd=new i.Vector3,this.spherical=new i.Spherical,this.sphericalEnd=new i.Spherical,this.deltaTarget=new i.Vector3,this.keyboard=new Ye,this.offsetVector=new i.Vector3,this.panVector=new i.Vector3,this.raycaster=new i.Raycaster,this.targetFPS=30,this.targetFPSOverActualFPS=1,this.isFocused=!1,this.update=e=>{const{camera:t,target:n,targetEnd:r,spherical:o,sphericalEnd:i,deltaTarget:a,handleKeyboard:s,enableDamping:d,dampingFactor:l,EPSILON:c,targetFPS:u,enabled:m}=this;if(!m)return!1;const h=Math.min(1/e,u);this.targetFPSOverActualFPS=u/h,s(),this._accumulatedMouseMove.lengthSq()>0&&(this.rotate(this._accumulatedMouseMove.x,this._accumulatedMouseMove.y),this._accumulatedMouseMove.set(0,0)),i.theta=Math.sign(i.theta)*Math.min(Math.abs(i.theta),2*Math.PI);let p=i.theta-o.theta;Math.abs(p)>Math.PI&&(p-=2*Math.PI*Math.sign(p));const f=i.phi-o.phi,v=i.radius-o.radius;a.subVectors(r,n);let x=!1;const g=d&&!this.temporarilyDisableDamping?Math.min(l*this.targetFPSOverActualFPS,1):1;return this.temporarilyDisableDamping=!1,Math.abs(p)>c||Math.abs(f)>c||Math.abs(v)>c||Math.abs(a.x)>c||Math.abs(a.y)>c||Math.abs(a.z)>c?(o.set(o.radius+v*g,o.phi+f*g,o.theta+p*g),o.theta=o.theta%(2*Math.PI),n.add(a.multiplyScalar(g)),x=!0):(o.copy(i),n.copy(r)),o.makeSafe(),t.position.setFromSpherical(o).add(n),t.lookAt(n),x&&this.triggerCameraChangeEvent(),x},this.getState=()=>{const{target:e,camera:t}=this;return{target:e.clone(),position:t.position.clone()}},this.setState=(e,t)=>{const n=e.clone().sub(t);this.targetEnd.copy(t),this.sphericalEnd.setFromVector3(n),this.target.copy(this.targetEnd),this.spherical.copy(this.sphericalEnd),this.update(1e3/this.targetFPS),this.triggerCameraChangeEvent()},this.triggerCameraChangeEvent=()=>{const{camera:e,target:t}=this;this.dispatchEvent({type:"cameraChange",camera:{position:e.position,target:t}})},this.onMouseDown=e=>{if(this.enabled)switch(e.button){case i.MOUSE.LEFT:this.startMouseRotation(e);break;case i.MOUSE.RIGHT:e.preventDefault(),this.startMousePan(e)}},this.onMouseUp=e=>{this._accumulatedMouseMove.set(0,0)},this.onMouseWheel=e=>{if(!this.enabled)return;e.preventDefault();let t=0;if(e.wheelDelta)t=-e.wheelDelta/40;else if(e.detail)t=e.detail;else if(e.deltaY){const n=Ze?1:40;t=e.deltaY/n}const{domElement:n}=this;let r=e.offsetX,o=e.offsetY;r=r/n.clientWidth*2-1,o=o/n.clientHeight*-2+1;const i=t<0,a=this.camera.isPerspectiveCamera?this.getDollyDeltaDistance(i,Math.abs(t)):Math.sign(t)*this.orthographicCameraDollyFactor;this.dolly(r,o,a)},this.onTouchStart=e=>{if(this.enabled)switch(e.preventDefault(),e.touches.length){case 1:this.startTouchRotation(e);break;case 2:this.startTouchPinch(e)}},this.onFocusChanged=e=>{this.isFocused="blur"!==e.type&&(e.target===this.domElement||document.activeElement===this.domElement),this.keyboard.disabled=!this.isFocused},this.onContextMenu=e=>{this.enabled&&e.preventDefault()},this.rotate=(e,t)=>{if(0===e&&0===t)return;const n=(this.firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedAzimuth)*e,r=(this.firstPersonMode?this.mouseFirstPersonRotationSpeed:this.pointerRotationSpeedPolar)*t;this.firstPersonMode?(this.temporarilyDisableDamping=!0,this.rotateFirstPersonMode(n,r)):this.rotateSpherical(n,r)},this.startMouseRotation=e=>{let t=Qe(this.domElement,e.clientX,e.clientY);const n=e=>{const n=Qe(this.domElement,e.clientX,e.clientY),r=t.clone().sub(n);this._accumulatedMouseMove.add(r),t=n},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startMousePan=e=>{let t=Qe(this.domElement,e.clientX,e.clientY);const n=e=>{const n=Qe(this.domElement,e.clientX,e.clientY),r=n.x-t.x,o=n.y-t.y;t=n,this.pan(r,o)},r=()=>{window.removeEventListener("mousemove",n),window.removeEventListener("mouseup",r)};window.addEventListener("mousemove",n,{passive:!1}),window.addEventListener("mouseup",r,{passive:!1})},this.startTouchRotation=e=>{const{domElement:t}=this;let n=Qe(t,e.touches[0].clientX,e.touches[0].clientY);const r=e=>{if(1!==e.touches.length)return;const r=Qe(t,e.touches[0].clientX,e.touches[0].clientY);this.rotate(n.x-r.x,n.y-r.y),n=r},o=e=>{1!==e.touches.length&&a()},i=()=>{a()},a=()=>{document.removeEventListener("touchstart",o),document.removeEventListener("touchmove",r),document.removeEventListener("touchend",i)};document.addEventListener("touchstart",o),document.addEventListener("touchmove",r,{passive:!1}),document.addEventListener("touchend",i,{passive:!1})},this.startTouchPinch=e=>{const{domElement:t}=this;let n=Je(t,e.touches);const r=Je(t,e.touches),o=this.spherical.radius,i=e=>{if(2!==e.touches.length)return;const i=Je(t,e.touches),a=r.distance/i.distance;this.sphericalEnd.radius=Math.max(a*o,this.minDistance/5);const s=i.center.clone().sub(n.center);s.length()>this.pinchEpsilon&&(s.multiplyScalar(this.pinchPanSpeed),this.pan(s.x,s.y)),n=i},a=e=>{2!==e.touches.length&&d()},s=()=>{d()},d=()=>{document.removeEventListener("touchstart",a),document.removeEventListener("touchmove",i),document.removeEventListener("touchend",s)};document.addEventListener("touchstart",a),document.addEventListener("touchmove",i),document.addEventListener("touchend",s)},this.handleKeyboard=()=>{if(!this.enabled||!this.enableKeyboardNavigation||!this.isFocused)return;const{keyboard:e,keyboardDollySpeed:t,keyboardPanSpeed:n,keyboardSpeedFactor:r}=this,o=this.keyboardRotationSpeedAzimuth*(Number(e.isPressed("left"))-Number(e.isPressed("right")));let i=this.keyboardRotationSpeedPolar*(Number(e.isPressed("up"))-Number(e.isPressed("down")));if(0!==o||0!==i){const{sphericalEnd:e}=this,t=e.phi;e.phi+=i,e.makeSafe(),i=e.phi-t,e.phi=t,this.rotateFirstPersonMode(o,i)}this.firstPersonMode=!1;const a=e.isPressed("shift")?r:1,s=!!e.isPressed("w")||!e.isPressed("s")&&void 0;void 0!==s&&(this.dolly(0,0,this.getDollyDeltaDistance(s,t*a)),this.firstPersonMode=!0);const d=Number(e.isPressed("a"))-Number(e.isPressed("d")),l=Number(e.isPressed("e"))-Number(e.isPressed("q"));0===d&&0===l||(this.pan(a*n*d,a*n*l),this.firstPersonMode=!0)},this.rotateSpherical=(e,t)=>{const{sphericalEnd:n}=this,r=i.MathUtils.clamp(n.theta+e,this.minAzimuthAngle,this.maxAzimuthAngle),o=i.MathUtils.clamp(n.phi+t,this.minPolarAngle,this.maxPolarAngle);n.theta=r,n.phi=o,n.makeSafe()},this.rotateFirstPersonMode=(e,t)=>{const{firstPersonRotationFactor:n,reusableCamera:r,reusableVector3:o,sphericalEnd:i,targetEnd:a}=this;r.position.setFromSpherical(i).add(a),r.lookAt(a),r.rotateX(n*t),r.rotateY(n*e);const s=a.distanceTo(r.position);r.getWorldDirection(o),a.addVectors(r.position,o.multiplyScalar(s)),i.setFromVector3(o.subVectors(r.position,a)),i.makeSafe()},this.pan=(e,t)=>{const{domElement:n,camera:r,offsetVector:o,target:i}=this;o.copy(r.position).sub(i);let a=o.length();a=Math.max(a,this.panDollyMinDistanceFactor*this.minDistance),r.isPerspectiveCamera&&(a*=Math.tan(r.fov/2*Math.PI/180)),this.panLeft(2*e*a/n.clientHeight),this.panUp(2*t*a/n.clientHeight)},this.dollyOrthographicCamera=(e,t,n)=>{const r=this.camera;r.zoom*=1-n,r.zoom=i.MathUtils.clamp(r.zoom,this.minZoom,this.maxZoom),r.updateProjectionMatrix()},this.dollyPerspectiveCamera=(e,t,n)=>{const{dynamicTarget:r,minDistance:o,raycaster:a,reusableVector3:s,sphericalEnd:d,targetEnd:l,camera:c,reusableCamera:u}=this,m=Math.tan(i.MathUtils.degToRad(90-.5*c.fov)),h=Math.sqrt(m*m+e*e+t*t)/m,p=s.setFromSpherical(d).length();u.copy(c),u.position.setFromSpherical(d).add(l),u.lookAt(l),a.setFromCamera({x:e,y:t},u);const f=s;let v=p+n;v<o&&(v=o,r?(u.getWorldDirection(f),l.add(f.normalize().multiplyScalar(Math.abs(n)))):n=p-v);const x=-n*h;d.radius=v,u.getWorldDirection(f),f.normalize().multiplyScalar(n);const g=a.ray.direction.normalize().multiplyScalar(x).add(f);l.add(g)},this.dolly=(e,t,n)=>{const{camera:r}=this;r.isOrthographicCamera?this.dollyOrthographicCamera(e,t,n):r.isPerspectiveCamera&&this.dollyPerspectiveCamera(e,t,n)},this.getDollyDeltaDistance=(e,t=1)=>{const{sphericalEnd:n,dollyFactor:r}=this,o=r**t,i=e?o:1/o;return Math.max(n.radius,this.panDollyMinDistanceFactor*this.minDistance)*(i-1)},this.panLeft=e=>{const{camera:t,targetEnd:n,panVector:r}=this;r.setFromMatrixColumn(t.matrix,0),r.multiplyScalar(-e),n.add(r)},this.panUp=e=>{const{camera:t,targetEnd:n,panVector:r}=this;r.setFromMatrixColumn(t.matrix,1),r.multiplyScalar(e),n.add(r)},this.camera=e,this.reusableCamera=e.clone(),this.domElement=t,this.spherical.setFromVector3(e.position),this.sphericalEnd.copy(this.spherical),t.addEventListener("mousedown",this.onMouseDown),t.addEventListener("touchstart",this.onTouchStart),t.addEventListener("wheel",this.onMouseWheel),t.addEventListener("contextmenu",this.onContextMenu),t.addEventListener("focus",this.onFocusChanged),t.addEventListener("blur",this.onFocusChanged),window.addEventListener("mouseup",this.onMouseUp),window.addEventListener("mousedown",this.onFocusChanged),window.addEventListener("touchstart",this.onFocusChanged),this.dispose=()=>{t.removeEventListener("mousedown",this.onMouseDown),t.removeEventListener("wheel",this.onMouseWheel),t.removeEventListener("touchstart",this.onTouchStart),t.removeEventListener("contextmenu",this.onContextMenu),t.removeEventListener("focus",this.onFocusChanged),t.removeEventListener("blur",this.onFocusChanged),window.removeEventListener("mouseup",this.onMouseUp),window.removeEventListener("mousedown",this.onFocusChanged),window.removeEventListener("touchstart",this.onFocusChanged)}}}
|
|
330
384
|
/*!
|
|
331
385
|
* Copyright 2021 Cognite AS
|
|
332
386
|
*/
|
|
333
|
-
|
|
387
|
+
/*!
|
|
388
|
+
* Copyright 2021 Cognite AS
|
|
389
|
+
*/class nt{constructor(e){var t,n;this._subscription=new m.Subscription,this._boundAnimate=this.animate.bind(this),this._events={cameraChange:new o.d,click:new o.d,hover:new o.d,sceneRendered:new o.d,disposed:new o.d},this._models=[],this._extraObjects=[],this.isDisposed=!1,this.latestRequestId=-1,this.clock=new i.Clock,this._clippingNeedsUpdate=!1,this._updateNearAndFarPlaneBuffers={combinedBbox:new i.Box3,bbox:new i.Box3,cameraPosition:new i.Vector3,cameraDirection:new i.Vector3,nearPlaneCoplanarPoint:new i.Vector3,nearPlane:new i.Plane,corners:new Array(new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3)},this.startPointerEventListeners=()=>{const e=this.canvas;let t=!1,n=0,r=!1;const i=l()(t=>{this._events.hover.fire(Object(o.l)(t,e))},100),a=n=>{const{offsetX:i,offsetY:a}=Object(o.l)(n,e),{offsetX:s,offsetY:d}=Object(o.l)(n,e);t&&r&&Math.abs(i-s)+Math.abs(a-d)>4&&(r=!1)},s=d=>{const l=d.timeStamp-n;t&&r&&l<250&&this._events.click.fire(Object(o.l)(d,e)),t=!1,r=!1,e.removeEventListener("mousemove",a),e.removeEventListener("touchmove",a),e.removeEventListener("mouseup",s),e.removeEventListener("touchend",s),e.addEventListener("mousemove",i)},d=o=>{event=o,t=!0,r=!0,n=o.timeStamp,e.addEventListener("mousemove",a),e.addEventListener("touchmove",a),e.addEventListener("mouseup",s),e.addEventListener("touchend",s),e.removeEventListener("mousemove",i)};e.addEventListener("mousedown",d),e.addEventListener("touchstart",d),e.addEventListener("mousemove",i)},this._renderer=e.renderer||new i.WebGLRenderer,this._renderer.localClippingEnabled=!0,this._automaticNearFarPlane=void 0===e.automaticCameraNearFar||e.automaticCameraNearFar,this._automaticControlsSensitivity=void 0===e.automaticControlsSensitivity||e.automaticControlsSensitivity,this.canvas.style.width="640px",this.canvas.style.height="480px",this.canvas.style.minWidth="100%",this.canvas.style.minHeight="100%",this.canvas.style.maxWidth="100%",this.canvas.style.maxHeight="100%",this._domElement=e.domElement||function(){const e=document.createElementNS("http://www.w3.org/1999/xhtml","div");return e.style.width="100%",e.style.height="100%",e}(),this._domElement.appendChild(this.canvas),this.spinner=new N(this.domElement),this.camera=new i.PerspectiveCamera(60,void 0,.1,1e4),this.camera.position.x=30,this.camera.position.y=10,this.camera.position.z=50,this.camera.lookAt(new i.Vector3),this.scene=new i.Scene,this.scene.autoUpdate=!1,this.controls=new tt(this.camera,this.canvas),this.controls.dollyFactor=.992,this.controls.minDistance=1,this.controls.maxDistance=100,this.controls.addEventListener("cameraChange",e=>{const{position:t,target:n}=e.camera;this._events.cameraChange.fire(t.clone(),n.clone())}),this.sdkClient=e.sdk,this.renderController=new C(this.camera),this._viewStateHelper=new Z(this,this.sdkClient);const r=function(e){var t;const n={internal:{}};n.internal={sectorCuller:e._sectorCuller};const{antiAliasing:r,multiSampleCount:i}=function(e){switch(e=e||"fxaa"){case"disabled":return{antiAliasing:h.a.NoAA,multiSampleCount:1};case"fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:1};case"msaa2":return{antiAliasing:h.a.NoAA,multiSampleCount:2};case"msaa4":return{antiAliasing:h.a.NoAA,multiSampleCount:4};case"msaa8":return{antiAliasing:h.a.NoAA,multiSampleCount:8};case"msaa16":return{antiAliasing:h.a.NoAA,multiSampleCount:16};case"msaa2+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:2};case"msaa4+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:4};case"msaa8+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:8};case"msaa16+fxaa":return{antiAliasing:h.a.FXAA,multiSampleCount:16};default:Object(o.k)(e,"Unsupported anti-aliasing mode: "+e)}}(e.antiAliasingHint),a=function(e){const t={...h.j.ssaoRenderParameters};switch(e){case void 0:break;case"medium":t.sampleSize=h.h.Medium;break;case"high":t.sampleSize=h.h.High;break;case"veryhigh":t.sampleSize=h.h.VeryHigh;break;case"disabled":t.sampleSize=h.h.None;break;default:Object(o.k)(e,`Unexpected SSAO quality mode: '${e}'`)}return t}(e.ssaoQualityHint),s={enabled:null!==(t=e.enableEdges)&&void 0!==t?t:h.j.edgeDetectionParameters.enabled};return n.renderOptions={antiAliasing:r,multiSampleCountHint:i,ssaoRenderParameters:a,edgeDetectionParameters:s},n}(e);!0===e._localModels?this._revealManagerHelper=Ke.createLocalHelper(this._renderer,this.scene,r):this._revealManagerHelper=Ke.createCdfHelper(this._renderer,this.scene,r,this.sdkClient),this.startPointerEventListeners(),this.revealManager.setRenderTarget((null===(t=e.renderTargetOptions)||void 0===t?void 0:t.target)||null,null===(n=e.renderTargetOptions)||void 0===n?void 0:n.autoSetSize),this._subscription.add(Object(m.fromEventPattern)(e=>this.revealManager.on("loadingStateChanged",e),e=>this.revealManager.off("loadingStateChanged",e)).subscribe(t=>{this.spinner.loading=t.itemsLoaded!=t.itemsRequested,e.onLoading&&e.onLoading(t.itemsLoaded,t.itemsRequested,t.itemsCulled)},e=>Object(o.u)(e,{moduleName:"Cognite3DViewer",methodName:"constructor"}))),this.animate(0),Object(o.v)("construct3dViewer",{moduleName:"Cognite3DViewer",methodName:"constructor",constructorOptions:u()(e,["sdk","domElement","renderer","_sectorCuller"])})}get canvas(){return this.renderer.domElement}static isBrowserSupported(){return!0}get domElement(){return this._domElement}get renderer(){return this._renderer}get revealManager(){return this._revealManagerHelper.revealManager}get cadBudget(){return this.revealManager.cadBudget}set cadBudget(e){this.revealManager.cadBudget=e}get pointCloudBudget(){return this.revealManager.pointCloudBudget}set pointCloudBudget(e){this.revealManager.pointCloudBudget=e}get models(){return this._models.slice()}get cadLoadedStatistics(){return this.revealManager.cadLoadedStatistics}getVersion(){return"2.1.2"}dispose(){if(!this.isDisposed){this.isDisposed=!0,void 0!==this.latestRequestId&&cancelAnimationFrame(this.latestRequestId),this._subscription.unsubscribe(),this.revealManager.dispose(),this.domElement.removeChild(this.canvas),this.renderer.dispose();for(const e of this._models.values())e.dispose();this._models.splice(0),this.spinner.dispose(),this._events.disposed.fire()}}on(e,t){switch(e){case"click":this._events.click.subscribe(t);break;case"hover":this._events.hover.subscribe(t);break;case"cameraChange":this._events.cameraChange.subscribe(t);break;case"sceneRendered":this._events.sceneRendered.subscribe(t);break;case"disposed":this._events.disposed.subscribe(t);break;default:Object(o.k)(e)}}off(e,t){switch(e){case"click":this._events.click.unsubscribe(t);break;case"hover":this._events.hover.unsubscribe(t);break;case"cameraChange":this._events.cameraChange.unsubscribe(t);break;case"sceneRendered":this._events.sceneRendered.unsubscribe(t);break;case"disposed":this._events.disposed.unsubscribe(t);break;default:Object(o.k)(e)}}getViewState(){return this._viewStateHelper.getCurrentState()}setViewState(e){return this.models.filter(e=>e instanceof P).map(e=>e).forEach(e=>{e.styledNodeCollections.forEach(t=>e.unassignStyledNodeCollection(t.nodes)),e.styledNodeCollections.splice(0)}),this._viewStateHelper.setState(e)}async addModel(e){if(void 0!==e.localPath)throw new Error("addModel() only supports CDF hosted models. Use addCadModel() and addPointCloudModel() to use self-hosted models");switch(await this.determineModelType(e.modelId,e.revisionId)){case"cad":return this.addCadModel(e);case"pointcloud":return this.addPointCloudModel(e);default:throw new Error("Model is not supported")}}async addCadModel(e){let t;t=e.localPath?new ee(e.localPath):new J(this.sdkClient);const{modelId:n,revisionId:r}=e,o=await this._revealManagerHelper.addCadModel(e),i=new P(n,r,o,t);return this._models.push(i),this.scene.add(i),i}async addPointCloudModel(e){if(e.localPath)throw new T("localPath is not supported");if(e.geometryFilter)throw new T("geometryFilter is not supported for point clouds");const{modelId:t,revisionId:n}=e,r=await this._revealManagerHelper.addPointCloudModel(e),o=new R(t,n,r);return this._models.push(o),this.scene.add(o),o}removeModel(e){const t=this._models.indexOf(e);if(-1===t)throw new Error("Model is not added to viewer");switch(this._models.splice(t,1),this.scene.remove(e),this.renderController.redraw(),e.type){case"cad":const t=e;return void this.revealManager.removeModel(e.type,t.cadNode);case"pointcloud":const n=e;return void this.revealManager.removeModel(e.type,n.pointCloudNode);default:Object(o.k)(e.type,`Model type ${e.type} cannot be removed`)}}async determineModelType(e,t){const n=new ue(this.sdkClient),r=await n.getOutputs({modelId:e,revisionId:t,format:te.AnyFormat});return void 0!==r.findMostRecentOutput(te.RevealCadModel)?"cad":void 0!==r.findMostRecentOutput(te.EptPointCloud)?"pointcloud":""}addObject3D(e){this.isDisposed||(this.scene.add(e),e.updateMatrixWorld(!0),this._extraObjects.push(e),this.renderController.redraw(),this.updateCameraNearAndFar(this.camera))}removeObject3D(e){if(this.isDisposed)return;this.scene.remove(e);const t=this._extraObjects.indexOf(e);t>=0&&this._extraObjects.splice(t,1),this.renderController.redraw(),this.updateCameraNearAndFar(this.camera)}addUiObject(e,t,n){this.isDisposed||this.revealManager.addUiObject(e,t,n)}removeUiObject(e){this.isDisposed||this.revealManager.removeUiObject(e)}setBackgroundColor(e){this.isDisposed||(this.renderer.setClearColor(e),this.spinner.updateBackgroundColor(e),this.requestRedraw())}setClippingPlanes(e){this.revealManager.clippingPlanes=e,this._clippingNeedsUpdate=!0}setSlicingPlanes(e){this.setClippingPlanes(e)}getClippingPlanes(){return this.revealManager.clippingPlanes}getCamera(){return this.camera}getScene(){return this.scene}getCameraPosition(){return this.isDisposed?new i.Vector3(-1/0,-1/0,-1/0):this.controls.getState().position.clone()}getCameraTarget(){return this.isDisposed?new i.Vector3(-1/0,-1/0,-1/0):this.controls.getState().target.clone()}setCameraPosition(e){this.isDisposed||this.controls.setState(e,this.getCameraTarget())}setCameraTarget(e){this.isDisposed||this.controls.setState(this.getCameraPosition(),e)}get cameraControls(){return this.controls}get cameraControlsEnabled(){return this.controls.enabled}set cameraControlsEnabled(e){this.controls.enabled=e}loadCameraFromModel(e){const t=e.getCameraConfiguration();t?this.controls.setState(t.position,t.target):this.fitCameraToModel(e,0)}fitCameraToModel(e,t){const n=e.getModelBoundingBox(new i.Box3,!0);this.fitCameraToBoundingBox(n,t)}fitCameraToBoundingBox(e,t,n=2){const r=(new i.Vector3).lerpVectors(e.min,e.max,.5),o=.5*(new i.Vector3).subVectors(e.max,e.min).length(),a=new i.Sphere(r,o),s=a.center,d=a.radius*n,l=new i.Vector3(0,0,-1);l.applyQuaternion(this.camera.quaternion);const c=new i.Vector3;c.copy(l).multiplyScalar(-d).add(s),this.moveCameraTo(c,s,t)}requestRedraw(){this.revealManager.requestRedraw()}enableKeyboardNavigation(){this.controls.enableKeyboardNavigation=!0}disableKeyboardNavigation(){this.controls.enableKeyboardNavigation=!1}worldToScreen(e,t){this.camera.updateMatrixWorld();const n=new i.Vector3;return t?Object(p.a)(this.renderer,this.camera,e,n):Object(p.b)(this.renderer,this.camera,e,n),n.x<0||n.x>1||n.y<0||n.y>1||n.z<0||n.z>1?null:new i.Vector2(n.x,n.y)}async getScreenshot(e=this.canvas.width,t=this.canvas.height){if(this.isDisposed)throw new Error("Viewer is disposed");const{width:n,height:r}=this.canvas,o=this.camera.clone();rt(o,e,t),this.renderer.setSize(e,t),this.renderer.render(this.scene,o),this.revealManager.render(o);const i=this.renderer.domElement.toDataURL();return this.renderer.setSize(n,r),this.renderer.render(this.scene,this.camera),this.requestRedraw(),i}async getIntersectionFromPixel(e,t,n){const r=this.getModels("cad"),o=this.getModels("pointcloud"),i=r.map(e=>e.cadNode),a=o.map(e=>e.pointCloudNode),s={normalizedCoords:{x:e/this.renderer.domElement.clientWidth*2-1,y:t/this.renderer.domElement.clientHeight*-2+1},camera:this.camera,renderer:this.renderer,clippingPlanes:this.getClippingPlanes(),domElement:this.renderer.domElement},d=function(e,t){const n=[];for(const r of e){const e=x(r,t);e&&n.push(e)}return n.sort((e,t)=>e.distance-t.distance)}(i,s),l=O(a,s,null==n?void 0:n.pointIntersectionThreshold),c=[];if(l.length>0){const e=l[0];for(const t of o)if(t.pointCloudNode===e.pointCloudNode){const n={type:"pointcloud",model:t,point:e.point,pointIndex:e.pointIndex,distanceToCamera:e.distance};c.push(n);break}}if(d.length>0){const e=d[0];for(const t of r)if(t.cadNode===e.cadNode){const n={type:"cad",model:t,treeIndex:e.treeIndex,point:e.point,distanceToCamera:e.distance};c.push(n)}}return c.sort((e,t)=>e.distanceToCamera-t.distanceToCamera),c.length>0?c[0]:null}getModels(e){return this._models.filter(t=>t.type===e)}moveCameraTo(e,t,n){if(this.isDisposed)return;const{camera:r}=this;if(null==n){n=125*e.distanceTo(r.position),n=Math.min(Math.max(n,600),2500)}const o=new i.Raycaster;o.setFromCamera(new i.Vector2(0,0),r);const a=t.distanceTo(r.position),d=o.ray.direction.clone().multiplyScalar(a),l=o.ray.origin.clone().add(d),c={x:r.position.x,y:r.position.y,z:r.position.z,targetX:l.x,targetY:l.y,targetZ:l.z},u={x:e.x,y:e.y,z:e.z,targetX:t.x,targetY:t.y,targetZ:t.z},m=new s.a.Tween(c),h=e=>{if(this.isDisposed)return document.removeEventListener("keydown",h),void m.stop();("keydown"!==e.type||this.controls.enableKeyboardNavigation)&&(m.stop(),this.canvas.removeEventListener("pointerdown",h),this.canvas.removeEventListener("wheel",h),document.removeEventListener("keydown",h))};this.canvas.addEventListener("pointerdown",h),this.canvas.addEventListener("wheel",h),document.addEventListener("keydown",h);const p=new i.Vector3,f=new i.Vector3;m.to(u,n).easing(e=>s.a.Easing.Circular.Out(e)).onUpdate(()=>{this.isDisposed||(f.set(c.x,c.y,c.z),p.set(c.targetX,c.targetY,c.targetZ),this.camera&&(this.setCameraPosition(f),this.setCameraTarget(p)))}).onComplete(()=>{this.isDisposed||this.canvas.removeEventListener("pointerdown",h)}).start(s.a.now()).update(s.a.now())}async animate(e){if(this.isDisposed)return;this.latestRequestId=requestAnimationFrame(this._boundAnimate);const{display:t,visibility:n}=window.getComputedStyle(this.canvas);if("visible"===n&&"none"!==t){const{renderController:t}=this;s.a.update(e);if(this.resizeIfNecessary()&&this.requestRedraw(),this.controls.update(this.clock.getDelta()),t.update(),this.revealManager.update(this.camera),t.needsRedraw||this.revealManager.needsRedraw||this._clippingNeedsUpdate){const e=this.renderer.info.render.frame,n=Date.now();this.updateCameraNearAndFar(this.camera),this.revealManager.render(this.camera),t.clearNeedsRedraw(),this.revealManager.resetRedraw(),this._clippingNeedsUpdate=!1;const r=Date.now()-n;this._events.sceneRendered.fire({frameNumber:e,renderTime:r,renderer:this.renderer,camera:this.camera})}}}updateCameraNearAndFar(e){if(this.isDisposed)return;if(!this._automaticControlsSensitivity&&!this._automaticNearFarPlane)return;const{combinedBbox:t,bbox:n,cameraPosition:r,cameraDirection:o,corners:a,nearPlane:s,nearPlaneCoplanarPoint:d}=this._updateNearAndFarPlaneBuffers;t.makeEmpty(),this._models.forEach(e=>{e.getModelBoundingBox(n),n.isEmpty()||(t.expandByPoint(n.min),t.expandByPoint(n.max))}),this._extraObjects.forEach(e=>{n.setFromObject(e),n.isEmpty()||(t.expandByPoint(n.min),t.expandByPoint(n.max))}),function(e,t){if(8!==(t=t||[new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3,new i.Vector3]).length)throw new Error(`outBuffer must hold exactly 8 elements, but holds ${t.length} elemnents`);const n=e.min,r=e.max;t[0].set(n.x,n.y,n.z),t[1].set(r.x,n.y,n.z),t[2].set(n.x,r.y,n.z),t[3].set(n.x,n.y,r.z),t[4].set(r.x,r.y,n.z),t[5].set(r.x,r.y,r.z),t[6].set(r.x,n.y,r.z),t[7].set(n.x,r.y,r.z)}(t,a),e.getWorldPosition(r),e.getWorldDirection(o);let l=t.distanceToPoint(r);l/=Math.sqrt(1+Math.tan(e.fov/180*Math.PI/2)**2*(e.aspect**2+1)),l=Math.max(.1,l),d.copy(r).addScaledVector(o,l),s.setFromNormalAndCoplanarPoint(o,d);let c=-1/0;for(let e=0;e<8;++e)if(s.distanceToPoint(a[e])>=0){const t=a[e].distanceTo(r);c=Math.max(c,t)}c=Math.max(2*l,c);const u=t.min.distanceTo(t.max);t.containsPoint(r)&&(l=Math.min(.1,c/1e3)),this._automaticNearFarPlane&&(e.near=l,e.far=c,e.updateProjectionMatrix()),this._automaticControlsSensitivity&&(this.controls.minDistance=Math.min(Math.max(.02*u,.1*l),10))}resizeIfNecessary(){if(this.isDisposed)return!1;const e=this.renderer.getSize(new i.Vector2),t=e.width,n=e.height,r=0!==this.domElement.clientWidth?this.domElement.clientWidth:this.canvas.clientWidth,o=0!==this.domElement.clientHeight?this.domElement.clientHeight:this.canvas.clientHeight,a=this.renderer.getPixelRatio()*r,s=this.renderer.getPixelRatio()*o,d=a*s,l=d>14e5?Math.sqrt(14e5/d):1,c=a*l,u=s*l;return!(Math.abs(t-c)<.1&&Math.abs(n-u)<.1)&&(this.renderer.setSize(c,u),rt(this.camera,c,u),this.camera instanceof i.OrthographicCamera&&(this.controls.orthographicCameraDollyFactor=20/c,this.controls.keyboardDollySpeed=2/c),!0)}}function rt(e,t,n){e instanceof i.PerspectiveCamera?(e.aspect=t/n,e.updateProjectionMatrix()):e instanceof i.OrthographicCamera&&(e.left=-t,e.right=t,e.top=n,e.bottom=-n)}var ot=n(20);
|
|
334
390
|
/*!
|
|
335
391
|
* Copyright 2021 Cognite AS
|
|
336
392
|
*/
|
|
337
|
-
class Ir extends ur{constructor(e,t,n={}){super(Ir.classToken),this._indexSet=new Dn,this._filter={propertyCategory:"",propertyKey:"",propertyValues:new Array},this._client=e,this._modelId=t.modelId,this._revisionId=t.revisionId,this._options={requestPartitions:1,...n}}get isLoading(){return void 0!==this._fetchResultHelper&&this._fetchResultHelper.isLoading}async executeFilter(e,t,n){const r=new Dn,{requestPartitions:o}=this._options;void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt();const i=new cr(e=>new l(e.treeIndex,e.subtreeSize),()=>this.notifyChanged());this._fetchResultHelper=i,this._indexSet=r;const a=this.buildUrl(),s=Array.from(function*(e){for(let t=0;t<e.length;t+=1e3){const n=e.slice(t,Math.min(e.length,t+1e3));yield n}}(n)).flatMap(n=>{const s={[""+e]:{[""+t]:n}};return vr()(1,o+1).map(async e=>{const t=async function(e,t,n){const r=await Mr(e,t,n);return new Cr(e,t,n,r)}(this._client,a,{params:{partition:`${e}/${o}`,limit:1e3},data:{filter:s}});return i.pageResults(r,t)})});this.notifyChanged(),await Promise.all(s)}clear(){void 0!==this._fetchResultHelper&&this._fetchResultHelper.interrupt(),this._indexSet.clear(),this.notifyChanged()}getIndexSet(){return this._indexSet}buildUrl(){return`${this._client.getBaseUrl()}/api/playground/projects/${this._client.project}/3d/v2/models/${this._modelId}/revisions/${this._revisionId}/nodes/list`}serialize(){return{token:this.classToken,state:pr()(this._filter),options:{...this._options}}}}Ir.classToken="SinglePropertyNodeCollection";class Cr{constructor(e,t,n,r){this.items=r.items,this.nextCursor=r.nextCursor,void 0!==this.nextCursor&&(this.next=async()=>{const r={...n,cursor:this.nextCursor},o=await Mr(e,t,r);return new Cr(e,t,n,o)})}}async function Mr(e,t,n){const r=await e.post(t,n);if(200===r.status)return r.data;throw new Error(`Unexpected status from server while POST ${t}: ${r.status} (body: ${r.data})`)}
|
|
338
393
|
/*!
|
|
339
394
|
* Copyright 2021 Cognite AS
|
|
340
|
-
*/
|
|
395
|
+
*/
|
|
341
396
|
/*!
|
|
342
397
|
* Copyright 2021 Cognite AS
|
|
343
|
-
*/
|
|
398
|
+
*/
|
|
344
399
|
/*!
|
|
345
400
|
* Copyright 2021 Cognite AS
|
|
346
|
-
*/
|
|
401
|
+
*/
|
|
347
402
|
/*!
|
|
348
403
|
* Copyright 2021 Cognite AS
|
|
349
|
-
*/
|
|
404
|
+
*/}])}));
|
|
350
405
|
//# sourceMappingURL=index.map
|