itowns 2.43.2-next.0 → 2.43.2-next.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/debug.js +1 -1
- package/dist/debug.js.map +1 -1
- package/dist/itowns.js +1 -1
- package/dist/itowns.js.map +1 -1
- package/dist/itowns_widgets.js +1 -1
- package/dist/itowns_widgets.js.map +1 -1
- package/examples/3dtiles_25d.html +1 -1
- package/examples/3dtiles_basic.html +2 -2
- package/examples/3dtiles_batch_table.html +1 -3
- package/examples/3dtiles_pointcloud.html +9 -15
- package/examples/config.json +2 -1
- package/examples/copc_simple_loader.html +128 -0
- package/examples/entwine_3d_loader.html +1 -1
- package/examples/entwine_simple_loader.html +1 -1
- package/examples/js/plugins/COGParser.js +84 -50
- package/examples/js/plugins/COGSource.js +7 -4
- package/examples/layers/JSONLayers/OPENSM.json +1 -1
- package/examples/potree_25d_map.html +1 -1
- package/examples/potree_3d_map.html +1 -1
- package/examples/source_file_cog.html +22 -5
- package/lib/Controls/FirstPersonControls.js +0 -1
- package/lib/Controls/FlyControls.js +0 -1
- package/lib/Converter/Feature2Mesh.js +2 -4
- package/lib/Converter/textureConverter.js +1 -1
- package/lib/Core/3DTiles/C3DTBatchTable.js +1 -1
- package/lib/Core/3DTiles/C3DTFeature.js +0 -1
- package/lib/Core/CopcNode.js +174 -0
- package/lib/Core/Feature.js +1 -2
- package/lib/Core/Geographic/CoordStars.js +0 -1
- package/lib/Core/Label.js +0 -1
- package/lib/Core/MainLoop.js +0 -1
- package/lib/Core/Prefab/Globe/Atmosphere.js +0 -4
- package/lib/Core/Prefab/Globe/GlobeLayer.js +3 -3
- package/lib/Core/Style.js +2 -4
- package/lib/Core/View.js +2 -4
- package/lib/Layer/C3DTilesLayer.js +3 -1
- package/lib/Layer/CopcLayer.js +59 -0
- package/lib/Layer/ElevationLayer.js +2 -3
- package/lib/Layer/GeoidLayer.js +1 -2
- package/lib/Layer/LabelLayer.js +8 -17
- package/lib/Layer/Layer.js +4 -2
- package/lib/Layer/PointCloudLayer.js +4 -7
- package/lib/Layer/ReferencingLayerProperties.js +3 -3
- package/lib/Layer/TiledGeometryLayer.js +2 -3
- package/lib/Main.js +2 -0
- package/lib/Parser/GeoJsonParser.js +2 -3
- package/lib/Parser/LASLoader.js +45 -1
- package/lib/Parser/LASParser.js +57 -25
- package/lib/Parser/deprecated/LegacyGLTFLoader.js +1 -2
- package/lib/Process/FeatureProcessing.js +1 -2
- package/lib/Process/LayeredMaterialNodeProcessing.js +3 -9
- package/lib/Process/ObjectRemovalHelper.js +1 -2
- package/lib/Provider/3dTilesProvider.js +1 -0
- package/lib/Renderer/ColorLayersOrdering.js +1 -2
- package/lib/Renderer/Label2DRenderer.js +1 -4
- package/lib/Renderer/PointsMaterial.js +14 -9
- package/lib/Renderer/RenderMode.js +0 -1
- package/lib/Source/CopcSource.js +118 -0
- package/lib/Source/Source.js +3 -1
- package/lib/ThreeExtended/loaders/DDSLoader.js +11 -1
- package/lib/ThreeExtended/loaders/DRACOLoader.js +0 -1
- package/lib/ThreeExtended/loaders/GLTFLoader.js +1 -0
- package/lib/Utils/DEMUtils.js +2 -2
- package/lib/Utils/OrientationUtils.js +0 -1
- package/lib/Utils/gui/Searchbar.js +1 -2
- package/package.json +8 -7
package/lib/Parser/LASLoader.js
CHANGED
|
@@ -15,6 +15,10 @@ import { Las } from 'copc';
|
|
|
15
15
|
* xOffset, zOffset]`) added to the scaled X, Y, Z point record values.
|
|
16
16
|
*/
|
|
17
17
|
|
|
18
|
+
function defaultColorEncoding(header) {
|
|
19
|
+
return header.majorVersion === 1 && header.minorVersion <= 2 ? 8 : 16;
|
|
20
|
+
}
|
|
21
|
+
|
|
18
22
|
/**
|
|
19
23
|
* @classdesc
|
|
20
24
|
* Loader for LAS and LAZ (LASZip) point clouds. It uses the copc.js library and
|
|
@@ -117,6 +121,46 @@ class LASLoader {
|
|
|
117
121
|
this._wasmPromise = null;
|
|
118
122
|
}
|
|
119
123
|
|
|
124
|
+
/**
|
|
125
|
+
* Parses a LAS or LAZ (LASZip) chunk. Note that this function is
|
|
126
|
+
* **CPU-bound** and shall be parallelised in a dedicated worker.
|
|
127
|
+
* @param {Uint8Array} data - File chunk data.
|
|
128
|
+
* @param {Object} options - Parsing options.
|
|
129
|
+
* @param {Header} options.header - Partial LAS header.
|
|
130
|
+
* @param {number} options.pointCount - Number of points encoded in this
|
|
131
|
+
* data chunk.
|
|
132
|
+
* @param {Las.ExtraBytes[]} [options.eb] - Extra bytes LAS VLRs
|
|
133
|
+
* headers.
|
|
134
|
+
* @param {8 | 16} [options.colorDepth] - Color depth encoding (in bits).
|
|
135
|
+
* Either 8 or 16 bits. Defaults to 8 bits for LAS 1.2 and 16 bits for later
|
|
136
|
+
* versions (as mandatory by the specification).
|
|
137
|
+
*/
|
|
138
|
+
async parseChunk(data, options) {
|
|
139
|
+
const {
|
|
140
|
+
header,
|
|
141
|
+
eb,
|
|
142
|
+
pointCount
|
|
143
|
+
} = options;
|
|
144
|
+
const {
|
|
145
|
+
pointDataRecordFormat,
|
|
146
|
+
pointDataRecordLength
|
|
147
|
+
} = header;
|
|
148
|
+
const colorDepth = options.colorDepth ?? defaultColorEncoding(header);
|
|
149
|
+
const bytes = new Uint8Array(data);
|
|
150
|
+
const pointData = await Las.PointData.decompressChunk(bytes, {
|
|
151
|
+
pointCount,
|
|
152
|
+
pointDataRecordFormat,
|
|
153
|
+
pointDataRecordLength
|
|
154
|
+
}, this._initDecoder());
|
|
155
|
+
const view = Las.View.create(pointData, header, eb);
|
|
156
|
+
const attributes = this._parseView(view, {
|
|
157
|
+
colorDepth
|
|
158
|
+
});
|
|
159
|
+
return {
|
|
160
|
+
attributes
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
|
|
120
164
|
/**
|
|
121
165
|
* Parses a LAS or LAZ (LASZip) file. Note that this function is
|
|
122
166
|
* **CPU-bound** and shall be parallelised in a dedicated worker.
|
|
@@ -131,7 +175,7 @@ class LASLoader {
|
|
|
131
175
|
const bytes = new Uint8Array(data);
|
|
132
176
|
const pointData = await Las.PointData.decompressFile(bytes, this._initDecoder());
|
|
133
177
|
const header = Las.Header.parse(bytes);
|
|
134
|
-
const colorDepth = options.colorDepth ?? (header
|
|
178
|
+
const colorDepth = options.colorDepth ?? defaultColorEncoding(header);
|
|
135
179
|
const getter = async (begin, end) => bytes.slice(begin, end);
|
|
136
180
|
const vlrs = await Las.Vlr.walk(getter, header);
|
|
137
181
|
const ebVlr = Las.Vlr.find(vlrs, 'LASF_Spec', 4);
|
package/lib/Parser/LASParser.js
CHANGED
|
@@ -1,6 +1,29 @@
|
|
|
1
1
|
import * as THREE from 'three';
|
|
2
2
|
import LASLoader from "./LASLoader.js";
|
|
3
3
|
const lasLoader = new LASLoader();
|
|
4
|
+
function buildBufferGeometry(attributes) {
|
|
5
|
+
const geometry = new THREE.BufferGeometry();
|
|
6
|
+
const positionBuffer = new THREE.BufferAttribute(attributes.position, 3);
|
|
7
|
+
geometry.setAttribute('position', positionBuffer);
|
|
8
|
+
const intensityBuffer = new THREE.BufferAttribute(attributes.intensity, 1);
|
|
9
|
+
geometry.setAttribute('intensity', intensityBuffer);
|
|
10
|
+
const returnNumber = new THREE.BufferAttribute(attributes.returnNumber, 1);
|
|
11
|
+
geometry.setAttribute('returnNumber', returnNumber);
|
|
12
|
+
const numberOfReturns = new THREE.BufferAttribute(attributes.numberOfReturns, 1);
|
|
13
|
+
geometry.setAttribute('numberOfReturns', numberOfReturns);
|
|
14
|
+
const classBuffer = new THREE.BufferAttribute(attributes.classification, 1);
|
|
15
|
+
geometry.setAttribute('classification', classBuffer);
|
|
16
|
+
const pointSourceID = new THREE.BufferAttribute(attributes.pointSourceID, 1);
|
|
17
|
+
geometry.setAttribute('pointSourceID', pointSourceID);
|
|
18
|
+
if (attributes.color) {
|
|
19
|
+
const colorBuffer = new THREE.BufferAttribute(attributes.color, 4, true);
|
|
20
|
+
geometry.setAttribute('color', colorBuffer);
|
|
21
|
+
}
|
|
22
|
+
const scanAngle = new THREE.BufferAttribute(attributes.scanAngle, 1);
|
|
23
|
+
geometry.setAttribute('scanAngle', scanAngle);
|
|
24
|
+
geometry.userData.origin = new THREE.Vector3().fromArray(attributes.origin);
|
|
25
|
+
return geometry;
|
|
26
|
+
}
|
|
4
27
|
|
|
5
28
|
/** The LASParser module provides a [parse]{@link
|
|
6
29
|
* module:LASParser.parse} method that takes a LAS or LAZ (LASZip) file in, and
|
|
@@ -21,6 +44,36 @@ export default {
|
|
|
21
44
|
}
|
|
22
45
|
lasLoader.lazPerf = path;
|
|
23
46
|
},
|
|
47
|
+
/**
|
|
48
|
+
* Parses a chunk of a LAS or LAZ (LASZip) and returns the corresponding
|
|
49
|
+
* `THREE.BufferGeometry`.
|
|
50
|
+
*
|
|
51
|
+
* @param {ArrayBuffer} data - The file content to parse.
|
|
52
|
+
* @param {Object} options
|
|
53
|
+
* @param {Object} options.in - Options to give to the parser.
|
|
54
|
+
* @param {number} options.in.pointCount - Number of points encoded in this
|
|
55
|
+
* data chunk.
|
|
56
|
+
* @param {Object} options.in.header - Partial LAS file header.
|
|
57
|
+
* @param {number} options.in.header.pointDataRecordFormat - Type of Point
|
|
58
|
+
* Data Record contained in the LAS file.
|
|
59
|
+
* @param {number} options.in.header.pointDataRecordLength - Size (in bytes)
|
|
60
|
+
* of the Point Data Record.
|
|
61
|
+
* @param {Object} [options.eb] - Extra bytes LAS VLRs headers.
|
|
62
|
+
* @param { 8 | 16 } [options.in.colorDepth] - Color depth (in bits).
|
|
63
|
+
* Defaults to 8 bits for LAS 1.2 and 16 bits for later versions
|
|
64
|
+
* (as mandatory by the specification)
|
|
65
|
+
*
|
|
66
|
+
* @return {Promise<THREE.BufferGeometry>} A promise resolving with a
|
|
67
|
+
* `THREE.BufferGeometry`.
|
|
68
|
+
*/
|
|
69
|
+
parseChunk(data) {
|
|
70
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
71
|
+
return lasLoader.parseChunk(data, options.in).then(parsedData => {
|
|
72
|
+
const geometry = buildBufferGeometry(parsedData.attributes);
|
|
73
|
+
geometry.computeBoundingBox();
|
|
74
|
+
return geometry;
|
|
75
|
+
});
|
|
76
|
+
},
|
|
24
77
|
/**
|
|
25
78
|
* Parses a LAS file or a LAZ (LASZip) file and return the corresponding
|
|
26
79
|
* `THREE.BufferGeometry`.
|
|
@@ -36,37 +89,16 @@ export default {
|
|
|
36
89
|
* header of the file is contained in `userData`.
|
|
37
90
|
*/
|
|
38
91
|
parse(data) {
|
|
39
|
-
var _options$out, _options$in;
|
|
40
92
|
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
41
|
-
if (
|
|
93
|
+
if (options.out?.skip) {
|
|
42
94
|
console.warn("Warning: options 'skip' not supported anymore");
|
|
43
95
|
}
|
|
44
96
|
return lasLoader.parseFile(data, {
|
|
45
|
-
colorDepth:
|
|
97
|
+
colorDepth: options.in?.colorDepth
|
|
46
98
|
}).then(parsedData => {
|
|
47
|
-
const geometry =
|
|
48
|
-
|
|
49
|
-
geometry.userData = parsedData.header;
|
|
50
|
-
const positionBuffer = new THREE.BufferAttribute(attributes.position, 3);
|
|
51
|
-
geometry.setAttribute('position', positionBuffer);
|
|
52
|
-
const intensityBuffer = new THREE.BufferAttribute(attributes.intensity, 1);
|
|
53
|
-
geometry.setAttribute('intensity', intensityBuffer);
|
|
54
|
-
const returnNumber = new THREE.BufferAttribute(attributes.returnNumber, 1);
|
|
55
|
-
geometry.setAttribute('returnNumber', returnNumber);
|
|
56
|
-
const numberOfReturns = new THREE.BufferAttribute(attributes.numberOfReturns, 1);
|
|
57
|
-
geometry.setAttribute('numberOfReturns', numberOfReturns);
|
|
58
|
-
const classBuffer = new THREE.BufferAttribute(attributes.classification, 1);
|
|
59
|
-
geometry.setAttribute('classification', classBuffer);
|
|
60
|
-
const pointSourceID = new THREE.BufferAttribute(attributes.pointSourceID, 1);
|
|
61
|
-
geometry.setAttribute('pointSourceID', pointSourceID);
|
|
62
|
-
if (attributes.color) {
|
|
63
|
-
const colorBuffer = new THREE.BufferAttribute(attributes.color, 4, true);
|
|
64
|
-
geometry.setAttribute('color', colorBuffer);
|
|
65
|
-
}
|
|
66
|
-
const scanAngle = new THREE.BufferAttribute(attributes.scanAngle, 1);
|
|
67
|
-
geometry.setAttribute('scanAngle', scanAngle);
|
|
99
|
+
const geometry = buildBufferGeometry(parsedData.attributes);
|
|
100
|
+
geometry.userData.header = parsedData.header;
|
|
68
101
|
geometry.computeBoundingBox();
|
|
69
|
-
geometry.userData.origin = new THREE.Vector3().fromArray(attributes.origin);
|
|
70
102
|
return geometry;
|
|
71
103
|
});
|
|
72
104
|
}
|
|
@@ -321,7 +321,6 @@ threeExamples.LegacyGLTFLoader = function () {
|
|
|
321
321
|
1029: THREE.FrontSide // Culling back
|
|
322
322
|
//1032: THREE.NoSide // Culling front and back, what to do?
|
|
323
323
|
};
|
|
324
|
-
|
|
325
324
|
var WEBGL_DEPTH_FUNCS = {
|
|
326
325
|
512: THREE.NeverDepth,
|
|
327
326
|
513: THREE.LessDepth,
|
|
@@ -355,7 +354,6 @@ threeExamples.LegacyGLTFLoader = function () {
|
|
|
355
354
|
//32771: CONSTANT_ALPHA,
|
|
356
355
|
//32772: ONE_MINUS_CONSTANT_COLOR
|
|
357
356
|
};
|
|
358
|
-
|
|
359
357
|
var WEBGL_TYPE_SIZES = {
|
|
360
358
|
'SCALAR': 1,
|
|
361
359
|
'VEC2': 2,
|
|
@@ -1116,6 +1114,7 @@ threeExamples.LegacyGLTFLoader = function () {
|
|
|
1116
1114
|
|
|
1117
1115
|
// According to COLLADA spec...
|
|
1118
1116
|
// aspectRatio = xfov / yfov
|
|
1117
|
+
|
|
1119
1118
|
var _camera = new THREE.PerspectiveCamera(THREE.MathUtils.radToDeg(yfov * aspectRatio), aspectRatio, camera.perspective.znear || 1, camera.perspective.zfar || 2e6);
|
|
1120
1119
|
if (camera.name !== undefined) _camera.name = camera.name;
|
|
1121
1120
|
if (camera.extras) _camera.userData = camera.extras;
|
|
@@ -17,9 +17,8 @@ export default {
|
|
|
17
17
|
if (node.layerUpdateState[layer.id] === undefined) {
|
|
18
18
|
node.layerUpdateState[layer.id] = new LayerUpdateState();
|
|
19
19
|
} else if (!node.layerUpdateState[layer.id].canTryUpdate()) {
|
|
20
|
-
var _node$link$layer$id;
|
|
21
20
|
// toggle visibility features
|
|
22
|
-
|
|
21
|
+
node.link[layer.id]?.forEach(f => {
|
|
23
22
|
f.layer.object3d.add(f);
|
|
24
23
|
f.meshes.position.z = geoidLayerIsVisible(layer.parent) ? node.geoidHeight : 0;
|
|
25
24
|
f.meshes.updateMatrixWorld();
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import { chooseNextLevelToFetch } from "../Layer/LayerUpdateStrategy.js";
|
|
2
2
|
import LayerUpdateState from "../Layer/LayerUpdateState.js";
|
|
3
3
|
import handlingError from "./handlerNodeError.js";
|
|
4
|
-
export const SIZE_TEXTURE_TILE = 256;
|
|
5
|
-
export const SIZE_DIAGONAL_TEXTURE = (2 * (SIZE_TEXTURE_TILE * SIZE_TEXTURE_TILE)) ** 0.5;
|
|
6
4
|
function materialCommandQueuePriorityFunction(material) {
|
|
7
5
|
// We know that 'node' is visible because commands can only be
|
|
8
6
|
// issued for visible nodes.
|
|
@@ -62,14 +60,12 @@ export function updateLayeredMaterialNodeImagery(context, layer, node, parent) {
|
|
|
62
60
|
return;
|
|
63
61
|
} // ok, we're going to inherit our parent's texture
|
|
64
62
|
}
|
|
65
|
-
|
|
66
63
|
if (!nodeLayer) {
|
|
67
|
-
var _parent$material;
|
|
68
64
|
// Create new raster node
|
|
69
65
|
nodeLayer = layer.setupRasterNode(node);
|
|
70
66
|
|
|
71
67
|
// Init the node by parent
|
|
72
|
-
const parentLayer =
|
|
68
|
+
const parentLayer = parent.material?.getLayer(layer.id);
|
|
73
69
|
nodeLayer.initFromParent(parentLayer, extentsDestination);
|
|
74
70
|
}
|
|
75
71
|
|
|
@@ -158,9 +154,8 @@ export function updateLayeredMaterialNodeElevation(context, layer, node, parent)
|
|
|
158
154
|
nodeLayer = layer.setupRasterNode(node);
|
|
159
155
|
}
|
|
160
156
|
if (node.layerUpdateState[layer.id] === undefined) {
|
|
161
|
-
var _parent$material2;
|
|
162
157
|
node.layerUpdateState[layer.id] = new LayerUpdateState();
|
|
163
|
-
const parentLayer =
|
|
158
|
+
const parentLayer = parent.material?.getLayer(layer.id);
|
|
164
159
|
nodeLayer.initFromParent(parentLayer, extentsDestination);
|
|
165
160
|
if (nodeLayer.level >= layer.source.zoom.min) {
|
|
166
161
|
context.view.notifyChange(node, false);
|
|
@@ -210,8 +205,7 @@ export function removeLayeredMaterialNodeLayer(layerId) {
|
|
|
210
205
|
* @param {TileMesh} node - The node to udpate.
|
|
211
206
|
*/
|
|
212
207
|
return function (node) {
|
|
213
|
-
|
|
214
|
-
if ((_node$material = node.material) !== null && _node$material !== void 0 && _node$material.removeLayer) {
|
|
208
|
+
if (node.material?.removeLayer) {
|
|
215
209
|
if (node.material.elevationLayerIds.indexOf(layerId) > -1) {
|
|
216
210
|
node.setBBoxZ({
|
|
217
211
|
min: 0,
|
|
@@ -28,7 +28,6 @@ export default {
|
|
|
28
28
|
// see https://github.com/iTowns/itowns/issues/869
|
|
29
29
|
// obj.geometry = null;
|
|
30
30
|
}
|
|
31
|
-
|
|
32
31
|
if (obj.material) {
|
|
33
32
|
if (Array.isArray(obj.material)) {
|
|
34
33
|
for (const material of obj.material) {
|
|
@@ -82,7 +81,7 @@ export default {
|
|
|
82
81
|
// of the objects which have their own removal logic
|
|
83
82
|
let toRemove = obj.children.filter(c => c.layer && c.layer.id === layer.id);
|
|
84
83
|
const linked = obj.link && obj.link[layer.id];
|
|
85
|
-
if (linked
|
|
84
|
+
if (linked?.children.length) {
|
|
86
85
|
toRemove = toRemove.concat(linked.children);
|
|
87
86
|
delete obj.link[layer.id];
|
|
88
87
|
}
|
|
@@ -114,6 +114,7 @@ function executeCommand(command) {
|
|
|
114
114
|
} else if (magic == 'b3dm') {
|
|
115
115
|
func = supportedFormats.b3dm;
|
|
116
116
|
} else if (magic == 'pnts') {
|
|
117
|
+
layer.hasPnts = true;
|
|
117
118
|
func = supportedFormats.pnts;
|
|
118
119
|
} else if (magic == 'glTF') {
|
|
119
120
|
func = supportedFormats.gltf;
|
|
@@ -2,8 +2,7 @@ import { ImageryLayers } from "../Layer/Layer.js";
|
|
|
2
2
|
function updateLayersOrdering(geometryLayer, imageryLayers) {
|
|
3
3
|
const sequence = ImageryLayers.getColorLayersIdOrderedBySequence(imageryLayers);
|
|
4
4
|
const cO = function (object) {
|
|
5
|
-
|
|
6
|
-
if ((_object$material = object.material) !== null && _object$material !== void 0 && _object$material.setSequence) {
|
|
5
|
+
if (object.material?.setSequence) {
|
|
7
6
|
object.material.setSequence(sequence);
|
|
8
7
|
}
|
|
9
8
|
};
|
|
@@ -157,10 +157,7 @@ class Label2DRenderer {
|
|
|
157
157
|
}
|
|
158
158
|
});
|
|
159
159
|
labelLayers.forEach(labelLayer => {
|
|
160
|
-
labelLayer.toHide.children.forEach(labelsNode =>
|
|
161
|
-
var _labelsNode$domElemen;
|
|
162
|
-
return (_labelsNode$domElemen = labelsNode.domElements) === null || _labelsNode$domElemen === void 0 ? void 0 : _labelsNode$domElemen.labels.hide();
|
|
163
|
-
});
|
|
160
|
+
labelLayer.toHide.children.forEach(labelsNode => labelsNode.domElements?.labels.hide());
|
|
164
161
|
labelLayer.toHide.clear();
|
|
165
162
|
});
|
|
166
163
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as THREE from 'three';
|
|
2
2
|
/* babel-plugin-inline-import './Shader/PointsVS.glsl' */
|
|
3
|
-
const PointsVS = "#include <itowns/precision_qualifier>\n#if defined(USE_TEXTURES_PROJECTIVE)\n#include <itowns/projective_texturing_pars_vertex>\n#endif\n#include <common>\n#include <logdepthbuf_pars_vertex>\n\n#define NB_CLASS 8.\n\nuniform float size;\nuniform float scale;\n\nuniform bool picking;\nuniform int mode;\nuniform float opacity;\nuniform vec4 overlayColor;\n\nuniform vec2 elevationRange;\nuniform vec2 intensityRange;\nuniform vec2 angleRange;\n\nuniform
|
|
3
|
+
const PointsVS = "#include <itowns/precision_qualifier>\n#if defined(USE_TEXTURES_PROJECTIVE)\n#include <itowns/projective_texturing_pars_vertex>\n#endif\n#include <common>\n#include <logdepthbuf_pars_vertex>\n\n#define NB_CLASS 8.\n\nuniform float size;\nuniform float scale;\n\nuniform bool picking;\nuniform int mode;\nuniform float opacity;\nuniform vec4 overlayColor;\n\nuniform vec2 elevationRange;\nuniform vec2 intensityRange;\nuniform vec2 angleRange;\n\nuniform sampler2D classificationTexture;\nuniform sampler2D discreteTexture;\nuniform sampler2D gradientTexture;\nuniform int sizeMode;\nuniform float minAttenuatedSize;\nuniform float maxAttenuatedSize;\n\nattribute vec3 color;\nattribute vec2 range;\nattribute vec4 unique_id;\nattribute float intensity;\nattribute float classification;\nattribute float pointSourceID;\n\nattribute float returnNumber;\nattribute float numberOfReturns;\nattribute float scanAngle;\n\n#if defined(NORMAL_OCT16)\nattribute vec2 oct16Normal;\n#elif defined(NORMAL_SPHEREMAPPED)\nattribute vec2 sphereMappedNormal;\n#endif\n\nvarying vec4 vColor;\n\n// see https://web.archive.org/web/20150303053317/http://lgdv.cs.fau.de/get/1602\n// and implementation in PotreeConverter (BINPointReader.cpp) and potree (BinaryDecoderWorker.js)\n#if defined(NORMAL_OCT16)\nvec3 decodeOct16Normal(vec2 encodedNormal) {\n vec2 nNorm = 2. * (encodedNormal / 255.) - 1.;\n vec3 n;\n n.z = 1. - abs(nNorm.x) - abs(nNorm.y);\n if (n.z >= 0.) {\n n.x = nNorm.x;\n n.y = nNorm.y;\n } else {\n n.x = sign(nNorm.x) - sign(nNorm.x) * sign(nNorm.y) * nNorm.y;\n n.y = sign(nNorm.y) - sign(nNorm.y) * sign(nNorm.x) * nNorm.x;\n }\n return normalize(n);\n}\n#elif defined(NORMAL_SPHEREMAPPED)\n// see http://aras-p.info/texts/CompactNormalStorage.html method #4\n// or see potree's implementation in BINPointReader.cpp\nvec3 decodeSphereMappedNormal(vec2 encodedNormal) {\n vec2 fenc = 2. * encodedNormal / 255. - 1.;\n float f = dot(fenc,fenc);\n float g = 2. * sqrt(1. - f);\n vec3 n;\n n.xy = fenc * g;\n n.z = 1. - 2. * f;\n return n;\n}\n#endif\n\nvoid main() {\n\n#if defined(NORMAL_OCT16)\n vec3 normal = decodeOct16Normal(oct16Normal);\n#elif defined(NORMAL_SPHEREMAPPED)\n vec3 normal = decodeSphereMappedNormal(sphereMappedNormal);\n#elif defined(NORMAL)\n // nothing to do\n#else\n // default to color\n vec3 normal = color;\n#endif\n\n if (picking) {\n vColor = unique_id;\n } else {\n vColor.a = 1.0;\n if (mode == PNTS_MODE_CLASSIFICATION) {\n vec2 uv = vec2(classification/255., 0.5);\n vColor = texture2D(classificationTexture, uv);\n } else if (mode == PNTS_MODE_NORMAL) {\n vColor.rgb = abs(normal);\n } else if (mode == PNTS_MODE_COLOR) {\n // default to color mode\n vColor.rgb = mix(color, overlayColor.rgb, overlayColor.a);\n } else if (mode == PNTS_MODE_RETURN_NUMBER) {\n vec2 uv = vec2(returnNumber/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_RETURN_TYPE) {\n float returnType;\n if (returnNumber > numberOfReturns) {\n returnType = 4.;\n } else if (returnNumber == 1.) {\n if (numberOfReturns == 1.) {\n // single\n returnType = 0.;\n } else {\n // first\n returnType = 1.;\n }\n } else {\n if (returnNumber == numberOfReturns) {\n // last\n returnType = 3.;\n } else {\n // intermediate\n returnType = 2.;\n }\n }\n vec2 uv = vec2(returnType/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_RETURN_COUNT) {\n vec2 uv = vec2(numberOfReturns/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_POINT_SOURCE_ID) {\n vec2 uv = vec2(mod(pointSourceID, NB_CLASS)/255., 0.5);\n vColor = texture2D(discreteTexture, uv);\n } else if (mode == PNTS_MODE_SCAN_ANGLE) {\n float i = (scanAngle - angleRange.x) / (angleRange.y - angleRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n } else if (mode == PNTS_MODE_INTENSITY) {\n float i = (intensity - intensityRange.x) / (intensityRange.y - intensityRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n } else if (mode == PNTS_MODE_ELEVATION) {\n float i = (position.z - elevationRange.x) / (elevationRange.y - elevationRange.x);\n vec2 uv = vec2(i, (1. - i));\n vColor = texture2D(gradientTexture, uv);\n }\n\n vColor.a *= opacity;\n }\n\n #include <begin_vertex>\n #include <project_vertex>\n\n gl_PointSize = size;\n\n if (sizeMode == PNTS_SIZE_MODE_ATTENUATED) {\n bool isPerspective = isPerspectiveMatrix(projectionMatrix);\n\n if (isPerspective) {\n gl_PointSize *= scale / -mvPosition.z;\n gl_PointSize = clamp(gl_PointSize, minAttenuatedSize, maxAttenuatedSize);\n }\n }\n\n#if defined(USE_TEXTURES_PROJECTIVE)\n #include <itowns/projective_texturing_vertex>\n#endif\n #include <logdepthbuf_vertex>\n}\n";
|
|
4
4
|
/* babel-plugin-inline-import './Shader/PointsFS.glsl' */
|
|
5
5
|
const PointsFS = "#include <itowns/precision_qualifier>\n#include <logdepthbuf_pars_fragment>\n#if defined(USE_TEXTURES_PROJECTIVE)\n#include <itowns/projective_texturing_pars_fragment>\n#endif\n\nvarying vec4 vColor;\nuniform bool picking;\nuniform int shape;\n\nvoid main() {\n #include <logdepthbuf_fragment>\n //square shape does not require any change.\n if (shape == PNTS_SHAPE_CIRCLE) {\n //circular rendering in glsl\n if ((length(gl_PointCoord - 0.5) > 0.5) || (vColor.a == 0.0)) {\n discard;\n }\n }\n\n#if defined(USE_TEXTURES_PROJECTIVE)\n vec4 color = vColor;\n if (!picking) {\n #pragma unroll_loop\n for (int i = 0; i < ORIENTED_IMAGES_COUNT; i++) {\n color = projectiveTextureColor(projectiveTextureCoords[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTextureDistortion[ ORIENTED_IMAGES_COUNT - 1 - i ], projectiveTexture[ ORIENTED_IMAGES_COUNT - 1 - i ], mask[ORIENTED_IMAGES_COUNT - 1 - i], color);\n }\n gl_FragColor = vec4(color.rgb, color.a * opacity);\n } else {\n gl_FragColor = color;\n }\n#else\n gl_FragColor = vColor;\n#endif\n}\n";
|
|
6
6
|
import ShaderUtils from "./Shader/ShaderUtils.js";
|
|
@@ -127,7 +127,7 @@ export const ClassificationScheme = {
|
|
|
127
127
|
visible: true,
|
|
128
128
|
name: 'default',
|
|
129
129
|
color: new THREE.Color(0.3, 0.6, 0.6),
|
|
130
|
-
opacity: 0
|
|
130
|
+
opacity: 1.0
|
|
131
131
|
}
|
|
132
132
|
}
|
|
133
133
|
};
|
|
@@ -185,7 +185,7 @@ const DiscreteScheme = {
|
|
|
185
185
|
visible: true,
|
|
186
186
|
name: 'default',
|
|
187
187
|
color: white,
|
|
188
|
-
opacity: 0
|
|
188
|
+
opacity: 1.0
|
|
189
189
|
}
|
|
190
190
|
}
|
|
191
191
|
};
|
|
@@ -220,6 +220,7 @@ function generateGradientTexture(gradient) {
|
|
|
220
220
|
return texture;
|
|
221
221
|
}
|
|
222
222
|
function recomputeTexture(scheme, texture, nbClass) {
|
|
223
|
+
let needTransparency;
|
|
223
224
|
const data = texture.image.data;
|
|
224
225
|
const width = texture.image.width;
|
|
225
226
|
if (!nbClass) {
|
|
@@ -250,8 +251,10 @@ function recomputeTexture(scheme, texture, nbClass) {
|
|
|
250
251
|
data[j + 1] = parseInt(255 * color.g, 10);
|
|
251
252
|
data[j + 2] = parseInt(255 * color.b, 10);
|
|
252
253
|
data[j + 3] = visible ? parseInt(255 * opacity, 10) : 0;
|
|
254
|
+
needTransparency = needTransparency || opacity < 1;
|
|
253
255
|
}
|
|
254
256
|
texture.needsUpdate = true;
|
|
257
|
+
return needTransparency;
|
|
255
258
|
}
|
|
256
259
|
class PointsMaterial extends THREE.ShaderMaterial {
|
|
257
260
|
/**
|
|
@@ -264,7 +267,6 @@ class PointsMaterial extends THREE.ShaderMaterial {
|
|
|
264
267
|
* @param {THREE.Vector2} [options.intensityRange=new THREE.Vector2(1, 65536)] intensity range.
|
|
265
268
|
* @param {THREE.Vector2} [options.elevationRange=new THREE.Vector2(0, 1000)] elevation range.
|
|
266
269
|
* @param {THREE.Vector2} [options.angleRange=new THREE.Vector2(-90, 90)] scan angle range.
|
|
267
|
-
* @param {boolean} [options.applyOpacityClassication=false] apply opacity classification on all display mode.
|
|
268
270
|
* @param {Scheme} [options.classification] LUT for point classification colorization.
|
|
269
271
|
* @param {Scheme} [options.discreteScheme] LUT for other discret point values colorization.
|
|
270
272
|
* @param {string} [options.gradient] Descrition of the gradient to use for continuous point values.
|
|
@@ -293,7 +295,6 @@ class PointsMaterial extends THREE.ShaderMaterial {
|
|
|
293
295
|
const oiMaterial = options.orientedImageMaterial;
|
|
294
296
|
const classificationScheme = options.classification || ClassificationScheme.DEFAULT;
|
|
295
297
|
const discreteScheme = options.discreteScheme || DiscreteScheme.DEFAULT;
|
|
296
|
-
const applyOpacityClassication = options.applyOpacityClassication == undefined ? false : options.applyOpacityClassication;
|
|
297
298
|
const size = options.size || 0;
|
|
298
299
|
const mode = options.mode || PNTS_MODE.COLOR;
|
|
299
300
|
const shape = options.shape || PNTS_SHAPE.CIRCLE;
|
|
@@ -313,7 +314,6 @@ class PointsMaterial extends THREE.ShaderMaterial {
|
|
|
313
314
|
delete options.orientedImageMaterial;
|
|
314
315
|
delete options.classification;
|
|
315
316
|
delete options.discreteScheme;
|
|
316
|
-
delete options.applyOpacityClassication;
|
|
317
317
|
delete options.size;
|
|
318
318
|
delete options.mode;
|
|
319
319
|
delete options.shape;
|
|
@@ -322,6 +322,7 @@ class PointsMaterial extends THREE.ShaderMaterial {
|
|
|
322
322
|
delete options.maxAttenuatedSize;
|
|
323
323
|
delete options.gradient;
|
|
324
324
|
super(options);
|
|
325
|
+
this.userData.needTransparency = {};
|
|
325
326
|
this.gradients = gradients;
|
|
326
327
|
this.gradientTexture = new THREE.CanvasTexture();
|
|
327
328
|
this.vertexShader = PointsVS;
|
|
@@ -339,7 +340,6 @@ class PointsMaterial extends THREE.ShaderMaterial {
|
|
|
339
340
|
CommonMaterial.setUniformProperty(this, 'intensityRange', intensityRange);
|
|
340
341
|
CommonMaterial.setUniformProperty(this, 'elevationRange', elevationRange);
|
|
341
342
|
CommonMaterial.setUniformProperty(this, 'angleRange', angleRange);
|
|
342
|
-
CommonMaterial.setUniformProperty(this, 'applyOpacityClassication', applyOpacityClassication);
|
|
343
343
|
CommonMaterial.setUniformProperty(this, 'sizeMode', sizeMode);
|
|
344
344
|
CommonMaterial.setUniformProperty(this, 'scale', scale);
|
|
345
345
|
CommonMaterial.setUniformProperty(this, 'minAttenuatedSize', minAttenuatedSize);
|
|
@@ -390,14 +390,19 @@ class PointsMaterial extends THREE.ShaderMaterial {
|
|
|
390
390
|
}
|
|
391
391
|
}
|
|
392
392
|
recomputeClassification() {
|
|
393
|
-
recomputeTexture(this.classificationScheme, this.classificationTexture,
|
|
393
|
+
const needTransparency = recomputeTexture(this.classificationScheme, this.classificationTexture, 256);
|
|
394
|
+
this.userData.needTransparency[PNTS_MODE.CLASSIFICATION] = needTransparency;
|
|
394
395
|
this.dispatchEvent({
|
|
395
396
|
type: 'material_property_changed',
|
|
396
397
|
target: this.uniforms
|
|
397
398
|
});
|
|
398
399
|
}
|
|
399
400
|
recomputeDiscreteTexture() {
|
|
400
|
-
recomputeTexture(this.discreteScheme, this.discreteTexture);
|
|
401
|
+
const needTransparency = recomputeTexture(this.discreteScheme, this.discreteTexture);
|
|
402
|
+
this.userData.needTransparency[PNTS_MODE.RETURN_NUMBER] = needTransparency;
|
|
403
|
+
this.userData.needTransparency[PNTS_MODE.RETURN_TYPE] = needTransparency;
|
|
404
|
+
this.userData.needTransparency[PNTS_MODE.RETURN_COUNT] = needTransparency;
|
|
405
|
+
this.userData.needTransparency[PNTS_MODE.POINT_SOURCE_ID] = needTransparency;
|
|
401
406
|
this.dispatchEvent({
|
|
402
407
|
type: 'material_property_changed',
|
|
403
408
|
target: this.uniforms
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { Binary, Info, Las } from 'copc';
|
|
2
|
+
import Extent from "../Core/Geographic/Extent.js";
|
|
3
|
+
import Fetcher from "../Provider/Fetcher.js";
|
|
4
|
+
import LASParser from "../Parser/LASParser.js";
|
|
5
|
+
import Source from "./Source.js";
|
|
6
|
+
import * as THREE from 'three';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* @param {function(number, number):Promise<Uint8Array>} fetcher
|
|
10
|
+
*/
|
|
11
|
+
async function getHeaders(fetcher) {
|
|
12
|
+
const header = Las.Header.parse(await fetcher(0, Las.Constants.minHeaderLength));
|
|
13
|
+
const vlrs = await Las.Vlr.walk(fetcher, header);
|
|
14
|
+
|
|
15
|
+
// info VLR: required by COPC
|
|
16
|
+
const infoVlr = Las.Vlr.find(vlrs, 'copc', 1);
|
|
17
|
+
if (!infoVlr) {
|
|
18
|
+
return Promise.reject('COPC info VLR is required');
|
|
19
|
+
}
|
|
20
|
+
const info = Info.parse(await Las.Vlr.fetch(fetcher, infoVlr));
|
|
21
|
+
|
|
22
|
+
// OGC Coordinate System WKT: required by LAS1.4
|
|
23
|
+
const wktVlr = Las.Vlr.find(vlrs, 'LASF_Projection', 2112);
|
|
24
|
+
if (!wktVlr) {
|
|
25
|
+
return Promise.reject('LAS1.4 WKT VLR is required');
|
|
26
|
+
}
|
|
27
|
+
const wkt = Binary.toCString(await Las.Vlr.fetch(fetcher, wktVlr));
|
|
28
|
+
|
|
29
|
+
// Extra bytes: optional by LAS1.4
|
|
30
|
+
const ebVlr = Las.Vlr.find(vlrs, 'LASF_Spec', 4);
|
|
31
|
+
const eb = ebVlr ? Las.ExtraBytes.parse(await Las.Vlr.fetch(fetcher, ebVlr)) : [];
|
|
32
|
+
return {
|
|
33
|
+
header,
|
|
34
|
+
info,
|
|
35
|
+
wkt,
|
|
36
|
+
eb
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* @classdesc
|
|
42
|
+
* A source for [Cloud Optimised Point Cloud](https://copc.io/) (COPC) data.
|
|
43
|
+
* Such data consists of a [LAZ 1.4](https://www.ogc.org/standard/las/) file
|
|
44
|
+
* that stores compressed points data organized in a clustered octree.
|
|
45
|
+
*
|
|
46
|
+
* A freshly created source fetches and parses portions of the file
|
|
47
|
+
* corresponding to the LAS 1.4 header, all the Variable Length Record (VLR)
|
|
48
|
+
* headers as well the following VLRs:
|
|
49
|
+
* - COPC [`info`](https://copc.io/#info-vlr) record (mandatory)
|
|
50
|
+
* - LAS 1.4 `OGC Coordinate System WKT` record (mandatory, see [Las 1.4
|
|
51
|
+
* spec](https://portal.ogc.org/files/?artifact_id=74523))
|
|
52
|
+
* - LAS 1.4 `Extra Bytes` record (optional, see [Las 1.4
|
|
53
|
+
* spec](https://portal.ogc.org/files/?artifact_id=74523))
|
|
54
|
+
*
|
|
55
|
+
* @extends {Source}
|
|
56
|
+
*
|
|
57
|
+
* @property {boolean} isCopcSource - Read-only flag to check that a given
|
|
58
|
+
* object is of type CopcSource.
|
|
59
|
+
* @property {Object} header - LAS header of the source.
|
|
60
|
+
* @property {Object[]} eb - List of headers of each Variable Length Records
|
|
61
|
+
* (VLRs).
|
|
62
|
+
* @property {Object} info - COPC `info` VLR.
|
|
63
|
+
* @property {number[]} info.cube - Bounding box of the octree as a 6-elements.
|
|
64
|
+
* tuple `[minX, minY, minZ, maxX, maxY, maxZ]`. Computed from `center_x`,
|
|
65
|
+
* `center_y`, `center_z` and `halfSize` properties.
|
|
66
|
+
* @property {Object} info.rootHierarchyPage - Hierarchy page of the root node.
|
|
67
|
+
* @property {number} info.rootHierarchyPage.pageOffset - Absolute Offset to the
|
|
68
|
+
* root node data chunk.
|
|
69
|
+
* @property {number} info.rootHierarchyPage.pageOffset - Size (in bytes) of the
|
|
70
|
+
* root node data chunk.
|
|
71
|
+
* @property {number[]} gpsTimeRange - A 2-element tuple denoting the minimum
|
|
72
|
+
* and maximum values of attribute `gpsTime`.
|
|
73
|
+
*/
|
|
74
|
+
class CopcSource extends Source {
|
|
75
|
+
/**
|
|
76
|
+
* @param {Object} config - Source configuration
|
|
77
|
+
* @param {string} config.url - URL of the COPC resource.
|
|
78
|
+
* @param {8 | 16} [config.colorDepth=16] - Encoding of the `color`
|
|
79
|
+
* attribute. Either `8` or `16` bits.
|
|
80
|
+
* @param {string} [config._lazPerfBaseUrl] - (experimental) Overrides base
|
|
81
|
+
* url of the `las-zip.wasm` file of the `laz-perf` library.
|
|
82
|
+
* @param {string} [config.crs='EPSG:4326'] - Native CRS of the COPC
|
|
83
|
+
* ressource. Note that this is not for now inferred from the COPC header.
|
|
84
|
+
* @param {RequestInit} [config.networkOptions] - Fetch options (passed
|
|
85
|
+
* directly to `fetch()`), see [the syntax for more information]{@link
|
|
86
|
+
* https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch#Syntax}.
|
|
87
|
+
* @param {Object} [config.attribution] - Attribution of the data.
|
|
88
|
+
*
|
|
89
|
+
* @constructor
|
|
90
|
+
*/
|
|
91
|
+
constructor(config) {
|
|
92
|
+
super(config);
|
|
93
|
+
this.isCopcSource = true;
|
|
94
|
+
this.parser = LASParser.parseChunk;
|
|
95
|
+
this.fetcher = Fetcher.arrayBuffer;
|
|
96
|
+
this.colorDepth = config.colorDepth ?? 16;
|
|
97
|
+
const get = ( /** @type {number} */begin, /** @type {number} */end) => this.fetcher(this.url, {
|
|
98
|
+
...this.networkOptions,
|
|
99
|
+
headers: {
|
|
100
|
+
...this.networkOptions.headers,
|
|
101
|
+
range: `bytes=${begin}-${end - 1}`
|
|
102
|
+
}
|
|
103
|
+
}).then(buffer => new Uint8Array(buffer));
|
|
104
|
+
this.whenReady = getHeaders(get).then(metadata => {
|
|
105
|
+
this.header = metadata.header;
|
|
106
|
+
this.info = metadata.info;
|
|
107
|
+
this.eb = metadata.eb;
|
|
108
|
+
// TODO: use wkt definition in `metadata.wkt` to infer/define crs
|
|
109
|
+
this.crs = config.crs || 'EPSG:4326';
|
|
110
|
+
const bbox = new THREE.Box3();
|
|
111
|
+
bbox.min.fromArray(this.info.cube, 0);
|
|
112
|
+
bbox.max.fromArray(this.info.cube, 3);
|
|
113
|
+
this.extent = Extent.fromBox3(this.crs, bbox);
|
|
114
|
+
return this;
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
export default CopcSource;
|
package/lib/Source/Source.js
CHANGED
|
@@ -56,6 +56,8 @@ let uid = 0;
|
|
|
56
56
|
* To extend a Source, it is necessary to implement two functions:
|
|
57
57
|
* `urlFromExtent` and `extentInsideLimit`.
|
|
58
58
|
*
|
|
59
|
+
* @extends InformationsData
|
|
60
|
+
*
|
|
59
61
|
* @property {boolean} isSource - Used to checkout whether this source is a
|
|
60
62
|
* Source. Default is true. You should not change this, as it is used internally
|
|
61
63
|
* for optimisation.
|
|
@@ -97,7 +99,6 @@ class Source extends InformationsData {
|
|
|
97
99
|
* Source. Only the `url` property is mandatory.
|
|
98
100
|
*
|
|
99
101
|
* @constructor
|
|
100
|
-
* @extends InformationsData
|
|
101
102
|
*/
|
|
102
103
|
constructor(source) {
|
|
103
104
|
super(source);
|
|
@@ -118,6 +119,7 @@ class Source extends InformationsData {
|
|
|
118
119
|
crossOrigin: 'anonymous'
|
|
119
120
|
};
|
|
120
121
|
this.attribution = source.attribution;
|
|
122
|
+
/** @type {Promise<any>} */
|
|
121
123
|
this.whenReady = Promise.resolve();
|
|
122
124
|
this._featuresCaches = {};
|
|
123
125
|
if (source.extent && !source.extent.isExtent) {
|
|
@@ -17,23 +17,29 @@ class DDSLoader extends CompressedTextureLoader {
|
|
|
17
17
|
|
|
18
18
|
// All values and structures referenced from:
|
|
19
19
|
// http://msdn.microsoft.com/en-us/library/bb943991.aspx/
|
|
20
|
+
|
|
20
21
|
// const DDSD_CAPS = 0x1;
|
|
21
22
|
// const DDSD_HEIGHT = 0x2;
|
|
22
23
|
// const DDSD_WIDTH = 0x4;
|
|
23
24
|
// const DDSD_PITCH = 0x8;
|
|
24
25
|
// const DDSD_PIXELFORMAT = 0x1000;
|
|
26
|
+
|
|
25
27
|
// const DDSD_LINEARSIZE = 0x80000;
|
|
26
28
|
// const DDSD_DEPTH = 0x800000;
|
|
29
|
+
|
|
27
30
|
// const DDSCAPS_COMPLEX = 0x8;
|
|
28
31
|
// const DDSCAPS_MIPMAP = 0x400000;
|
|
29
32
|
// const DDSCAPS_TEXTURE = 0x1000;
|
|
33
|
+
|
|
30
34
|
// const DDSCAPS2_VOLUME = 0x200000;
|
|
35
|
+
|
|
31
36
|
// const DDPF_ALPHAPIXELS = 0x1;
|
|
32
37
|
// const DDPF_ALPHA = 0x2;
|
|
33
38
|
// const DDPF_FOURCC = 0x4;
|
|
34
39
|
// const DDPF_RGB = 0x40;
|
|
35
40
|
// const DDPF_YUV = 0x200;
|
|
36
41
|
// const DDPF_LUMINANCE = 0x20000;
|
|
42
|
+
|
|
37
43
|
function fourCCToInt32(value) {
|
|
38
44
|
return value.charCodeAt(0) + (value.charCodeAt(1) << 8) + (value.charCodeAt(2) << 16) + (value.charCodeAt(3) << 24);
|
|
39
45
|
}
|
|
@@ -66,7 +72,6 @@ class DDSLoader extends CompressedTextureLoader {
|
|
|
66
72
|
dst++; //a
|
|
67
73
|
}
|
|
68
74
|
}
|
|
69
|
-
|
|
70
75
|
return byteArray;
|
|
71
76
|
}
|
|
72
77
|
const FOURCC_DXT1 = fourCCToInt32('DXT1');
|
|
@@ -78,11 +83,16 @@ class DDSLoader extends CompressedTextureLoader {
|
|
|
78
83
|
const extendedHeaderLengthInt = 5; // The extended header length in 32 bit ints
|
|
79
84
|
|
|
80
85
|
// Offsets into the header array
|
|
86
|
+
|
|
81
87
|
// const off_pfFlags = 20;
|
|
88
|
+
|
|
82
89
|
// const off_caps = 27;
|
|
90
|
+
|
|
83
91
|
// const off_caps3 = 29;
|
|
84
92
|
// const off_caps4 = 30;
|
|
93
|
+
|
|
85
94
|
// If fourCC = DX10, the extended header starts after 32
|
|
95
|
+
|
|
86
96
|
// Parse header
|
|
87
97
|
|
|
88
98
|
const header = new Int32Array(buffer, 0, headerLengthInt);
|
|
@@ -2627,6 +2627,7 @@ class GLTFParser {
|
|
|
2627
2627
|
|
|
2628
2628
|
// Removes dangling associations, associations that reference a node that
|
|
2629
2629
|
// didn't make it into the scene.
|
|
2630
|
+
|
|
2630
2631
|
parser.associations = (node => {
|
|
2631
2632
|
const reducedAssociations = new Map();
|
|
2632
2633
|
for (const [key, value] of parser.associations) {
|