@loaders.gl/flatgeobuf 4.2.0-alpha.4 → 4.2.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.dev.js +264 -218
- package/dist/dist.min.js +30 -0
- package/dist/flatgeobuf/3.27.2/column-meta.d.ts +1 -1
- package/dist/flatgeobuf/3.27.2/column-meta.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/column-meta.js +1 -2
- package/dist/flatgeobuf/3.27.2/config.js +13 -13
- package/dist/flatgeobuf/3.27.2/constants.js +3 -2
- package/dist/flatgeobuf/3.27.2/crs-meta.js +1 -2
- package/dist/flatgeobuf/3.27.2/dumptree.js +28 -23
- package/dist/flatgeobuf/3.27.2/feature.d.ts +1 -1
- package/dist/flatgeobuf/3.27.2/feature.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/feature.js +2 -3
- package/dist/flatgeobuf/3.27.2/flat-geobuf/column-type.js +19 -19
- package/dist/flatgeobuf/3.27.2/flat-geobuf/column.d.ts +1 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/column.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/column.js +115 -117
- package/dist/flatgeobuf/3.27.2/flat-geobuf/crs.js +74 -76
- package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.d.ts +2 -2
- package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.js +94 -90
- package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry-type.js +22 -22
- package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.d.ts +1 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.js +222 -210
- package/dist/flatgeobuf/3.27.2/flat-geobuf/header.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/flat-geobuf/header.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/header.js +164 -160
- package/dist/flatgeobuf/3.27.2/flat-geobuf.d.ts +5 -5
- package/dist/flatgeobuf/3.27.2/flat-geobuf.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf.js +1 -1
- package/dist/flatgeobuf/3.27.2/generic/feature.d.ts +4 -4
- package/dist/flatgeobuf/3.27.2/generic/feature.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/generic/feature.js +195 -205
- package/dist/flatgeobuf/3.27.2/generic/featurecollection.d.ts +6 -6
- package/dist/flatgeobuf/3.27.2/generic/featurecollection.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/generic/featurecollection.js +150 -128
- package/dist/flatgeobuf/3.27.2/generic/geometry.d.ts +2 -2
- package/dist/flatgeobuf/3.27.2/generic/geometry.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/generic/geometry.js +78 -66
- package/dist/flatgeobuf/3.27.2/generic/header.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/generic/header.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/generic/header.js +18 -18
- package/dist/flatgeobuf/3.27.2/generic/index.d.ts +4 -4
- package/dist/flatgeobuf/3.27.2/generic/index.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/generic/index.js +2 -1
- package/dist/flatgeobuf/3.27.2/generic.d.ts +7 -7
- package/dist/flatgeobuf/3.27.2/generic.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/generic.js +11 -3
- package/dist/flatgeobuf/3.27.2/geojson/feature.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/geojson/feature.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/geojson/feature.js +8 -9
- package/dist/flatgeobuf/3.27.2/geojson/featurecollection.d.ts +2 -2
- package/dist/flatgeobuf/3.27.2/geojson/featurecollection.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/geojson/featurecollection.js +40 -38
- package/dist/flatgeobuf/3.27.2/geojson/geometry.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/geojson/geometry.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/geojson/geometry.js +107 -103
- package/dist/flatgeobuf/3.27.2/geojson/index.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/geojson/index.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/geojson/index.js +2 -1
- package/dist/flatgeobuf/3.27.2/geojson.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/geojson.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/geojson.js +17 -5
- package/dist/flatgeobuf/3.27.2/header-meta.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/header-meta.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/header-meta.js +46 -43
- package/dist/flatgeobuf/3.27.2/header.d.ts +1 -1
- package/dist/flatgeobuf/3.27.2/header.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/header.js +2 -3
- package/dist/flatgeobuf/3.27.2/http-reader.d.ts +3 -3
- package/dist/flatgeobuf/3.27.2/http-reader.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/http-reader.js +241 -171
- package/dist/flatgeobuf/3.27.2/index.d.ts +13 -13
- package/dist/flatgeobuf/3.27.2/index.d.ts.map +1 -1
- package/dist/flatgeobuf/3.27.2/index.js +5 -13
- package/dist/flatgeobuf/3.27.2/logger.js +36 -56
- package/dist/flatgeobuf/3.27.2/packedrtree.js +176 -123
- package/dist/flatgeobuf/3.27.2/streams/utils.js +59 -58
- package/dist/flatgeobuf-loader.js +29 -23
- package/dist/flatgeobuf-worker.js +40 -35
- package/dist/index.cjs +191 -213
- package/dist/index.cjs.map +7 -0
- package/dist/index.d.ts +2 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -1
- package/dist/lib/binary-geometries.js +115 -117
- package/dist/lib/get-schema-from-fgb-header.js +110 -85
- package/dist/lib/parse-flatgeobuf.d.ts +1 -1
- package/dist/lib/parse-flatgeobuf.d.ts.map +1 -1
- package/dist/lib/parse-flatgeobuf.js +125 -109
- package/dist/workers/flatgeobuf-worker.js +3 -1
- package/package.json +11 -6
- package/dist/LICENSE +0 -9
- package/dist/flatgeobuf/3.27.2/LICENSE +0 -29
- package/dist/flatgeobuf/3.27.2/README.md +0 -45
- package/dist/flatgeobuf/3.27.2/column-meta.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/config.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/constants.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/crs-meta.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/dumptree.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/feature.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/column-type.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/column.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/crs.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry-type.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf/header.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/flat-geobuf.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/generic/feature.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/generic/featurecollection.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/generic/geometry.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/generic/header.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/generic/index.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/generic.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/geojson/feature.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/geojson/featurecollection.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/geojson/geometry.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/geojson/index.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/geojson.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/header-meta.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/header.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/http-reader.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/index.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/logger.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/packedrtree.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/streams/utils.js.map +0 -1
- package/dist/flatgeobuf/3.27.2/types/jsts.d.ts +0 -23
- package/dist/flatgeobuf/3.27.2/types/slice.d.ts +0 -16
- package/dist/flatgeobuf-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/binary-geometries.js.map +0 -1
- package/dist/lib/get-schema-from-fgb-header.js.map +0 -1
- package/dist/lib/parse-flatgeobuf.js.map +0 -1
- package/dist/workers/flatgeobuf-worker.js.map +0 -1
|
@@ -1,138 +1,191 @@
|
|
|
1
1
|
import Config from "./config.js";
|
|
2
2
|
import Logger from "./logger.js";
|
|
3
3
|
export const NODE_ITEM_BYTE_LEN = 8 * 4 + 8;
|
|
4
|
+
/**
|
|
5
|
+
* @deprecated Use `NODE_ITEM_BYTE_LEN` instead.
|
|
6
|
+
*/
|
|
4
7
|
export const NODE_ITEM_LEN = NODE_ITEM_BYTE_LEN;
|
|
8
|
+
// default branching factor of a node in the rtree
|
|
9
|
+
//
|
|
10
|
+
// actual value will be specified in the header but
|
|
11
|
+
// this can be useful for having reasonably sized guesses for fetch-sizes when
|
|
12
|
+
// streaming results
|
|
5
13
|
export const DEFAULT_NODE_SIZE = 16;
|
|
6
14
|
export function calcTreeSize(numItems, nodeSize) {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
+
nodeSize = Math.min(Math.max(Number(nodeSize), 2), 65535);
|
|
16
|
+
let n = numItems;
|
|
17
|
+
let numNodes = n;
|
|
18
|
+
do {
|
|
19
|
+
n = Math.ceil(n / nodeSize);
|
|
20
|
+
numNodes += n;
|
|
21
|
+
} while (n !== 1);
|
|
22
|
+
return numNodes * NODE_ITEM_BYTE_LEN;
|
|
15
23
|
}
|
|
24
|
+
/**
|
|
25
|
+
* returns [levelOffset, numNodes] for each level
|
|
26
|
+
*/
|
|
16
27
|
export function generateLevelBounds(numItems, nodeSize) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
28
|
+
if (nodeSize < 2)
|
|
29
|
+
throw new Error('Node size must be at least 2');
|
|
30
|
+
if (numItems === 0)
|
|
31
|
+
throw new Error('Number of items must be greater than 0');
|
|
32
|
+
// number of nodes per level in bottom-up order
|
|
33
|
+
let n = numItems;
|
|
34
|
+
let numNodes = n;
|
|
35
|
+
const levelNumNodes = [n];
|
|
36
|
+
do {
|
|
37
|
+
n = Math.ceil(n / nodeSize);
|
|
38
|
+
numNodes += n;
|
|
39
|
+
levelNumNodes.push(n);
|
|
40
|
+
} while (n !== 1);
|
|
41
|
+
// bounds per level in reversed storage order (top-down)
|
|
42
|
+
const levelOffsets = [];
|
|
43
|
+
n = numNodes;
|
|
44
|
+
for (const size of levelNumNodes) {
|
|
45
|
+
levelOffsets.push(n - size);
|
|
46
|
+
n -= size;
|
|
47
|
+
}
|
|
48
|
+
const levelBounds = [];
|
|
49
|
+
for (let i = 0; i < levelNumNodes.length; i++)
|
|
50
|
+
levelBounds.push([levelOffsets[i], levelOffsets[i] + levelNumNodes[i]]);
|
|
51
|
+
return levelBounds;
|
|
36
52
|
}
|
|
53
|
+
/**
|
|
54
|
+
* Yield's a `SearchResult` for each feature within the bounds of `rect`.
|
|
55
|
+
*
|
|
56
|
+
* Every node in the FGB index tree has a bounding rect, all of the nodes children
|
|
57
|
+
* are contained within that bounding rect. The leaf nodes of the tree represent
|
|
58
|
+
* the features of the collection.
|
|
59
|
+
*
|
|
60
|
+
* As we traverse the tree, starting from the root, we'll need to read more data
|
|
61
|
+
* from the index. When we don't already have this range data buffered locally,
|
|
62
|
+
* an HTTP fetch is triggered. For performance, we merge adjacent and nearby
|
|
63
|
+
* request ranges into a single request, reasoning that fetching a few extra
|
|
64
|
+
* bytes is a good tradeoff if it means we can reduce the number of requests.
|
|
65
|
+
*/
|
|
37
66
|
export async function* streamSearch(numItems, nodeSize, rect, readNode) {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
67
|
+
class NodeRange {
|
|
68
|
+
_level;
|
|
69
|
+
nodes;
|
|
70
|
+
constructor(nodes, level) {
|
|
71
|
+
this._level = level;
|
|
72
|
+
this.nodes = nodes;
|
|
73
|
+
}
|
|
74
|
+
level() {
|
|
75
|
+
return this._level;
|
|
76
|
+
}
|
|
77
|
+
startNodeIdx() {
|
|
78
|
+
return this.nodes[0];
|
|
79
|
+
}
|
|
80
|
+
endNodeIdx() {
|
|
81
|
+
return this.nodes[1];
|
|
82
|
+
}
|
|
83
|
+
extendEndNodeIdx(newIdx) {
|
|
84
|
+
console.assert(newIdx > this.nodes[1]);
|
|
85
|
+
this.nodes[1] = newIdx;
|
|
86
|
+
}
|
|
87
|
+
toString() {
|
|
88
|
+
return `[NodeRange level: ${this._level}, nodes: ${this.nodes[0]}-${this.nodes[1]}]`;
|
|
89
|
+
}
|
|
53
90
|
}
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
const {
|
|
63
|
-
minX,
|
|
64
|
-
minY,
|
|
65
|
-
maxX,
|
|
66
|
-
maxY
|
|
67
|
-
} = rect;
|
|
68
|
-
Logger.info(`tree items: ${numItems}, nodeSize: ${nodeSize}`);
|
|
69
|
-
const levelBounds = generateLevelBounds(numItems, nodeSize);
|
|
70
|
-
const firstLeafNodeIdx = levelBounds[0][0];
|
|
71
|
-
const rootNodeRange = (() => {
|
|
72
|
-
const range = [0, 1];
|
|
73
|
-
const level = levelBounds.length - 1;
|
|
74
|
-
return new NodeRange(range, level);
|
|
75
|
-
})();
|
|
76
|
-
const queue = [rootNodeRange];
|
|
77
|
-
Logger.debug(`starting stream search with queue: ${queue}, numItems: ${numItems}, nodeSize: ${nodeSize}, levelBounds: ${levelBounds}`);
|
|
78
|
-
while (queue.length != 0) {
|
|
79
|
-
const nodeRange = queue.shift();
|
|
80
|
-
Logger.debug(`popped node: ${nodeRange}, queueLength: ${queue.length}`);
|
|
81
|
-
const nodeRangeStartIdx = nodeRange.startNodeIdx();
|
|
82
|
-
const isLeafNode = nodeRangeStartIdx >= firstLeafNodeIdx;
|
|
83
|
-
const nodeRangeEndIdx = (() => {
|
|
84
|
-
const [, levelBound] = levelBounds[nodeRange.level()];
|
|
85
|
-
const nodeIdx = Math.min(nodeRange.endNodeIdx() + nodeSize, levelBound);
|
|
86
|
-
if (isLeafNode && nodeIdx < levelBound) {
|
|
87
|
-
return nodeIdx + 1;
|
|
88
|
-
}
|
|
89
|
-
return nodeIdx;
|
|
91
|
+
const { minX, minY, maxX, maxY } = rect;
|
|
92
|
+
Logger.info(`tree items: ${numItems}, nodeSize: ${nodeSize}`);
|
|
93
|
+
const levelBounds = generateLevelBounds(numItems, nodeSize);
|
|
94
|
+
const firstLeafNodeIdx = levelBounds[0][0];
|
|
95
|
+
const rootNodeRange = (() => {
|
|
96
|
+
const range = [0, 1];
|
|
97
|
+
const level = levelBounds.length - 1;
|
|
98
|
+
return new NodeRange(range, level);
|
|
90
99
|
})();
|
|
91
|
-
const
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
100
|
+
const queue = [rootNodeRange];
|
|
101
|
+
Logger.debug(`starting stream search with queue: ${queue}, numItems: ${numItems}, nodeSize: ${nodeSize}, levelBounds: ${levelBounds}`);
|
|
102
|
+
while (queue.length != 0) {
|
|
103
|
+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
104
|
+
const nodeRange = queue.shift();
|
|
105
|
+
Logger.debug(`popped node: ${nodeRange}, queueLength: ${queue.length}`);
|
|
106
|
+
const nodeRangeStartIdx = nodeRange.startNodeIdx();
|
|
107
|
+
const isLeafNode = nodeRangeStartIdx >= firstLeafNodeIdx;
|
|
108
|
+
// find the end index of the node
|
|
109
|
+
const nodeRangeEndIdx = (() => {
|
|
110
|
+
const [, levelBound] = levelBounds[nodeRange.level()];
|
|
111
|
+
const nodeIdx = Math.min(nodeRange.endNodeIdx() + nodeSize, levelBound);
|
|
112
|
+
if (isLeafNode && nodeIdx < levelBound) {
|
|
113
|
+
// We can infer the length of *this* feature by getting the start of the *next*
|
|
114
|
+
// feature, so we get an extra node.
|
|
115
|
+
// This approach doesn't work for the final node in the index,
|
|
116
|
+
// but in that case we know that the feature runs to the end of the FGB file and
|
|
117
|
+
// could make an open ended range request to get "the rest of the data".
|
|
118
|
+
return nodeIdx + 1;
|
|
119
|
+
}
|
|
120
|
+
return nodeIdx;
|
|
111
121
|
})();
|
|
112
|
-
const
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
122
|
+
const numNodesInRange = nodeRangeEndIdx - nodeRangeStartIdx;
|
|
123
|
+
const buffer = await readNode(nodeRangeStartIdx * NODE_ITEM_BYTE_LEN, numNodesInRange * NODE_ITEM_BYTE_LEN);
|
|
124
|
+
const dataView = new DataView(buffer);
|
|
125
|
+
for (let nodeIdx = nodeRangeStartIdx; nodeIdx < nodeRangeEndIdx; nodeIdx++) {
|
|
126
|
+
const nodeIdxInDataView = nodeIdx - nodeRangeStartIdx;
|
|
127
|
+
const dataViewByteStart = nodeIdxInDataView * NODE_ITEM_BYTE_LEN;
|
|
128
|
+
if (maxX < dataView.getFloat64(dataViewByteStart + 0, true))
|
|
129
|
+
continue; // maxX < nodeMinX
|
|
130
|
+
if (maxY < dataView.getFloat64(dataViewByteStart + 8, true))
|
|
131
|
+
continue; // maxY < nodeMinY
|
|
132
|
+
if (minX > dataView.getFloat64(dataViewByteStart + 16, true))
|
|
133
|
+
continue; // minX > nodeMaxX
|
|
134
|
+
if (minY > dataView.getFloat64(dataViewByteStart + 24, true))
|
|
135
|
+
continue; // minY > nodeMaxY
|
|
136
|
+
// `offset` is:
|
|
137
|
+
// For leaf nodes: the byte-offset into the feature buffer.
|
|
138
|
+
// For inner nodes: the node-idx of its first child.
|
|
139
|
+
const offset = dataView.getBigUint64(dataViewByteStart + 32, true);
|
|
140
|
+
if (isLeafNode) {
|
|
141
|
+
const featureByteOffset = offset;
|
|
142
|
+
const featureLength = (() => {
|
|
143
|
+
if (nodeIdx < numItems - 1) {
|
|
144
|
+
// Since features are tightly packed, we infer the
|
|
145
|
+
// length of _this_ feature by measuring to the _next_
|
|
146
|
+
// feature's start.
|
|
147
|
+
const nextPos = (nodeIdxInDataView + 1) * NODE_ITEM_BYTE_LEN;
|
|
148
|
+
// console.debug(`nodeIdx: ${nodeIdx} of ${numItems}, nodeRangeStartIdx: ${nodeRangeStartIdx}, nextPos: ${nextPos}, dataView.byteLength: ${dataView.byteLength}`,);
|
|
149
|
+
const nextOffset = dataView.getBigUint64(nextPos + 32, true);
|
|
150
|
+
return nextOffset - featureByteOffset;
|
|
151
|
+
}
|
|
152
|
+
// This is the last feature - there's no "next" feature
|
|
153
|
+
// to measure to, so we can't know it's length.
|
|
154
|
+
return null;
|
|
155
|
+
})();
|
|
156
|
+
// Logger.debug(`featureByteOffset: ${featureByteOffset}, nodeIdx: ${nodeIdx}, featureLength: ${featureLength}`);
|
|
157
|
+
const featureIdx = nodeIdx - firstLeafNodeIdx;
|
|
158
|
+
yield [Number(featureByteOffset), featureIdx, Number(featureLength)];
|
|
159
|
+
continue;
|
|
160
|
+
}
|
|
161
|
+
const firstChildNodeIdx = offset;
|
|
162
|
+
// request up to this many nodes if it means we can eliminate an
|
|
163
|
+
// extra request
|
|
164
|
+
const extraRequestThresholdNodes = Config.global.extraRequestThreshold() / NODE_ITEM_BYTE_LEN;
|
|
165
|
+
// Since we're traversing the tree by monotonically increasing byte
|
|
166
|
+
// offset, the most recently enqueued node range will be the
|
|
167
|
+
// nearest, and thus presents the best candidate for merging.
|
|
168
|
+
const nearestNodeRange = queue[queue.length - 1];
|
|
169
|
+
if (nearestNodeRange !== undefined &&
|
|
170
|
+
nearestNodeRange.level() == nodeRange.level() - 1 &&
|
|
171
|
+
firstChildNodeIdx < nearestNodeRange.endNodeIdx() + extraRequestThresholdNodes) {
|
|
172
|
+
Logger.debug(`Merging "nodeRange" request into existing range: ${nearestNodeRange}, newEndNodeIdx: ${nearestNodeRange.endNodeIdx()} -> ${firstChildNodeIdx}`);
|
|
173
|
+
nearestNodeRange.extendEndNodeIdx(Number(firstChildNodeIdx));
|
|
174
|
+
continue;
|
|
175
|
+
}
|
|
176
|
+
const newNodeRange = (() => {
|
|
177
|
+
const level = nodeRange.level() - 1;
|
|
178
|
+
const range = [Number(firstChildNodeIdx), Number(firstChildNodeIdx) + 1];
|
|
179
|
+
return new NodeRange(range, level);
|
|
180
|
+
})();
|
|
181
|
+
// We're going to add a new node range - log the reason
|
|
182
|
+
if (nearestNodeRange !== undefined && nearestNodeRange.level() == newNodeRange.level()) {
|
|
183
|
+
Logger.info(`Same level, but too far away. Pushing new request for nodeIdx: ${firstChildNodeIdx} rather than merging with distant ${nearestNodeRange}`);
|
|
184
|
+
}
|
|
185
|
+
else {
|
|
186
|
+
Logger.info(`Pushing new level for ${newNodeRange} onto queue with nearestNodeRange: ${nearestNodeRange} since there's not already a range for this level.`);
|
|
187
|
+
}
|
|
188
|
+
queue.push(newNodeRange);
|
|
189
|
+
}
|
|
135
190
|
}
|
|
136
|
-
}
|
|
137
191
|
}
|
|
138
|
-
//# sourceMappingURL=packedrtree.js.map
|
|
@@ -1,67 +1,68 @@
|
|
|
1
1
|
import { ReadableStream } from 'web-streams-polyfill';
|
|
2
2
|
import { ReadableStreamBuffer } from 'stream-buffers';
|
|
3
3
|
export function arrayToStream(array) {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
4
|
+
const myReadableStreamBuffer = new ReadableStreamBuffer({
|
|
5
|
+
frequency: 10, // in milliseconds.
|
|
6
|
+
chunkSize: 2048 // in bytes.
|
|
7
|
+
});
|
|
8
|
+
myReadableStreamBuffer.put(Buffer.from(array));
|
|
9
|
+
myReadableStreamBuffer.stop();
|
|
10
|
+
const webReader = nodeToWeb(myReadableStreamBuffer);
|
|
11
|
+
return webReader;
|
|
12
12
|
}
|
|
13
|
-
export async function takeAsync(asyncIterable) {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
result.push(value);
|
|
24
|
-
}
|
|
25
|
-
return result;
|
|
13
|
+
export async function takeAsync(asyncIterable, count = Infinity) {
|
|
14
|
+
const result = [];
|
|
15
|
+
const iterator = asyncIterable[Symbol.asyncIterator]();
|
|
16
|
+
while (result.length < count) {
|
|
17
|
+
const { value, done } = await iterator.next();
|
|
18
|
+
if (done)
|
|
19
|
+
break;
|
|
20
|
+
result.push(value);
|
|
21
|
+
}
|
|
22
|
+
return result;
|
|
26
23
|
}
|
|
27
24
|
export function nodeToWeb(nodeStream) {
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
25
|
+
let destroyed = false;
|
|
26
|
+
const listeners = {};
|
|
27
|
+
function start(controller) {
|
|
28
|
+
listeners.data = onData;
|
|
29
|
+
listeners.end = onData;
|
|
30
|
+
listeners.end = onDestroy;
|
|
31
|
+
listeners.close = onDestroy;
|
|
32
|
+
listeners.error = onDestroy;
|
|
33
|
+
for (const name in listeners)
|
|
34
|
+
nodeStream.on(name, listeners[name]);
|
|
35
|
+
nodeStream.pause();
|
|
36
|
+
function onData(chunk) {
|
|
37
|
+
if (destroyed)
|
|
38
|
+
return;
|
|
39
|
+
controller.enqueue(chunk);
|
|
40
|
+
nodeStream.pause();
|
|
41
|
+
}
|
|
42
|
+
function onDestroy(err) {
|
|
43
|
+
if (destroyed)
|
|
44
|
+
return;
|
|
45
|
+
destroyed = true;
|
|
46
|
+
for (const name in listeners)
|
|
47
|
+
nodeStream.removeListener(name, listeners[name]);
|
|
48
|
+
if (err)
|
|
49
|
+
controller.error(err);
|
|
50
|
+
else
|
|
51
|
+
controller.close();
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
function pull() {
|
|
55
|
+
if (destroyed)
|
|
56
|
+
return;
|
|
57
|
+
nodeStream.resume();
|
|
42
58
|
}
|
|
43
|
-
function
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
59
|
+
function cancel() {
|
|
60
|
+
destroyed = true;
|
|
61
|
+
for (const name in listeners)
|
|
62
|
+
nodeStream.removeListener(name, listeners[name]);
|
|
63
|
+
nodeStream.push(null);
|
|
64
|
+
nodeStream.pause();
|
|
65
|
+
nodeStream.destroy();
|
|
48
66
|
}
|
|
49
|
-
|
|
50
|
-
function pull() {
|
|
51
|
-
if (destroyed) return;
|
|
52
|
-
nodeStream.resume();
|
|
53
|
-
}
|
|
54
|
-
function cancel() {
|
|
55
|
-
destroyed = true;
|
|
56
|
-
for (const name in listeners) nodeStream.removeListener(name, listeners[name]);
|
|
57
|
-
nodeStream.push(null);
|
|
58
|
-
nodeStream.pause();
|
|
59
|
-
nodeStream.destroy();
|
|
60
|
-
}
|
|
61
|
-
return new ReadableStream({
|
|
62
|
-
start,
|
|
63
|
-
pull,
|
|
64
|
-
cancel
|
|
65
|
-
});
|
|
67
|
+
return new ReadableStream({ start, pull, cancel });
|
|
66
68
|
}
|
|
67
|
-
//# sourceMappingURL=utils.js.map
|
|
@@ -1,30 +1,36 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
import { parseFlatGeobuf, parseFlatGeobufInBatches } from "./lib/parse-flatgeobuf.js";
|
|
2
|
-
|
|
5
|
+
// __VERSION__ is injected by babel-plugin-version-inline
|
|
6
|
+
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
7
|
+
const VERSION = typeof "4.2.0-alpha.5" !== 'undefined' ? "4.2.0-alpha.5" : 'latest';
|
|
8
|
+
// FGB\3FGB\1
|
|
3
9
|
const FGB_MAGIC_NUMBER = [0x66, 0x67, 0x62, 0x03, 0x66, 0x67, 0x62, 0x01];
|
|
4
10
|
export const FlatGeobufWorkerLoader = {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
11
|
+
id: 'flatgeobuf',
|
|
12
|
+
name: 'FlatGeobuf',
|
|
13
|
+
module: 'flatgeobuf',
|
|
14
|
+
version: VERSION,
|
|
15
|
+
worker: true,
|
|
16
|
+
extensions: ['fgb'],
|
|
17
|
+
mimeTypes: ['application/octet-stream'],
|
|
18
|
+
category: 'geometry',
|
|
19
|
+
tests: [new Uint8Array(FGB_MAGIC_NUMBER).buffer],
|
|
20
|
+
options: {
|
|
21
|
+
flatgeobuf: {
|
|
22
|
+
shape: 'geojson-table'
|
|
23
|
+
},
|
|
24
|
+
gis: {
|
|
25
|
+
reproject: false
|
|
26
|
+
}
|
|
20
27
|
}
|
|
21
|
-
}
|
|
22
28
|
};
|
|
23
29
|
export const FlatGeobufLoader = {
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
30
|
+
...FlatGeobufWorkerLoader,
|
|
31
|
+
parse: async (arrayBuffer, options) => parseFlatGeobuf(arrayBuffer, options),
|
|
32
|
+
parseSync: parseFlatGeobuf,
|
|
33
|
+
// @ts-expect-error this is a stream parser not an async iterator parser
|
|
34
|
+
parseInBatchesFromStream: parseFlatGeobufInBatches,
|
|
35
|
+
binary: true
|
|
29
36
|
};
|
|
30
|
-
//# sourceMappingURL=flatgeobuf-loader.js.map
|