@loaders.gl/shapefile 4.2.0-alpha.4 → 4.2.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbf-loader.js +24 -19
- package/dist/dist.dev.js +227 -247
- package/dist/dist.min.js +12 -0
- package/dist/index.cjs +49 -57
- package/dist/index.cjs.map +7 -0
- package/dist/index.d.ts +6 -6
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/lib/parsers/parse-dbf.d.ts +1 -1
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf.js +300 -259
- package/dist/lib/parsers/parse-shapefile.d.ts +3 -3
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shapefile.js +225 -184
- package/dist/lib/parsers/parse-shp-geometry.d.ts +1 -1
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js +260 -168
- package/dist/lib/parsers/parse-shp-header.js +33 -23
- package/dist/lib/parsers/parse-shp.d.ts +1 -1
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp.js +147 -110
- package/dist/lib/parsers/parse-shx.js +19 -15
- package/dist/lib/parsers/types.js +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js +150 -95
- package/dist/lib/streaming/binary-reader.js +49 -23
- package/dist/lib/streaming/zip-batch-iterators.js +61 -45
- package/dist/shapefile-loader.js +25 -18
- package/dist/shp-loader.js +24 -18
- package/dist/workers/dbf-worker.js +0 -1
- package/dist/workers/shp-worker.js +0 -1
- package/package.json +11 -7
- package/dist/dbf-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/lib/parsers/parse-shp.js.map +0 -1
- package/dist/lib/parsers/parse-shx.js.map +0 -1
- package/dist/lib/parsers/types.js.map +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/lib/streaming/binary-reader.js.map +0 -1
- package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/shapefile-loader.js.map +0 -1
- package/dist/shp-loader.js.map +0 -1
- package/dist/workers/dbf-worker.js.map +0 -1
- package/dist/workers/shp-worker.js.map +0 -1
package/dist/dist.dev.js
CHANGED
|
@@ -2,14 +2,19 @@
|
|
|
2
2
|
if (typeof exports === 'object' && typeof module === 'object')
|
|
3
3
|
module.exports = factory();
|
|
4
4
|
else if (typeof define === 'function' && define.amd) define([], factory);
|
|
5
|
-
else if (typeof exports === 'object') exports['
|
|
6
|
-
else root['
|
|
5
|
+
else if (typeof exports === 'object') exports['loaders'] = factory();
|
|
6
|
+
else root['loaders'] = factory();})(globalThis, function () {
|
|
7
7
|
"use strict";
|
|
8
8
|
var __exports__ = (() => {
|
|
9
|
+
var __create = Object.create;
|
|
9
10
|
var __defProp = Object.defineProperty;
|
|
10
11
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
11
12
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
13
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
12
14
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
15
|
+
var __commonJS = (cb, mod) => function __require() {
|
|
16
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
17
|
+
};
|
|
13
18
|
var __export = (target, all) => {
|
|
14
19
|
for (var name in all)
|
|
15
20
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
@@ -22,11 +27,27 @@ var __exports__ = (() => {
|
|
|
22
27
|
}
|
|
23
28
|
return to;
|
|
24
29
|
};
|
|
30
|
+
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
|
|
31
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
32
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
33
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
34
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
35
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
36
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
37
|
+
mod
|
|
38
|
+
));
|
|
25
39
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
26
40
|
|
|
27
|
-
//
|
|
28
|
-
var
|
|
29
|
-
|
|
41
|
+
// external-global-plugin:@loaders.gl/core
|
|
42
|
+
var require_core = __commonJS({
|
|
43
|
+
"external-global-plugin:@loaders.gl/core"(exports4, module) {
|
|
44
|
+
module.exports = globalThis.loaders;
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
// bundle.ts
|
|
49
|
+
var bundle_exports = {};
|
|
50
|
+
__export(bundle_exports, {
|
|
30
51
|
DBFLoader: () => DBFLoader,
|
|
31
52
|
DBFWorkerLoader: () => DBFWorkerLoader,
|
|
32
53
|
SHPLoader: () => SHPLoader,
|
|
@@ -36,18 +57,20 @@ var __exports__ = (() => {
|
|
|
36
57
|
_BinaryReader: () => BinaryReader,
|
|
37
58
|
_zipBatchIterators: () => zipBatchIterators
|
|
38
59
|
});
|
|
60
|
+
__reExport(bundle_exports, __toESM(require_core(), 1));
|
|
39
61
|
|
|
40
62
|
// src/lib/streaming/binary-chunk-reader.ts
|
|
41
63
|
var BinaryChunkReader = class {
|
|
42
64
|
constructor(options) {
|
|
43
|
-
const {
|
|
44
|
-
maxRewindBytes = 0
|
|
45
|
-
} = options || {};
|
|
65
|
+
const { maxRewindBytes = 0 } = options || {};
|
|
46
66
|
this.offset = 0;
|
|
47
67
|
this.arrayBuffers = [];
|
|
48
68
|
this.ended = false;
|
|
49
69
|
this.maxRewindBytes = maxRewindBytes;
|
|
50
70
|
}
|
|
71
|
+
/**
|
|
72
|
+
* @param arrayBuffer
|
|
73
|
+
*/
|
|
51
74
|
write(arrayBuffer) {
|
|
52
75
|
this.arrayBuffers.push(arrayBuffer);
|
|
53
76
|
}
|
|
@@ -55,6 +78,12 @@ var __exports__ = (() => {
|
|
|
55
78
|
this.arrayBuffers = [];
|
|
56
79
|
this.ended = true;
|
|
57
80
|
}
|
|
81
|
+
/**
|
|
82
|
+
* Has enough bytes available in array buffers
|
|
83
|
+
*
|
|
84
|
+
* @param bytes Number of bytes
|
|
85
|
+
* @return boolean
|
|
86
|
+
*/
|
|
58
87
|
hasAvailableBytes(bytes) {
|
|
59
88
|
let bytesAvailable = -this.offset;
|
|
60
89
|
for (const arrayBuffer of this.arrayBuffers) {
|
|
@@ -65,6 +94,12 @@ var __exports__ = (() => {
|
|
|
65
94
|
}
|
|
66
95
|
return false;
|
|
67
96
|
}
|
|
97
|
+
/**
|
|
98
|
+
* Find offsets of byte ranges within this.arrayBuffers
|
|
99
|
+
*
|
|
100
|
+
* @param bytes Byte length to read
|
|
101
|
+
* @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
|
|
102
|
+
*/
|
|
68
103
|
findBufferOffsets(bytes) {
|
|
69
104
|
let offset = -this.offset;
|
|
70
105
|
const selectedBuffers = [];
|
|
@@ -88,6 +123,12 @@ var __exports__ = (() => {
|
|
|
88
123
|
}
|
|
89
124
|
return null;
|
|
90
125
|
}
|
|
126
|
+
/**
|
|
127
|
+
* Get the required number of bytes from the iterator
|
|
128
|
+
*
|
|
129
|
+
* @param bytes Number of bytes
|
|
130
|
+
* @return DataView with data
|
|
131
|
+
*/
|
|
91
132
|
getDataView(bytes) {
|
|
92
133
|
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
93
134
|
if (!bufferOffsets && this.ended) {
|
|
@@ -109,12 +150,25 @@ var __exports__ = (() => {
|
|
|
109
150
|
this.disposeBuffers();
|
|
110
151
|
return view;
|
|
111
152
|
}
|
|
153
|
+
/**
|
|
154
|
+
* Dispose of old array buffers
|
|
155
|
+
*/
|
|
112
156
|
disposeBuffers() {
|
|
113
157
|
while (this.arrayBuffers.length > 0 && this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
|
|
114
158
|
this.offset -= this.arrayBuffers[0].byteLength;
|
|
115
159
|
this.arrayBuffers.shift();
|
|
116
160
|
}
|
|
117
161
|
}
|
|
162
|
+
/**
|
|
163
|
+
* Copy multiple ArrayBuffers into one contiguous ArrayBuffer
|
|
164
|
+
*
|
|
165
|
+
* In contrast to concatenateArrayBuffers, this only copies the necessary
|
|
166
|
+
* portions of the source arrays, rather than first copying the entire arrays
|
|
167
|
+
* then taking a part of them.
|
|
168
|
+
*
|
|
169
|
+
* @param bufferOffsets List of internal array offsets
|
|
170
|
+
* @return New contiguous ArrayBuffer
|
|
171
|
+
*/
|
|
118
172
|
_combineArrayBuffers(bufferOffsets) {
|
|
119
173
|
let byteLength = 0;
|
|
120
174
|
for (const bufferOffset of bufferOffsets) {
|
|
@@ -131,9 +185,15 @@ var __exports__ = (() => {
|
|
|
131
185
|
}
|
|
132
186
|
return result.buffer;
|
|
133
187
|
}
|
|
188
|
+
/**
|
|
189
|
+
* @param bytes
|
|
190
|
+
*/
|
|
134
191
|
skip(bytes) {
|
|
135
192
|
this.offset += bytes;
|
|
136
193
|
}
|
|
194
|
+
/**
|
|
195
|
+
* @param bytes
|
|
196
|
+
*/
|
|
137
197
|
rewind(bytes) {
|
|
138
198
|
this.offset -= bytes;
|
|
139
199
|
}
|
|
@@ -146,6 +206,7 @@ var __exports__ = (() => {
|
|
|
146
206
|
function parseSHPHeader(headerView) {
|
|
147
207
|
const header = {
|
|
148
208
|
magic: headerView.getInt32(0, BIG_ENDIAN),
|
|
209
|
+
// Length is stored as # of 2-byte words; multiply by 2 to get # of bytes
|
|
149
210
|
length: headerView.getInt32(24, BIG_ENDIAN) * 2,
|
|
150
211
|
version: headerView.getInt32(28, LITTLE_ENDIAN),
|
|
151
212
|
type: headerView.getInt32(32, LITTLE_ENDIAN),
|
|
@@ -172,9 +233,7 @@ var __exports__ = (() => {
|
|
|
172
233
|
// src/lib/parsers/parse-shp-geometry.ts
|
|
173
234
|
var LITTLE_ENDIAN2 = true;
|
|
174
235
|
function parseRecord(view, options) {
|
|
175
|
-
const {
|
|
176
|
-
_maxDimensions = 4
|
|
177
|
-
} = options?.shp || {};
|
|
236
|
+
const { _maxDimensions = 4 } = options?.shp || {};
|
|
178
237
|
let offset = 0;
|
|
179
238
|
const type = view.getInt32(offset, LITTLE_ENDIAN2);
|
|
180
239
|
offset += Int32Array.BYTES_PER_ELEMENT;
|
|
@@ -216,10 +275,7 @@ var __exports__ = (() => {
|
|
|
216
275
|
let positions;
|
|
217
276
|
[positions, offset] = parsePositions(view, offset, 1, dim);
|
|
218
277
|
return {
|
|
219
|
-
positions: {
|
|
220
|
-
value: positions,
|
|
221
|
-
size: dim
|
|
222
|
-
},
|
|
278
|
+
positions: { value: positions, size: dim },
|
|
223
279
|
type: "Point"
|
|
224
280
|
};
|
|
225
281
|
}
|
|
@@ -241,10 +297,7 @@ var __exports__ = (() => {
|
|
|
241
297
|
}
|
|
242
298
|
const positions = concatPositions(xyPositions, mPositions, zPositions);
|
|
243
299
|
return {
|
|
244
|
-
positions: {
|
|
245
|
-
value: positions,
|
|
246
|
-
size: dim
|
|
247
|
-
},
|
|
300
|
+
positions: { value: positions, size: dim },
|
|
248
301
|
type: "Point"
|
|
249
302
|
};
|
|
250
303
|
}
|
|
@@ -276,14 +329,8 @@ var __exports__ = (() => {
|
|
|
276
329
|
if (type === "LineString") {
|
|
277
330
|
return {
|
|
278
331
|
type,
|
|
279
|
-
positions: {
|
|
280
|
-
|
|
281
|
-
size: dim
|
|
282
|
-
},
|
|
283
|
-
pathIndices: {
|
|
284
|
-
value: ringIndices,
|
|
285
|
-
size: 1
|
|
286
|
-
}
|
|
332
|
+
positions: { value: positions, size: dim },
|
|
333
|
+
pathIndices: { value: ringIndices, size: 1 }
|
|
287
334
|
};
|
|
288
335
|
}
|
|
289
336
|
const polygonIndices = [];
|
|
@@ -299,24 +346,22 @@ var __exports__ = (() => {
|
|
|
299
346
|
polygonIndices.push(nPoints);
|
|
300
347
|
return {
|
|
301
348
|
type,
|
|
302
|
-
positions: {
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
},
|
|
310
|
-
polygonIndices: {
|
|
311
|
-
value: new Uint32Array(polygonIndices),
|
|
312
|
-
size: 1
|
|
313
|
-
}
|
|
349
|
+
positions: { value: positions, size: dim },
|
|
350
|
+
primitivePolygonIndices: { value: ringIndices, size: 1 },
|
|
351
|
+
// TODO: Dynamically choose Uint32Array over Uint16Array only when
|
|
352
|
+
// necessary. I believe the implementation requires nPoints to be the
|
|
353
|
+
// largest value in the array, so you should be able to use Uint32Array only
|
|
354
|
+
// when nPoints > 65535.
|
|
355
|
+
polygonIndices: { value: new Uint32Array(polygonIndices), size: 1 }
|
|
314
356
|
};
|
|
315
357
|
}
|
|
316
358
|
function parsePositions(view, offset, nPoints, dim) {
|
|
317
359
|
const bufferOffset = view.byteOffset + offset;
|
|
318
360
|
const bufferLength = nPoints * dim * Float64Array.BYTES_PER_ELEMENT;
|
|
319
|
-
return [
|
|
361
|
+
return [
|
|
362
|
+
new Float64Array(view.buffer.slice(bufferOffset, bufferOffset + bufferLength)),
|
|
363
|
+
offset + bufferLength
|
|
364
|
+
];
|
|
320
365
|
}
|
|
321
366
|
function concatPositions(xyPositions, mPositions, zPositions) {
|
|
322
367
|
if (!(mPositions || zPositions)) {
|
|
@@ -373,21 +418,21 @@ var __exports__ = (() => {
|
|
|
373
418
|
ERROR: 3
|
|
374
419
|
};
|
|
375
420
|
var SHPParser = class {
|
|
376
|
-
options = {};
|
|
377
|
-
binaryReader = new BinaryChunkReader({
|
|
378
|
-
maxRewindBytes: SHP_RECORD_HEADER_SIZE
|
|
379
|
-
});
|
|
380
|
-
state = STATE.EXPECTING_HEADER;
|
|
381
|
-
result = {
|
|
382
|
-
geometries: [],
|
|
383
|
-
progress: {
|
|
384
|
-
bytesTotal: NaN,
|
|
385
|
-
bytesUsed: NaN,
|
|
386
|
-
rows: NaN
|
|
387
|
-
},
|
|
388
|
-
currentIndex: NaN
|
|
389
|
-
};
|
|
390
421
|
constructor(options) {
|
|
422
|
+
this.options = {};
|
|
423
|
+
this.binaryReader = new BinaryChunkReader({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
|
|
424
|
+
this.state = STATE.EXPECTING_HEADER;
|
|
425
|
+
this.result = {
|
|
426
|
+
geometries: [],
|
|
427
|
+
// Initialize with number values to make TS happy
|
|
428
|
+
// These are initialized for real in STATE.EXPECTING_HEADER
|
|
429
|
+
progress: {
|
|
430
|
+
bytesTotal: NaN,
|
|
431
|
+
bytesUsed: NaN,
|
|
432
|
+
rows: NaN
|
|
433
|
+
},
|
|
434
|
+
currentIndex: NaN
|
|
435
|
+
};
|
|
391
436
|
this.options = options;
|
|
392
437
|
}
|
|
393
438
|
write(arrayBuffer) {
|
|
@@ -455,7 +500,9 @@ var __exports__ = (() => {
|
|
|
455
500
|
const recordHeaderView = binaryReader.getDataView(SHP_RECORD_HEADER_SIZE);
|
|
456
501
|
const recordHeader = {
|
|
457
502
|
recordNumber: recordHeaderView.getInt32(0, BIG_ENDIAN2),
|
|
503
|
+
// 2 byte words; includes the four words of record header
|
|
458
504
|
byteLength: recordHeaderView.getInt32(4, BIG_ENDIAN2) * 2,
|
|
505
|
+
// This is actually part of the record, not the header...
|
|
459
506
|
type: recordHeaderView.getInt32(8, LITTLE_ENDIAN3)
|
|
460
507
|
};
|
|
461
508
|
if (!binaryReader.hasAvailableBytes(recordHeader.byteLength - 4)) {
|
|
@@ -492,7 +539,7 @@ var __exports__ = (() => {
|
|
|
492
539
|
}
|
|
493
540
|
|
|
494
541
|
// src/shp-loader.ts
|
|
495
|
-
var VERSION =
|
|
542
|
+
var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
496
543
|
var SHP_MAGIC_NUMBER2 = [0, 0, 39, 10];
|
|
497
544
|
var SHPWorkerLoader = {
|
|
498
545
|
name: "SHP",
|
|
@@ -503,6 +550,7 @@ var __exports__ = (() => {
|
|
|
503
550
|
category: "geometry",
|
|
504
551
|
extensions: ["shp"],
|
|
505
552
|
mimeTypes: ["application/octet-stream"],
|
|
553
|
+
// ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...
|
|
506
554
|
tests: [new Uint8Array(SHP_MAGIC_NUMBER2).buffer],
|
|
507
555
|
options: {
|
|
508
556
|
shp: {
|
|
@@ -589,11 +637,11 @@ var __exports__ = (() => {
|
|
|
589
637
|
}
|
|
590
638
|
}
|
|
591
639
|
function polygonToGeoJson(data, startIndex = -Infinity, endIndex = Infinity) {
|
|
592
|
-
const {
|
|
593
|
-
positions
|
|
594
|
-
} = data;
|
|
640
|
+
const { positions } = data;
|
|
595
641
|
const polygonIndices = data.polygonIndices.value.filter((x) => x >= startIndex && x <= endIndex);
|
|
596
|
-
const primitivePolygonIndices = data.primitivePolygonIndices.value.filter(
|
|
642
|
+
const primitivePolygonIndices = data.primitivePolygonIndices.value.filter(
|
|
643
|
+
(x) => x >= startIndex && x <= endIndex
|
|
644
|
+
);
|
|
597
645
|
const multi = polygonIndices.length > 2;
|
|
598
646
|
if (!multi) {
|
|
599
647
|
const coordinates2 = [];
|
|
@@ -603,62 +651,44 @@ var __exports__ = (() => {
|
|
|
603
651
|
const ringCoordinates = ringToGeoJson(positions, startRingIndex, endRingIndex);
|
|
604
652
|
coordinates2.push(ringCoordinates);
|
|
605
653
|
}
|
|
606
|
-
return {
|
|
607
|
-
type: "Polygon",
|
|
608
|
-
coordinates: coordinates2
|
|
609
|
-
};
|
|
654
|
+
return { type: "Polygon", coordinates: coordinates2 };
|
|
610
655
|
}
|
|
611
656
|
const coordinates = [];
|
|
612
657
|
for (let i = 0; i < polygonIndices.length - 1; i++) {
|
|
613
658
|
const startPolygonIndex = polygonIndices[i];
|
|
614
659
|
const endPolygonIndex = polygonIndices[i + 1];
|
|
615
|
-
const polygonCoordinates = polygonToGeoJson(
|
|
660
|
+
const polygonCoordinates = polygonToGeoJson(
|
|
661
|
+
data,
|
|
662
|
+
startPolygonIndex,
|
|
663
|
+
endPolygonIndex
|
|
664
|
+
).coordinates;
|
|
616
665
|
coordinates.push(polygonCoordinates);
|
|
617
666
|
}
|
|
618
|
-
return {
|
|
619
|
-
type: "MultiPolygon",
|
|
620
|
-
coordinates
|
|
621
|
-
};
|
|
667
|
+
return { type: "MultiPolygon", coordinates };
|
|
622
668
|
}
|
|
623
669
|
function lineStringToGeoJson(data, startIndex = -Infinity, endIndex = Infinity) {
|
|
624
|
-
const {
|
|
625
|
-
positions
|
|
626
|
-
} = data;
|
|
670
|
+
const { positions } = data;
|
|
627
671
|
const pathIndices = data.pathIndices.value.filter((x) => x >= startIndex && x <= endIndex);
|
|
628
672
|
const multi = pathIndices.length > 2;
|
|
629
673
|
if (!multi) {
|
|
630
674
|
const coordinates2 = ringToGeoJson(positions, pathIndices[0], pathIndices[1]);
|
|
631
|
-
return {
|
|
632
|
-
type: "LineString",
|
|
633
|
-
coordinates: coordinates2
|
|
634
|
-
};
|
|
675
|
+
return { type: "LineString", coordinates: coordinates2 };
|
|
635
676
|
}
|
|
636
677
|
const coordinates = [];
|
|
637
678
|
for (let i = 0; i < pathIndices.length - 1; i++) {
|
|
638
679
|
const ringCoordinates = ringToGeoJson(positions, pathIndices[i], pathIndices[i + 1]);
|
|
639
680
|
coordinates.push(ringCoordinates);
|
|
640
681
|
}
|
|
641
|
-
return {
|
|
642
|
-
type: "MultiLineString",
|
|
643
|
-
coordinates
|
|
644
|
-
};
|
|
682
|
+
return { type: "MultiLineString", coordinates };
|
|
645
683
|
}
|
|
646
684
|
function pointToGeoJson(data, startIndex, endIndex) {
|
|
647
|
-
const {
|
|
648
|
-
positions
|
|
649
|
-
} = data;
|
|
685
|
+
const { positions } = data;
|
|
650
686
|
const coordinates = ringToGeoJson(positions, startIndex, endIndex);
|
|
651
687
|
const multi = coordinates.length > 1;
|
|
652
688
|
if (multi) {
|
|
653
|
-
return {
|
|
654
|
-
type: "MultiPoint",
|
|
655
|
-
coordinates
|
|
656
|
-
};
|
|
689
|
+
return { type: "MultiPoint", coordinates };
|
|
657
690
|
}
|
|
658
|
-
return {
|
|
659
|
-
type: "Point",
|
|
660
|
-
coordinates: coordinates[0]
|
|
661
|
-
};
|
|
691
|
+
return { type: "Point", coordinates: coordinates[0] };
|
|
662
692
|
}
|
|
663
693
|
function ringToGeoJson(positions, startIndex, endIndex) {
|
|
664
694
|
startIndex = startIndex || 0;
|
|
@@ -6072,10 +6102,7 @@ var __exports__ = (() => {
|
|
|
6072
6102
|
let iterator2Done = false;
|
|
6073
6103
|
while (!iterator1Done && !iterator2Done) {
|
|
6074
6104
|
if (batch1Data.length === 0 && !iterator1Done) {
|
|
6075
|
-
const {
|
|
6076
|
-
value,
|
|
6077
|
-
done
|
|
6078
|
-
} = await iterator1.next();
|
|
6105
|
+
const { value, done } = await iterator1.next();
|
|
6079
6106
|
if (done) {
|
|
6080
6107
|
iterator1Done = true;
|
|
6081
6108
|
} else {
|
|
@@ -6083,10 +6110,7 @@ var __exports__ = (() => {
|
|
|
6083
6110
|
}
|
|
6084
6111
|
}
|
|
6085
6112
|
if (batch2Data.length === 0 && !iterator2Done) {
|
|
6086
|
-
const {
|
|
6087
|
-
value,
|
|
6088
|
-
done
|
|
6089
|
-
} = await iterator2.next();
|
|
6113
|
+
const { value, done } = await iterator2.next();
|
|
6090
6114
|
if (done) {
|
|
6091
6115
|
iterator2Done = true;
|
|
6092
6116
|
} else {
|
|
@@ -6118,23 +6142,18 @@ var __exports__ = (() => {
|
|
|
6118
6142
|
// src/lib/parsers/parse-dbf.ts
|
|
6119
6143
|
var LITTLE_ENDIAN4 = true;
|
|
6120
6144
|
var DBF_HEADER_SIZE = 32;
|
|
6121
|
-
var STATE2 = function(STATE3) {
|
|
6122
|
-
STATE3[STATE3["START"] = 0] = "START";
|
|
6123
|
-
STATE3[STATE3["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
|
|
6124
|
-
STATE3[STATE3["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
|
|
6125
|
-
STATE3[STATE3["END"] = 3] = "END";
|
|
6126
|
-
STATE3[STATE3["ERROR"] = 4] = "ERROR";
|
|
6127
|
-
return STATE3;
|
|
6128
|
-
}(STATE2 || {});
|
|
6129
6145
|
var DBFParser = class {
|
|
6130
|
-
binaryReader = new BinaryChunkReader();
|
|
6131
|
-
state = STATE2.START;
|
|
6132
|
-
result = {
|
|
6133
|
-
data: []
|
|
6134
|
-
};
|
|
6135
6146
|
constructor(options) {
|
|
6147
|
+
this.binaryReader = new BinaryChunkReader();
|
|
6148
|
+
this.state = 0 /* START */;
|
|
6149
|
+
this.result = {
|
|
6150
|
+
data: []
|
|
6151
|
+
};
|
|
6136
6152
|
this.textDecoder = new TextDecoder(options.encoding);
|
|
6137
6153
|
}
|
|
6154
|
+
/**
|
|
6155
|
+
* @param arrayBuffer
|
|
6156
|
+
*/
|
|
6138
6157
|
write(arrayBuffer) {
|
|
6139
6158
|
this.binaryReader.write(arrayBuffer);
|
|
6140
6159
|
this.state = parseState2(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
@@ -6142,25 +6161,18 @@ var __exports__ = (() => {
|
|
|
6142
6161
|
end() {
|
|
6143
6162
|
this.binaryReader.end();
|
|
6144
6163
|
this.state = parseState2(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
6145
|
-
if (this.state !==
|
|
6146
|
-
this.state =
|
|
6164
|
+
if (this.state !== 3 /* END */) {
|
|
6165
|
+
this.state = 4 /* ERROR */;
|
|
6147
6166
|
this.result.error = "DBF incomplete file";
|
|
6148
6167
|
}
|
|
6149
6168
|
}
|
|
6150
6169
|
};
|
|
6151
6170
|
function parseDBF(arrayBuffer, options = {}) {
|
|
6152
|
-
const {
|
|
6153
|
-
|
|
6154
|
-
} = options.dbf || {};
|
|
6155
|
-
const dbfParser = new DBFParser({
|
|
6156
|
-
encoding
|
|
6157
|
-
});
|
|
6171
|
+
const { encoding = "latin1" } = options.dbf || {};
|
|
6172
|
+
const dbfParser = new DBFParser({ encoding });
|
|
6158
6173
|
dbfParser.write(arrayBuffer);
|
|
6159
6174
|
dbfParser.end();
|
|
6160
|
-
const {
|
|
6161
|
-
data,
|
|
6162
|
-
schema
|
|
6163
|
-
} = dbfParser.result;
|
|
6175
|
+
const { data, schema } = dbfParser.result;
|
|
6164
6176
|
const shape = options?.dbf?.shape;
|
|
6165
6177
|
switch (shape) {
|
|
6166
6178
|
case "object-row-table": {
|
|
@@ -6172,22 +6184,15 @@ var __exports__ = (() => {
|
|
|
6172
6184
|
return table;
|
|
6173
6185
|
}
|
|
6174
6186
|
case "table":
|
|
6175
|
-
return {
|
|
6176
|
-
schema,
|
|
6177
|
-
rows: data
|
|
6178
|
-
};
|
|
6187
|
+
return { schema, rows: data };
|
|
6179
6188
|
case "rows":
|
|
6180
6189
|
default:
|
|
6181
6190
|
return data;
|
|
6182
6191
|
}
|
|
6183
6192
|
}
|
|
6184
6193
|
async function* parseDBFInBatches(asyncIterator, options = {}) {
|
|
6185
|
-
const {
|
|
6186
|
-
|
|
6187
|
-
} = options.dbf || {};
|
|
6188
|
-
const parser = new DBFParser({
|
|
6189
|
-
encoding
|
|
6190
|
-
});
|
|
6194
|
+
const { encoding = "latin1" } = options.dbf || {};
|
|
6195
|
+
const parser = new DBFParser({ encoding });
|
|
6191
6196
|
let headerReturned = false;
|
|
6192
6197
|
for await (const arrayBuffer of asyncIterator) {
|
|
6193
6198
|
parser.write(arrayBuffer);
|
|
@@ -6209,10 +6214,10 @@ var __exports__ = (() => {
|
|
|
6209
6214
|
while (true) {
|
|
6210
6215
|
try {
|
|
6211
6216
|
switch (state) {
|
|
6212
|
-
case
|
|
6213
|
-
case
|
|
6217
|
+
case 4 /* ERROR */:
|
|
6218
|
+
case 3 /* END */:
|
|
6214
6219
|
return state;
|
|
6215
|
-
case
|
|
6220
|
+
case 0 /* START */:
|
|
6216
6221
|
const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
|
|
6217
6222
|
if (!dataView) {
|
|
6218
6223
|
return state;
|
|
@@ -6223,10 +6228,13 @@ var __exports__ = (() => {
|
|
|
6223
6228
|
rowsTotal: result.dbfHeader.nRecords,
|
|
6224
6229
|
rows: 0
|
|
6225
6230
|
};
|
|
6226
|
-
state =
|
|
6231
|
+
state = 1 /* FIELD_DESCRIPTORS */;
|
|
6227
6232
|
break;
|
|
6228
|
-
case
|
|
6229
|
-
const fieldDescriptorView = binaryReader.getDataView(
|
|
6233
|
+
case 1 /* FIELD_DESCRIPTORS */:
|
|
6234
|
+
const fieldDescriptorView = binaryReader.getDataView(
|
|
6235
|
+
// @ts-ignore
|
|
6236
|
+
result.dbfHeader.headerLength - DBF_HEADER_SIZE
|
|
6237
|
+
);
|
|
6230
6238
|
if (!fieldDescriptorView) {
|
|
6231
6239
|
return state;
|
|
6232
6240
|
}
|
|
@@ -6235,14 +6243,11 @@ var __exports__ = (() => {
|
|
|
6235
6243
|
fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
|
|
6236
6244
|
metadata: {}
|
|
6237
6245
|
};
|
|
6238
|
-
state =
|
|
6246
|
+
state = 2 /* FIELD_PROPERTIES */;
|
|
6239
6247
|
binaryReader.skip(1);
|
|
6240
6248
|
break;
|
|
6241
|
-
case
|
|
6242
|
-
const {
|
|
6243
|
-
recordLength = 0,
|
|
6244
|
-
nRecords = 0
|
|
6245
|
-
} = result?.dbfHeader || {};
|
|
6249
|
+
case 2 /* FIELD_PROPERTIES */:
|
|
6250
|
+
const { recordLength = 0, nRecords = 0 } = result?.dbfHeader || {};
|
|
6246
6251
|
while (result.data.length < nRecords) {
|
|
6247
6252
|
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
6248
6253
|
if (!recordView) {
|
|
@@ -6253,15 +6258,15 @@ var __exports__ = (() => {
|
|
|
6253
6258
|
result.data.push(row);
|
|
6254
6259
|
result.progress.rows = result.data.length;
|
|
6255
6260
|
}
|
|
6256
|
-
state =
|
|
6261
|
+
state = 3 /* END */;
|
|
6257
6262
|
break;
|
|
6258
6263
|
default:
|
|
6259
|
-
state =
|
|
6264
|
+
state = 4 /* ERROR */;
|
|
6260
6265
|
result.error = `illegal parser state ${state}`;
|
|
6261
6266
|
return state;
|
|
6262
6267
|
}
|
|
6263
6268
|
} catch (error) {
|
|
6264
|
-
state =
|
|
6269
|
+
state = 4 /* ERROR */;
|
|
6265
6270
|
result.error = `DBF parsing failed: ${error.message}`;
|
|
6266
6271
|
return state;
|
|
6267
6272
|
}
|
|
@@ -6269,12 +6274,17 @@ var __exports__ = (() => {
|
|
|
6269
6274
|
}
|
|
6270
6275
|
function parseDBFHeader(headerView) {
|
|
6271
6276
|
return {
|
|
6277
|
+
// Last updated date
|
|
6272
6278
|
year: headerView.getUint8(1) + 1900,
|
|
6273
6279
|
month: headerView.getUint8(2),
|
|
6274
6280
|
day: headerView.getUint8(3),
|
|
6281
|
+
// Number of records in data file
|
|
6275
6282
|
nRecords: headerView.getUint32(4, LITTLE_ENDIAN4),
|
|
6283
|
+
// Length of header in bytes
|
|
6276
6284
|
headerLength: headerView.getUint16(8, LITTLE_ENDIAN4),
|
|
6285
|
+
// Length of each record
|
|
6277
6286
|
recordLength: headerView.getUint16(10, LITTLE_ENDIAN4),
|
|
6287
|
+
// Not sure if this is usually set
|
|
6278
6288
|
languageDriver: headerView.getUint8(29)
|
|
6279
6289
|
};
|
|
6280
6290
|
}
|
|
@@ -6298,7 +6308,9 @@ var __exports__ = (() => {
|
|
|
6298
6308
|
const out = {};
|
|
6299
6309
|
let offset = 0;
|
|
6300
6310
|
for (const field of fields) {
|
|
6301
|
-
const text = textDecoder.decode(
|
|
6311
|
+
const text = textDecoder.decode(
|
|
6312
|
+
new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength)
|
|
6313
|
+
);
|
|
6302
6314
|
out[field.name] = parseField(text, field.dataType);
|
|
6303
6315
|
offset += field.fieldLength;
|
|
6304
6316
|
}
|
|
@@ -6337,69 +6349,29 @@ var __exports__ = (() => {
|
|
|
6337
6349
|
function parseCharacter(text) {
|
|
6338
6350
|
return text.trim() || null;
|
|
6339
6351
|
}
|
|
6340
|
-
function makeField({
|
|
6341
|
-
name,
|
|
6342
|
-
dataType,
|
|
6343
|
-
fieldLength,
|
|
6344
|
-
decimal
|
|
6345
|
-
}) {
|
|
6352
|
+
function makeField({ name, dataType, fieldLength, decimal }) {
|
|
6346
6353
|
switch (dataType) {
|
|
6347
6354
|
case "B":
|
|
6348
|
-
return {
|
|
6349
|
-
name,
|
|
6350
|
-
type: "float64",
|
|
6351
|
-
nullable: true,
|
|
6352
|
-
metadata: {}
|
|
6353
|
-
};
|
|
6355
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6354
6356
|
case "C":
|
|
6355
|
-
return {
|
|
6356
|
-
name,
|
|
6357
|
-
type: "utf8",
|
|
6358
|
-
nullable: true,
|
|
6359
|
-
metadata: {}
|
|
6360
|
-
};
|
|
6357
|
+
return { name, type: "utf8", nullable: true, metadata: {} };
|
|
6361
6358
|
case "F":
|
|
6362
|
-
return {
|
|
6363
|
-
name,
|
|
6364
|
-
type: "float64",
|
|
6365
|
-
nullable: true,
|
|
6366
|
-
metadata: {}
|
|
6367
|
-
};
|
|
6359
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6368
6360
|
case "N":
|
|
6369
|
-
return {
|
|
6370
|
-
name,
|
|
6371
|
-
type: "float64",
|
|
6372
|
-
nullable: true,
|
|
6373
|
-
metadata: {}
|
|
6374
|
-
};
|
|
6361
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6375
6362
|
case "O":
|
|
6376
|
-
return {
|
|
6377
|
-
name,
|
|
6378
|
-
type: "float64",
|
|
6379
|
-
nullable: true,
|
|
6380
|
-
metadata: {}
|
|
6381
|
-
};
|
|
6363
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6382
6364
|
case "D":
|
|
6383
|
-
return {
|
|
6384
|
-
name,
|
|
6385
|
-
type: "timestamp-millisecond",
|
|
6386
|
-
nullable: true,
|
|
6387
|
-
metadata: {}
|
|
6388
|
-
};
|
|
6365
|
+
return { name, type: "timestamp-millisecond", nullable: true, metadata: {} };
|
|
6389
6366
|
case "L":
|
|
6390
|
-
return {
|
|
6391
|
-
name,
|
|
6392
|
-
type: "bool",
|
|
6393
|
-
nullable: true,
|
|
6394
|
-
metadata: {}
|
|
6395
|
-
};
|
|
6367
|
+
return { name, type: "bool", nullable: true, metadata: {} };
|
|
6396
6368
|
default:
|
|
6397
6369
|
throw new Error("Unsupported data type");
|
|
6398
6370
|
}
|
|
6399
6371
|
}
|
|
6400
6372
|
|
|
6401
6373
|
// src/dbf-loader.ts
|
|
6402
|
-
var VERSION2 =
|
|
6374
|
+
var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
6403
6375
|
var DBFWorkerLoader = {
|
|
6404
6376
|
name: "DBF",
|
|
6405
6377
|
id: "dbf",
|
|
@@ -6426,26 +6398,27 @@ var __exports__ = (() => {
|
|
|
6426
6398
|
|
|
6427
6399
|
// src/lib/parsers/parse-shapefile.ts
|
|
6428
6400
|
async function* parseShapefileInBatches(asyncIterator, options, context) {
|
|
6429
|
-
const {
|
|
6430
|
-
|
|
6431
|
-
|
|
6432
|
-
|
|
6433
|
-
|
|
6434
|
-
|
|
6435
|
-
|
|
6436
|
-
|
|
6437
|
-
} = await loadShapefileSidecarFiles(options, context);
|
|
6438
|
-
const shapeIterable = await parseInBatchesFromContext(asyncIterator, SHPLoader, options, context);
|
|
6401
|
+
const { reproject = false, _targetCrs = "WGS84" } = options?.gis || {};
|
|
6402
|
+
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
6403
|
+
const shapeIterable = await parseInBatchesFromContext(
|
|
6404
|
+
asyncIterator,
|
|
6405
|
+
SHPLoader,
|
|
6406
|
+
options,
|
|
6407
|
+
context
|
|
6408
|
+
);
|
|
6439
6409
|
const shapeIterator = shapeIterable[Symbol.asyncIterator]?.() || shapeIterable[Symbol.iterator]?.();
|
|
6440
6410
|
let propertyIterator = null;
|
|
6441
6411
|
const dbfResponse = await context?.fetch(replaceExtension(context?.url || "", "dbf"));
|
|
6442
6412
|
if (dbfResponse?.ok) {
|
|
6443
|
-
const propertyIterable = await parseInBatchesFromContext(
|
|
6444
|
-
|
|
6445
|
-
|
|
6446
|
-
|
|
6447
|
-
|
|
6448
|
-
|
|
6413
|
+
const propertyIterable = await parseInBatchesFromContext(
|
|
6414
|
+
dbfResponse,
|
|
6415
|
+
DBFLoader,
|
|
6416
|
+
{
|
|
6417
|
+
...options,
|
|
6418
|
+
dbf: { encoding: cpg || "latin1" }
|
|
6419
|
+
},
|
|
6420
|
+
context
|
|
6421
|
+
);
|
|
6449
6422
|
propertyIterator = propertyIterable[Symbol.asyncIterator]?.() || propertyIterable[Symbol.iterator]();
|
|
6450
6423
|
}
|
|
6451
6424
|
let shapeHeader = (await shapeIterator.next()).value;
|
|
@@ -6488,29 +6461,19 @@ var __exports__ = (() => {
|
|
|
6488
6461
|
}
|
|
6489
6462
|
}
|
|
6490
6463
|
async function parseShapefile(arrayBuffer, options, context) {
|
|
6491
|
-
const {
|
|
6492
|
-
|
|
6493
|
-
|
|
6494
|
-
} = options?.gis || {};
|
|
6495
|
-
const {
|
|
6496
|
-
shx,
|
|
6497
|
-
cpg,
|
|
6498
|
-
prj
|
|
6499
|
-
} = await loadShapefileSidecarFiles(options, context);
|
|
6500
|
-
const {
|
|
6501
|
-
header,
|
|
6502
|
-
geometries
|
|
6503
|
-
} = await parseFromContext(arrayBuffer, SHPLoader, options, context);
|
|
6464
|
+
const { reproject = false, _targetCrs = "WGS84" } = options?.gis || {};
|
|
6465
|
+
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
6466
|
+
const { header, geometries } = await parseFromContext(arrayBuffer, SHPLoader, options, context);
|
|
6504
6467
|
const geojsonGeometries = parseGeometries(geometries);
|
|
6505
6468
|
let propertyTable;
|
|
6506
6469
|
const dbfResponse = await context?.fetch(replaceExtension(context?.url, "dbf"));
|
|
6507
6470
|
if (dbfResponse?.ok) {
|
|
6508
|
-
propertyTable = await parseFromContext(
|
|
6509
|
-
|
|
6510
|
-
|
|
6511
|
-
|
|
6512
|
-
|
|
6513
|
-
|
|
6471
|
+
propertyTable = await parseFromContext(
|
|
6472
|
+
dbfResponse,
|
|
6473
|
+
DBFLoader,
|
|
6474
|
+
{ dbf: { shape: "object-row-table", encoding: cpg || "latin1" } },
|
|
6475
|
+
context
|
|
6476
|
+
);
|
|
6514
6477
|
}
|
|
6515
6478
|
let features = joinProperties(geojsonGeometries, propertyTable?.data || []);
|
|
6516
6479
|
if (reproject) {
|
|
@@ -6519,13 +6482,11 @@ var __exports__ = (() => {
|
|
|
6519
6482
|
switch (options?.shapefile?.shape) {
|
|
6520
6483
|
case "geojson-table":
|
|
6521
6484
|
return {
|
|
6485
|
+
// @ts-expect-error
|
|
6522
6486
|
shape: "geojson-table",
|
|
6523
6487
|
type: "FeatureCollection",
|
|
6524
6488
|
encoding: cpg,
|
|
6525
|
-
schema: propertyTable?.schema || {
|
|
6526
|
-
metadata: {},
|
|
6527
|
-
fields: []
|
|
6528
|
-
},
|
|
6489
|
+
schema: propertyTable?.schema || { metadata: {}, fields: [] },
|
|
6529
6490
|
prj,
|
|
6530
6491
|
shx,
|
|
6531
6492
|
header,
|
|
@@ -6555,6 +6516,7 @@ var __exports__ = (() => {
|
|
|
6555
6516
|
const feature = {
|
|
6556
6517
|
type: "Feature",
|
|
6557
6518
|
geometry,
|
|
6519
|
+
// properties can be undefined if dbfResponse above was empty
|
|
6558
6520
|
properties: properties && properties[i] || {}
|
|
6559
6521
|
};
|
|
6560
6522
|
features.push(feature);
|
|
@@ -6565,17 +6527,11 @@ var __exports__ = (() => {
|
|
|
6565
6527
|
if (!sourceCrs && !targetCrs) {
|
|
6566
6528
|
return features;
|
|
6567
6529
|
}
|
|
6568
|
-
const projection = new Proj4Projection({
|
|
6569
|
-
from: sourceCrs || "WGS84",
|
|
6570
|
-
to: targetCrs || "WGS84"
|
|
6571
|
-
});
|
|
6530
|
+
const projection = new Proj4Projection({ from: sourceCrs || "WGS84", to: targetCrs || "WGS84" });
|
|
6572
6531
|
return transformGeoJsonCoords(features, (coord) => projection.project(coord));
|
|
6573
6532
|
}
|
|
6574
6533
|
async function loadShapefileSidecarFiles(options, context) {
|
|
6575
|
-
const {
|
|
6576
|
-
url,
|
|
6577
|
-
fetch
|
|
6578
|
-
} = context;
|
|
6534
|
+
const { url, fetch } = context;
|
|
6579
6535
|
const shxPromise = fetch(replaceExtension(url, "shx"));
|
|
6580
6536
|
const cpgPromise = fetch(replaceExtension(url, "cpg"));
|
|
6581
6537
|
const prjPromise = fetch(replaceExtension(url, "prj"));
|
|
@@ -6627,7 +6583,7 @@ var __exports__ = (() => {
|
|
|
6627
6583
|
}
|
|
6628
6584
|
|
|
6629
6585
|
// src/shapefile-loader.ts
|
|
6630
|
-
var VERSION3 =
|
|
6586
|
+
var VERSION3 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
6631
6587
|
var ShapefileLoader = {
|
|
6632
6588
|
name: "Shapefile",
|
|
6633
6589
|
id: "shapefile",
|
|
@@ -6645,7 +6601,9 @@ var __exports__ = (() => {
|
|
|
6645
6601
|
_maxDimensions: 4
|
|
6646
6602
|
}
|
|
6647
6603
|
},
|
|
6604
|
+
// @ts-expect-error
|
|
6648
6605
|
parse: parseShapefile,
|
|
6606
|
+
// @ts-expect-error
|
|
6649
6607
|
parseInBatches: parseShapefileInBatches
|
|
6650
6608
|
};
|
|
6651
6609
|
|
|
@@ -6655,9 +6613,21 @@ var __exports__ = (() => {
|
|
|
6655
6613
|
this.offset = 0;
|
|
6656
6614
|
this.arrayBuffer = arrayBuffer;
|
|
6657
6615
|
}
|
|
6616
|
+
/**
|
|
6617
|
+
* Checks if there are available bytes in data
|
|
6618
|
+
*
|
|
6619
|
+
* @param bytes
|
|
6620
|
+
* @returns boolean
|
|
6621
|
+
*/
|
|
6658
6622
|
hasAvailableBytes(bytes) {
|
|
6659
6623
|
return this.arrayBuffer.byteLength - this.offset >= bytes;
|
|
6660
6624
|
}
|
|
6625
|
+
/**
|
|
6626
|
+
* Get the required number of bytes from the iterator
|
|
6627
|
+
*
|
|
6628
|
+
* @param bytes
|
|
6629
|
+
* @returns Dataview
|
|
6630
|
+
*/
|
|
6661
6631
|
getDataView(bytes) {
|
|
6662
6632
|
if (bytes && !this.hasAvailableBytes(bytes)) {
|
|
6663
6633
|
throw new Error("binary data exhausted");
|
|
@@ -6666,14 +6636,24 @@ var __exports__ = (() => {
|
|
|
6666
6636
|
this.offset += bytes;
|
|
6667
6637
|
return dataView;
|
|
6668
6638
|
}
|
|
6639
|
+
/**
|
|
6640
|
+
* Skipping
|
|
6641
|
+
*
|
|
6642
|
+
* @param bytes
|
|
6643
|
+
*/
|
|
6669
6644
|
skip(bytes) {
|
|
6670
6645
|
this.offset += bytes;
|
|
6671
6646
|
}
|
|
6647
|
+
/**
|
|
6648
|
+
* Rewinding
|
|
6649
|
+
*
|
|
6650
|
+
* @param bytes
|
|
6651
|
+
*/
|
|
6672
6652
|
rewind(bytes) {
|
|
6673
6653
|
this.offset -= bytes;
|
|
6674
6654
|
}
|
|
6675
6655
|
};
|
|
6676
|
-
return __toCommonJS(
|
|
6656
|
+
return __toCommonJS(bundle_exports);
|
|
6677
6657
|
})();
|
|
6678
6658
|
return __exports__;
|
|
6679
6659
|
});
|