@loaders.gl/shapefile 4.2.0-alpha.4 → 4.2.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbf-loader.js +25 -20
- package/dist/dbf-worker.js +14 -7
- package/dist/dist.dev.js +219 -232
- package/dist/dist.min.js +12 -0
- package/dist/index.cjs +74 -75
- package/dist/index.cjs.map +7 -0
- package/dist/index.d.ts +6 -6
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/lib/parsers/parse-dbf.d.ts +1 -1
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf.js +300 -258
- package/dist/lib/parsers/parse-shapefile.d.ts +3 -3
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shapefile.js +225 -184
- package/dist/lib/parsers/parse-shp-geometry.d.ts +1 -1
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js +260 -168
- package/dist/lib/parsers/parse-shp-header.js +33 -23
- package/dist/lib/parsers/parse-shp.d.ts +1 -1
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp.js +146 -109
- package/dist/lib/parsers/parse-shx.js +19 -15
- package/dist/lib/parsers/types.js +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js +154 -95
- package/dist/lib/streaming/binary-reader.js +51 -23
- package/dist/lib/streaming/zip-batch-iterators.js +61 -45
- package/dist/shapefile-loader.js +26 -19
- package/dist/shp-loader.js +25 -19
- package/dist/shp-worker.js +22 -16
- package/dist/workers/dbf-worker.js +0 -1
- package/dist/workers/shp-worker.js +0 -1
- package/package.json +11 -7
- package/dist/dbf-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/lib/parsers/parse-shp.js.map +0 -1
- package/dist/lib/parsers/parse-shx.js.map +0 -1
- package/dist/lib/parsers/types.js.map +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/lib/streaming/binary-reader.js.map +0 -1
- package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/shapefile-loader.js.map +0 -1
- package/dist/shp-loader.js.map +0 -1
- package/dist/workers/dbf-worker.js.map +0 -1
- package/dist/workers/shp-worker.js.map +0 -1
package/dist/dist.dev.js
CHANGED
|
@@ -2,14 +2,19 @@
|
|
|
2
2
|
if (typeof exports === 'object' && typeof module === 'object')
|
|
3
3
|
module.exports = factory();
|
|
4
4
|
else if (typeof define === 'function' && define.amd) define([], factory);
|
|
5
|
-
else if (typeof exports === 'object') exports['
|
|
6
|
-
else root['
|
|
5
|
+
else if (typeof exports === 'object') exports['loaders'] = factory();
|
|
6
|
+
else root['loaders'] = factory();})(globalThis, function () {
|
|
7
7
|
"use strict";
|
|
8
8
|
var __exports__ = (() => {
|
|
9
|
+
var __create = Object.create;
|
|
9
10
|
var __defProp = Object.defineProperty;
|
|
10
11
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
11
12
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
13
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
12
14
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
15
|
+
var __commonJS = (cb, mod) => function __require() {
|
|
16
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
17
|
+
};
|
|
13
18
|
var __export = (target, all) => {
|
|
14
19
|
for (var name in all)
|
|
15
20
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
@@ -22,11 +27,27 @@ var __exports__ = (() => {
|
|
|
22
27
|
}
|
|
23
28
|
return to;
|
|
24
29
|
};
|
|
30
|
+
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
|
|
31
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
32
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
33
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
34
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
35
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
36
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
37
|
+
mod
|
|
38
|
+
));
|
|
25
39
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
26
40
|
|
|
27
|
-
//
|
|
28
|
-
var
|
|
29
|
-
|
|
41
|
+
// external-global-plugin:@loaders.gl/core
|
|
42
|
+
var require_core = __commonJS({
|
|
43
|
+
"external-global-plugin:@loaders.gl/core"(exports4, module) {
|
|
44
|
+
module.exports = globalThis.loaders;
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
// bundle.ts
|
|
49
|
+
var bundle_exports = {};
|
|
50
|
+
__export(bundle_exports, {
|
|
30
51
|
DBFLoader: () => DBFLoader,
|
|
31
52
|
DBFWorkerLoader: () => DBFWorkerLoader,
|
|
32
53
|
SHPLoader: () => SHPLoader,
|
|
@@ -36,18 +57,24 @@ var __exports__ = (() => {
|
|
|
36
57
|
_BinaryReader: () => BinaryReader,
|
|
37
58
|
_zipBatchIterators: () => zipBatchIterators
|
|
38
59
|
});
|
|
60
|
+
__reExport(bundle_exports, __toESM(require_core(), 1));
|
|
39
61
|
|
|
40
62
|
// src/lib/streaming/binary-chunk-reader.ts
|
|
41
63
|
var BinaryChunkReader = class {
|
|
64
|
+
offset;
|
|
65
|
+
arrayBuffers;
|
|
66
|
+
ended;
|
|
67
|
+
maxRewindBytes;
|
|
42
68
|
constructor(options) {
|
|
43
|
-
const {
|
|
44
|
-
maxRewindBytes = 0
|
|
45
|
-
} = options || {};
|
|
69
|
+
const { maxRewindBytes = 0 } = options || {};
|
|
46
70
|
this.offset = 0;
|
|
47
71
|
this.arrayBuffers = [];
|
|
48
72
|
this.ended = false;
|
|
49
73
|
this.maxRewindBytes = maxRewindBytes;
|
|
50
74
|
}
|
|
75
|
+
/**
|
|
76
|
+
* @param arrayBuffer
|
|
77
|
+
*/
|
|
51
78
|
write(arrayBuffer) {
|
|
52
79
|
this.arrayBuffers.push(arrayBuffer);
|
|
53
80
|
}
|
|
@@ -55,6 +82,12 @@ var __exports__ = (() => {
|
|
|
55
82
|
this.arrayBuffers = [];
|
|
56
83
|
this.ended = true;
|
|
57
84
|
}
|
|
85
|
+
/**
|
|
86
|
+
* Has enough bytes available in array buffers
|
|
87
|
+
*
|
|
88
|
+
* @param bytes Number of bytes
|
|
89
|
+
* @return boolean
|
|
90
|
+
*/
|
|
58
91
|
hasAvailableBytes(bytes) {
|
|
59
92
|
let bytesAvailable = -this.offset;
|
|
60
93
|
for (const arrayBuffer of this.arrayBuffers) {
|
|
@@ -65,6 +98,12 @@ var __exports__ = (() => {
|
|
|
65
98
|
}
|
|
66
99
|
return false;
|
|
67
100
|
}
|
|
101
|
+
/**
|
|
102
|
+
* Find offsets of byte ranges within this.arrayBuffers
|
|
103
|
+
*
|
|
104
|
+
* @param bytes Byte length to read
|
|
105
|
+
* @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
|
|
106
|
+
*/
|
|
68
107
|
findBufferOffsets(bytes) {
|
|
69
108
|
let offset = -this.offset;
|
|
70
109
|
const selectedBuffers = [];
|
|
@@ -88,6 +127,12 @@ var __exports__ = (() => {
|
|
|
88
127
|
}
|
|
89
128
|
return null;
|
|
90
129
|
}
|
|
130
|
+
/**
|
|
131
|
+
* Get the required number of bytes from the iterator
|
|
132
|
+
*
|
|
133
|
+
* @param bytes Number of bytes
|
|
134
|
+
* @return DataView with data
|
|
135
|
+
*/
|
|
91
136
|
getDataView(bytes) {
|
|
92
137
|
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
93
138
|
if (!bufferOffsets && this.ended) {
|
|
@@ -109,12 +154,25 @@ var __exports__ = (() => {
|
|
|
109
154
|
this.disposeBuffers();
|
|
110
155
|
return view;
|
|
111
156
|
}
|
|
157
|
+
/**
|
|
158
|
+
* Dispose of old array buffers
|
|
159
|
+
*/
|
|
112
160
|
disposeBuffers() {
|
|
113
161
|
while (this.arrayBuffers.length > 0 && this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
|
|
114
162
|
this.offset -= this.arrayBuffers[0].byteLength;
|
|
115
163
|
this.arrayBuffers.shift();
|
|
116
164
|
}
|
|
117
165
|
}
|
|
166
|
+
/**
|
|
167
|
+
* Copy multiple ArrayBuffers into one contiguous ArrayBuffer
|
|
168
|
+
*
|
|
169
|
+
* In contrast to concatenateArrayBuffers, this only copies the necessary
|
|
170
|
+
* portions of the source arrays, rather than first copying the entire arrays
|
|
171
|
+
* then taking a part of them.
|
|
172
|
+
*
|
|
173
|
+
* @param bufferOffsets List of internal array offsets
|
|
174
|
+
* @return New contiguous ArrayBuffer
|
|
175
|
+
*/
|
|
118
176
|
_combineArrayBuffers(bufferOffsets) {
|
|
119
177
|
let byteLength = 0;
|
|
120
178
|
for (const bufferOffset of bufferOffsets) {
|
|
@@ -131,9 +189,15 @@ var __exports__ = (() => {
|
|
|
131
189
|
}
|
|
132
190
|
return result.buffer;
|
|
133
191
|
}
|
|
192
|
+
/**
|
|
193
|
+
* @param bytes
|
|
194
|
+
*/
|
|
134
195
|
skip(bytes) {
|
|
135
196
|
this.offset += bytes;
|
|
136
197
|
}
|
|
198
|
+
/**
|
|
199
|
+
* @param bytes
|
|
200
|
+
*/
|
|
137
201
|
rewind(bytes) {
|
|
138
202
|
this.offset -= bytes;
|
|
139
203
|
}
|
|
@@ -146,6 +210,7 @@ var __exports__ = (() => {
|
|
|
146
210
|
function parseSHPHeader(headerView) {
|
|
147
211
|
const header = {
|
|
148
212
|
magic: headerView.getInt32(0, BIG_ENDIAN),
|
|
213
|
+
// Length is stored as # of 2-byte words; multiply by 2 to get # of bytes
|
|
149
214
|
length: headerView.getInt32(24, BIG_ENDIAN) * 2,
|
|
150
215
|
version: headerView.getInt32(28, LITTLE_ENDIAN),
|
|
151
216
|
type: headerView.getInt32(32, LITTLE_ENDIAN),
|
|
@@ -172,9 +237,7 @@ var __exports__ = (() => {
|
|
|
172
237
|
// src/lib/parsers/parse-shp-geometry.ts
|
|
173
238
|
var LITTLE_ENDIAN2 = true;
|
|
174
239
|
function parseRecord(view, options) {
|
|
175
|
-
const {
|
|
176
|
-
_maxDimensions = 4
|
|
177
|
-
} = options?.shp || {};
|
|
240
|
+
const { _maxDimensions = 4 } = options?.shp || {};
|
|
178
241
|
let offset = 0;
|
|
179
242
|
const type = view.getInt32(offset, LITTLE_ENDIAN2);
|
|
180
243
|
offset += Int32Array.BYTES_PER_ELEMENT;
|
|
@@ -216,10 +279,7 @@ var __exports__ = (() => {
|
|
|
216
279
|
let positions;
|
|
217
280
|
[positions, offset] = parsePositions(view, offset, 1, dim);
|
|
218
281
|
return {
|
|
219
|
-
positions: {
|
|
220
|
-
value: positions,
|
|
221
|
-
size: dim
|
|
222
|
-
},
|
|
282
|
+
positions: { value: positions, size: dim },
|
|
223
283
|
type: "Point"
|
|
224
284
|
};
|
|
225
285
|
}
|
|
@@ -241,10 +301,7 @@ var __exports__ = (() => {
|
|
|
241
301
|
}
|
|
242
302
|
const positions = concatPositions(xyPositions, mPositions, zPositions);
|
|
243
303
|
return {
|
|
244
|
-
positions: {
|
|
245
|
-
value: positions,
|
|
246
|
-
size: dim
|
|
247
|
-
},
|
|
304
|
+
positions: { value: positions, size: dim },
|
|
248
305
|
type: "Point"
|
|
249
306
|
};
|
|
250
307
|
}
|
|
@@ -276,14 +333,8 @@ var __exports__ = (() => {
|
|
|
276
333
|
if (type === "LineString") {
|
|
277
334
|
return {
|
|
278
335
|
type,
|
|
279
|
-
positions: {
|
|
280
|
-
|
|
281
|
-
size: dim
|
|
282
|
-
},
|
|
283
|
-
pathIndices: {
|
|
284
|
-
value: ringIndices,
|
|
285
|
-
size: 1
|
|
286
|
-
}
|
|
336
|
+
positions: { value: positions, size: dim },
|
|
337
|
+
pathIndices: { value: ringIndices, size: 1 }
|
|
287
338
|
};
|
|
288
339
|
}
|
|
289
340
|
const polygonIndices = [];
|
|
@@ -299,24 +350,22 @@ var __exports__ = (() => {
|
|
|
299
350
|
polygonIndices.push(nPoints);
|
|
300
351
|
return {
|
|
301
352
|
type,
|
|
302
|
-
positions: {
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
},
|
|
310
|
-
polygonIndices: {
|
|
311
|
-
value: new Uint32Array(polygonIndices),
|
|
312
|
-
size: 1
|
|
313
|
-
}
|
|
353
|
+
positions: { value: positions, size: dim },
|
|
354
|
+
primitivePolygonIndices: { value: ringIndices, size: 1 },
|
|
355
|
+
// TODO: Dynamically choose Uint32Array over Uint16Array only when
|
|
356
|
+
// necessary. I believe the implementation requires nPoints to be the
|
|
357
|
+
// largest value in the array, so you should be able to use Uint32Array only
|
|
358
|
+
// when nPoints > 65535.
|
|
359
|
+
polygonIndices: { value: new Uint32Array(polygonIndices), size: 1 }
|
|
314
360
|
};
|
|
315
361
|
}
|
|
316
362
|
function parsePositions(view, offset, nPoints, dim) {
|
|
317
363
|
const bufferOffset = view.byteOffset + offset;
|
|
318
364
|
const bufferLength = nPoints * dim * Float64Array.BYTES_PER_ELEMENT;
|
|
319
|
-
return [
|
|
365
|
+
return [
|
|
366
|
+
new Float64Array(view.buffer.slice(bufferOffset, bufferOffset + bufferLength)),
|
|
367
|
+
offset + bufferLength
|
|
368
|
+
];
|
|
320
369
|
}
|
|
321
370
|
function concatPositions(xyPositions, mPositions, zPositions) {
|
|
322
371
|
if (!(mPositions || zPositions)) {
|
|
@@ -374,12 +423,12 @@ var __exports__ = (() => {
|
|
|
374
423
|
};
|
|
375
424
|
var SHPParser = class {
|
|
376
425
|
options = {};
|
|
377
|
-
binaryReader = new BinaryChunkReader({
|
|
378
|
-
maxRewindBytes: SHP_RECORD_HEADER_SIZE
|
|
379
|
-
});
|
|
426
|
+
binaryReader = new BinaryChunkReader({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
|
|
380
427
|
state = STATE.EXPECTING_HEADER;
|
|
381
428
|
result = {
|
|
382
429
|
geometries: [],
|
|
430
|
+
// Initialize with number values to make TS happy
|
|
431
|
+
// These are initialized for real in STATE.EXPECTING_HEADER
|
|
383
432
|
progress: {
|
|
384
433
|
bytesTotal: NaN,
|
|
385
434
|
bytesUsed: NaN,
|
|
@@ -455,7 +504,9 @@ var __exports__ = (() => {
|
|
|
455
504
|
const recordHeaderView = binaryReader.getDataView(SHP_RECORD_HEADER_SIZE);
|
|
456
505
|
const recordHeader = {
|
|
457
506
|
recordNumber: recordHeaderView.getInt32(0, BIG_ENDIAN2),
|
|
507
|
+
// 2 byte words; includes the four words of record header
|
|
458
508
|
byteLength: recordHeaderView.getInt32(4, BIG_ENDIAN2) * 2,
|
|
509
|
+
// This is actually part of the record, not the header...
|
|
459
510
|
type: recordHeaderView.getInt32(8, LITTLE_ENDIAN3)
|
|
460
511
|
};
|
|
461
512
|
if (!binaryReader.hasAvailableBytes(recordHeader.byteLength - 4)) {
|
|
@@ -492,7 +543,7 @@ var __exports__ = (() => {
|
|
|
492
543
|
}
|
|
493
544
|
|
|
494
545
|
// src/shp-loader.ts
|
|
495
|
-
var VERSION =
|
|
546
|
+
var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
496
547
|
var SHP_MAGIC_NUMBER2 = [0, 0, 39, 10];
|
|
497
548
|
var SHPWorkerLoader = {
|
|
498
549
|
name: "SHP",
|
|
@@ -503,6 +554,7 @@ var __exports__ = (() => {
|
|
|
503
554
|
category: "geometry",
|
|
504
555
|
extensions: ["shp"],
|
|
505
556
|
mimeTypes: ["application/octet-stream"],
|
|
557
|
+
// ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...
|
|
506
558
|
tests: [new Uint8Array(SHP_MAGIC_NUMBER2).buffer],
|
|
507
559
|
options: {
|
|
508
560
|
shp: {
|
|
@@ -589,11 +641,11 @@ var __exports__ = (() => {
|
|
|
589
641
|
}
|
|
590
642
|
}
|
|
591
643
|
function polygonToGeoJson(data, startIndex = -Infinity, endIndex = Infinity) {
|
|
592
|
-
const {
|
|
593
|
-
positions
|
|
594
|
-
} = data;
|
|
644
|
+
const { positions } = data;
|
|
595
645
|
const polygonIndices = data.polygonIndices.value.filter((x) => x >= startIndex && x <= endIndex);
|
|
596
|
-
const primitivePolygonIndices = data.primitivePolygonIndices.value.filter(
|
|
646
|
+
const primitivePolygonIndices = data.primitivePolygonIndices.value.filter(
|
|
647
|
+
(x) => x >= startIndex && x <= endIndex
|
|
648
|
+
);
|
|
597
649
|
const multi = polygonIndices.length > 2;
|
|
598
650
|
if (!multi) {
|
|
599
651
|
const coordinates2 = [];
|
|
@@ -603,62 +655,44 @@ var __exports__ = (() => {
|
|
|
603
655
|
const ringCoordinates = ringToGeoJson(positions, startRingIndex, endRingIndex);
|
|
604
656
|
coordinates2.push(ringCoordinates);
|
|
605
657
|
}
|
|
606
|
-
return {
|
|
607
|
-
type: "Polygon",
|
|
608
|
-
coordinates: coordinates2
|
|
609
|
-
};
|
|
658
|
+
return { type: "Polygon", coordinates: coordinates2 };
|
|
610
659
|
}
|
|
611
660
|
const coordinates = [];
|
|
612
661
|
for (let i = 0; i < polygonIndices.length - 1; i++) {
|
|
613
662
|
const startPolygonIndex = polygonIndices[i];
|
|
614
663
|
const endPolygonIndex = polygonIndices[i + 1];
|
|
615
|
-
const polygonCoordinates = polygonToGeoJson(
|
|
664
|
+
const polygonCoordinates = polygonToGeoJson(
|
|
665
|
+
data,
|
|
666
|
+
startPolygonIndex,
|
|
667
|
+
endPolygonIndex
|
|
668
|
+
).coordinates;
|
|
616
669
|
coordinates.push(polygonCoordinates);
|
|
617
670
|
}
|
|
618
|
-
return {
|
|
619
|
-
type: "MultiPolygon",
|
|
620
|
-
coordinates
|
|
621
|
-
};
|
|
671
|
+
return { type: "MultiPolygon", coordinates };
|
|
622
672
|
}
|
|
623
673
|
function lineStringToGeoJson(data, startIndex = -Infinity, endIndex = Infinity) {
|
|
624
|
-
const {
|
|
625
|
-
positions
|
|
626
|
-
} = data;
|
|
674
|
+
const { positions } = data;
|
|
627
675
|
const pathIndices = data.pathIndices.value.filter((x) => x >= startIndex && x <= endIndex);
|
|
628
676
|
const multi = pathIndices.length > 2;
|
|
629
677
|
if (!multi) {
|
|
630
678
|
const coordinates2 = ringToGeoJson(positions, pathIndices[0], pathIndices[1]);
|
|
631
|
-
return {
|
|
632
|
-
type: "LineString",
|
|
633
|
-
coordinates: coordinates2
|
|
634
|
-
};
|
|
679
|
+
return { type: "LineString", coordinates: coordinates2 };
|
|
635
680
|
}
|
|
636
681
|
const coordinates = [];
|
|
637
682
|
for (let i = 0; i < pathIndices.length - 1; i++) {
|
|
638
683
|
const ringCoordinates = ringToGeoJson(positions, pathIndices[i], pathIndices[i + 1]);
|
|
639
684
|
coordinates.push(ringCoordinates);
|
|
640
685
|
}
|
|
641
|
-
return {
|
|
642
|
-
type: "MultiLineString",
|
|
643
|
-
coordinates
|
|
644
|
-
};
|
|
686
|
+
return { type: "MultiLineString", coordinates };
|
|
645
687
|
}
|
|
646
688
|
function pointToGeoJson(data, startIndex, endIndex) {
|
|
647
|
-
const {
|
|
648
|
-
positions
|
|
649
|
-
} = data;
|
|
689
|
+
const { positions } = data;
|
|
650
690
|
const coordinates = ringToGeoJson(positions, startIndex, endIndex);
|
|
651
691
|
const multi = coordinates.length > 1;
|
|
652
692
|
if (multi) {
|
|
653
|
-
return {
|
|
654
|
-
type: "MultiPoint",
|
|
655
|
-
coordinates
|
|
656
|
-
};
|
|
693
|
+
return { type: "MultiPoint", coordinates };
|
|
657
694
|
}
|
|
658
|
-
return {
|
|
659
|
-
type: "Point",
|
|
660
|
-
coordinates: coordinates[0]
|
|
661
|
-
};
|
|
695
|
+
return { type: "Point", coordinates: coordinates[0] };
|
|
662
696
|
}
|
|
663
697
|
function ringToGeoJson(positions, startIndex, endIndex) {
|
|
664
698
|
startIndex = startIndex || 0;
|
|
@@ -6072,10 +6106,7 @@ var __exports__ = (() => {
|
|
|
6072
6106
|
let iterator2Done = false;
|
|
6073
6107
|
while (!iterator1Done && !iterator2Done) {
|
|
6074
6108
|
if (batch1Data.length === 0 && !iterator1Done) {
|
|
6075
|
-
const {
|
|
6076
|
-
value,
|
|
6077
|
-
done
|
|
6078
|
-
} = await iterator1.next();
|
|
6109
|
+
const { value, done } = await iterator1.next();
|
|
6079
6110
|
if (done) {
|
|
6080
6111
|
iterator1Done = true;
|
|
6081
6112
|
} else {
|
|
@@ -6083,10 +6114,7 @@ var __exports__ = (() => {
|
|
|
6083
6114
|
}
|
|
6084
6115
|
}
|
|
6085
6116
|
if (batch2Data.length === 0 && !iterator2Done) {
|
|
6086
|
-
const {
|
|
6087
|
-
value,
|
|
6088
|
-
done
|
|
6089
|
-
} = await iterator2.next();
|
|
6117
|
+
const { value, done } = await iterator2.next();
|
|
6090
6118
|
if (done) {
|
|
6091
6119
|
iterator2Done = true;
|
|
6092
6120
|
} else {
|
|
@@ -6118,23 +6146,19 @@ var __exports__ = (() => {
|
|
|
6118
6146
|
// src/lib/parsers/parse-dbf.ts
|
|
6119
6147
|
var LITTLE_ENDIAN4 = true;
|
|
6120
6148
|
var DBF_HEADER_SIZE = 32;
|
|
6121
|
-
var STATE2 = function(STATE3) {
|
|
6122
|
-
STATE3[STATE3["START"] = 0] = "START";
|
|
6123
|
-
STATE3[STATE3["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
|
|
6124
|
-
STATE3[STATE3["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
|
|
6125
|
-
STATE3[STATE3["END"] = 3] = "END";
|
|
6126
|
-
STATE3[STATE3["ERROR"] = 4] = "ERROR";
|
|
6127
|
-
return STATE3;
|
|
6128
|
-
}(STATE2 || {});
|
|
6129
6149
|
var DBFParser = class {
|
|
6130
6150
|
binaryReader = new BinaryChunkReader();
|
|
6131
|
-
|
|
6151
|
+
textDecoder;
|
|
6152
|
+
state = 0 /* START */;
|
|
6132
6153
|
result = {
|
|
6133
6154
|
data: []
|
|
6134
6155
|
};
|
|
6135
6156
|
constructor(options) {
|
|
6136
6157
|
this.textDecoder = new TextDecoder(options.encoding);
|
|
6137
6158
|
}
|
|
6159
|
+
/**
|
|
6160
|
+
* @param arrayBuffer
|
|
6161
|
+
*/
|
|
6138
6162
|
write(arrayBuffer) {
|
|
6139
6163
|
this.binaryReader.write(arrayBuffer);
|
|
6140
6164
|
this.state = parseState2(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
@@ -6142,25 +6166,18 @@ var __exports__ = (() => {
|
|
|
6142
6166
|
end() {
|
|
6143
6167
|
this.binaryReader.end();
|
|
6144
6168
|
this.state = parseState2(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
6145
|
-
if (this.state !==
|
|
6146
|
-
this.state =
|
|
6169
|
+
if (this.state !== 3 /* END */) {
|
|
6170
|
+
this.state = 4 /* ERROR */;
|
|
6147
6171
|
this.result.error = "DBF incomplete file";
|
|
6148
6172
|
}
|
|
6149
6173
|
}
|
|
6150
6174
|
};
|
|
6151
6175
|
function parseDBF(arrayBuffer, options = {}) {
|
|
6152
|
-
const {
|
|
6153
|
-
|
|
6154
|
-
} = options.dbf || {};
|
|
6155
|
-
const dbfParser = new DBFParser({
|
|
6156
|
-
encoding
|
|
6157
|
-
});
|
|
6176
|
+
const { encoding = "latin1" } = options.dbf || {};
|
|
6177
|
+
const dbfParser = new DBFParser({ encoding });
|
|
6158
6178
|
dbfParser.write(arrayBuffer);
|
|
6159
6179
|
dbfParser.end();
|
|
6160
|
-
const {
|
|
6161
|
-
data,
|
|
6162
|
-
schema
|
|
6163
|
-
} = dbfParser.result;
|
|
6180
|
+
const { data, schema } = dbfParser.result;
|
|
6164
6181
|
const shape = options?.dbf?.shape;
|
|
6165
6182
|
switch (shape) {
|
|
6166
6183
|
case "object-row-table": {
|
|
@@ -6172,22 +6189,15 @@ var __exports__ = (() => {
|
|
|
6172
6189
|
return table;
|
|
6173
6190
|
}
|
|
6174
6191
|
case "table":
|
|
6175
|
-
return {
|
|
6176
|
-
schema,
|
|
6177
|
-
rows: data
|
|
6178
|
-
};
|
|
6192
|
+
return { schema, rows: data };
|
|
6179
6193
|
case "rows":
|
|
6180
6194
|
default:
|
|
6181
6195
|
return data;
|
|
6182
6196
|
}
|
|
6183
6197
|
}
|
|
6184
6198
|
async function* parseDBFInBatches(asyncIterator, options = {}) {
|
|
6185
|
-
const {
|
|
6186
|
-
|
|
6187
|
-
} = options.dbf || {};
|
|
6188
|
-
const parser = new DBFParser({
|
|
6189
|
-
encoding
|
|
6190
|
-
});
|
|
6199
|
+
const { encoding = "latin1" } = options.dbf || {};
|
|
6200
|
+
const parser = new DBFParser({ encoding });
|
|
6191
6201
|
let headerReturned = false;
|
|
6192
6202
|
for await (const arrayBuffer of asyncIterator) {
|
|
6193
6203
|
parser.write(arrayBuffer);
|
|
@@ -6209,10 +6219,10 @@ var __exports__ = (() => {
|
|
|
6209
6219
|
while (true) {
|
|
6210
6220
|
try {
|
|
6211
6221
|
switch (state) {
|
|
6212
|
-
case
|
|
6213
|
-
case
|
|
6222
|
+
case 4 /* ERROR */:
|
|
6223
|
+
case 3 /* END */:
|
|
6214
6224
|
return state;
|
|
6215
|
-
case
|
|
6225
|
+
case 0 /* START */:
|
|
6216
6226
|
const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
|
|
6217
6227
|
if (!dataView) {
|
|
6218
6228
|
return state;
|
|
@@ -6223,10 +6233,13 @@ var __exports__ = (() => {
|
|
|
6223
6233
|
rowsTotal: result.dbfHeader.nRecords,
|
|
6224
6234
|
rows: 0
|
|
6225
6235
|
};
|
|
6226
|
-
state =
|
|
6236
|
+
state = 1 /* FIELD_DESCRIPTORS */;
|
|
6227
6237
|
break;
|
|
6228
|
-
case
|
|
6229
|
-
const fieldDescriptorView = binaryReader.getDataView(
|
|
6238
|
+
case 1 /* FIELD_DESCRIPTORS */:
|
|
6239
|
+
const fieldDescriptorView = binaryReader.getDataView(
|
|
6240
|
+
// @ts-ignore
|
|
6241
|
+
result.dbfHeader.headerLength - DBF_HEADER_SIZE
|
|
6242
|
+
);
|
|
6230
6243
|
if (!fieldDescriptorView) {
|
|
6231
6244
|
return state;
|
|
6232
6245
|
}
|
|
@@ -6235,14 +6248,11 @@ var __exports__ = (() => {
|
|
|
6235
6248
|
fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
|
|
6236
6249
|
metadata: {}
|
|
6237
6250
|
};
|
|
6238
|
-
state =
|
|
6251
|
+
state = 2 /* FIELD_PROPERTIES */;
|
|
6239
6252
|
binaryReader.skip(1);
|
|
6240
6253
|
break;
|
|
6241
|
-
case
|
|
6242
|
-
const {
|
|
6243
|
-
recordLength = 0,
|
|
6244
|
-
nRecords = 0
|
|
6245
|
-
} = result?.dbfHeader || {};
|
|
6254
|
+
case 2 /* FIELD_PROPERTIES */:
|
|
6255
|
+
const { recordLength = 0, nRecords = 0 } = result?.dbfHeader || {};
|
|
6246
6256
|
while (result.data.length < nRecords) {
|
|
6247
6257
|
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
6248
6258
|
if (!recordView) {
|
|
@@ -6253,15 +6263,15 @@ var __exports__ = (() => {
|
|
|
6253
6263
|
result.data.push(row);
|
|
6254
6264
|
result.progress.rows = result.data.length;
|
|
6255
6265
|
}
|
|
6256
|
-
state =
|
|
6266
|
+
state = 3 /* END */;
|
|
6257
6267
|
break;
|
|
6258
6268
|
default:
|
|
6259
|
-
state =
|
|
6269
|
+
state = 4 /* ERROR */;
|
|
6260
6270
|
result.error = `illegal parser state ${state}`;
|
|
6261
6271
|
return state;
|
|
6262
6272
|
}
|
|
6263
6273
|
} catch (error) {
|
|
6264
|
-
state =
|
|
6274
|
+
state = 4 /* ERROR */;
|
|
6265
6275
|
result.error = `DBF parsing failed: ${error.message}`;
|
|
6266
6276
|
return state;
|
|
6267
6277
|
}
|
|
@@ -6269,12 +6279,17 @@ var __exports__ = (() => {
|
|
|
6269
6279
|
}
|
|
6270
6280
|
function parseDBFHeader(headerView) {
|
|
6271
6281
|
return {
|
|
6282
|
+
// Last updated date
|
|
6272
6283
|
year: headerView.getUint8(1) + 1900,
|
|
6273
6284
|
month: headerView.getUint8(2),
|
|
6274
6285
|
day: headerView.getUint8(3),
|
|
6286
|
+
// Number of records in data file
|
|
6275
6287
|
nRecords: headerView.getUint32(4, LITTLE_ENDIAN4),
|
|
6288
|
+
// Length of header in bytes
|
|
6276
6289
|
headerLength: headerView.getUint16(8, LITTLE_ENDIAN4),
|
|
6290
|
+
// Length of each record
|
|
6277
6291
|
recordLength: headerView.getUint16(10, LITTLE_ENDIAN4),
|
|
6292
|
+
// Not sure if this is usually set
|
|
6278
6293
|
languageDriver: headerView.getUint8(29)
|
|
6279
6294
|
};
|
|
6280
6295
|
}
|
|
@@ -6298,7 +6313,9 @@ var __exports__ = (() => {
|
|
|
6298
6313
|
const out = {};
|
|
6299
6314
|
let offset = 0;
|
|
6300
6315
|
for (const field of fields) {
|
|
6301
|
-
const text = textDecoder.decode(
|
|
6316
|
+
const text = textDecoder.decode(
|
|
6317
|
+
new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength)
|
|
6318
|
+
);
|
|
6302
6319
|
out[field.name] = parseField(text, field.dataType);
|
|
6303
6320
|
offset += field.fieldLength;
|
|
6304
6321
|
}
|
|
@@ -6337,69 +6354,29 @@ var __exports__ = (() => {
|
|
|
6337
6354
|
function parseCharacter(text) {
|
|
6338
6355
|
return text.trim() || null;
|
|
6339
6356
|
}
|
|
6340
|
-
function makeField({
|
|
6341
|
-
name,
|
|
6342
|
-
dataType,
|
|
6343
|
-
fieldLength,
|
|
6344
|
-
decimal
|
|
6345
|
-
}) {
|
|
6357
|
+
function makeField({ name, dataType, fieldLength, decimal }) {
|
|
6346
6358
|
switch (dataType) {
|
|
6347
6359
|
case "B":
|
|
6348
|
-
return {
|
|
6349
|
-
name,
|
|
6350
|
-
type: "float64",
|
|
6351
|
-
nullable: true,
|
|
6352
|
-
metadata: {}
|
|
6353
|
-
};
|
|
6360
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6354
6361
|
case "C":
|
|
6355
|
-
return {
|
|
6356
|
-
name,
|
|
6357
|
-
type: "utf8",
|
|
6358
|
-
nullable: true,
|
|
6359
|
-
metadata: {}
|
|
6360
|
-
};
|
|
6362
|
+
return { name, type: "utf8", nullable: true, metadata: {} };
|
|
6361
6363
|
case "F":
|
|
6362
|
-
return {
|
|
6363
|
-
name,
|
|
6364
|
-
type: "float64",
|
|
6365
|
-
nullable: true,
|
|
6366
|
-
metadata: {}
|
|
6367
|
-
};
|
|
6364
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6368
6365
|
case "N":
|
|
6369
|
-
return {
|
|
6370
|
-
name,
|
|
6371
|
-
type: "float64",
|
|
6372
|
-
nullable: true,
|
|
6373
|
-
metadata: {}
|
|
6374
|
-
};
|
|
6366
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6375
6367
|
case "O":
|
|
6376
|
-
return {
|
|
6377
|
-
name,
|
|
6378
|
-
type: "float64",
|
|
6379
|
-
nullable: true,
|
|
6380
|
-
metadata: {}
|
|
6381
|
-
};
|
|
6368
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
6382
6369
|
case "D":
|
|
6383
|
-
return {
|
|
6384
|
-
name,
|
|
6385
|
-
type: "timestamp-millisecond",
|
|
6386
|
-
nullable: true,
|
|
6387
|
-
metadata: {}
|
|
6388
|
-
};
|
|
6370
|
+
return { name, type: "timestamp-millisecond", nullable: true, metadata: {} };
|
|
6389
6371
|
case "L":
|
|
6390
|
-
return {
|
|
6391
|
-
name,
|
|
6392
|
-
type: "bool",
|
|
6393
|
-
nullable: true,
|
|
6394
|
-
metadata: {}
|
|
6395
|
-
};
|
|
6372
|
+
return { name, type: "bool", nullable: true, metadata: {} };
|
|
6396
6373
|
default:
|
|
6397
6374
|
throw new Error("Unsupported data type");
|
|
6398
6375
|
}
|
|
6399
6376
|
}
|
|
6400
6377
|
|
|
6401
6378
|
// src/dbf-loader.ts
|
|
6402
|
-
var VERSION2 =
|
|
6379
|
+
var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
6403
6380
|
var DBFWorkerLoader = {
|
|
6404
6381
|
name: "DBF",
|
|
6405
6382
|
id: "dbf",
|
|
@@ -6426,26 +6403,27 @@ var __exports__ = (() => {
|
|
|
6426
6403
|
|
|
6427
6404
|
// src/lib/parsers/parse-shapefile.ts
|
|
6428
6405
|
async function* parseShapefileInBatches(asyncIterator, options, context) {
|
|
6429
|
-
const {
|
|
6430
|
-
|
|
6431
|
-
|
|
6432
|
-
|
|
6433
|
-
|
|
6434
|
-
|
|
6435
|
-
|
|
6436
|
-
|
|
6437
|
-
} = await loadShapefileSidecarFiles(options, context);
|
|
6438
|
-
const shapeIterable = await parseInBatchesFromContext(asyncIterator, SHPLoader, options, context);
|
|
6406
|
+
const { reproject = false, _targetCrs = "WGS84" } = options?.gis || {};
|
|
6407
|
+
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
6408
|
+
const shapeIterable = await parseInBatchesFromContext(
|
|
6409
|
+
asyncIterator,
|
|
6410
|
+
SHPLoader,
|
|
6411
|
+
options,
|
|
6412
|
+
context
|
|
6413
|
+
);
|
|
6439
6414
|
const shapeIterator = shapeIterable[Symbol.asyncIterator]?.() || shapeIterable[Symbol.iterator]?.();
|
|
6440
6415
|
let propertyIterator = null;
|
|
6441
6416
|
const dbfResponse = await context?.fetch(replaceExtension(context?.url || "", "dbf"));
|
|
6442
6417
|
if (dbfResponse?.ok) {
|
|
6443
|
-
const propertyIterable = await parseInBatchesFromContext(
|
|
6444
|
-
|
|
6445
|
-
|
|
6446
|
-
|
|
6447
|
-
|
|
6448
|
-
|
|
6418
|
+
const propertyIterable = await parseInBatchesFromContext(
|
|
6419
|
+
dbfResponse,
|
|
6420
|
+
DBFLoader,
|
|
6421
|
+
{
|
|
6422
|
+
...options,
|
|
6423
|
+
dbf: { encoding: cpg || "latin1" }
|
|
6424
|
+
},
|
|
6425
|
+
context
|
|
6426
|
+
);
|
|
6449
6427
|
propertyIterator = propertyIterable[Symbol.asyncIterator]?.() || propertyIterable[Symbol.iterator]();
|
|
6450
6428
|
}
|
|
6451
6429
|
let shapeHeader = (await shapeIterator.next()).value;
|
|
@@ -6488,29 +6466,19 @@ var __exports__ = (() => {
|
|
|
6488
6466
|
}
|
|
6489
6467
|
}
|
|
6490
6468
|
async function parseShapefile(arrayBuffer, options, context) {
|
|
6491
|
-
const {
|
|
6492
|
-
|
|
6493
|
-
|
|
6494
|
-
} = options?.gis || {};
|
|
6495
|
-
const {
|
|
6496
|
-
shx,
|
|
6497
|
-
cpg,
|
|
6498
|
-
prj
|
|
6499
|
-
} = await loadShapefileSidecarFiles(options, context);
|
|
6500
|
-
const {
|
|
6501
|
-
header,
|
|
6502
|
-
geometries
|
|
6503
|
-
} = await parseFromContext(arrayBuffer, SHPLoader, options, context);
|
|
6469
|
+
const { reproject = false, _targetCrs = "WGS84" } = options?.gis || {};
|
|
6470
|
+
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
6471
|
+
const { header, geometries } = await parseFromContext(arrayBuffer, SHPLoader, options, context);
|
|
6504
6472
|
const geojsonGeometries = parseGeometries(geometries);
|
|
6505
6473
|
let propertyTable;
|
|
6506
6474
|
const dbfResponse = await context?.fetch(replaceExtension(context?.url, "dbf"));
|
|
6507
6475
|
if (dbfResponse?.ok) {
|
|
6508
|
-
propertyTable = await parseFromContext(
|
|
6509
|
-
|
|
6510
|
-
|
|
6511
|
-
|
|
6512
|
-
|
|
6513
|
-
|
|
6476
|
+
propertyTable = await parseFromContext(
|
|
6477
|
+
dbfResponse,
|
|
6478
|
+
DBFLoader,
|
|
6479
|
+
{ dbf: { shape: "object-row-table", encoding: cpg || "latin1" } },
|
|
6480
|
+
context
|
|
6481
|
+
);
|
|
6514
6482
|
}
|
|
6515
6483
|
let features = joinProperties(geojsonGeometries, propertyTable?.data || []);
|
|
6516
6484
|
if (reproject) {
|
|
@@ -6519,13 +6487,11 @@ var __exports__ = (() => {
|
|
|
6519
6487
|
switch (options?.shapefile?.shape) {
|
|
6520
6488
|
case "geojson-table":
|
|
6521
6489
|
return {
|
|
6490
|
+
// @ts-expect-error
|
|
6522
6491
|
shape: "geojson-table",
|
|
6523
6492
|
type: "FeatureCollection",
|
|
6524
6493
|
encoding: cpg,
|
|
6525
|
-
schema: propertyTable?.schema || {
|
|
6526
|
-
metadata: {},
|
|
6527
|
-
fields: []
|
|
6528
|
-
},
|
|
6494
|
+
schema: propertyTable?.schema || { metadata: {}, fields: [] },
|
|
6529
6495
|
prj,
|
|
6530
6496
|
shx,
|
|
6531
6497
|
header,
|
|
@@ -6555,6 +6521,7 @@ var __exports__ = (() => {
|
|
|
6555
6521
|
const feature = {
|
|
6556
6522
|
type: "Feature",
|
|
6557
6523
|
geometry,
|
|
6524
|
+
// properties can be undefined if dbfResponse above was empty
|
|
6558
6525
|
properties: properties && properties[i] || {}
|
|
6559
6526
|
};
|
|
6560
6527
|
features.push(feature);
|
|
@@ -6565,17 +6532,11 @@ var __exports__ = (() => {
|
|
|
6565
6532
|
if (!sourceCrs && !targetCrs) {
|
|
6566
6533
|
return features;
|
|
6567
6534
|
}
|
|
6568
|
-
const projection = new Proj4Projection({
|
|
6569
|
-
from: sourceCrs || "WGS84",
|
|
6570
|
-
to: targetCrs || "WGS84"
|
|
6571
|
-
});
|
|
6535
|
+
const projection = new Proj4Projection({ from: sourceCrs || "WGS84", to: targetCrs || "WGS84" });
|
|
6572
6536
|
return transformGeoJsonCoords(features, (coord) => projection.project(coord));
|
|
6573
6537
|
}
|
|
6574
6538
|
async function loadShapefileSidecarFiles(options, context) {
|
|
6575
|
-
const {
|
|
6576
|
-
url,
|
|
6577
|
-
fetch
|
|
6578
|
-
} = context;
|
|
6539
|
+
const { url, fetch } = context;
|
|
6579
6540
|
const shxPromise = fetch(replaceExtension(url, "shx"));
|
|
6580
6541
|
const cpgPromise = fetch(replaceExtension(url, "cpg"));
|
|
6581
6542
|
const prjPromise = fetch(replaceExtension(url, "prj"));
|
|
@@ -6627,7 +6588,7 @@ var __exports__ = (() => {
|
|
|
6627
6588
|
}
|
|
6628
6589
|
|
|
6629
6590
|
// src/shapefile-loader.ts
|
|
6630
|
-
var VERSION3 =
|
|
6591
|
+
var VERSION3 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
6631
6592
|
var ShapefileLoader = {
|
|
6632
6593
|
name: "Shapefile",
|
|
6633
6594
|
id: "shapefile",
|
|
@@ -6645,19 +6606,35 @@ var __exports__ = (() => {
|
|
|
6645
6606
|
_maxDimensions: 4
|
|
6646
6607
|
}
|
|
6647
6608
|
},
|
|
6609
|
+
// @ts-expect-error
|
|
6648
6610
|
parse: parseShapefile,
|
|
6611
|
+
// @ts-expect-error
|
|
6649
6612
|
parseInBatches: parseShapefileInBatches
|
|
6650
6613
|
};
|
|
6651
6614
|
|
|
6652
6615
|
// src/lib/streaming/binary-reader.ts
|
|
6653
6616
|
var BinaryReader = class {
|
|
6617
|
+
offset;
|
|
6618
|
+
arrayBuffer;
|
|
6654
6619
|
constructor(arrayBuffer) {
|
|
6655
6620
|
this.offset = 0;
|
|
6656
6621
|
this.arrayBuffer = arrayBuffer;
|
|
6657
6622
|
}
|
|
6623
|
+
/**
|
|
6624
|
+
* Checks if there are available bytes in data
|
|
6625
|
+
*
|
|
6626
|
+
* @param bytes
|
|
6627
|
+
* @returns boolean
|
|
6628
|
+
*/
|
|
6658
6629
|
hasAvailableBytes(bytes) {
|
|
6659
6630
|
return this.arrayBuffer.byteLength - this.offset >= bytes;
|
|
6660
6631
|
}
|
|
6632
|
+
/**
|
|
6633
|
+
* Get the required number of bytes from the iterator
|
|
6634
|
+
*
|
|
6635
|
+
* @param bytes
|
|
6636
|
+
* @returns Dataview
|
|
6637
|
+
*/
|
|
6661
6638
|
getDataView(bytes) {
|
|
6662
6639
|
if (bytes && !this.hasAvailableBytes(bytes)) {
|
|
6663
6640
|
throw new Error("binary data exhausted");
|
|
@@ -6666,14 +6643,24 @@ var __exports__ = (() => {
|
|
|
6666
6643
|
this.offset += bytes;
|
|
6667
6644
|
return dataView;
|
|
6668
6645
|
}
|
|
6646
|
+
/**
|
|
6647
|
+
* Skipping
|
|
6648
|
+
*
|
|
6649
|
+
* @param bytes
|
|
6650
|
+
*/
|
|
6669
6651
|
skip(bytes) {
|
|
6670
6652
|
this.offset += bytes;
|
|
6671
6653
|
}
|
|
6654
|
+
/**
|
|
6655
|
+
* Rewinding
|
|
6656
|
+
*
|
|
6657
|
+
* @param bytes
|
|
6658
|
+
*/
|
|
6672
6659
|
rewind(bytes) {
|
|
6673
6660
|
this.offset -= bytes;
|
|
6674
6661
|
}
|
|
6675
6662
|
};
|
|
6676
|
-
return __toCommonJS(
|
|
6663
|
+
return __toCommonJS(bundle_exports);
|
|
6677
6664
|
})();
|
|
6678
6665
|
return __exports__;
|
|
6679
6666
|
});
|