@loaders.gl/shapefile 3.1.0-alpha.4 → 3.1.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/dist/bundle.d.ts +2 -0
  2. package/dist/bundle.d.ts.map +1 -0
  3. package/dist/bundle.js +7554 -0
  4. package/dist/dbf-loader.d.ts +8 -0
  5. package/dist/dbf-loader.d.ts.map +1 -0
  6. package/dist/dbf-loader.js +32 -0
  7. package/dist/dbf-worker.js +912 -2
  8. package/dist/es5/bundle.js +1 -1
  9. package/dist/es5/bundle.js.map +1 -1
  10. package/dist/es5/dbf-loader.js +5 -40
  11. package/dist/es5/dbf-loader.js.map +1 -1
  12. package/dist/es5/index.js +5 -5
  13. package/dist/es5/lib/parsers/parse-dbf.js +86 -232
  14. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
  15. package/dist/es5/lib/parsers/parse-shapefile.js +151 -404
  16. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
  17. package/dist/es5/lib/parsers/parse-shp-geometry.js +49 -96
  18. package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -1
  19. package/dist/es5/lib/parsers/parse-shp-header.js +4 -4
  20. package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -1
  21. package/dist/es5/lib/parsers/parse-shp.js +47 -165
  22. package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
  23. package/dist/es5/lib/parsers/parse-shx.js +11 -11
  24. package/dist/es5/lib/parsers/parse-shx.js.map +1 -1
  25. package/dist/es5/lib/streaming/binary-chunk-reader.js +99 -172
  26. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
  27. package/dist/es5/lib/streaming/binary-reader.js +24 -35
  28. package/dist/es5/lib/streaming/binary-reader.js.map +1 -1
  29. package/dist/es5/lib/streaming/zip-batch-iterators.js +37 -96
  30. package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -1
  31. package/dist/es5/shapefile-loader.js +3 -3
  32. package/dist/es5/shapefile-loader.js.map +1 -1
  33. package/dist/es5/shp-loader.js +6 -41
  34. package/dist/es5/shp-loader.js.map +1 -1
  35. package/dist/es5/workers/dbf-worker.js +7 -3
  36. package/dist/es5/workers/dbf-worker.js.map +1 -0
  37. package/dist/es5/workers/shp-worker.js +7 -3
  38. package/dist/es5/workers/shp-worker.js.map +1 -0
  39. package/dist/esm/dbf-loader.js +1 -1
  40. package/dist/esm/dbf-loader.js.map +1 -1
  41. package/dist/esm/lib/parsers/parse-dbf.js +4 -6
  42. package/dist/esm/lib/parsers/parse-dbf.js.map +1 -1
  43. package/dist/esm/lib/parsers/parse-shapefile.js +1 -1
  44. package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -1
  45. package/dist/esm/lib/parsers/parse-shp-geometry.js +1 -1
  46. package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -1
  47. package/dist/esm/lib/parsers/parse-shp-header.js +2 -2
  48. package/dist/esm/lib/parsers/parse-shp-header.js.map +1 -1
  49. package/dist/esm/lib/parsers/parse-shp.js +2 -2
  50. package/dist/esm/lib/parsers/parse-shp.js.map +1 -1
  51. package/dist/esm/shapefile-loader.js +1 -1
  52. package/dist/esm/shapefile-loader.js.map +1 -1
  53. package/dist/esm/shp-loader.js +1 -1
  54. package/dist/esm/shp-loader.js.map +1 -1
  55. package/dist/esm/workers/dbf-worker.js +3 -3
  56. package/dist/esm/workers/dbf-worker.js.map +1 -0
  57. package/dist/esm/workers/shp-worker.js +3 -3
  58. package/dist/esm/workers/shp-worker.js.map +1 -0
  59. package/dist/index.d.ts +4 -0
  60. package/dist/index.d.ts.map +1 -0
  61. package/dist/index.js +11 -0
  62. package/dist/lib/parsers/parse-dbf.d.ts +28 -0
  63. package/dist/lib/parsers/parse-dbf.d.ts.map +1 -0
  64. package/dist/lib/parsers/parse-dbf.js +335 -0
  65. package/dist/lib/parsers/parse-shapefile.d.ts +54 -0
  66. package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -0
  67. package/dist/lib/parsers/parse-shapefile.js +245 -0
  68. package/dist/lib/parsers/parse-shp-geometry.d.ts +11 -0
  69. package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -0
  70. package/dist/lib/parsers/parse-shp-geometry.js +287 -0
  71. package/dist/lib/parsers/parse-shp-header.d.ts +26 -0
  72. package/dist/lib/parsers/parse-shp-header.d.ts.map +1 -0
  73. package/dist/lib/parsers/parse-shp-header.js +43 -0
  74. package/dist/lib/parsers/parse-shp.d.ts +9 -0
  75. package/dist/lib/parsers/parse-shp.d.ts.map +1 -0
  76. package/dist/lib/parsers/parse-shp.js +170 -0
  77. package/dist/lib/parsers/parse-shx.d.ts +10 -0
  78. package/dist/lib/parsers/parse-shx.d.ts.map +1 -0
  79. package/dist/lib/parsers/parse-shx.js +28 -0
  80. package/dist/lib/streaming/binary-chunk-reader.d.ts +59 -0
  81. package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -0
  82. package/dist/lib/streaming/binary-chunk-reader.js +161 -0
  83. package/dist/lib/streaming/binary-reader.d.ts +32 -0
  84. package/dist/lib/streaming/binary-reader.d.ts.map +1 -0
  85. package/dist/lib/streaming/binary-reader.js +52 -0
  86. package/dist/lib/streaming/zip-batch-iterators.d.ts +8 -0
  87. package/dist/lib/streaming/zip-batch-iterators.d.ts.map +1 -0
  88. package/dist/lib/streaming/zip-batch-iterators.js +61 -0
  89. package/dist/shapefile-loader.d.ts +26 -0
  90. package/dist/shapefile-loader.d.ts.map +1 -0
  91. package/dist/shapefile-loader.js +31 -0
  92. package/dist/shp-loader.d.ts +9 -0
  93. package/dist/shp-loader.d.ts.map +1 -0
  94. package/dist/shp-loader.js +35 -0
  95. package/dist/shp-worker.js +606 -2
  96. package/dist/workers/dbf-worker.d.ts +2 -0
  97. package/dist/workers/dbf-worker.d.ts.map +1 -0
  98. package/dist/workers/dbf-worker.js +5 -0
  99. package/dist/workers/shp-worker.d.ts +2 -0
  100. package/dist/workers/shp-worker.d.ts.map +1 -0
  101. package/dist/workers/shp-worker.js +5 -0
  102. package/package.json +10 -10
  103. package/src/lib/parsers/parse-dbf.ts +5 -3
  104. package/src/lib/parsers/parse-shp-geometry.ts +1 -1
  105. package/src/workers/{dbf-worker.js → dbf-worker.ts} +0 -0
  106. package/src/workers/{shp-worker.js → shp-worker.ts} +0 -0
  107. package/dist/dbf-worker.js.map +0 -1
  108. package/dist/dist.min.js +0 -2
  109. package/dist/dist.min.js.map +0 -1
  110. package/dist/shp-worker.js.map +0 -1
@@ -0,0 +1,170 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.parseSHPInBatches = exports.parseSHP = void 0;
7
+ const binary_chunk_reader_1 = __importDefault(require("../streaming/binary-chunk-reader"));
8
+ const parse_shp_header_1 = require("./parse-shp-header");
9
+ const parse_shp_geometry_1 = require("./parse-shp-geometry");
10
+ const LITTLE_ENDIAN = true;
11
+ const BIG_ENDIAN = false;
12
+ const SHP_HEADER_SIZE = 100;
13
+ // According to the spec, the record header is just 8 bytes, but here we set it
14
+ // to 12 so that we can also access the record's type
15
+ const SHP_RECORD_HEADER_SIZE = 12;
16
+ const STATE = {
17
+ EXPECTING_HEADER: 0,
18
+ EXPECTING_RECORD: 1,
19
+ END: 2,
20
+ ERROR: 3
21
+ };
22
+ class SHPParser {
23
+ constructor(options) {
24
+ this.options = {};
25
+ this.binaryReader = new binary_chunk_reader_1.default({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
26
+ this.state = STATE.EXPECTING_HEADER;
27
+ this.result = {
28
+ geometries: []
29
+ };
30
+ this.options = options;
31
+ }
32
+ write(arrayBuffer) {
33
+ this.binaryReader.write(arrayBuffer);
34
+ this.state = parseState(this.state, this.result, this.binaryReader, this.options);
35
+ }
36
+ end() {
37
+ this.binaryReader.end();
38
+ this.state = parseState(this.state, this.result, this.binaryReader, this.options);
39
+ // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
40
+ if (this.state !== STATE.END) {
41
+ this.state = STATE.ERROR;
42
+ this.result.error = 'SHP incomplete file';
43
+ }
44
+ }
45
+ }
46
+ function parseSHP(arrayBuffer, options) {
47
+ const shpParser = new SHPParser(options);
48
+ shpParser.write(arrayBuffer);
49
+ shpParser.end();
50
+ // @ts-ignore
51
+ return shpParser.result;
52
+ }
53
+ exports.parseSHP = parseSHP;
54
+ /**
55
+ * @param asyncIterator
56
+ * @param options
57
+ * @returns
58
+ */
59
+ async function* parseSHPInBatches(asyncIterator, options) {
60
+ const parser = new SHPParser(options);
61
+ let headerReturned = false;
62
+ for await (const arrayBuffer of asyncIterator) {
63
+ parser.write(arrayBuffer);
64
+ if (!headerReturned && parser.result.header) {
65
+ headerReturned = true;
66
+ yield parser.result.header;
67
+ }
68
+ if (parser.result.geometries.length > 0) {
69
+ yield parser.result.geometries;
70
+ parser.result.geometries = [];
71
+ }
72
+ }
73
+ parser.end();
74
+ if (parser.result.geometries.length > 0) {
75
+ yield parser.result.geometries;
76
+ }
77
+ return;
78
+ }
79
+ exports.parseSHPInBatches = parseSHPInBatches;
80
+ /**
81
+ * State-machine parser for SHP data
82
+ *
83
+ * Note that whenever more data is needed, a `return`, not a `break`, is
84
+ * necessary, as the `break` keeps the context within `parseState`, while
85
+ * `return` releases context so that more data can be written into the
86
+ * BinaryChunkReader.
87
+ *
88
+ * @param state Current state
89
+ * @param result An object to hold result data
90
+ * @param binaryReader
91
+ * @return State at end of current parsing
92
+ */
93
+ /* eslint-disable complexity, max-depth */
94
+ function parseState(state, result, binaryReader, options) {
95
+ // eslint-disable-next-line no-constant-condition
96
+ while (true) {
97
+ try {
98
+ switch (state) {
99
+ case STATE.ERROR:
100
+ case STATE.END:
101
+ return state;
102
+ case STATE.EXPECTING_HEADER:
103
+ // Parse initial file header
104
+ const dataView = binaryReader.getDataView(SHP_HEADER_SIZE);
105
+ if (!dataView) {
106
+ return state;
107
+ }
108
+ result.header = (0, parse_shp_header_1.parseSHPHeader)(dataView);
109
+ result.progress = {
110
+ bytesUsed: 0,
111
+ bytesTotal: result.header.length,
112
+ rows: 0
113
+ };
114
+ // index numbering starts at 1
115
+ result.currentIndex = 1;
116
+ state = STATE.EXPECTING_RECORD;
117
+ break;
118
+ case STATE.EXPECTING_RECORD:
119
+ while (binaryReader.hasAvailableBytes(SHP_RECORD_HEADER_SIZE)) {
120
+ const recordHeaderView = binaryReader.getDataView(SHP_RECORD_HEADER_SIZE);
121
+ const recordHeader = {
122
+ recordNumber: recordHeaderView.getInt32(0, BIG_ENDIAN),
123
+ // 2 byte words; includes the four words of record header
124
+ byteLength: recordHeaderView.getInt32(4, BIG_ENDIAN) * 2,
125
+ // This is actually part of the record, not the header...
126
+ type: recordHeaderView.getInt32(8, LITTLE_ENDIAN)
127
+ };
128
+ if (!binaryReader.hasAvailableBytes(recordHeader.byteLength - 4)) {
129
+ binaryReader.rewind(SHP_RECORD_HEADER_SIZE);
130
+ return state;
131
+ }
132
+ const invalidRecord = recordHeader.byteLength < 4 ||
133
+ recordHeader.type !== result.header.type ||
134
+ recordHeader.recordNumber !== result.currentIndex;
135
+ // All records must have at least four bytes (for the record shape type)
136
+ if (invalidRecord) {
137
+ // Malformed record, try again, advancing just 4 bytes
138
+ // Note: this is a rewind because binaryReader.getDataView above
139
+ // moved the pointer forward 12 bytes, so rewinding 8 bytes still
140
+ // leaves us 4 bytes ahead
141
+ binaryReader.rewind(SHP_RECORD_HEADER_SIZE - 4);
142
+ }
143
+ else {
144
+ // Note: type is actually part of the record, not the header, so
145
+ // rewind 4 bytes before reading record
146
+ binaryReader.rewind(4);
147
+ const recordView = binaryReader.getDataView(recordHeader.byteLength);
148
+ const geometry = (0, parse_shp_geometry_1.parseRecord)(recordView, options);
149
+ result.geometries.push(geometry);
150
+ result.currentIndex++;
151
+ result.progress.rows = result.currentIndex - 1;
152
+ }
153
+ }
154
+ if (binaryReader.ended) {
155
+ state = STATE.END;
156
+ }
157
+ return state;
158
+ default:
159
+ state = STATE.ERROR;
160
+ result.error = `illegal parser state ${state}`;
161
+ return state;
162
+ }
163
+ }
164
+ catch (error) {
165
+ state = STATE.ERROR;
166
+ result.error = `SHP parsing failed: ${error?.message}`;
167
+ return state;
168
+ }
169
+ }
170
+ }
@@ -0,0 +1,10 @@
1
+ export interface SHXOutput {
2
+ offsets: Int32Array;
3
+ lengths: Int32Array;
4
+ }
5
+ /**
6
+ * @param arrayBuffer
7
+ * @returns SHXOutput
8
+ */
9
+ export declare function parseShx(arrayBuffer: ArrayBuffer): SHXOutput;
10
+ //# sourceMappingURL=parse-shx.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parse-shx.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-shx.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,SAAS;IACxB,OAAO,EAAE,UAAU,CAAC;IACpB,OAAO,EAAE,UAAU,CAAC;CACrB;AAKD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,WAAW,EAAE,WAAW,GAAG,SAAS,CAoB5D"}
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseShx = void 0;
4
+ const parse_shp_header_1 = require("./parse-shp-header");
5
+ const SHX_HEADER_SIZE = 100;
6
+ const BIG_ENDIAN = false;
7
+ /**
8
+ * @param arrayBuffer
9
+ * @returns SHXOutput
10
+ */
11
+ function parseShx(arrayBuffer) {
12
+ // SHX header is identical to SHP Header
13
+ const headerView = new DataView(arrayBuffer, 0, SHX_HEADER_SIZE);
14
+ const header = (0, parse_shp_header_1.parseSHPHeader)(headerView);
15
+ const contentLength = header.length - SHX_HEADER_SIZE;
16
+ const contentView = new DataView(arrayBuffer, SHX_HEADER_SIZE, contentLength);
17
+ const offsets = new Int32Array(contentLength);
18
+ const lengths = new Int32Array(contentLength);
19
+ for (let i = 0; i < contentLength / 8; i++) {
20
+ offsets[i] = contentView.getInt32(i * 8, BIG_ENDIAN);
21
+ lengths[i] = contentView.getInt32(i * 8 + 4, BIG_ENDIAN);
22
+ }
23
+ return {
24
+ offsets,
25
+ lengths
26
+ };
27
+ }
28
+ exports.parseShx = parseShx;
@@ -0,0 +1,59 @@
1
+ export default class BinaryChunkReader {
2
+ offset: number;
3
+ arrayBuffers: ArrayBuffer[];
4
+ ended: boolean;
5
+ maxRewindBytes: number;
6
+ constructor(options?: {
7
+ [key: string]: any;
8
+ });
9
+ /**
10
+ * @param arrayBuffer
11
+ */
12
+ write(arrayBuffer: ArrayBuffer): void;
13
+ end(): void;
14
+ /**
15
+ * Has enough bytes available in array buffers
16
+ *
17
+ * @param bytes Number of bytes
18
+ * @return boolean
19
+ */
20
+ hasAvailableBytes(bytes: number): boolean;
21
+ /**
22
+ * Find offsets of byte ranges within this.arrayBuffers
23
+ *
24
+ * @param bytes Byte length to read
25
+ * @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
26
+ */
27
+ findBufferOffsets(bytes: number): any[] | null;
28
+ /**
29
+ * Get the required number of bytes from the iterator
30
+ *
31
+ * @param bytes Number of bytes
32
+ * @return DataView with data
33
+ */
34
+ getDataView(bytes: number): DataView | null;
35
+ /**
36
+ * Dispose of old array buffers
37
+ */
38
+ disposeBuffers(): void;
39
+ /**
40
+ * Copy multiple ArrayBuffers into one contiguous ArrayBuffer
41
+ *
42
+ * In contrast to concatenateArrayBuffers, this only copies the necessary
43
+ * portions of the source arrays, rather than first copying the entire arrays
44
+ * then taking a part of them.
45
+ *
46
+ * @param bufferOffsets List of internal array offsets
47
+ * @return New contiguous ArrayBuffer
48
+ */
49
+ _combineArrayBuffers(bufferOffsets: any[]): ArrayBufferLike;
50
+ /**
51
+ * @param bytes
52
+ */
53
+ skip(bytes: number): void;
54
+ /**
55
+ * @param bytes
56
+ */
57
+ rewind(bytes: number): void;
58
+ }
59
+ //# sourceMappingURL=binary-chunk-reader.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"binary-chunk-reader.d.ts","sourceRoot":"","sources":["../../../src/lib/streaming/binary-chunk-reader.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,OAAO,OAAO,iBAAiB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,WAAW,EAAE,CAAC;IAC5B,KAAK,EAAE,OAAO,CAAC;IACf,cAAc,EAAE,MAAM,CAAC;gBAEX,OAAO,CAAC,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAC;IAY1C;;OAEG;IACH,KAAK,CAAC,WAAW,EAAE,WAAW,GAAG,IAAI;IAIrC,GAAG,IAAI,IAAI;IAKX;;;;;OAKG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO;IAWzC;;;;;OAKG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,GAAG,EAAE,GAAG,IAAI;IAwC9C;;;;;OAKG;IACH,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,QAAQ,GAAG,IAAI;IA+B3C;;OAEG;IACH,cAAc,IAAI,IAAI;IAUtB;;;;;;;;;OASG;IACH,oBAAoB,CAAC,aAAa,EAAE,GAAG,EAAE,GAAG,eAAe;IAoB3D;;OAEG;IACH,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAGzB;;OAEG;IACH,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CAI5B"}
@@ -0,0 +1,161 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ class BinaryChunkReader {
4
+ constructor(options) {
5
+ const { maxRewindBytes = 0 } = options || {};
6
+ /** current global offset into current array buffer*/
7
+ this.offset = 0;
8
+ /** current buffer from iterator */
9
+ this.arrayBuffers = [];
10
+ this.ended = false;
11
+ /** bytes behind offset to hold on to */
12
+ this.maxRewindBytes = maxRewindBytes;
13
+ }
14
+ /**
15
+ * @param arrayBuffer
16
+ */
17
+ write(arrayBuffer) {
18
+ this.arrayBuffers.push(arrayBuffer);
19
+ }
20
+ end() {
21
+ this.arrayBuffers = [];
22
+ this.ended = true;
23
+ }
24
+ /**
25
+ * Has enough bytes available in array buffers
26
+ *
27
+ * @param bytes Number of bytes
28
+ * @return boolean
29
+ */
30
+ hasAvailableBytes(bytes) {
31
+ let bytesAvailable = -this.offset;
32
+ for (const arrayBuffer of this.arrayBuffers) {
33
+ bytesAvailable += arrayBuffer.byteLength;
34
+ if (bytesAvailable >= bytes) {
35
+ return true;
36
+ }
37
+ }
38
+ return false;
39
+ }
40
+ /**
41
+ * Find offsets of byte ranges within this.arrayBuffers
42
+ *
43
+ * @param bytes Byte length to read
44
+ * @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
45
+ */
46
+ findBufferOffsets(bytes) {
47
+ let offset = -this.offset;
48
+ const selectedBuffers = [];
49
+ for (let i = 0; i < this.arrayBuffers.length; i++) {
50
+ const buf = this.arrayBuffers[i];
51
+ // Current buffer isn't long enough to reach global offset
52
+ if (offset + buf.byteLength <= 0) {
53
+ offset += buf.byteLength;
54
+ // eslint-disable-next-line no-continue
55
+ continue;
56
+ }
57
+ // Find start/end offsets for this buffer
58
+ // When offset < 0, need to skip over Math.abs(offset) bytes
59
+ // When offset > 0, implies bytes in previous buffer, start at 0
60
+ const start = offset <= 0 ? Math.abs(offset) : 0;
61
+ let end;
62
+ // Length of requested bytes is contained in current buffer
63
+ if (start + bytes <= buf.byteLength) {
64
+ end = start + bytes;
65
+ selectedBuffers.push([i, [start, end]]);
66
+ return selectedBuffers;
67
+ }
68
+ // Will need to look into next buffer
69
+ end = buf.byteLength;
70
+ selectedBuffers.push([i, [start, end]]);
71
+ // Need to read fewer bytes in next iter
72
+ bytes -= buf.byteLength - start;
73
+ offset += buf.byteLength;
74
+ }
75
+ // Should only finish loop if exhausted all arrays
76
+ return null;
77
+ }
78
+ /**
79
+ * Get the required number of bytes from the iterator
80
+ *
81
+ * @param bytes Number of bytes
82
+ * @return DataView with data
83
+ */
84
+ getDataView(bytes) {
85
+ const bufferOffsets = this.findBufferOffsets(bytes);
86
+ // return `null` if not enough data, except if end() already called, in
87
+ // which case throw an error.
88
+ if (!bufferOffsets && this.ended) {
89
+ throw new Error('binary data exhausted');
90
+ }
91
+ if (!bufferOffsets) {
92
+ // @ts-ignore
93
+ return null;
94
+ }
95
+ // If only one arrayBuffer needed, return DataView directly
96
+ if (bufferOffsets.length === 1) {
97
+ const [bufferIndex, [start, end]] = bufferOffsets[0];
98
+ const arrayBuffer = this.arrayBuffers[bufferIndex];
99
+ const view = new DataView(arrayBuffer, start, end - start);
100
+ this.offset += bytes;
101
+ this.disposeBuffers();
102
+ return view;
103
+ }
104
+ // Concatenate portions of multiple ArrayBuffers
105
+ const view = new DataView(this._combineArrayBuffers(bufferOffsets));
106
+ this.offset += bytes;
107
+ this.disposeBuffers();
108
+ return view;
109
+ }
110
+ /**
111
+ * Dispose of old array buffers
112
+ */
113
+ disposeBuffers() {
114
+ while (this.arrayBuffers.length > 0 &&
115
+ this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
116
+ this.offset -= this.arrayBuffers[0].byteLength;
117
+ this.arrayBuffers.shift();
118
+ }
119
+ }
120
+ /**
121
+ * Copy multiple ArrayBuffers into one contiguous ArrayBuffer
122
+ *
123
+ * In contrast to concatenateArrayBuffers, this only copies the necessary
124
+ * portions of the source arrays, rather than first copying the entire arrays
125
+ * then taking a part of them.
126
+ *
127
+ * @param bufferOffsets List of internal array offsets
128
+ * @return New contiguous ArrayBuffer
129
+ */
130
+ _combineArrayBuffers(bufferOffsets) {
131
+ let byteLength = 0;
132
+ for (const bufferOffset of bufferOffsets) {
133
+ const [start, end] = bufferOffset[1];
134
+ byteLength += end - start;
135
+ }
136
+ const result = new Uint8Array(byteLength);
137
+ // Copy the subarrays
138
+ let resultOffset = 0;
139
+ for (const bufferOffset of bufferOffsets) {
140
+ const [bufferIndex, [start, end]] = bufferOffset;
141
+ const sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);
142
+ result.set(sourceArray.subarray(start, end), resultOffset);
143
+ resultOffset += end - start;
144
+ }
145
+ return result.buffer;
146
+ }
147
+ /**
148
+ * @param bytes
149
+ */
150
+ skip(bytes) {
151
+ this.offset += bytes;
152
+ }
153
+ /**
154
+ * @param bytes
155
+ */
156
+ rewind(bytes) {
157
+ // TODO - only works if offset is already set
158
+ this.offset -= bytes;
159
+ }
160
+ }
161
+ exports.default = BinaryChunkReader;
@@ -0,0 +1,32 @@
1
+ export default class BinaryReader {
2
+ offset: number;
3
+ arrayBuffer: ArrayBuffer;
4
+ constructor(arrayBuffer: ArrayBuffer);
5
+ /**
6
+ * Checks if there are available bytes in data
7
+ *
8
+ * @param bytes
9
+ * @returns boolean
10
+ */
11
+ hasAvailableBytes(bytes: number): boolean;
12
+ /**
13
+ * Get the required number of bytes from the iterator
14
+ *
15
+ * @param bytes
16
+ * @returns Dataview
17
+ */
18
+ getDataView(bytes: number): DataView;
19
+ /**
20
+ * Skipping
21
+ *
22
+ * @param bytes
23
+ */
24
+ skip(bytes: number): void;
25
+ /**
26
+ * Rewinding
27
+ *
28
+ * @param bytes
29
+ */
30
+ rewind(bytes: number): void;
31
+ }
32
+ //# sourceMappingURL=binary-reader.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"binary-reader.d.ts","sourceRoot":"","sources":["../../../src/lib/streaming/binary-reader.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,OAAO,OAAO,YAAY;IAC/B,MAAM,EAAE,MAAM,CAAC;IACf,WAAW,EAAE,WAAW,CAAC;gBAEb,WAAW,EAAE,WAAW;IAMpC;;;;;OAKG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO;IAIzC;;;;;OAKG;IACH,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,QAAQ;IAYpC;;;;OAIG;IACH,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAIzB;;;;OAIG;IACH,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CAG5B"}
@@ -0,0 +1,52 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ class BinaryReader {
4
+ constructor(arrayBuffer) {
5
+ /** current global (stream) offset */
6
+ this.offset = 0;
7
+ /** current buffer from iterator */
8
+ this.arrayBuffer = arrayBuffer;
9
+ }
10
+ /**
11
+ * Checks if there are available bytes in data
12
+ *
13
+ * @param bytes
14
+ * @returns boolean
15
+ */
16
+ hasAvailableBytes(bytes) {
17
+ return this.arrayBuffer.byteLength - this.offset >= bytes;
18
+ }
19
+ /**
20
+ * Get the required number of bytes from the iterator
21
+ *
22
+ * @param bytes
23
+ * @returns Dataview
24
+ */
25
+ getDataView(bytes) {
26
+ if (bytes && !this.hasAvailableBytes(bytes)) {
27
+ throw new Error('binary data exhausted');
28
+ }
29
+ const dataView = bytes
30
+ ? new DataView(this.arrayBuffer, this.offset, bytes)
31
+ : new DataView(this.arrayBuffer, this.offset);
32
+ this.offset += bytes;
33
+ return dataView;
34
+ }
35
+ /**
36
+ * Skipping
37
+ *
38
+ * @param bytes
39
+ */
40
+ skip(bytes) {
41
+ this.offset += bytes;
42
+ }
43
+ /**
44
+ * Rewinding
45
+ *
46
+ * @param bytes
47
+ */
48
+ rewind(bytes) {
49
+ this.offset -= bytes;
50
+ }
51
+ }
52
+ exports.default = BinaryReader;
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Zip two iterators together
3
+ *
4
+ * @param iterator1
5
+ * @param iterator2
6
+ */
7
+ export declare function zipBatchIterators(iterator1: AsyncIterator<any[]>, iterator2: AsyncIterator<any[]>): AsyncGenerator<number[][], void, unknown>;
8
+ //# sourceMappingURL=zip-batch-iterators.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"zip-batch-iterators.d.ts","sourceRoot":"","sources":["../../../src/lib/streaming/zip-batch-iterators.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,wBAAuB,iBAAiB,CACtC,SAAS,EAAE,aAAa,CAAC,GAAG,EAAE,CAAC,EAC/B,SAAS,EAAE,aAAa,CAAC,GAAG,EAAE,CAAC,GAC9B,cAAc,CAAC,MAAM,EAAE,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,CA8B3C"}
@@ -0,0 +1,61 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.zipBatchIterators = void 0;
4
+ /**
5
+ * Zip two iterators together
6
+ *
7
+ * @param iterator1
8
+ * @param iterator2
9
+ */
10
+ async function* zipBatchIterators(iterator1, iterator2) {
11
+ let batch1 = [];
12
+ let batch2 = [];
13
+ let iterator1Done = false;
14
+ let iterator2Done = false;
15
+ // TODO - one could let all iterators flow at full speed using `Promise.race`
16
+ // however we might end up with a big temporary buffer
17
+ while (!iterator1Done && !iterator2Done) {
18
+ if (batch1.length === 0 && !iterator1Done) {
19
+ const { value, done } = await iterator1.next();
20
+ if (done) {
21
+ iterator1Done = true;
22
+ }
23
+ else {
24
+ batch1 = value;
25
+ }
26
+ }
27
+ else if (batch2.length === 0 && !iterator2Done) {
28
+ const { value, done } = await iterator2.next();
29
+ if (done) {
30
+ iterator2Done = true;
31
+ }
32
+ else {
33
+ batch2 = value;
34
+ }
35
+ }
36
+ const batch = extractBatch(batch1, batch2);
37
+ if (batch) {
38
+ yield batch;
39
+ }
40
+ }
41
+ }
42
+ exports.zipBatchIterators = zipBatchIterators;
43
+ /**
44
+ * Extract batch of same length from two batches
45
+ *
46
+ * @param batch1
47
+ * @param batch2
48
+ * @return array | null
49
+ */
50
+ function extractBatch(batch1, batch2) {
51
+ const batchLength = Math.min(batch1.length, batch2.length);
52
+ if (batchLength === 0) {
53
+ return null;
54
+ }
55
+ // Non interleaved arrays
56
+ const batch = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
57
+ // Modify the 2 batches
58
+ batch1.splice(0, batchLength);
59
+ batch2.splice(0, batchLength);
60
+ return batch;
61
+ }
@@ -0,0 +1,26 @@
1
+ import type { LoaderWithParser } from '@loaders.gl/loader-utils';
2
+ import { parseShapefile, parseShapefileInBatches } from './lib/parsers/parse-shapefile';
3
+ /**
4
+ * Shapefile loader
5
+ * @note Shapefile is multifile format and requires providing additional files
6
+ */
7
+ export declare const ShapefileLoader: {
8
+ name: string;
9
+ id: string;
10
+ module: string;
11
+ version: any;
12
+ category: string;
13
+ extensions: string[];
14
+ mimeTypes: string[];
15
+ tests: ArrayBufferLike[];
16
+ options: {
17
+ shapefile: {};
18
+ shp: {
19
+ _maxDimensions: number;
20
+ };
21
+ };
22
+ parse: typeof parseShapefile;
23
+ parseInBatches: typeof parseShapefileInBatches;
24
+ };
25
+ export declare const _typecheckShapefileLoader: LoaderWithParser;
26
+ //# sourceMappingURL=shapefile-loader.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"shapefile-loader.d.ts","sourceRoot":"","sources":["../src/shapefile-loader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,0BAA0B,CAAC;AAE/D,OAAO,EAAC,cAAc,EAAE,uBAAuB,EAAC,MAAM,+BAA+B,CAAC;AAMtF;;;GAGG;AACH,eAAO,MAAM,eAAe;;;;;;;;;;;;;;;;;CAiB3B,CAAC;AAEF,eAAO,MAAM,yBAAyB,EAAE,gBAAkC,CAAC"}
@@ -0,0 +1,31 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports._typecheckShapefileLoader = exports.ShapefileLoader = void 0;
4
+ const shp_loader_1 = require("./shp-loader");
5
+ const parse_shapefile_1 = require("./lib/parsers/parse-shapefile");
6
+ // __VERSION__ is injected by babel-plugin-version-inline
7
+ // @ts-ignore TS2304: Cannot find name '__VERSION__'.
8
+ const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
9
+ /**
10
+ * Shapefile loader
11
+ * @note Shapefile is multifile format and requires providing additional files
12
+ */
13
+ exports.ShapefileLoader = {
14
+ name: 'Shapefile',
15
+ id: 'shapefile',
16
+ module: 'shapefile',
17
+ version: VERSION,
18
+ category: 'geometry',
19
+ extensions: ['shp'],
20
+ mimeTypes: ['application/octet-stream'],
21
+ tests: [new Uint8Array(shp_loader_1.SHP_MAGIC_NUMBER).buffer],
22
+ options: {
23
+ shapefile: {},
24
+ shp: {
25
+ _maxDimensions: 4
26
+ }
27
+ },
28
+ parse: parse_shapefile_1.parseShapefile,
29
+ parseInBatches: parse_shapefile_1.parseShapefileInBatches
30
+ };
31
+ exports._typecheckShapefileLoader = exports.ShapefileLoader;
@@ -0,0 +1,9 @@
1
+ import type { Loader, LoaderWithParser } from '@loaders.gl/loader-utils';
2
+ export declare const SHP_MAGIC_NUMBER: number[];
3
+ /**
4
+ * SHP file loader
5
+ */
6
+ export declare const SHPWorkerLoader: Loader;
7
+ /** SHP file loader */
8
+ export declare const SHPLoader: LoaderWithParser;
9
+ //# sourceMappingURL=shp-loader.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"shp-loader.d.ts","sourceRoot":"","sources":["../src/shp-loader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAE,gBAAgB,EAAC,MAAM,0BAA0B,CAAC;AAOvE,eAAO,MAAM,gBAAgB,UAA2B,CAAC;AAEzD;;GAEG;AACH,eAAO,MAAM,eAAe,EAAE,MAgB7B,CAAC;AAEF,sBAAsB;AACtB,eAAO,MAAM,SAAS,EAAE,gBAKvB,CAAC"}