@loaders.gl/shapefile 4.0.0-alpha.5 → 4.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.js +2 -2
- package/dist/dbf-loader.js +29 -20
- package/dist/dbf-worker.js +73 -447
- package/dist/dist.min.js +130 -489
- package/dist/es5/bundle.js +6 -0
- package/dist/es5/bundle.js.map +1 -0
- package/dist/es5/dbf-loader.js +53 -0
- package/dist/es5/dbf-loader.js.map +1 -0
- package/dist/es5/index.js +39 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/lib/parsers/parse-dbf.js +394 -0
- package/dist/es5/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shapefile.js +373 -0
- package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp-geometry.js +220 -0
- package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp-header.js +35 -0
- package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp.js +227 -0
- package/dist/es5/lib/parsers/parse-shp.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shx.js +26 -0
- package/dist/es5/lib/parsers/parse-shx.js.map +1 -0
- package/dist/es5/lib/parsers/types.js +2 -0
- package/dist/es5/lib/parsers/types.js.map +1 -0
- package/dist/es5/lib/streaming/binary-chunk-reader.js +178 -0
- package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/es5/lib/streaming/binary-reader.js +48 -0
- package/dist/es5/lib/streaming/binary-reader.js.map +1 -0
- package/dist/es5/lib/streaming/zip-batch-iterators.js +91 -0
- package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/es5/shapefile-loader.js +31 -0
- package/dist/es5/shapefile-loader.js.map +1 -0
- package/dist/es5/shp-loader.js +56 -0
- package/dist/es5/shp-loader.js.map +1 -0
- package/dist/es5/workers/dbf-worker.js +6 -0
- package/dist/es5/workers/dbf-worker.js.map +1 -0
- package/dist/es5/workers/shp-worker.js +6 -0
- package/dist/es5/workers/shp-worker.js.map +1 -0
- package/dist/esm/bundle.js +4 -0
- package/dist/esm/bundle.js.map +1 -0
- package/dist/esm/dbf-loader.js +24 -0
- package/dist/esm/dbf-loader.js.map +1 -0
- package/dist/esm/index.js +4 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/lib/parsers/parse-dbf.js +296 -0
- package/dist/esm/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shapefile.js +187 -0
- package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp-geometry.js +191 -0
- package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp-header.js +29 -0
- package/dist/esm/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp.js +134 -0
- package/dist/esm/lib/parsers/parse-shp.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shx.js +20 -0
- package/dist/esm/lib/parsers/parse-shx.js.map +1 -0
- package/dist/esm/lib/parsers/types.js +2 -0
- package/dist/esm/lib/parsers/types.js.map +1 -0
- package/dist/esm/lib/streaming/binary-chunk-reader.js +106 -0
- package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/esm/lib/streaming/binary-reader.js +27 -0
- package/dist/esm/lib/streaming/binary-reader.js.map +1 -0
- package/dist/esm/lib/streaming/zip-batch-iterators.js +44 -0
- package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/esm/shapefile-loader.js +23 -0
- package/dist/esm/shapefile-loader.js.map +1 -0
- package/dist/esm/shp-loader.js +26 -0
- package/dist/esm/shp-loader.js.map +1 -0
- package/dist/esm/workers/dbf-worker.js +4 -0
- package/dist/esm/workers/dbf-worker.js.map +1 -0
- package/dist/esm/workers/shp-worker.js +4 -0
- package/dist/esm/workers/shp-worker.js.map +1 -0
- package/dist/index.js +11 -4
- package/dist/lib/parsers/parse-dbf.d.ts +4 -18
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf.js +309 -264
- package/dist/lib/parsers/parse-shapefile.d.ts +3 -8
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shapefile.js +227 -209
- package/dist/lib/parsers/parse-shp-geometry.d.ts +2 -3
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js +265 -212
- package/dist/lib/parsers/parse-shp-header.js +38 -27
- package/dist/lib/parsers/parse-shp.d.ts +3 -2
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp.js +160 -136
- package/dist/lib/parsers/parse-shx.js +25 -19
- package/dist/lib/parsers/types.d.ts +68 -0
- package/dist/lib/parsers/types.d.ts.map +1 -0
- package/dist/lib/parsers/types.js +2 -0
- package/dist/lib/streaming/binary-chunk-reader.d.ts +5 -3
- package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -1
- package/dist/lib/streaming/binary-chunk-reader.js +152 -128
- package/dist/lib/streaming/binary-reader.js +50 -33
- package/dist/lib/streaming/zip-batch-iterators.js +57 -48
- package/dist/shapefile-loader.js +30 -22
- package/dist/shp-loader.js +32 -22
- package/dist/shp-worker.js +57 -19
- package/dist/workers/dbf-worker.js +5 -4
- package/dist/workers/shp-worker.js +5 -4
- package/package.json +7 -7
- package/src/lib/parsers/parse-dbf.ts +41 -67
- package/src/lib/parsers/parse-shapefile.ts +3 -6
- package/src/lib/parsers/parse-shp-geometry.ts +3 -2
- package/src/lib/parsers/parse-shp.ts +26 -12
- package/src/lib/parsers/types.ts +79 -0
- package/src/lib/streaming/binary-chunk-reader.ts +5 -1
- package/src/lib/streaming/zip-batch-iterators.ts +2 -2
- package/dist/bundle.js.map +0 -1
- package/dist/dbf-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/lib/parsers/parse-shp.js.map +0 -1
- package/dist/lib/parsers/parse-shx.js.map +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/lib/streaming/binary-reader.js.map +0 -1
- package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/shapefile-loader.js.map +0 -1
- package/dist/shp-loader.js.map +0 -1
- package/dist/workers/dbf-worker.js.map +0 -1
- package/dist/workers/shp-worker.js.map +0 -1
|
@@ -1,137 +1,161 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
maxRewindBytes = 0
|
|
14
|
-
} = options || {};
|
|
15
|
-
this.offset = 0;
|
|
16
|
-
this.arrayBuffers = [];
|
|
17
|
-
this.ended = false;
|
|
18
|
-
this.maxRewindBytes = maxRewindBytes;
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
write(arrayBuffer) {
|
|
22
|
-
this.arrayBuffers.push(arrayBuffer);
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
end() {
|
|
26
|
-
this.arrayBuffers = [];
|
|
27
|
-
this.ended = true;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
hasAvailableBytes(bytes) {
|
|
31
|
-
let bytesAvailable = -this.offset;
|
|
32
|
-
|
|
33
|
-
for (const arrayBuffer of this.arrayBuffers) {
|
|
34
|
-
bytesAvailable += arrayBuffer.byteLength;
|
|
35
|
-
|
|
36
|
-
if (bytesAvailable >= bytes) {
|
|
37
|
-
return true;
|
|
38
|
-
}
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
class BinaryChunkReader {
|
|
4
|
+
constructor(options) {
|
|
5
|
+
const { maxRewindBytes = 0 } = options || {};
|
|
6
|
+
/** current global offset into current array buffer*/
|
|
7
|
+
this.offset = 0;
|
|
8
|
+
/** current buffer from iterator */
|
|
9
|
+
this.arrayBuffers = [];
|
|
10
|
+
this.ended = false;
|
|
11
|
+
/** bytes behind offset to hold on to */
|
|
12
|
+
this.maxRewindBytes = maxRewindBytes;
|
|
39
13
|
}
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
let offset = -this.offset;
|
|
46
|
-
const selectedBuffers = [];
|
|
47
|
-
|
|
48
|
-
for (let i = 0; i < this.arrayBuffers.length; i++) {
|
|
49
|
-
const buf = this.arrayBuffers[i];
|
|
50
|
-
|
|
51
|
-
if (offset + buf.byteLength <= 0) {
|
|
52
|
-
offset += buf.byteLength;
|
|
53
|
-
continue;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
const start = offset <= 0 ? Math.abs(offset) : 0;
|
|
57
|
-
let end;
|
|
58
|
-
|
|
59
|
-
if (start + bytes <= buf.byteLength) {
|
|
60
|
-
end = start + bytes;
|
|
61
|
-
selectedBuffers.push([i, [start, end]]);
|
|
62
|
-
return selectedBuffers;
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
end = buf.byteLength;
|
|
66
|
-
selectedBuffers.push([i, [start, end]]);
|
|
67
|
-
bytes -= buf.byteLength - start;
|
|
68
|
-
offset += buf.byteLength;
|
|
14
|
+
/**
|
|
15
|
+
* @param arrayBuffer
|
|
16
|
+
*/
|
|
17
|
+
write(arrayBuffer) {
|
|
18
|
+
this.arrayBuffers.push(arrayBuffer);
|
|
69
19
|
}
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
getDataView(bytes) {
|
|
75
|
-
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
76
|
-
|
|
77
|
-
if (!bufferOffsets && this.ended) {
|
|
78
|
-
throw new Error('binary data exhausted');
|
|
20
|
+
end() {
|
|
21
|
+
this.arrayBuffers = [];
|
|
22
|
+
this.ended = true;
|
|
79
23
|
}
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
24
|
+
/**
|
|
25
|
+
* Has enough bytes available in array buffers
|
|
26
|
+
*
|
|
27
|
+
* @param bytes Number of bytes
|
|
28
|
+
* @return boolean
|
|
29
|
+
*/
|
|
30
|
+
hasAvailableBytes(bytes) {
|
|
31
|
+
let bytesAvailable = -this.offset;
|
|
32
|
+
for (const arrayBuffer of this.arrayBuffers) {
|
|
33
|
+
bytesAvailable += arrayBuffer.byteLength;
|
|
34
|
+
if (bytesAvailable >= bytes) {
|
|
35
|
+
return true;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return false;
|
|
83
39
|
}
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
40
|
+
/**
|
|
41
|
+
* Find offsets of byte ranges within this.arrayBuffers
|
|
42
|
+
*
|
|
43
|
+
* @param bytes Byte length to read
|
|
44
|
+
* @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
|
|
45
|
+
*/
|
|
46
|
+
findBufferOffsets(bytes) {
|
|
47
|
+
let offset = -this.offset;
|
|
48
|
+
const selectedBuffers = [];
|
|
49
|
+
for (let i = 0; i < this.arrayBuffers.length; i++) {
|
|
50
|
+
const buf = this.arrayBuffers[i];
|
|
51
|
+
// Current buffer isn't long enough to reach global offset
|
|
52
|
+
if (offset + buf.byteLength <= 0) {
|
|
53
|
+
offset += buf.byteLength;
|
|
54
|
+
// eslint-disable-next-line no-continue
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
// Find start/end offsets for this buffer
|
|
58
|
+
// When offset < 0, need to skip over Math.abs(offset) bytes
|
|
59
|
+
// When offset > 0, implies bytes in previous buffer, start at 0
|
|
60
|
+
const start = offset <= 0 ? Math.abs(offset) : 0;
|
|
61
|
+
let end;
|
|
62
|
+
// Length of requested bytes is contained in current buffer
|
|
63
|
+
if (start + bytes <= buf.byteLength) {
|
|
64
|
+
end = start + bytes;
|
|
65
|
+
selectedBuffers.push([i, [start, end]]);
|
|
66
|
+
return selectedBuffers;
|
|
67
|
+
}
|
|
68
|
+
// Will need to look into next buffer
|
|
69
|
+
end = buf.byteLength;
|
|
70
|
+
selectedBuffers.push([i, [start, end]]);
|
|
71
|
+
// Need to read fewer bytes in next iter
|
|
72
|
+
bytes -= buf.byteLength - start;
|
|
73
|
+
offset += buf.byteLength;
|
|
74
|
+
}
|
|
75
|
+
// Should only finish loop if exhausted all arrays
|
|
76
|
+
return null;
|
|
92
77
|
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
78
|
+
/**
|
|
79
|
+
* Get the required number of bytes from the iterator
|
|
80
|
+
*
|
|
81
|
+
* @param bytes Number of bytes
|
|
82
|
+
* @return DataView with data
|
|
83
|
+
*/
|
|
84
|
+
getDataView(bytes) {
|
|
85
|
+
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
86
|
+
// return `null` if not enough data, except if end() already called, in
|
|
87
|
+
// which case throw an error.
|
|
88
|
+
if (!bufferOffsets && this.ended) {
|
|
89
|
+
throw new Error('binary data exhausted');
|
|
90
|
+
}
|
|
91
|
+
if (!bufferOffsets) {
|
|
92
|
+
// @ts-ignore
|
|
93
|
+
return null;
|
|
94
|
+
}
|
|
95
|
+
// If only one arrayBuffer needed, return DataView directly
|
|
96
|
+
if (bufferOffsets.length === 1) {
|
|
97
|
+
const [bufferIndex, [start, end]] = bufferOffsets[0];
|
|
98
|
+
const arrayBuffer = this.arrayBuffers[bufferIndex];
|
|
99
|
+
const view = new DataView(arrayBuffer, start, end - start);
|
|
100
|
+
this.offset += bytes;
|
|
101
|
+
this.disposeBuffers();
|
|
102
|
+
return view;
|
|
103
|
+
}
|
|
104
|
+
// Concatenate portions of multiple ArrayBuffers
|
|
105
|
+
const view = new DataView(this._combineArrayBuffers(bufferOffsets));
|
|
106
|
+
this.offset += bytes;
|
|
107
|
+
this.disposeBuffers();
|
|
108
|
+
return view;
|
|
104
109
|
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
110
|
+
/**
|
|
111
|
+
* Dispose of old array buffers
|
|
112
|
+
*/
|
|
113
|
+
disposeBuffers() {
|
|
114
|
+
while (this.arrayBuffers.length > 0 &&
|
|
115
|
+
this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
|
|
116
|
+
this.offset -= this.arrayBuffers[0].byteLength;
|
|
117
|
+
this.arrayBuffers.shift();
|
|
118
|
+
}
|
|
113
119
|
}
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
120
|
+
/**
|
|
121
|
+
* Copy multiple ArrayBuffers into one contiguous ArrayBuffer
|
|
122
|
+
*
|
|
123
|
+
* In contrast to concatenateArrayBuffers, this only copies the necessary
|
|
124
|
+
* portions of the source arrays, rather than first copying the entire arrays
|
|
125
|
+
* then taking a part of them.
|
|
126
|
+
*
|
|
127
|
+
* @param bufferOffsets List of internal array offsets
|
|
128
|
+
* @return New contiguous ArrayBuffer
|
|
129
|
+
*/
|
|
130
|
+
_combineArrayBuffers(bufferOffsets) {
|
|
131
|
+
let byteLength = 0;
|
|
132
|
+
for (const bufferOffset of bufferOffsets) {
|
|
133
|
+
const [start, end] = bufferOffset[1];
|
|
134
|
+
byteLength += end - start;
|
|
135
|
+
}
|
|
136
|
+
const result = new Uint8Array(byteLength);
|
|
137
|
+
// Copy the subarrays
|
|
138
|
+
let resultOffset = 0;
|
|
139
|
+
for (const bufferOffset of bufferOffsets) {
|
|
140
|
+
const [bufferIndex, [start, end]] = bufferOffset;
|
|
141
|
+
const sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);
|
|
142
|
+
result.set(sourceArray.subarray(start, end), resultOffset);
|
|
143
|
+
resultOffset += end - start;
|
|
144
|
+
}
|
|
145
|
+
return result.buffer;
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* @param bytes
|
|
149
|
+
*/
|
|
150
|
+
skip(bytes) {
|
|
151
|
+
this.offset += bytes;
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* @param bytes
|
|
155
|
+
*/
|
|
156
|
+
rewind(bytes) {
|
|
157
|
+
// TODO - only works if offset is already set
|
|
158
|
+
this.offset -= bytes;
|
|
123
159
|
}
|
|
124
|
-
|
|
125
|
-
return result.buffer;
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
skip(bytes) {
|
|
129
|
-
this.offset += bytes;
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
rewind(bytes) {
|
|
133
|
-
this.offset -= bytes;
|
|
134
|
-
}
|
|
135
|
-
|
|
136
160
|
}
|
|
137
|
-
|
|
161
|
+
exports.default = BinaryChunkReader;
|
|
@@ -1,35 +1,52 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
class BinaryReader {
|
|
4
|
+
constructor(arrayBuffer) {
|
|
5
|
+
/** current global (stream) offset */
|
|
6
|
+
this.offset = 0;
|
|
7
|
+
/** current buffer from iterator */
|
|
8
|
+
this.arrayBuffer = arrayBuffer;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Checks if there are available bytes in data
|
|
12
|
+
*
|
|
13
|
+
* @param bytes
|
|
14
|
+
* @returns boolean
|
|
15
|
+
*/
|
|
16
|
+
hasAvailableBytes(bytes) {
|
|
17
|
+
return this.arrayBuffer.byteLength - this.offset >= bytes;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Get the required number of bytes from the iterator
|
|
21
|
+
*
|
|
22
|
+
* @param bytes
|
|
23
|
+
* @returns Dataview
|
|
24
|
+
*/
|
|
25
|
+
getDataView(bytes) {
|
|
26
|
+
if (bytes && !this.hasAvailableBytes(bytes)) {
|
|
27
|
+
throw new Error('binary data exhausted');
|
|
28
|
+
}
|
|
29
|
+
const dataView = bytes
|
|
30
|
+
? new DataView(this.arrayBuffer, this.offset, bytes)
|
|
31
|
+
: new DataView(this.arrayBuffer, this.offset);
|
|
32
|
+
this.offset += bytes;
|
|
33
|
+
return dataView;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Skipping
|
|
37
|
+
*
|
|
38
|
+
* @param bytes
|
|
39
|
+
*/
|
|
40
|
+
skip(bytes) {
|
|
41
|
+
this.offset += bytes;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Rewinding
|
|
45
|
+
*
|
|
46
|
+
* @param bytes
|
|
47
|
+
*/
|
|
48
|
+
rewind(bytes) {
|
|
49
|
+
this.offset -= bytes;
|
|
19
50
|
}
|
|
20
|
-
|
|
21
|
-
const dataView = bytes ? new DataView(this.arrayBuffer, this.offset, bytes) : new DataView(this.arrayBuffer, this.offset);
|
|
22
|
-
this.offset += bytes;
|
|
23
|
-
return dataView;
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
skip(bytes) {
|
|
27
|
-
this.offset += bytes;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
rewind(bytes) {
|
|
31
|
-
this.offset -= bytes;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
51
|
}
|
|
35
|
-
|
|
52
|
+
exports.default = BinaryReader;
|
|
@@ -1,52 +1,61 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.zipBatchIterators = void 0;
|
|
4
|
+
/**
|
|
5
|
+
* Zip two iterators together
|
|
6
|
+
*
|
|
7
|
+
* @param iterator1
|
|
8
|
+
* @param iterator2
|
|
9
|
+
*/
|
|
10
|
+
async function* zipBatchIterators(iterator1, iterator2) {
|
|
11
|
+
let batch1 = [];
|
|
12
|
+
let batch2 = [];
|
|
13
|
+
let iterator1Done = false;
|
|
14
|
+
let iterator2Done = false;
|
|
15
|
+
// TODO - one could let all iterators flow at full speed using `Promise.race`
|
|
16
|
+
// however we might end up with a big temporary buffer
|
|
17
|
+
while (!iterator1Done && !iterator2Done) {
|
|
18
|
+
if (batch1.length === 0 && !iterator1Done) {
|
|
19
|
+
const { value, done } = await iterator1.next();
|
|
20
|
+
if (done) {
|
|
21
|
+
iterator1Done = true;
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
batch1 = value;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
else if (batch2.length === 0 && !iterator2Done) {
|
|
28
|
+
const { value, done } = await iterator2.next();
|
|
29
|
+
if (done) {
|
|
30
|
+
iterator2Done = true;
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
batch2 = value;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
const batch = extractBatch(batch1, batch2);
|
|
37
|
+
if (batch) {
|
|
38
|
+
yield batch;
|
|
39
|
+
}
|
|
30
40
|
}
|
|
31
|
-
|
|
32
|
-
const batch = extractBatch(batch1, batch2);
|
|
33
|
-
|
|
34
|
-
if (batch) {
|
|
35
|
-
yield batch;
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
41
|
}
|
|
39
|
-
|
|
42
|
+
exports.zipBatchIterators = zipBatchIterators;
|
|
43
|
+
/**
|
|
44
|
+
* Extract batch of same length from two batches
|
|
45
|
+
*
|
|
46
|
+
* @param batch1
|
|
47
|
+
* @param batch2
|
|
48
|
+
* @return array | null
|
|
49
|
+
*/
|
|
40
50
|
function extractBatch(batch1, batch2) {
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
+
const batchLength = Math.min(batch1.length, batch2.length);
|
|
52
|
+
if (batchLength === 0) {
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
// Non interleaved arrays
|
|
56
|
+
const batch = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
|
|
57
|
+
// Modify the 2 batches
|
|
58
|
+
batch1.splice(0, batchLength);
|
|
59
|
+
batch2.splice(0, batchLength);
|
|
60
|
+
return batch;
|
|
51
61
|
}
|
|
52
|
-
//# sourceMappingURL=zip-batch-iterators.js.map
|
package/dist/shapefile-loader.js
CHANGED
|
@@ -1,23 +1,31 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports._typecheckShapefileLoader = exports.ShapefileLoader = void 0;
|
|
4
|
+
const shp_loader_1 = require("./shp-loader");
|
|
5
|
+
const parse_shapefile_1 = require("./lib/parsers/parse-shapefile");
|
|
6
|
+
// __VERSION__ is injected by babel-plugin-version-inline
|
|
7
|
+
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
8
|
+
const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
|
|
9
|
+
/**
|
|
10
|
+
* Shapefile loader
|
|
11
|
+
* @note Shapefile is multifile format and requires providing additional files
|
|
12
|
+
*/
|
|
13
|
+
exports.ShapefileLoader = {
|
|
14
|
+
name: 'Shapefile',
|
|
15
|
+
id: 'shapefile',
|
|
16
|
+
module: 'shapefile',
|
|
17
|
+
version: VERSION,
|
|
18
|
+
category: 'geometry',
|
|
19
|
+
extensions: ['shp'],
|
|
20
|
+
mimeTypes: ['application/octet-stream'],
|
|
21
|
+
tests: [new Uint8Array(shp_loader_1.SHP_MAGIC_NUMBER).buffer],
|
|
22
|
+
options: {
|
|
23
|
+
shapefile: {},
|
|
24
|
+
shp: {
|
|
25
|
+
_maxDimensions: 4
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
parse: parse_shapefile_1.parseShapefile,
|
|
29
|
+
parseInBatches: parse_shapefile_1.parseShapefileInBatches
|
|
21
30
|
};
|
|
22
|
-
|
|
23
|
-
//# sourceMappingURL=shapefile-loader.js.map
|
|
31
|
+
exports._typecheckShapefileLoader = exports.ShapefileLoader;
|
package/dist/shp-loader.js
CHANGED
|
@@ -1,25 +1,35 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SHPLoader = exports.SHPWorkerLoader = exports.SHP_MAGIC_NUMBER = void 0;
|
|
4
|
+
const parse_shp_1 = require("./lib/parsers/parse-shp");
|
|
5
|
+
// __VERSION__ is injected by babel-plugin-version-inline
|
|
6
|
+
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
7
|
+
const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
|
|
8
|
+
exports.SHP_MAGIC_NUMBER = [0x00, 0x00, 0x27, 0x0a];
|
|
9
|
+
/**
|
|
10
|
+
* SHP file loader
|
|
11
|
+
*/
|
|
12
|
+
exports.SHPWorkerLoader = {
|
|
13
|
+
name: 'SHP',
|
|
14
|
+
id: 'shp',
|
|
15
|
+
module: 'shapefile',
|
|
16
|
+
version: VERSION,
|
|
17
|
+
worker: true,
|
|
18
|
+
category: 'geometry',
|
|
19
|
+
extensions: ['shp'],
|
|
20
|
+
mimeTypes: ['application/octet-stream'],
|
|
21
|
+
// ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...
|
|
22
|
+
tests: [new Uint8Array(exports.SHP_MAGIC_NUMBER).buffer],
|
|
23
|
+
options: {
|
|
24
|
+
shp: {
|
|
25
|
+
_maxDimensions: 4
|
|
26
|
+
}
|
|
17
27
|
}
|
|
18
|
-
}
|
|
19
28
|
};
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
29
|
+
/** SHP file loader */
|
|
30
|
+
exports.SHPLoader = {
|
|
31
|
+
...exports.SHPWorkerLoader,
|
|
32
|
+
parse: async (arrayBuffer, options) => (0, parse_shp_1.parseSHP)(arrayBuffer, options),
|
|
33
|
+
parseSync: parse_shp_1.parseSHP,
|
|
34
|
+
parseInBatches: parse_shp_1.parseSHPInBatches
|
|
24
35
|
};
|
|
25
|
-
//# sourceMappingURL=shp-loader.js.map
|