@loaders.gl/shapefile 4.0.0-beta.2 → 4.0.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbf-loader.d.ts.map +1 -0
- package/dist/{esm/dbf-loader.js → dbf-loader.js} +2 -2
- package/dist/dbf-loader.js.map +1 -0
- package/dist/dbf-worker.js +82 -26
- package/dist/{dist.min.js → dist.dev.js} +1449 -1973
- package/dist/index.cjs +1107 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +7 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-dbf.js +8 -9
- package/dist/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shapefile.js +5 -5
- package/dist/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shp-geometry.js +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/lib/parsers/parse-shp-header.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shp-header.js +2 -2
- package/dist/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shp.js +11 -12
- package/dist/lib/parsers/parse-shp.js.map +1 -0
- package/dist/lib/parsers/parse-shx.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shx.js +1 -1
- package/dist/lib/parsers/parse-shx.js.map +1 -0
- package/dist/lib/parsers/types.d.ts.map +1 -0
- package/dist/lib/parsers/types.js.map +1 -0
- package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/streaming/binary-chunk-reader.js +4 -5
- package/dist/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/lib/streaming/binary-reader.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/streaming/binary-reader.js +2 -3
- package/dist/lib/streaming/binary-reader.js.map +1 -0
- package/dist/lib/streaming/zip-batch-iterators.d.ts.map +1 -0
- package/dist/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/shapefile-loader.d.ts.map +1 -0
- package/dist/{esm/shapefile-loader.js → shapefile-loader.js} +3 -3
- package/dist/shapefile-loader.js.map +1 -0
- package/dist/shp-loader.d.ts.map +1 -0
- package/dist/{esm/shp-loader.js → shp-loader.js} +2 -2
- package/dist/shp-loader.js.map +1 -0
- package/dist/shp-worker.js +64 -3
- package/dist/{src/workers → workers}/dbf-worker.d.ts.map +1 -1
- package/dist/{esm/workers → workers}/dbf-worker.js +1 -1
- package/dist/workers/dbf-worker.js.map +1 -0
- package/dist/{src/workers → workers}/shp-worker.d.ts.map +1 -1
- package/dist/{esm/workers → workers}/shp-worker.js +1 -1
- package/dist/workers/shp-worker.js.map +1 -0
- package/package.json +18 -10
- package/dist/es5/bundle.js +0 -6
- package/dist/es5/bundle.js.map +0 -1
- package/dist/es5/dbf-loader.js +0 -55
- package/dist/es5/dbf-loader.js.map +0 -1
- package/dist/es5/index.js +0 -60
- package/dist/es5/index.js.map +0 -1
- package/dist/es5/lib/parsers/parse-dbf.js +0 -394
- package/dist/es5/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shapefile.js +0 -377
- package/dist/es5/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp-geometry.js +0 -220
- package/dist/es5/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp-header.js +0 -35
- package/dist/es5/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp.js +0 -227
- package/dist/es5/lib/parsers/parse-shp.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shx.js +0 -26
- package/dist/es5/lib/parsers/parse-shx.js.map +0 -1
- package/dist/es5/lib/parsers/types.js +0 -2
- package/dist/es5/lib/parsers/types.js.map +0 -1
- package/dist/es5/lib/streaming/binary-chunk-reader.js +0 -178
- package/dist/es5/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/es5/lib/streaming/binary-reader.js +0 -48
- package/dist/es5/lib/streaming/binary-reader.js.map +0 -1
- package/dist/es5/lib/streaming/zip-batch-iterators.js +0 -95
- package/dist/es5/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/es5/shapefile-loader.js +0 -29
- package/dist/es5/shapefile-loader.js.map +0 -1
- package/dist/es5/shp-loader.js +0 -58
- package/dist/es5/shp-loader.js.map +0 -1
- package/dist/es5/workers/dbf-worker.js +0 -6
- package/dist/es5/workers/dbf-worker.js.map +0 -1
- package/dist/es5/workers/shp-worker.js +0 -6
- package/dist/es5/workers/shp-worker.js.map +0 -1
- package/dist/esm/bundle.js +0 -4
- package/dist/esm/bundle.js.map +0 -1
- package/dist/esm/dbf-loader.js.map +0 -1
- package/dist/esm/index.js +0 -7
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shx.js.map +0 -1
- package/dist/esm/lib/parsers/types.js.map +0 -1
- package/dist/esm/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/esm/lib/streaming/binary-reader.js.map +0 -1
- package/dist/esm/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/esm/shapefile-loader.js.map +0 -1
- package/dist/esm/shp-loader.js.map +0 -1
- package/dist/esm/workers/dbf-worker.js.map +0 -1
- package/dist/esm/workers/shp-worker.js.map +0 -1
- package/dist/src/bundle.d.ts +0 -2
- package/dist/src/bundle.d.ts.map +0 -1
- package/dist/src/dbf-loader.d.ts.map +0 -1
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-dbf.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shapefile.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shp-geometry.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shp-header.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shp.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shx.d.ts.map +0 -1
- package/dist/src/lib/parsers/types.d.ts.map +0 -1
- package/dist/src/lib/streaming/binary-chunk-reader.d.ts.map +0 -1
- package/dist/src/lib/streaming/binary-reader.d.ts.map +0 -1
- package/dist/src/lib/streaming/zip-batch-iterators.d.ts.map +0 -1
- package/dist/src/shapefile-loader.d.ts.map +0 -1
- package/dist/src/shp-loader.d.ts.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/src/bundle.ts +0 -4
- /package/dist/{src/dbf-loader.d.ts → dbf-loader.d.ts} +0 -0
- /package/dist/{src/index.d.ts → index.d.ts} +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-dbf.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shapefile.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shp-geometry.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shp-header.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shp.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shx.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/types.d.ts +0 -0
- /package/dist/{esm/lib → lib}/parsers/types.js +0 -0
- /package/dist/{src/lib → lib}/streaming/binary-chunk-reader.d.ts +0 -0
- /package/dist/{src/lib → lib}/streaming/binary-reader.d.ts +0 -0
- /package/dist/{src/lib → lib}/streaming/zip-batch-iterators.d.ts +0 -0
- /package/dist/{esm/lib → lib}/streaming/zip-batch-iterators.js +0 -0
- /package/dist/{src/shapefile-loader.d.ts → shapefile-loader.d.ts} +0 -0
- /package/dist/{src/shp-loader.d.ts → shp-loader.d.ts} +0 -0
- /package/dist/{src/workers → workers}/dbf-worker.d.ts +0 -0
- /package/dist/{src/workers → workers}/shp-worker.d.ts +0 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1107 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
DBFLoader: () => DBFLoader,
|
|
24
|
+
DBFWorkerLoader: () => DBFWorkerLoader,
|
|
25
|
+
SHPLoader: () => SHPLoader,
|
|
26
|
+
SHPWorkerLoader: () => SHPWorkerLoader,
|
|
27
|
+
ShapefileLoader: () => ShapefileLoader,
|
|
28
|
+
_BinaryChunkReader: () => BinaryChunkReader,
|
|
29
|
+
_BinaryReader: () => BinaryReader,
|
|
30
|
+
_zipBatchIterators: () => zipBatchIterators
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(src_exports);
|
|
33
|
+
|
|
34
|
+
// src/lib/streaming/binary-chunk-reader.ts
|
|
35
|
+
var BinaryChunkReader = class {
|
|
36
|
+
constructor(options) {
|
|
37
|
+
const { maxRewindBytes = 0 } = options || {};
|
|
38
|
+
this.offset = 0;
|
|
39
|
+
this.arrayBuffers = [];
|
|
40
|
+
this.ended = false;
|
|
41
|
+
this.maxRewindBytes = maxRewindBytes;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* @param arrayBuffer
|
|
45
|
+
*/
|
|
46
|
+
write(arrayBuffer) {
|
|
47
|
+
this.arrayBuffers.push(arrayBuffer);
|
|
48
|
+
}
|
|
49
|
+
end() {
|
|
50
|
+
this.arrayBuffers = [];
|
|
51
|
+
this.ended = true;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Has enough bytes available in array buffers
|
|
55
|
+
*
|
|
56
|
+
* @param bytes Number of bytes
|
|
57
|
+
* @return boolean
|
|
58
|
+
*/
|
|
59
|
+
hasAvailableBytes(bytes) {
|
|
60
|
+
let bytesAvailable = -this.offset;
|
|
61
|
+
for (const arrayBuffer of this.arrayBuffers) {
|
|
62
|
+
bytesAvailable += arrayBuffer.byteLength;
|
|
63
|
+
if (bytesAvailable >= bytes) {
|
|
64
|
+
return true;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return false;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Find offsets of byte ranges within this.arrayBuffers
|
|
71
|
+
*
|
|
72
|
+
* @param bytes Byte length to read
|
|
73
|
+
* @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
|
|
74
|
+
*/
|
|
75
|
+
findBufferOffsets(bytes) {
|
|
76
|
+
let offset = -this.offset;
|
|
77
|
+
const selectedBuffers = [];
|
|
78
|
+
for (let i = 0; i < this.arrayBuffers.length; i++) {
|
|
79
|
+
const buf = this.arrayBuffers[i];
|
|
80
|
+
if (offset + buf.byteLength <= 0) {
|
|
81
|
+
offset += buf.byteLength;
|
|
82
|
+
continue;
|
|
83
|
+
}
|
|
84
|
+
const start = offset <= 0 ? Math.abs(offset) : 0;
|
|
85
|
+
let end;
|
|
86
|
+
if (start + bytes <= buf.byteLength) {
|
|
87
|
+
end = start + bytes;
|
|
88
|
+
selectedBuffers.push([i, [start, end]]);
|
|
89
|
+
return selectedBuffers;
|
|
90
|
+
}
|
|
91
|
+
end = buf.byteLength;
|
|
92
|
+
selectedBuffers.push([i, [start, end]]);
|
|
93
|
+
bytes -= buf.byteLength - start;
|
|
94
|
+
offset += buf.byteLength;
|
|
95
|
+
}
|
|
96
|
+
return null;
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Get the required number of bytes from the iterator
|
|
100
|
+
*
|
|
101
|
+
* @param bytes Number of bytes
|
|
102
|
+
* @return DataView with data
|
|
103
|
+
*/
|
|
104
|
+
getDataView(bytes) {
|
|
105
|
+
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
106
|
+
if (!bufferOffsets && this.ended) {
|
|
107
|
+
throw new Error("binary data exhausted");
|
|
108
|
+
}
|
|
109
|
+
if (!bufferOffsets) {
|
|
110
|
+
return null;
|
|
111
|
+
}
|
|
112
|
+
if (bufferOffsets.length === 1) {
|
|
113
|
+
const [bufferIndex, [start, end]] = bufferOffsets[0];
|
|
114
|
+
const arrayBuffer = this.arrayBuffers[bufferIndex];
|
|
115
|
+
const view2 = new DataView(arrayBuffer, start, end - start);
|
|
116
|
+
this.offset += bytes;
|
|
117
|
+
this.disposeBuffers();
|
|
118
|
+
return view2;
|
|
119
|
+
}
|
|
120
|
+
const view = new DataView(this._combineArrayBuffers(bufferOffsets));
|
|
121
|
+
this.offset += bytes;
|
|
122
|
+
this.disposeBuffers();
|
|
123
|
+
return view;
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Dispose of old array buffers
|
|
127
|
+
*/
|
|
128
|
+
disposeBuffers() {
|
|
129
|
+
while (this.arrayBuffers.length > 0 && this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
|
|
130
|
+
this.offset -= this.arrayBuffers[0].byteLength;
|
|
131
|
+
this.arrayBuffers.shift();
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Copy multiple ArrayBuffers into one contiguous ArrayBuffer
|
|
136
|
+
*
|
|
137
|
+
* In contrast to concatenateArrayBuffers, this only copies the necessary
|
|
138
|
+
* portions of the source arrays, rather than first copying the entire arrays
|
|
139
|
+
* then taking a part of them.
|
|
140
|
+
*
|
|
141
|
+
* @param bufferOffsets List of internal array offsets
|
|
142
|
+
* @return New contiguous ArrayBuffer
|
|
143
|
+
*/
|
|
144
|
+
_combineArrayBuffers(bufferOffsets) {
|
|
145
|
+
let byteLength = 0;
|
|
146
|
+
for (const bufferOffset of bufferOffsets) {
|
|
147
|
+
const [start, end] = bufferOffset[1];
|
|
148
|
+
byteLength += end - start;
|
|
149
|
+
}
|
|
150
|
+
const result = new Uint8Array(byteLength);
|
|
151
|
+
let resultOffset = 0;
|
|
152
|
+
for (const bufferOffset of bufferOffsets) {
|
|
153
|
+
const [bufferIndex, [start, end]] = bufferOffset;
|
|
154
|
+
const sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);
|
|
155
|
+
result.set(sourceArray.subarray(start, end), resultOffset);
|
|
156
|
+
resultOffset += end - start;
|
|
157
|
+
}
|
|
158
|
+
return result.buffer;
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* @param bytes
|
|
162
|
+
*/
|
|
163
|
+
skip(bytes) {
|
|
164
|
+
this.offset += bytes;
|
|
165
|
+
}
|
|
166
|
+
/**
|
|
167
|
+
* @param bytes
|
|
168
|
+
*/
|
|
169
|
+
rewind(bytes) {
|
|
170
|
+
this.offset -= bytes;
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
// src/lib/parsers/parse-shp-header.ts
|
|
175
|
+
var LITTLE_ENDIAN = true;
|
|
176
|
+
var BIG_ENDIAN = false;
|
|
177
|
+
var SHP_MAGIC_NUMBER = 9994;
|
|
178
|
+
function parseSHPHeader(headerView) {
|
|
179
|
+
const header = {
|
|
180
|
+
magic: headerView.getInt32(0, BIG_ENDIAN),
|
|
181
|
+
// Length is stored as # of 2-byte words; multiply by 2 to get # of bytes
|
|
182
|
+
length: headerView.getInt32(24, BIG_ENDIAN) * 2,
|
|
183
|
+
version: headerView.getInt32(28, LITTLE_ENDIAN),
|
|
184
|
+
type: headerView.getInt32(32, LITTLE_ENDIAN),
|
|
185
|
+
bbox: {
|
|
186
|
+
minX: headerView.getFloat64(36, LITTLE_ENDIAN),
|
|
187
|
+
minY: headerView.getFloat64(44, LITTLE_ENDIAN),
|
|
188
|
+
minZ: headerView.getFloat64(68, LITTLE_ENDIAN),
|
|
189
|
+
minM: headerView.getFloat64(84, LITTLE_ENDIAN),
|
|
190
|
+
maxX: headerView.getFloat64(52, LITTLE_ENDIAN),
|
|
191
|
+
maxY: headerView.getFloat64(60, LITTLE_ENDIAN),
|
|
192
|
+
maxZ: headerView.getFloat64(76, LITTLE_ENDIAN),
|
|
193
|
+
maxM: headerView.getFloat64(92, LITTLE_ENDIAN)
|
|
194
|
+
}
|
|
195
|
+
};
|
|
196
|
+
if (header.magic !== SHP_MAGIC_NUMBER) {
|
|
197
|
+
console.error(`SHP file: bad magic number ${header.magic}`);
|
|
198
|
+
}
|
|
199
|
+
if (header.version !== 1e3) {
|
|
200
|
+
console.error(`SHP file: bad version ${header.version}`);
|
|
201
|
+
}
|
|
202
|
+
return header;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// src/lib/parsers/parse-shp-geometry.ts
|
|
206
|
+
var LITTLE_ENDIAN2 = true;
|
|
207
|
+
function parseRecord(view, options) {
|
|
208
|
+
const { _maxDimensions = 4 } = (options == null ? void 0 : options.shp) || {};
|
|
209
|
+
let offset = 0;
|
|
210
|
+
const type = view.getInt32(offset, LITTLE_ENDIAN2);
|
|
211
|
+
offset += Int32Array.BYTES_PER_ELEMENT;
|
|
212
|
+
switch (type) {
|
|
213
|
+
case 0:
|
|
214
|
+
return parseNull();
|
|
215
|
+
case 1:
|
|
216
|
+
return parsePoint(view, offset, Math.min(2, _maxDimensions));
|
|
217
|
+
case 3:
|
|
218
|
+
return parsePoly(view, offset, Math.min(2, _maxDimensions), "LineString");
|
|
219
|
+
case 5:
|
|
220
|
+
return parsePoly(view, offset, Math.min(2, _maxDimensions), "Polygon");
|
|
221
|
+
case 8:
|
|
222
|
+
return parseMultiPoint(view, offset, Math.min(2, _maxDimensions));
|
|
223
|
+
case 11:
|
|
224
|
+
return parsePoint(view, offset, Math.min(4, _maxDimensions));
|
|
225
|
+
case 13:
|
|
226
|
+
return parsePoly(view, offset, Math.min(4, _maxDimensions), "LineString");
|
|
227
|
+
case 15:
|
|
228
|
+
return parsePoly(view, offset, Math.min(4, _maxDimensions), "Polygon");
|
|
229
|
+
case 18:
|
|
230
|
+
return parseMultiPoint(view, offset, Math.min(4, _maxDimensions));
|
|
231
|
+
case 21:
|
|
232
|
+
return parsePoint(view, offset, Math.min(3, _maxDimensions));
|
|
233
|
+
case 23:
|
|
234
|
+
return parsePoly(view, offset, Math.min(3, _maxDimensions), "LineString");
|
|
235
|
+
case 25:
|
|
236
|
+
return parsePoly(view, offset, Math.min(3, _maxDimensions), "Polygon");
|
|
237
|
+
case 28:
|
|
238
|
+
return parseMultiPoint(view, offset, Math.min(3, _maxDimensions));
|
|
239
|
+
default:
|
|
240
|
+
throw new Error(`unsupported shape type: ${type}`);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
function parseNull() {
|
|
244
|
+
return null;
|
|
245
|
+
}
|
|
246
|
+
function parsePoint(view, offset, dim) {
|
|
247
|
+
let positions;
|
|
248
|
+
[positions, offset] = parsePositions(view, offset, 1, dim);
|
|
249
|
+
return {
|
|
250
|
+
positions: { value: positions, size: dim },
|
|
251
|
+
type: "Point"
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
function parseMultiPoint(view, offset, dim) {
|
|
255
|
+
offset += 4 * Float64Array.BYTES_PER_ELEMENT;
|
|
256
|
+
const nPoints = view.getInt32(offset, LITTLE_ENDIAN2);
|
|
257
|
+
offset += Int32Array.BYTES_PER_ELEMENT;
|
|
258
|
+
let xyPositions = null;
|
|
259
|
+
let mPositions = null;
|
|
260
|
+
let zPositions = null;
|
|
261
|
+
[xyPositions, offset] = parsePositions(view, offset, nPoints, 2);
|
|
262
|
+
if (dim === 4) {
|
|
263
|
+
offset += 2 * Float64Array.BYTES_PER_ELEMENT;
|
|
264
|
+
[zPositions, offset] = parsePositions(view, offset, nPoints, 1);
|
|
265
|
+
}
|
|
266
|
+
if (dim >= 3) {
|
|
267
|
+
offset += 2 * Float64Array.BYTES_PER_ELEMENT;
|
|
268
|
+
[mPositions, offset] = parsePositions(view, offset, nPoints, 1);
|
|
269
|
+
}
|
|
270
|
+
const positions = concatPositions(xyPositions, mPositions, zPositions);
|
|
271
|
+
return {
|
|
272
|
+
positions: { value: positions, size: dim },
|
|
273
|
+
type: "Point"
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
function parsePoly(view, offset, dim, type) {
|
|
277
|
+
offset += 4 * Float64Array.BYTES_PER_ELEMENT;
|
|
278
|
+
const nParts = view.getInt32(offset, LITTLE_ENDIAN2);
|
|
279
|
+
offset += Int32Array.BYTES_PER_ELEMENT;
|
|
280
|
+
const nPoints = view.getInt32(offset, LITTLE_ENDIAN2);
|
|
281
|
+
offset += Int32Array.BYTES_PER_ELEMENT;
|
|
282
|
+
const bufferOffset = view.byteOffset + offset;
|
|
283
|
+
const bufferLength = nParts * Int32Array.BYTES_PER_ELEMENT;
|
|
284
|
+
const ringIndices = new Int32Array(nParts + 1);
|
|
285
|
+
ringIndices.set(new Int32Array(view.buffer.slice(bufferOffset, bufferOffset + bufferLength)));
|
|
286
|
+
ringIndices[nParts] = nPoints;
|
|
287
|
+
offset += nParts * Int32Array.BYTES_PER_ELEMENT;
|
|
288
|
+
let xyPositions = null;
|
|
289
|
+
let mPositions = null;
|
|
290
|
+
let zPositions = null;
|
|
291
|
+
[xyPositions, offset] = parsePositions(view, offset, nPoints, 2);
|
|
292
|
+
if (dim === 4) {
|
|
293
|
+
offset += 2 * Float64Array.BYTES_PER_ELEMENT;
|
|
294
|
+
[zPositions, offset] = parsePositions(view, offset, nPoints, 1);
|
|
295
|
+
}
|
|
296
|
+
if (dim >= 3) {
|
|
297
|
+
offset += 2 * Float64Array.BYTES_PER_ELEMENT;
|
|
298
|
+
[mPositions, offset] = parsePositions(view, offset, nPoints, 1);
|
|
299
|
+
}
|
|
300
|
+
const positions = concatPositions(xyPositions, mPositions, zPositions);
|
|
301
|
+
if (type === "LineString") {
|
|
302
|
+
return {
|
|
303
|
+
type,
|
|
304
|
+
positions: { value: positions, size: dim },
|
|
305
|
+
pathIndices: { value: ringIndices, size: 1 }
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
const polygonIndices = [];
|
|
309
|
+
for (let i = 1; i < ringIndices.length; i++) {
|
|
310
|
+
const startRingIndex = ringIndices[i - 1];
|
|
311
|
+
const endRingIndex = ringIndices[i];
|
|
312
|
+
const ring = xyPositions.subarray(startRingIndex * 2, endRingIndex * 2);
|
|
313
|
+
const sign = getWindingDirection(ring);
|
|
314
|
+
if (sign > 0) {
|
|
315
|
+
polygonIndices.push(startRingIndex);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
polygonIndices.push(nPoints);
|
|
319
|
+
return {
|
|
320
|
+
type,
|
|
321
|
+
positions: { value: positions, size: dim },
|
|
322
|
+
primitivePolygonIndices: { value: ringIndices, size: 1 },
|
|
323
|
+
// TODO: Dynamically choose Uint32Array over Uint16Array only when
|
|
324
|
+
// necessary. I believe the implementation requires nPoints to be the
|
|
325
|
+
// largest value in the array, so you should be able to use Uint32Array only
|
|
326
|
+
// when nPoints > 65535.
|
|
327
|
+
polygonIndices: { value: new Uint32Array(polygonIndices), size: 1 }
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
function parsePositions(view, offset, nPoints, dim) {
|
|
331
|
+
const bufferOffset = view.byteOffset + offset;
|
|
332
|
+
const bufferLength = nPoints * dim * Float64Array.BYTES_PER_ELEMENT;
|
|
333
|
+
return [
|
|
334
|
+
new Float64Array(view.buffer.slice(bufferOffset, bufferOffset + bufferLength)),
|
|
335
|
+
offset + bufferLength
|
|
336
|
+
];
|
|
337
|
+
}
|
|
338
|
+
function concatPositions(xyPositions, mPositions, zPositions) {
|
|
339
|
+
if (!(mPositions || zPositions)) {
|
|
340
|
+
return xyPositions;
|
|
341
|
+
}
|
|
342
|
+
let arrayLength = xyPositions.length;
|
|
343
|
+
let nDim = 2;
|
|
344
|
+
if (zPositions && zPositions.length) {
|
|
345
|
+
arrayLength += zPositions.length;
|
|
346
|
+
nDim++;
|
|
347
|
+
}
|
|
348
|
+
if (mPositions && mPositions.length) {
|
|
349
|
+
arrayLength += mPositions.length;
|
|
350
|
+
nDim++;
|
|
351
|
+
}
|
|
352
|
+
const positions = new Float64Array(arrayLength);
|
|
353
|
+
for (let i = 0; i < xyPositions.length / 2; i++) {
|
|
354
|
+
positions[nDim * i] = xyPositions[i * 2];
|
|
355
|
+
positions[nDim * i + 1] = xyPositions[i * 2 + 1];
|
|
356
|
+
}
|
|
357
|
+
if (zPositions && zPositions.length) {
|
|
358
|
+
for (let i = 0; i < zPositions.length; i++) {
|
|
359
|
+
positions[nDim * i + 2] = zPositions[i];
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
if (mPositions && mPositions.length) {
|
|
363
|
+
for (let i = 0; i < mPositions.length; i++) {
|
|
364
|
+
positions[nDim * i + (nDim - 1)] = mPositions[i];
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
return positions;
|
|
368
|
+
}
|
|
369
|
+
function getWindingDirection(positions) {
|
|
370
|
+
return Math.sign(getSignedArea(positions));
|
|
371
|
+
}
|
|
372
|
+
function getSignedArea(positions) {
|
|
373
|
+
let area = 0;
|
|
374
|
+
const nCoords = positions.length / 2 - 1;
|
|
375
|
+
for (let i = 0; i < nCoords; i++) {
|
|
376
|
+
area += (positions[i * 2] + positions[(i + 1) * 2]) * (positions[i * 2 + 1] - positions[(i + 1) * 2 + 1]);
|
|
377
|
+
}
|
|
378
|
+
return area / 2;
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
// src/lib/parsers/parse-shp.ts
|
|
382
|
+
var LITTLE_ENDIAN3 = true;
|
|
383
|
+
var BIG_ENDIAN2 = false;
|
|
384
|
+
var SHP_HEADER_SIZE = 100;
|
|
385
|
+
var SHP_RECORD_HEADER_SIZE = 12;
|
|
386
|
+
var STATE = {
|
|
387
|
+
EXPECTING_HEADER: 0,
|
|
388
|
+
EXPECTING_RECORD: 1,
|
|
389
|
+
END: 2,
|
|
390
|
+
ERROR: 3
|
|
391
|
+
};
|
|
392
|
+
var SHPParser = class {
|
|
393
|
+
constructor(options) {
|
|
394
|
+
this.options = {};
|
|
395
|
+
this.binaryReader = new BinaryChunkReader({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
|
|
396
|
+
this.state = STATE.EXPECTING_HEADER;
|
|
397
|
+
this.result = {
|
|
398
|
+
geometries: [],
|
|
399
|
+
// Initialize with number values to make TS happy
|
|
400
|
+
// These are initialized for real in STATE.EXPECTING_HEADER
|
|
401
|
+
progress: {
|
|
402
|
+
bytesTotal: NaN,
|
|
403
|
+
bytesUsed: NaN,
|
|
404
|
+
rows: NaN
|
|
405
|
+
},
|
|
406
|
+
currentIndex: NaN
|
|
407
|
+
};
|
|
408
|
+
this.options = options;
|
|
409
|
+
}
|
|
410
|
+
write(arrayBuffer) {
|
|
411
|
+
this.binaryReader.write(arrayBuffer);
|
|
412
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.options);
|
|
413
|
+
}
|
|
414
|
+
end() {
|
|
415
|
+
this.binaryReader.end();
|
|
416
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.options);
|
|
417
|
+
if (this.state !== STATE.END) {
|
|
418
|
+
this.state = STATE.ERROR;
|
|
419
|
+
this.result.error = "SHP incomplete file";
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
function parseSHP(arrayBuffer, options) {
|
|
424
|
+
const shpParser = new SHPParser(options);
|
|
425
|
+
shpParser.write(arrayBuffer);
|
|
426
|
+
shpParser.end();
|
|
427
|
+
return shpParser.result;
|
|
428
|
+
}
|
|
429
|
+
async function* parseSHPInBatches(asyncIterator, options) {
|
|
430
|
+
const parser = new SHPParser(options);
|
|
431
|
+
let headerReturned = false;
|
|
432
|
+
for await (const arrayBuffer of asyncIterator) {
|
|
433
|
+
parser.write(arrayBuffer);
|
|
434
|
+
if (!headerReturned && parser.result.header) {
|
|
435
|
+
headerReturned = true;
|
|
436
|
+
yield parser.result.header;
|
|
437
|
+
}
|
|
438
|
+
if (parser.result.geometries.length > 0) {
|
|
439
|
+
yield parser.result.geometries;
|
|
440
|
+
parser.result.geometries = [];
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
parser.end();
|
|
444
|
+
if (parser.result.geometries.length > 0) {
|
|
445
|
+
yield parser.result.geometries;
|
|
446
|
+
}
|
|
447
|
+
return;
|
|
448
|
+
}
|
|
449
|
+
function parseState(state, result, binaryReader, options) {
|
|
450
|
+
var _a;
|
|
451
|
+
while (true) {
|
|
452
|
+
try {
|
|
453
|
+
switch (state) {
|
|
454
|
+
case STATE.ERROR:
|
|
455
|
+
case STATE.END:
|
|
456
|
+
return state;
|
|
457
|
+
case STATE.EXPECTING_HEADER:
|
|
458
|
+
const dataView = binaryReader.getDataView(SHP_HEADER_SIZE);
|
|
459
|
+
if (!dataView) {
|
|
460
|
+
return state;
|
|
461
|
+
}
|
|
462
|
+
result.header = parseSHPHeader(dataView);
|
|
463
|
+
result.progress = {
|
|
464
|
+
bytesUsed: 0,
|
|
465
|
+
bytesTotal: result.header.length,
|
|
466
|
+
rows: 0
|
|
467
|
+
};
|
|
468
|
+
result.currentIndex = 1;
|
|
469
|
+
state = STATE.EXPECTING_RECORD;
|
|
470
|
+
break;
|
|
471
|
+
case STATE.EXPECTING_RECORD:
|
|
472
|
+
while (binaryReader.hasAvailableBytes(SHP_RECORD_HEADER_SIZE)) {
|
|
473
|
+
const recordHeaderView = binaryReader.getDataView(SHP_RECORD_HEADER_SIZE);
|
|
474
|
+
const recordHeader = {
|
|
475
|
+
recordNumber: recordHeaderView.getInt32(0, BIG_ENDIAN2),
|
|
476
|
+
// 2 byte words; includes the four words of record header
|
|
477
|
+
byteLength: recordHeaderView.getInt32(4, BIG_ENDIAN2) * 2,
|
|
478
|
+
// This is actually part of the record, not the header...
|
|
479
|
+
type: recordHeaderView.getInt32(8, LITTLE_ENDIAN3)
|
|
480
|
+
};
|
|
481
|
+
if (!binaryReader.hasAvailableBytes(recordHeader.byteLength - 4)) {
|
|
482
|
+
binaryReader.rewind(SHP_RECORD_HEADER_SIZE);
|
|
483
|
+
return state;
|
|
484
|
+
}
|
|
485
|
+
const invalidRecord = recordHeader.byteLength < 4 || recordHeader.type !== ((_a = result.header) == null ? void 0 : _a.type) || recordHeader.recordNumber !== result.currentIndex;
|
|
486
|
+
if (invalidRecord) {
|
|
487
|
+
binaryReader.rewind(SHP_RECORD_HEADER_SIZE - 4);
|
|
488
|
+
} else {
|
|
489
|
+
binaryReader.rewind(4);
|
|
490
|
+
const recordView = binaryReader.getDataView(recordHeader.byteLength);
|
|
491
|
+
const geometry = parseRecord(recordView, options);
|
|
492
|
+
result.geometries.push(geometry);
|
|
493
|
+
result.currentIndex++;
|
|
494
|
+
result.progress.rows = result.currentIndex - 1;
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
if (binaryReader.ended) {
|
|
498
|
+
state = STATE.END;
|
|
499
|
+
}
|
|
500
|
+
return state;
|
|
501
|
+
default:
|
|
502
|
+
state = STATE.ERROR;
|
|
503
|
+
result.error = `illegal parser state ${state}`;
|
|
504
|
+
return state;
|
|
505
|
+
}
|
|
506
|
+
} catch (error) {
|
|
507
|
+
state = STATE.ERROR;
|
|
508
|
+
result.error = `SHP parsing failed: ${error == null ? void 0 : error.message}`;
|
|
509
|
+
return state;
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
// src/shp-loader.ts
|
|
515
|
+
var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
516
|
+
var SHP_MAGIC_NUMBER2 = [0, 0, 39, 10];
|
|
517
|
+
var SHPWorkerLoader = {
|
|
518
|
+
name: "SHP",
|
|
519
|
+
id: "shp",
|
|
520
|
+
module: "shapefile",
|
|
521
|
+
version: VERSION,
|
|
522
|
+
worker: true,
|
|
523
|
+
category: "geometry",
|
|
524
|
+
extensions: ["shp"],
|
|
525
|
+
mimeTypes: ["application/octet-stream"],
|
|
526
|
+
// ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...
|
|
527
|
+
tests: [new Uint8Array(SHP_MAGIC_NUMBER2).buffer],
|
|
528
|
+
options: {
|
|
529
|
+
shp: {
|
|
530
|
+
_maxDimensions: 4
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
};
|
|
534
|
+
var SHPLoader = {
|
|
535
|
+
...SHPWorkerLoader,
|
|
536
|
+
parse: async (arrayBuffer, options) => parseSHP(arrayBuffer, options),
|
|
537
|
+
parseSync: parseSHP,
|
|
538
|
+
parseInBatches: (arrayBufferIterator, options) => parseSHPInBatches(arrayBufferIterator, options)
|
|
539
|
+
};
|
|
540
|
+
|
|
541
|
+
// src/lib/parsers/parse-shapefile.ts
|
|
542
|
+
var import_loader_utils = require("@loaders.gl/loader-utils");
|
|
543
|
+
var import_gis = require("@loaders.gl/gis");
|
|
544
|
+
var import_proj4 = require("@math.gl/proj4");
|
|
545
|
+
|
|
546
|
+
// src/lib/parsers/parse-shx.ts
|
|
547
|
+
var SHX_HEADER_SIZE = 100;
|
|
548
|
+
var BIG_ENDIAN3 = false;
|
|
549
|
+
function parseShx(arrayBuffer) {
|
|
550
|
+
const headerView = new DataView(arrayBuffer, 0, SHX_HEADER_SIZE);
|
|
551
|
+
const header = parseSHPHeader(headerView);
|
|
552
|
+
const contentLength = header.length - SHX_HEADER_SIZE;
|
|
553
|
+
const contentView = new DataView(arrayBuffer, SHX_HEADER_SIZE, contentLength);
|
|
554
|
+
const offsets = new Int32Array(contentLength);
|
|
555
|
+
const lengths = new Int32Array(contentLength);
|
|
556
|
+
for (let i = 0; i < contentLength / 8; i++) {
|
|
557
|
+
offsets[i] = contentView.getInt32(i * 8, BIG_ENDIAN3);
|
|
558
|
+
lengths[i] = contentView.getInt32(i * 8 + 4, BIG_ENDIAN3);
|
|
559
|
+
}
|
|
560
|
+
return {
|
|
561
|
+
offsets,
|
|
562
|
+
lengths
|
|
563
|
+
};
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
// src/lib/streaming/zip-batch-iterators.ts
|
|
567
|
+
async function* zipBatchIterators(iterator1, iterator2, shape) {
|
|
568
|
+
const batch1Data = [];
|
|
569
|
+
const batch2Data = [];
|
|
570
|
+
let iterator1Done = false;
|
|
571
|
+
let iterator2Done = false;
|
|
572
|
+
while (!iterator1Done && !iterator2Done) {
|
|
573
|
+
if (batch1Data.length === 0 && !iterator1Done) {
|
|
574
|
+
const { value, done } = await iterator1.next();
|
|
575
|
+
if (done) {
|
|
576
|
+
iterator1Done = true;
|
|
577
|
+
} else {
|
|
578
|
+
batch1Data.push(...value);
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
if (batch2Data.length === 0 && !iterator2Done) {
|
|
582
|
+
const { value, done } = await iterator2.next();
|
|
583
|
+
if (done) {
|
|
584
|
+
iterator2Done = true;
|
|
585
|
+
} else {
|
|
586
|
+
batch2Data.push(...value);
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
const batchData = extractBatchData(batch1Data, batch2Data);
|
|
590
|
+
if (batchData) {
|
|
591
|
+
yield {
|
|
592
|
+
batchType: "data",
|
|
593
|
+
shape,
|
|
594
|
+
length: batchData.length,
|
|
595
|
+
data: batchData
|
|
596
|
+
};
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
function extractBatchData(batch1, batch2) {
|
|
601
|
+
const batchLength = Math.min(batch1.length, batch2.length);
|
|
602
|
+
if (batchLength === 0) {
|
|
603
|
+
return null;
|
|
604
|
+
}
|
|
605
|
+
const batch = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
|
|
606
|
+
batch1.splice(0, batchLength);
|
|
607
|
+
batch2.splice(0, batchLength);
|
|
608
|
+
return batch;
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
// src/lib/parsers/parse-dbf.ts
|
|
612
|
+
var LITTLE_ENDIAN4 = true;
|
|
613
|
+
var DBF_HEADER_SIZE = 32;
|
|
614
|
+
var DBFParser = class {
|
|
615
|
+
constructor(options) {
|
|
616
|
+
this.binaryReader = new BinaryChunkReader();
|
|
617
|
+
this.state = 0 /* START */;
|
|
618
|
+
this.result = {
|
|
619
|
+
data: []
|
|
620
|
+
};
|
|
621
|
+
this.textDecoder = new TextDecoder(options.encoding);
|
|
622
|
+
}
|
|
623
|
+
/**
|
|
624
|
+
* @param arrayBuffer
|
|
625
|
+
*/
|
|
626
|
+
write(arrayBuffer) {
|
|
627
|
+
this.binaryReader.write(arrayBuffer);
|
|
628
|
+
this.state = parseState2(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
629
|
+
}
|
|
630
|
+
end() {
|
|
631
|
+
this.binaryReader.end();
|
|
632
|
+
this.state = parseState2(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
633
|
+
if (this.state !== 3 /* END */) {
|
|
634
|
+
this.state = 4 /* ERROR */;
|
|
635
|
+
this.result.error = "DBF incomplete file";
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
};
|
|
639
|
+
function parseDBF(arrayBuffer, options = {}) {
|
|
640
|
+
var _a, _b;
|
|
641
|
+
const { encoding = "latin1" } = options.dbf || {};
|
|
642
|
+
const dbfParser = new DBFParser({ encoding });
|
|
643
|
+
dbfParser.write(arrayBuffer);
|
|
644
|
+
dbfParser.end();
|
|
645
|
+
const { data, schema } = dbfParser.result;
|
|
646
|
+
const shape = ((_a = options == null ? void 0 : options.tables) == null ? void 0 : _a.format) || ((_b = options == null ? void 0 : options.dbf) == null ? void 0 : _b.shape);
|
|
647
|
+
switch (shape) {
|
|
648
|
+
case "object-row-table": {
|
|
649
|
+
const table = {
|
|
650
|
+
shape: "object-row-table",
|
|
651
|
+
schema,
|
|
652
|
+
data
|
|
653
|
+
};
|
|
654
|
+
return table;
|
|
655
|
+
}
|
|
656
|
+
case "table":
|
|
657
|
+
return { schema, rows: data };
|
|
658
|
+
case "rows":
|
|
659
|
+
default:
|
|
660
|
+
return data;
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
async function* parseDBFInBatches(asyncIterator, options = {}) {
|
|
664
|
+
const { encoding = "latin1" } = options.dbf || {};
|
|
665
|
+
const parser = new DBFParser({ encoding });
|
|
666
|
+
let headerReturned = false;
|
|
667
|
+
for await (const arrayBuffer of asyncIterator) {
|
|
668
|
+
parser.write(arrayBuffer);
|
|
669
|
+
if (!headerReturned && parser.result.dbfHeader) {
|
|
670
|
+
headerReturned = true;
|
|
671
|
+
yield parser.result.dbfHeader;
|
|
672
|
+
}
|
|
673
|
+
if (parser.result.data.length > 0) {
|
|
674
|
+
yield parser.result.data;
|
|
675
|
+
parser.result.data = [];
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
parser.end();
|
|
679
|
+
if (parser.result.data.length > 0) {
|
|
680
|
+
yield parser.result.data;
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
function parseState2(state, result, binaryReader, textDecoder) {
|
|
684
|
+
while (true) {
|
|
685
|
+
try {
|
|
686
|
+
switch (state) {
|
|
687
|
+
case 4 /* ERROR */:
|
|
688
|
+
case 3 /* END */:
|
|
689
|
+
return state;
|
|
690
|
+
case 0 /* START */:
|
|
691
|
+
const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
|
|
692
|
+
if (!dataView) {
|
|
693
|
+
return state;
|
|
694
|
+
}
|
|
695
|
+
result.dbfHeader = parseDBFHeader(dataView);
|
|
696
|
+
result.progress = {
|
|
697
|
+
bytesUsed: 0,
|
|
698
|
+
rowsTotal: result.dbfHeader.nRecords,
|
|
699
|
+
rows: 0
|
|
700
|
+
};
|
|
701
|
+
state = 1 /* FIELD_DESCRIPTORS */;
|
|
702
|
+
break;
|
|
703
|
+
case 1 /* FIELD_DESCRIPTORS */:
|
|
704
|
+
const fieldDescriptorView = binaryReader.getDataView(
|
|
705
|
+
// @ts-ignore
|
|
706
|
+
result.dbfHeader.headerLength - DBF_HEADER_SIZE
|
|
707
|
+
);
|
|
708
|
+
if (!fieldDescriptorView) {
|
|
709
|
+
return state;
|
|
710
|
+
}
|
|
711
|
+
result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
|
|
712
|
+
result.schema = {
|
|
713
|
+
fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
|
|
714
|
+
metadata: {}
|
|
715
|
+
};
|
|
716
|
+
state = 2 /* FIELD_PROPERTIES */;
|
|
717
|
+
binaryReader.skip(1);
|
|
718
|
+
break;
|
|
719
|
+
case 2 /* FIELD_PROPERTIES */:
|
|
720
|
+
const { recordLength = 0, nRecords = 0 } = (result == null ? void 0 : result.dbfHeader) || {};
|
|
721
|
+
while (result.data.length < nRecords) {
|
|
722
|
+
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
723
|
+
if (!recordView) {
|
|
724
|
+
return state;
|
|
725
|
+
}
|
|
726
|
+
binaryReader.skip(1);
|
|
727
|
+
const row = parseRow(recordView, result.dbfFields, textDecoder);
|
|
728
|
+
result.data.push(row);
|
|
729
|
+
result.progress.rows = result.data.length;
|
|
730
|
+
}
|
|
731
|
+
state = 3 /* END */;
|
|
732
|
+
break;
|
|
733
|
+
default:
|
|
734
|
+
state = 4 /* ERROR */;
|
|
735
|
+
result.error = `illegal parser state ${state}`;
|
|
736
|
+
return state;
|
|
737
|
+
}
|
|
738
|
+
} catch (error) {
|
|
739
|
+
state = 4 /* ERROR */;
|
|
740
|
+
result.error = `DBF parsing failed: ${error.message}`;
|
|
741
|
+
return state;
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
function parseDBFHeader(headerView) {
|
|
746
|
+
return {
|
|
747
|
+
// Last updated date
|
|
748
|
+
year: headerView.getUint8(1) + 1900,
|
|
749
|
+
month: headerView.getUint8(2),
|
|
750
|
+
day: headerView.getUint8(3),
|
|
751
|
+
// Number of records in data file
|
|
752
|
+
nRecords: headerView.getUint32(4, LITTLE_ENDIAN4),
|
|
753
|
+
// Length of header in bytes
|
|
754
|
+
headerLength: headerView.getUint16(8, LITTLE_ENDIAN4),
|
|
755
|
+
// Length of each record
|
|
756
|
+
recordLength: headerView.getUint16(10, LITTLE_ENDIAN4),
|
|
757
|
+
// Not sure if this is usually set
|
|
758
|
+
languageDriver: headerView.getUint8(29)
|
|
759
|
+
};
|
|
760
|
+
}
|
|
761
|
+
function parseFieldDescriptors(view, textDecoder) {
|
|
762
|
+
const nFields = (view.byteLength - 1) / 32;
|
|
763
|
+
const fields = [];
|
|
764
|
+
let offset = 0;
|
|
765
|
+
for (let i = 0; i < nFields; i++) {
|
|
766
|
+
const name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11)).replace(/\u0000/g, "");
|
|
767
|
+
fields.push({
|
|
768
|
+
name,
|
|
769
|
+
dataType: String.fromCharCode(view.getUint8(offset + 11)),
|
|
770
|
+
fieldLength: view.getUint8(offset + 16),
|
|
771
|
+
decimal: view.getUint8(offset + 17)
|
|
772
|
+
});
|
|
773
|
+
offset += 32;
|
|
774
|
+
}
|
|
775
|
+
return fields;
|
|
776
|
+
}
|
|
777
|
+
function parseRow(view, fields, textDecoder) {
|
|
778
|
+
const out = {};
|
|
779
|
+
let offset = 0;
|
|
780
|
+
for (const field of fields) {
|
|
781
|
+
const text = textDecoder.decode(
|
|
782
|
+
new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength)
|
|
783
|
+
);
|
|
784
|
+
out[field.name] = parseField(text, field.dataType);
|
|
785
|
+
offset += field.fieldLength;
|
|
786
|
+
}
|
|
787
|
+
return out;
|
|
788
|
+
}
|
|
789
|
+
function parseField(text, dataType) {
|
|
790
|
+
switch (dataType) {
|
|
791
|
+
case "B":
|
|
792
|
+
return parseNumber(text);
|
|
793
|
+
case "C":
|
|
794
|
+
return parseCharacter(text);
|
|
795
|
+
case "F":
|
|
796
|
+
return parseNumber(text);
|
|
797
|
+
case "N":
|
|
798
|
+
return parseNumber(text);
|
|
799
|
+
case "O":
|
|
800
|
+
return parseNumber(text);
|
|
801
|
+
case "D":
|
|
802
|
+
return parseDate(text);
|
|
803
|
+
case "L":
|
|
804
|
+
return parseBoolean(text);
|
|
805
|
+
default:
|
|
806
|
+
throw new Error("Unsupported data type");
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
function parseDate(str) {
|
|
810
|
+
return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
|
|
811
|
+
}
|
|
812
|
+
function parseBoolean(value) {
|
|
813
|
+
return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
|
|
814
|
+
}
|
|
815
|
+
function parseNumber(text) {
|
|
816
|
+
const number = parseFloat(text);
|
|
817
|
+
return isNaN(number) ? null : number;
|
|
818
|
+
}
|
|
819
|
+
function parseCharacter(text) {
|
|
820
|
+
return text.trim() || null;
|
|
821
|
+
}
|
|
822
|
+
function makeField({ name, dataType, fieldLength, decimal }) {
|
|
823
|
+
switch (dataType) {
|
|
824
|
+
case "B":
|
|
825
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
826
|
+
case "C":
|
|
827
|
+
return { name, type: "utf8", nullable: true, metadata: {} };
|
|
828
|
+
case "F":
|
|
829
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
830
|
+
case "N":
|
|
831
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
832
|
+
case "O":
|
|
833
|
+
return { name, type: "float64", nullable: true, metadata: {} };
|
|
834
|
+
case "D":
|
|
835
|
+
return { name, type: "timestamp-millisecond", nullable: true, metadata: {} };
|
|
836
|
+
case "L":
|
|
837
|
+
return { name, type: "bool", nullable: true, metadata: {} };
|
|
838
|
+
default:
|
|
839
|
+
throw new Error("Unsupported data type");
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
|
|
843
|
+
// src/dbf-loader.ts
|
|
844
|
+
var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
845
|
+
var DBFWorkerLoader = {
|
|
846
|
+
name: "DBF",
|
|
847
|
+
id: "dbf",
|
|
848
|
+
module: "shapefile",
|
|
849
|
+
version: VERSION2,
|
|
850
|
+
worker: true,
|
|
851
|
+
category: "table",
|
|
852
|
+
extensions: ["dbf"],
|
|
853
|
+
mimeTypes: ["application/x-dbf"],
|
|
854
|
+
options: {
|
|
855
|
+
dbf: {
|
|
856
|
+
encoding: "latin1"
|
|
857
|
+
}
|
|
858
|
+
}
|
|
859
|
+
};
|
|
860
|
+
var DBFLoader = {
|
|
861
|
+
...DBFWorkerLoader,
|
|
862
|
+
parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),
|
|
863
|
+
parseSync: parseDBF,
|
|
864
|
+
parseInBatches(arrayBufferIterator, options) {
|
|
865
|
+
return parseDBFInBatches(arrayBufferIterator, options);
|
|
866
|
+
}
|
|
867
|
+
};
|
|
868
|
+
|
|
869
|
+
// src/lib/parsers/parse-shapefile.ts
|
|
870
|
+
async function* parseShapefileInBatches(asyncIterator, options, context) {
|
|
871
|
+
var _a, _b, _c;
|
|
872
|
+
const { reproject = false, _targetCrs = "WGS84" } = (options == null ? void 0 : options.gis) || {};
|
|
873
|
+
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
874
|
+
const shapeIterable = await (0, import_loader_utils.parseInBatchesFromContext)(
|
|
875
|
+
asyncIterator,
|
|
876
|
+
SHPLoader,
|
|
877
|
+
options,
|
|
878
|
+
context
|
|
879
|
+
);
|
|
880
|
+
const shapeIterator = ((_a = shapeIterable[Symbol.asyncIterator]) == null ? void 0 : _a.call(shapeIterable)) || ((_b = shapeIterable[Symbol.iterator]) == null ? void 0 : _b.call(shapeIterable));
|
|
881
|
+
let propertyIterator = null;
|
|
882
|
+
const dbfResponse = await (context == null ? void 0 : context.fetch(replaceExtension((context == null ? void 0 : context.url) || "", "dbf")));
|
|
883
|
+
if (dbfResponse == null ? void 0 : dbfResponse.ok) {
|
|
884
|
+
const propertyIterable = await (0, import_loader_utils.parseInBatchesFromContext)(
|
|
885
|
+
dbfResponse,
|
|
886
|
+
DBFLoader,
|
|
887
|
+
{
|
|
888
|
+
...options,
|
|
889
|
+
dbf: { encoding: cpg || "latin1" }
|
|
890
|
+
},
|
|
891
|
+
context
|
|
892
|
+
);
|
|
893
|
+
propertyIterator = ((_c = propertyIterable[Symbol.asyncIterator]) == null ? void 0 : _c.call(propertyIterable)) || propertyIterable[Symbol.iterator]();
|
|
894
|
+
}
|
|
895
|
+
let shapeHeader = (await shapeIterator.next()).value;
|
|
896
|
+
if (shapeHeader && shapeHeader.batchType === "metadata") {
|
|
897
|
+
shapeHeader = (await shapeIterator.next()).value;
|
|
898
|
+
}
|
|
899
|
+
let dbfHeader = {};
|
|
900
|
+
if (propertyIterator) {
|
|
901
|
+
dbfHeader = (await propertyIterator.next()).value;
|
|
902
|
+
if (dbfHeader && dbfHeader.batchType === "metadata") {
|
|
903
|
+
dbfHeader = (await propertyIterator.next()).value;
|
|
904
|
+
}
|
|
905
|
+
}
|
|
906
|
+
const zippedIterator = propertyIterator ? zipBatchIterators(shapeIterator, propertyIterator, "object-row-table") : shapeIterator;
|
|
907
|
+
const zippedBatchIterable = {
|
|
908
|
+
[Symbol.asyncIterator]() {
|
|
909
|
+
return zippedIterator;
|
|
910
|
+
}
|
|
911
|
+
};
|
|
912
|
+
for await (const batch of zippedBatchIterable) {
|
|
913
|
+
let geometries;
|
|
914
|
+
let properties;
|
|
915
|
+
if (!propertyIterator) {
|
|
916
|
+
geometries = batch;
|
|
917
|
+
} else {
|
|
918
|
+
[geometries, properties] = batch.data;
|
|
919
|
+
}
|
|
920
|
+
const geojsonGeometries = parseGeometries(geometries);
|
|
921
|
+
let features = joinProperties(geojsonGeometries, properties);
|
|
922
|
+
if (reproject) {
|
|
923
|
+
features = reprojectFeatures(features, prj, _targetCrs);
|
|
924
|
+
}
|
|
925
|
+
yield {
|
|
926
|
+
encoding: cpg,
|
|
927
|
+
prj,
|
|
928
|
+
shx,
|
|
929
|
+
header: shapeHeader,
|
|
930
|
+
data: features
|
|
931
|
+
};
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
async function parseShapefile(arrayBuffer, options, context) {
|
|
935
|
+
const { reproject = false, _targetCrs = "WGS84" } = (options == null ? void 0 : options.gis) || {};
|
|
936
|
+
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
937
|
+
const { header, geometries } = await (0, import_loader_utils.parseFromContext)(arrayBuffer, SHPLoader, options, context);
|
|
938
|
+
const geojsonGeometries = parseGeometries(geometries);
|
|
939
|
+
let properties = [];
|
|
940
|
+
const dbfResponse = await (context == null ? void 0 : context.fetch(replaceExtension(context == null ? void 0 : context.url, "dbf")));
|
|
941
|
+
if (dbfResponse == null ? void 0 : dbfResponse.ok) {
|
|
942
|
+
properties = await (0, import_loader_utils.parseFromContext)(
|
|
943
|
+
dbfResponse,
|
|
944
|
+
DBFLoader,
|
|
945
|
+
{ dbf: { encoding: cpg || "latin1" } },
|
|
946
|
+
context
|
|
947
|
+
);
|
|
948
|
+
}
|
|
949
|
+
let features = joinProperties(geojsonGeometries, properties);
|
|
950
|
+
if (reproject) {
|
|
951
|
+
features = reprojectFeatures(features, prj, _targetCrs);
|
|
952
|
+
}
|
|
953
|
+
return {
|
|
954
|
+
encoding: cpg,
|
|
955
|
+
prj,
|
|
956
|
+
shx,
|
|
957
|
+
header,
|
|
958
|
+
data: features
|
|
959
|
+
};
|
|
960
|
+
}
|
|
961
|
+
function parseGeometries(geometries) {
|
|
962
|
+
const geojsonGeometries = [];
|
|
963
|
+
for (const geom of geometries) {
|
|
964
|
+
geojsonGeometries.push((0, import_gis.binaryToGeometry)(geom));
|
|
965
|
+
}
|
|
966
|
+
return geojsonGeometries;
|
|
967
|
+
}
|
|
968
|
+
function joinProperties(geometries, properties) {
|
|
969
|
+
const features = [];
|
|
970
|
+
for (let i = 0; i < geometries.length; i++) {
|
|
971
|
+
const geometry = geometries[i];
|
|
972
|
+
const feature = {
|
|
973
|
+
type: "Feature",
|
|
974
|
+
geometry,
|
|
975
|
+
// properties can be undefined if dbfResponse above was empty
|
|
976
|
+
properties: properties && properties[i] || {}
|
|
977
|
+
};
|
|
978
|
+
features.push(feature);
|
|
979
|
+
}
|
|
980
|
+
return features;
|
|
981
|
+
}
|
|
982
|
+
function reprojectFeatures(features, sourceCrs, targetCrs) {
|
|
983
|
+
if (!sourceCrs && !targetCrs) {
|
|
984
|
+
return features;
|
|
985
|
+
}
|
|
986
|
+
const projection = new import_proj4.Proj4Projection({ from: sourceCrs || "WGS84", to: targetCrs || "WGS84" });
|
|
987
|
+
return (0, import_gis.transformGeoJsonCoords)(features, (coord) => projection.project(coord));
|
|
988
|
+
}
|
|
989
|
+
async function loadShapefileSidecarFiles(options, context) {
|
|
990
|
+
const { url, fetch } = context;
|
|
991
|
+
const shxPromise = fetch(replaceExtension(url, "shx"));
|
|
992
|
+
const cpgPromise = fetch(replaceExtension(url, "cpg"));
|
|
993
|
+
const prjPromise = fetch(replaceExtension(url, "prj"));
|
|
994
|
+
await Promise.all([shxPromise, cpgPromise, prjPromise]);
|
|
995
|
+
let shx;
|
|
996
|
+
let cpg;
|
|
997
|
+
let prj;
|
|
998
|
+
const shxResponse = await shxPromise;
|
|
999
|
+
if (shxResponse.ok) {
|
|
1000
|
+
const arrayBuffer = await shxResponse.arrayBuffer();
|
|
1001
|
+
shx = parseShx(arrayBuffer);
|
|
1002
|
+
}
|
|
1003
|
+
const cpgResponse = await cpgPromise;
|
|
1004
|
+
if (cpgResponse.ok) {
|
|
1005
|
+
cpg = await cpgResponse.text();
|
|
1006
|
+
}
|
|
1007
|
+
const prjResponse = await prjPromise;
|
|
1008
|
+
if (prjResponse.ok) {
|
|
1009
|
+
prj = await prjResponse.text();
|
|
1010
|
+
}
|
|
1011
|
+
return {
|
|
1012
|
+
shx,
|
|
1013
|
+
cpg,
|
|
1014
|
+
prj
|
|
1015
|
+
};
|
|
1016
|
+
}
|
|
1017
|
+
function replaceExtension(url, newExtension) {
|
|
1018
|
+
const baseName = basename(url);
|
|
1019
|
+
const extension = extname(url);
|
|
1020
|
+
const isUpperCase = extension === extension.toUpperCase();
|
|
1021
|
+
if (isUpperCase) {
|
|
1022
|
+
newExtension = newExtension.toUpperCase();
|
|
1023
|
+
}
|
|
1024
|
+
return `${baseName}.${newExtension}`;
|
|
1025
|
+
}
|
|
1026
|
+
function basename(url) {
|
|
1027
|
+
const extIndex = url && url.lastIndexOf(".");
|
|
1028
|
+
if (typeof extIndex === "number") {
|
|
1029
|
+
return extIndex >= 0 ? url.substr(0, extIndex) : "";
|
|
1030
|
+
}
|
|
1031
|
+
return extIndex;
|
|
1032
|
+
}
|
|
1033
|
+
function extname(url) {
|
|
1034
|
+
const extIndex = url && url.lastIndexOf(".");
|
|
1035
|
+
if (typeof extIndex === "number") {
|
|
1036
|
+
return extIndex >= 0 ? url.substr(extIndex + 1) : "";
|
|
1037
|
+
}
|
|
1038
|
+
return extIndex;
|
|
1039
|
+
}
|
|
1040
|
+
|
|
1041
|
+
// src/shapefile-loader.ts
|
|
1042
|
+
var VERSION3 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
1043
|
+
var ShapefileLoader = {
|
|
1044
|
+
name: "Shapefile",
|
|
1045
|
+
id: "shapefile",
|
|
1046
|
+
module: "shapefile",
|
|
1047
|
+
version: VERSION3,
|
|
1048
|
+
category: "geometry",
|
|
1049
|
+
extensions: ["shp"],
|
|
1050
|
+
mimeTypes: ["application/octet-stream"],
|
|
1051
|
+
tests: [new Uint8Array(SHP_MAGIC_NUMBER2).buffer],
|
|
1052
|
+
options: {
|
|
1053
|
+
shapefile: {},
|
|
1054
|
+
shp: {
|
|
1055
|
+
_maxDimensions: 4
|
|
1056
|
+
}
|
|
1057
|
+
},
|
|
1058
|
+
parse: parseShapefile,
|
|
1059
|
+
parseInBatches: parseShapefileInBatches
|
|
1060
|
+
};
|
|
1061
|
+
|
|
1062
|
+
// src/lib/streaming/binary-reader.ts
|
|
1063
|
+
var BinaryReader = class {
|
|
1064
|
+
constructor(arrayBuffer) {
|
|
1065
|
+
this.offset = 0;
|
|
1066
|
+
this.arrayBuffer = arrayBuffer;
|
|
1067
|
+
}
|
|
1068
|
+
/**
|
|
1069
|
+
* Checks if there are available bytes in data
|
|
1070
|
+
*
|
|
1071
|
+
* @param bytes
|
|
1072
|
+
* @returns boolean
|
|
1073
|
+
*/
|
|
1074
|
+
hasAvailableBytes(bytes) {
|
|
1075
|
+
return this.arrayBuffer.byteLength - this.offset >= bytes;
|
|
1076
|
+
}
|
|
1077
|
+
/**
|
|
1078
|
+
* Get the required number of bytes from the iterator
|
|
1079
|
+
*
|
|
1080
|
+
* @param bytes
|
|
1081
|
+
* @returns Dataview
|
|
1082
|
+
*/
|
|
1083
|
+
getDataView(bytes) {
|
|
1084
|
+
if (bytes && !this.hasAvailableBytes(bytes)) {
|
|
1085
|
+
throw new Error("binary data exhausted");
|
|
1086
|
+
}
|
|
1087
|
+
const dataView = bytes ? new DataView(this.arrayBuffer, this.offset, bytes) : new DataView(this.arrayBuffer, this.offset);
|
|
1088
|
+
this.offset += bytes;
|
|
1089
|
+
return dataView;
|
|
1090
|
+
}
|
|
1091
|
+
/**
|
|
1092
|
+
* Skipping
|
|
1093
|
+
*
|
|
1094
|
+
* @param bytes
|
|
1095
|
+
*/
|
|
1096
|
+
skip(bytes) {
|
|
1097
|
+
this.offset += bytes;
|
|
1098
|
+
}
|
|
1099
|
+
/**
|
|
1100
|
+
* Rewinding
|
|
1101
|
+
*
|
|
1102
|
+
* @param bytes
|
|
1103
|
+
*/
|
|
1104
|
+
rewind(bytes) {
|
|
1105
|
+
this.offset -= bytes;
|
|
1106
|
+
}
|
|
1107
|
+
};
|