@loaders.gl/shapefile 4.2.0-alpha.4 → 4.2.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/dbf-loader.js +25 -20
  2. package/dist/dbf-worker.js +14 -7
  3. package/dist/dist.dev.js +219 -232
  4. package/dist/dist.min.js +12 -0
  5. package/dist/index.cjs +74 -75
  6. package/dist/index.cjs.map +7 -0
  7. package/dist/index.d.ts +6 -6
  8. package/dist/index.d.ts.map +1 -1
  9. package/dist/index.js +1 -1
  10. package/dist/lib/parsers/parse-dbf.d.ts +1 -1
  11. package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
  12. package/dist/lib/parsers/parse-dbf.js +300 -258
  13. package/dist/lib/parsers/parse-shapefile.d.ts +3 -3
  14. package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
  15. package/dist/lib/parsers/parse-shapefile.js +225 -184
  16. package/dist/lib/parsers/parse-shp-geometry.d.ts +1 -1
  17. package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
  18. package/dist/lib/parsers/parse-shp-geometry.js +260 -168
  19. package/dist/lib/parsers/parse-shp-header.js +33 -23
  20. package/dist/lib/parsers/parse-shp.d.ts +1 -1
  21. package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
  22. package/dist/lib/parsers/parse-shp.js +146 -109
  23. package/dist/lib/parsers/parse-shx.js +19 -15
  24. package/dist/lib/parsers/types.js +0 -1
  25. package/dist/lib/streaming/binary-chunk-reader.js +154 -95
  26. package/dist/lib/streaming/binary-reader.js +51 -23
  27. package/dist/lib/streaming/zip-batch-iterators.js +61 -45
  28. package/dist/shapefile-loader.js +26 -19
  29. package/dist/shp-loader.js +25 -19
  30. package/dist/shp-worker.js +22 -16
  31. package/dist/workers/dbf-worker.js +0 -1
  32. package/dist/workers/shp-worker.js +0 -1
  33. package/package.json +11 -7
  34. package/dist/dbf-loader.js.map +0 -1
  35. package/dist/index.js.map +0 -1
  36. package/dist/lib/parsers/parse-dbf.js.map +0 -1
  37. package/dist/lib/parsers/parse-shapefile.js.map +0 -1
  38. package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
  39. package/dist/lib/parsers/parse-shp-header.js.map +0 -1
  40. package/dist/lib/parsers/parse-shp.js.map +0 -1
  41. package/dist/lib/parsers/parse-shx.js.map +0 -1
  42. package/dist/lib/parsers/types.js.map +0 -1
  43. package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
  44. package/dist/lib/streaming/binary-reader.js.map +0 -1
  45. package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
  46. package/dist/shapefile-loader.js.map +0 -1
  47. package/dist/shp-loader.js.map +0 -1
  48. package/dist/workers/dbf-worker.js.map +0 -1
  49. package/dist/workers/shp-worker.js.map +0 -1
@@ -1,295 +1,337 @@
1
1
  import { BinaryChunkReader } from "../streaming/binary-chunk-reader.js";
2
2
  const LITTLE_ENDIAN = true;
3
3
  const DBF_HEADER_SIZE = 32;
4
- var STATE = function (STATE) {
5
- STATE[STATE["START"] = 0] = "START";
6
- STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
7
- STATE[STATE["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
8
- STATE[STATE["END"] = 3] = "END";
9
- STATE[STATE["ERROR"] = 4] = "ERROR";
10
- return STATE;
11
- }(STATE || {});
4
+ var STATE;
5
+ (function (STATE) {
6
+ STATE[STATE["START"] = 0] = "START";
7
+ STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
8
+ STATE[STATE["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
9
+ STATE[STATE["END"] = 3] = "END";
10
+ STATE[STATE["ERROR"] = 4] = "ERROR";
11
+ })(STATE || (STATE = {}));
12
12
  class DBFParser {
13
- constructor(options) {
14
- this.binaryReader = new BinaryChunkReader();
15
- this.textDecoder = void 0;
16
- this.state = STATE.START;
17
- this.result = {
18
- data: []
13
+ binaryReader = new BinaryChunkReader();
14
+ textDecoder;
15
+ state = STATE.START;
16
+ result = {
17
+ data: []
19
18
  };
20
- this.textDecoder = new TextDecoder(options.encoding);
21
- }
22
- write(arrayBuffer) {
23
- this.binaryReader.write(arrayBuffer);
24
- this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
25
- }
26
- end() {
27
- this.binaryReader.end();
28
- this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
29
- if (this.state !== STATE.END) {
30
- this.state = STATE.ERROR;
31
- this.result.error = 'DBF incomplete file';
19
+ constructor(options) {
20
+ this.textDecoder = new TextDecoder(options.encoding);
21
+ }
22
+ /**
23
+ * @param arrayBuffer
24
+ */
25
+ write(arrayBuffer) {
26
+ this.binaryReader.write(arrayBuffer);
27
+ this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
28
+ // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
29
+ // important events:
30
+ // - schema available
31
+ // - first rows available
32
+ // - all rows available
33
+ }
34
+ end() {
35
+ this.binaryReader.end();
36
+ this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
37
+ // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
38
+ if (this.state !== STATE.END) {
39
+ this.state = STATE.ERROR;
40
+ this.result.error = 'DBF incomplete file';
41
+ }
32
42
  }
33
- }
34
43
  }
35
- export function parseDBF(arrayBuffer) {
36
- var _options$dbf;
37
- let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
38
- const {
39
- encoding = 'latin1'
40
- } = options.dbf || {};
41
- const dbfParser = new DBFParser({
42
- encoding
43
- });
44
- dbfParser.write(arrayBuffer);
45
- dbfParser.end();
46
- const {
47
- data,
48
- schema
49
- } = dbfParser.result;
50
- const shape = options === null || options === void 0 ? void 0 : (_options$dbf = options.dbf) === null || _options$dbf === void 0 ? void 0 : _options$dbf.shape;
51
- switch (shape) {
52
- case 'object-row-table':
53
- {
54
- const table = {
55
- shape: 'object-row-table',
56
- schema,
57
- data
58
- };
59
- return table;
60
- }
61
- case 'table':
62
- return {
63
- schema,
64
- rows: data
65
- };
66
- case 'rows':
67
- default:
68
- return data;
69
- }
44
+ /**
45
+ * @param arrayBuffer
46
+ * @param options
47
+ * @returns DBFTable or rows
48
+ */
49
+ export function parseDBF(arrayBuffer, options = {}) {
50
+ const { encoding = 'latin1' } = options.dbf || {};
51
+ const dbfParser = new DBFParser({ encoding });
52
+ dbfParser.write(arrayBuffer);
53
+ dbfParser.end();
54
+ const { data, schema } = dbfParser.result;
55
+ const shape = options?.dbf?.shape;
56
+ switch (shape) {
57
+ case 'object-row-table': {
58
+ const table = {
59
+ shape: 'object-row-table',
60
+ schema,
61
+ data
62
+ };
63
+ return table;
64
+ }
65
+ case 'table':
66
+ return { schema, rows: data };
67
+ case 'rows':
68
+ default:
69
+ return data;
70
+ }
70
71
  }
71
- export function parseDBFInBatches(asyncIterator) {
72
- try {
73
- let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
74
- return async function* () {
75
- const {
76
- encoding = 'latin1'
77
- } = options.dbf || {};
78
- const parser = new DBFParser({
79
- encoding
80
- });
81
- let headerReturned = false;
82
- for await (const arrayBuffer of asyncIterator) {
72
+ /**
73
+ * @param asyncIterator
74
+ * @param options
75
+ */
76
+ export async function* parseDBFInBatches(asyncIterator, options = {}) {
77
+ const { encoding = 'latin1' } = options.dbf || {};
78
+ const parser = new DBFParser({ encoding });
79
+ let headerReturned = false;
80
+ for await (const arrayBuffer of asyncIterator) {
83
81
  parser.write(arrayBuffer);
84
82
  if (!headerReturned && parser.result.dbfHeader) {
85
- headerReturned = true;
86
- yield parser.result.dbfHeader;
83
+ headerReturned = true;
84
+ yield parser.result.dbfHeader;
87
85
  }
88
86
  if (parser.result.data.length > 0) {
89
- yield parser.result.data;
90
- parser.result.data = [];
87
+ yield parser.result.data;
88
+ parser.result.data = [];
91
89
  }
92
- }
93
- parser.end();
94
- if (parser.result.data.length > 0) {
90
+ }
91
+ parser.end();
92
+ if (parser.result.data.length > 0) {
95
93
  yield parser.result.data;
96
- }
97
- }();
98
- } catch (e) {
99
- return Promise.reject(e);
100
- }
94
+ }
101
95
  }
96
+ /**
97
+ * https://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm
98
+ * @param state
99
+ * @param result
100
+ * @param binaryReader
101
+ * @param textDecoder
102
+ * @returns
103
+ */
104
+ /* eslint-disable complexity, max-depth */
102
105
  function parseState(state, result, binaryReader, textDecoder) {
103
- while (true) {
104
- try {
105
- switch (state) {
106
- case STATE.ERROR:
107
- case STATE.END:
108
- return state;
109
- case STATE.START:
110
- const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
111
- if (!dataView) {
112
- return state;
113
- }
114
- result.dbfHeader = parseDBFHeader(dataView);
115
- result.progress = {
116
- bytesUsed: 0,
117
- rowsTotal: result.dbfHeader.nRecords,
118
- rows: 0
119
- };
120
- state = STATE.FIELD_DESCRIPTORS;
121
- break;
122
- case STATE.FIELD_DESCRIPTORS:
123
- const fieldDescriptorView = binaryReader.getDataView(result.dbfHeader.headerLength - DBF_HEADER_SIZE);
124
- if (!fieldDescriptorView) {
125
- return state;
126
- }
127
- result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
128
- result.schema = {
129
- fields: result.dbfFields.map(dbfField => makeField(dbfField)),
130
- metadata: {}
131
- };
132
- state = STATE.FIELD_PROPERTIES;
133
- binaryReader.skip(1);
134
- break;
135
- case STATE.FIELD_PROPERTIES:
136
- const {
137
- recordLength = 0,
138
- nRecords = 0
139
- } = (result === null || result === void 0 ? void 0 : result.dbfHeader) || {};
140
- while (result.data.length < nRecords) {
141
- const recordView = binaryReader.getDataView(recordLength - 1);
142
- if (!recordView) {
143
- return state;
106
+ // eslint-disable-next-line no-constant-condition
107
+ while (true) {
108
+ try {
109
+ switch (state) {
110
+ case STATE.ERROR:
111
+ case STATE.END:
112
+ return state;
113
+ case STATE.START:
114
+ // Parse initial file header
115
+ // DBF Header
116
+ const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
117
+ if (!dataView) {
118
+ return state;
119
+ }
120
+ result.dbfHeader = parseDBFHeader(dataView);
121
+ result.progress = {
122
+ bytesUsed: 0,
123
+ rowsTotal: result.dbfHeader.nRecords,
124
+ rows: 0
125
+ };
126
+ state = STATE.FIELD_DESCRIPTORS;
127
+ break;
128
+ case STATE.FIELD_DESCRIPTORS:
129
+ // Parse DBF field descriptors (schema)
130
+ const fieldDescriptorView = binaryReader.getDataView(
131
+ // @ts-ignore
132
+ result.dbfHeader.headerLength - DBF_HEADER_SIZE);
133
+ if (!fieldDescriptorView) {
134
+ return state;
135
+ }
136
+ result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
137
+ result.schema = {
138
+ fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
139
+ metadata: {}
140
+ };
141
+ state = STATE.FIELD_PROPERTIES;
142
+ // TODO(kyle) Not exactly sure why start offset needs to be headerLength + 1?
143
+ // parsedbf uses ((fields.length + 1) << 5) + 2;
144
+ binaryReader.skip(1);
145
+ break;
146
+ case STATE.FIELD_PROPERTIES:
147
+ const { recordLength = 0, nRecords = 0 } = result?.dbfHeader || {};
148
+ while (result.data.length < nRecords) {
149
+ const recordView = binaryReader.getDataView(recordLength - 1);
150
+ if (!recordView) {
151
+ return state;
152
+ }
153
+ // Note: Avoid actually reading the last byte, which may not be present
154
+ binaryReader.skip(1);
155
+ // @ts-ignore
156
+ const row = parseRow(recordView, result.dbfFields, textDecoder);
157
+ result.data.push(row);
158
+ // @ts-ignore
159
+ result.progress.rows = result.data.length;
160
+ }
161
+ state = STATE.END;
162
+ break;
163
+ default:
164
+ state = STATE.ERROR;
165
+ result.error = `illegal parser state ${state}`;
166
+ return state;
144
167
  }
145
- binaryReader.skip(1);
146
- const row = parseRow(recordView, result.dbfFields, textDecoder);
147
- result.data.push(row);
148
- result.progress.rows = result.data.length;
149
- }
150
- state = STATE.END;
151
- break;
152
- default:
153
- state = STATE.ERROR;
154
- result.error = `illegal parser state ${state}`;
155
- return state;
156
- }
157
- } catch (error) {
158
- state = STATE.ERROR;
159
- result.error = `DBF parsing failed: ${error.message}`;
160
- return state;
168
+ }
169
+ catch (error) {
170
+ state = STATE.ERROR;
171
+ result.error = `DBF parsing failed: ${error.message}`;
172
+ return state;
173
+ }
161
174
  }
162
- }
163
175
  }
176
+ /**
177
+ * @param headerView
178
+ */
164
179
  function parseDBFHeader(headerView) {
165
- return {
166
- year: headerView.getUint8(1) + 1900,
167
- month: headerView.getUint8(2),
168
- day: headerView.getUint8(3),
169
- nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
170
- headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
171
- recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
172
- languageDriver: headerView.getUint8(29)
173
- };
180
+ return {
181
+ // Last updated date
182
+ year: headerView.getUint8(1) + 1900,
183
+ month: headerView.getUint8(2),
184
+ day: headerView.getUint8(3),
185
+ // Number of records in data file
186
+ nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
187
+ // Length of header in bytes
188
+ headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
189
+ // Length of each record
190
+ recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
191
+ // Not sure if this is usually set
192
+ languageDriver: headerView.getUint8(29)
193
+ };
174
194
  }
195
+ /**
196
+ * @param view
197
+ */
175
198
  function parseFieldDescriptors(view, textDecoder) {
176
- const nFields = (view.byteLength - 1) / 32;
177
- const fields = [];
178
- let offset = 0;
179
- for (let i = 0; i < nFields; i++) {
180
- const name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11)).replace(/\u0000/g, '');
181
- fields.push({
182
- name,
183
- dataType: String.fromCharCode(view.getUint8(offset + 11)),
184
- fieldLength: view.getUint8(offset + 16),
185
- decimal: view.getUint8(offset + 17)
186
- });
187
- offset += 32;
199
+ // NOTE: this might overestimate the number of fields if the "Database
200
+ // Container" container exists and is included in the headerLength
201
+ const nFields = (view.byteLength - 1) / 32;
202
+ const fields = [];
203
+ let offset = 0;
204
+ for (let i = 0; i < nFields; i++) {
205
+ const name = textDecoder
206
+ .decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11))
207
+ // eslint-disable-next-line no-control-regex
208
+ .replace(/\u0000/g, '');
209
+ fields.push({
210
+ name,
211
+ dataType: String.fromCharCode(view.getUint8(offset + 11)),
212
+ fieldLength: view.getUint8(offset + 16),
213
+ decimal: view.getUint8(offset + 17)
214
+ });
215
+ offset += 32;
216
+ }
217
+ return fields;
218
+ }
219
+ /*
220
+ * @param {BinaryChunkReader} binaryReader
221
+ function parseRows(binaryReader, fields, nRecords, recordLength, textDecoder) {
222
+ const rows = [];
223
+ for (let i = 0; i < nRecords; i++) {
224
+ const recordView = binaryReader.getDataView(recordLength - 1);
225
+ binaryReader.skip(1);
226
+ // @ts-ignore
227
+ rows.push(parseRow(recordView, fields, textDecoder));
188
228
  }
189
- return fields;
229
+ return rows;
190
230
  }
231
+ */
232
+ /**
233
+ *
234
+ * @param view
235
+ * @param fields
236
+ * @param textDecoder
237
+ * @returns
238
+ */
191
239
  function parseRow(view, fields, textDecoder) {
192
- const out = {};
193
- let offset = 0;
194
- for (const field of fields) {
195
- const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
196
- out[field.name] = parseField(text, field.dataType);
197
- offset += field.fieldLength;
198
- }
199
- return out;
240
+ const out = {};
241
+ let offset = 0;
242
+ for (const field of fields) {
243
+ const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
244
+ out[field.name] = parseField(text, field.dataType);
245
+ offset += field.fieldLength;
246
+ }
247
+ return out;
200
248
  }
249
+ /**
250
+ * Should NaN be coerced to null?
251
+ * @param text
252
+ * @param dataType
253
+ * @returns Field depends on a type of the data
254
+ */
201
255
  function parseField(text, dataType) {
202
- switch (dataType) {
203
- case 'B':
204
- return parseNumber(text);
205
- case 'C':
206
- return parseCharacter(text);
207
- case 'F':
208
- return parseNumber(text);
209
- case 'N':
210
- return parseNumber(text);
211
- case 'O':
212
- return parseNumber(text);
213
- case 'D':
214
- return parseDate(text);
215
- case 'L':
216
- return parseBoolean(text);
217
- default:
218
- throw new Error('Unsupported data type');
219
- }
256
+ switch (dataType) {
257
+ case 'B':
258
+ return parseNumber(text);
259
+ case 'C':
260
+ return parseCharacter(text);
261
+ case 'F':
262
+ return parseNumber(text);
263
+ case 'N':
264
+ return parseNumber(text);
265
+ case 'O':
266
+ return parseNumber(text);
267
+ case 'D':
268
+ return parseDate(text);
269
+ case 'L':
270
+ return parseBoolean(text);
271
+ default:
272
+ throw new Error('Unsupported data type');
273
+ }
220
274
  }
275
+ /**
276
+ * Parse YYYYMMDD to date in milliseconds
277
+ * @param str YYYYMMDD
278
+ * @returns new Date as a number
279
+ */
221
280
  function parseDate(str) {
222
- return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
281
+ return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
223
282
  }
283
+ /**
284
+ * Read boolean value
285
+ * any of Y, y, T, t coerce to true
286
+ * any of N, n, F, f coerce to false
287
+ * otherwise null
288
+ * @param value
289
+ * @returns boolean | null
290
+ */
224
291
  function parseBoolean(value) {
225
- return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
292
+ return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
226
293
  }
294
+ /**
295
+ * Return null instead of NaN
296
+ * @param text
297
+ * @returns number | null
298
+ */
227
299
  function parseNumber(text) {
228
- const number = parseFloat(text);
229
- return isNaN(number) ? null : number;
300
+ const number = parseFloat(text);
301
+ return isNaN(number) ? null : number;
230
302
  }
303
+ /**
304
+ *
305
+ * @param text
306
+ * @returns string | null
307
+ */
231
308
  function parseCharacter(text) {
232
- return text.trim() || null;
309
+ return text.trim() || null;
233
310
  }
234
- function makeField(_ref) {
235
- let {
236
- name,
237
- dataType,
238
- fieldLength,
239
- decimal
240
- } = _ref;
241
- switch (dataType) {
242
- case 'B':
243
- return {
244
- name,
245
- type: 'float64',
246
- nullable: true,
247
- metadata: {}
248
- };
249
- case 'C':
250
- return {
251
- name,
252
- type: 'utf8',
253
- nullable: true,
254
- metadata: {}
255
- };
256
- case 'F':
257
- return {
258
- name,
259
- type: 'float64',
260
- nullable: true,
261
- metadata: {}
262
- };
263
- case 'N':
264
- return {
265
- name,
266
- type: 'float64',
267
- nullable: true,
268
- metadata: {}
269
- };
270
- case 'O':
271
- return {
272
- name,
273
- type: 'float64',
274
- nullable: true,
275
- metadata: {}
276
- };
277
- case 'D':
278
- return {
279
- name,
280
- type: 'timestamp-millisecond',
281
- nullable: true,
282
- metadata: {}
283
- };
284
- case 'L':
285
- return {
286
- name,
287
- type: 'bool',
288
- nullable: true,
289
- metadata: {}
290
- };
291
- default:
292
- throw new Error('Unsupported data type');
293
- }
311
+ /**
312
+ * Create a standard Arrow-style `Field` from field descriptor.
313
+ * TODO - use `fieldLength` and `decimal` to generate smaller types?
314
+ * @param param0
315
+ * @returns Field
316
+ */
317
+ // eslint-disable
318
+ function makeField({ name, dataType, fieldLength, decimal }) {
319
+ switch (dataType) {
320
+ case 'B':
321
+ return { name, type: 'float64', nullable: true, metadata: {} };
322
+ case 'C':
323
+ return { name, type: 'utf8', nullable: true, metadata: {} };
324
+ case 'F':
325
+ return { name, type: 'float64', nullable: true, metadata: {} };
326
+ case 'N':
327
+ return { name, type: 'float64', nullable: true, metadata: {} };
328
+ case 'O':
329
+ return { name, type: 'float64', nullable: true, metadata: {} };
330
+ case 'D':
331
+ return { name, type: 'timestamp-millisecond', nullable: true, metadata: {} };
332
+ case 'L':
333
+ return { name, type: 'bool', nullable: true, metadata: {} };
334
+ default:
335
+ throw new Error('Unsupported data type');
336
+ }
294
337
  }
295
- //# sourceMappingURL=parse-dbf.js.map
@@ -1,7 +1,7 @@
1
1
  import { LoaderContext } from '@loaders.gl/loader-utils';
2
- import type { SHXOutput } from './parse-shx';
3
- import type { SHPHeader } from './parse-shp-header';
4
- import type { ShapefileLoaderOptions } from './types';
2
+ import type { SHXOutput } from "./parse-shx.js";
3
+ import type { SHPHeader } from "./parse-shp-header.js";
4
+ import type { ShapefileLoaderOptions } from "./types.js";
5
5
  interface ShapefileOutput {
6
6
  encoding?: string;
7
7
  prj?: string;
@@ -1 +1 @@
1
- {"version":3,"file":"parse-shapefile.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-shapefile.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,aAAa,EAA8C,MAAM,0BAA0B,CAAC;AAUpG,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,aAAa,CAAC;AAC3C,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,oBAAoB,CAAC;AAClD,OAAO,KAAK,EAAC,sBAAsB,EAAC,MAAM,SAAS,CAAC;AAOpD,UAAU,eAAe;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,MAAM,EAAE,SAAS,CAAC;IAClB,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB;AACD;;GAEG;AAEH,wBAAuB,uBAAuB,CAC5C,aAAa,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACjE,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,aAAa,CAAC,eAAe,CAAC,CAkFhC;AAED;;;;;;;GAOG;AACH,wBAAsB,cAAc,CAClC,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,eAAe,CAAC,CAiD1B;AAwDD;;;;;GAKG;AAEH,wBAAsB,yBAAyB,CAC7C,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC;IACT,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;CACd,CAAC,CAkCD;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,MAAM,CAQ1E"}
1
+ {"version":3,"file":"parse-shapefile.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-shapefile.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,aAAa,EAA8C,MAAM,0BAA0B,CAAC;AAUpG,OAAO,KAAK,EAAC,SAAS,EAAC,uBAAoB;AAC3C,OAAO,KAAK,EAAC,SAAS,EAAC,8BAA2B;AAClD,OAAO,KAAK,EAAC,sBAAsB,EAAC,mBAAgB;AAOpD,UAAU,eAAe;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,MAAM,EAAE,SAAS,CAAC;IAClB,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB;AACD;;GAEG;AAEH,wBAAuB,uBAAuB,CAC5C,aAAa,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACjE,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,aAAa,CAAC,eAAe,CAAC,CAkFhC;AAED;;;;;;;GAOG;AACH,wBAAsB,cAAc,CAClC,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,eAAe,CAAC,CAiD1B;AAwDD;;;;;GAKG;AAEH,wBAAsB,yBAAyB,CAC7C,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC;IACT,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;CACd,CAAC,CAkCD;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,MAAM,CAQ1E"}