@loaders.gl/shapefile 4.0.0-alpha.5 → 4.0.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. package/dist/bundle.js +2 -2
  2. package/dist/dbf-loader.js +29 -20
  3. package/dist/dbf-worker.js +73 -447
  4. package/dist/dist.min.js +130 -489
  5. package/dist/es5/bundle.js +6 -0
  6. package/dist/es5/bundle.js.map +1 -0
  7. package/dist/es5/dbf-loader.js +53 -0
  8. package/dist/es5/dbf-loader.js.map +1 -0
  9. package/dist/es5/index.js +39 -0
  10. package/dist/es5/index.js.map +1 -0
  11. package/dist/es5/lib/parsers/parse-dbf.js +394 -0
  12. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -0
  13. package/dist/es5/lib/parsers/parse-shapefile.js +373 -0
  14. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -0
  15. package/dist/es5/lib/parsers/parse-shp-geometry.js +220 -0
  16. package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -0
  17. package/dist/es5/lib/parsers/parse-shp-header.js +35 -0
  18. package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -0
  19. package/dist/es5/lib/parsers/parse-shp.js +227 -0
  20. package/dist/es5/lib/parsers/parse-shp.js.map +1 -0
  21. package/dist/es5/lib/parsers/parse-shx.js +26 -0
  22. package/dist/es5/lib/parsers/parse-shx.js.map +1 -0
  23. package/dist/es5/lib/parsers/types.js +2 -0
  24. package/dist/es5/lib/parsers/types.js.map +1 -0
  25. package/dist/es5/lib/streaming/binary-chunk-reader.js +178 -0
  26. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -0
  27. package/dist/es5/lib/streaming/binary-reader.js +48 -0
  28. package/dist/es5/lib/streaming/binary-reader.js.map +1 -0
  29. package/dist/es5/lib/streaming/zip-batch-iterators.js +91 -0
  30. package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -0
  31. package/dist/es5/shapefile-loader.js +31 -0
  32. package/dist/es5/shapefile-loader.js.map +1 -0
  33. package/dist/es5/shp-loader.js +56 -0
  34. package/dist/es5/shp-loader.js.map +1 -0
  35. package/dist/es5/workers/dbf-worker.js +6 -0
  36. package/dist/es5/workers/dbf-worker.js.map +1 -0
  37. package/dist/es5/workers/shp-worker.js +6 -0
  38. package/dist/es5/workers/shp-worker.js.map +1 -0
  39. package/dist/esm/bundle.js +4 -0
  40. package/dist/esm/bundle.js.map +1 -0
  41. package/dist/esm/dbf-loader.js +24 -0
  42. package/dist/esm/dbf-loader.js.map +1 -0
  43. package/dist/esm/index.js +4 -0
  44. package/dist/esm/index.js.map +1 -0
  45. package/dist/esm/lib/parsers/parse-dbf.js +296 -0
  46. package/dist/esm/lib/parsers/parse-dbf.js.map +1 -0
  47. package/dist/esm/lib/parsers/parse-shapefile.js +187 -0
  48. package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -0
  49. package/dist/esm/lib/parsers/parse-shp-geometry.js +191 -0
  50. package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -0
  51. package/dist/esm/lib/parsers/parse-shp-header.js +29 -0
  52. package/dist/esm/lib/parsers/parse-shp-header.js.map +1 -0
  53. package/dist/esm/lib/parsers/parse-shp.js +134 -0
  54. package/dist/esm/lib/parsers/parse-shp.js.map +1 -0
  55. package/dist/esm/lib/parsers/parse-shx.js +20 -0
  56. package/dist/esm/lib/parsers/parse-shx.js.map +1 -0
  57. package/dist/esm/lib/parsers/types.js +2 -0
  58. package/dist/esm/lib/parsers/types.js.map +1 -0
  59. package/dist/esm/lib/streaming/binary-chunk-reader.js +106 -0
  60. package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -0
  61. package/dist/esm/lib/streaming/binary-reader.js +27 -0
  62. package/dist/esm/lib/streaming/binary-reader.js.map +1 -0
  63. package/dist/esm/lib/streaming/zip-batch-iterators.js +44 -0
  64. package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -0
  65. package/dist/esm/shapefile-loader.js +23 -0
  66. package/dist/esm/shapefile-loader.js.map +1 -0
  67. package/dist/esm/shp-loader.js +26 -0
  68. package/dist/esm/shp-loader.js.map +1 -0
  69. package/dist/esm/workers/dbf-worker.js +4 -0
  70. package/dist/esm/workers/dbf-worker.js.map +1 -0
  71. package/dist/esm/workers/shp-worker.js +4 -0
  72. package/dist/esm/workers/shp-worker.js.map +1 -0
  73. package/dist/index.js +11 -4
  74. package/dist/lib/parsers/parse-dbf.d.ts +4 -18
  75. package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
  76. package/dist/lib/parsers/parse-dbf.js +309 -264
  77. package/dist/lib/parsers/parse-shapefile.d.ts +3 -8
  78. package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
  79. package/dist/lib/parsers/parse-shapefile.js +227 -209
  80. package/dist/lib/parsers/parse-shp-geometry.d.ts +2 -3
  81. package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
  82. package/dist/lib/parsers/parse-shp-geometry.js +265 -212
  83. package/dist/lib/parsers/parse-shp-header.js +38 -27
  84. package/dist/lib/parsers/parse-shp.d.ts +3 -2
  85. package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
  86. package/dist/lib/parsers/parse-shp.js +160 -136
  87. package/dist/lib/parsers/parse-shx.js +25 -19
  88. package/dist/lib/parsers/types.d.ts +68 -0
  89. package/dist/lib/parsers/types.d.ts.map +1 -0
  90. package/dist/lib/parsers/types.js +2 -0
  91. package/dist/lib/streaming/binary-chunk-reader.d.ts +5 -3
  92. package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -1
  93. package/dist/lib/streaming/binary-chunk-reader.js +152 -128
  94. package/dist/lib/streaming/binary-reader.js +50 -33
  95. package/dist/lib/streaming/zip-batch-iterators.js +57 -48
  96. package/dist/shapefile-loader.js +30 -22
  97. package/dist/shp-loader.js +32 -22
  98. package/dist/shp-worker.js +57 -19
  99. package/dist/workers/dbf-worker.js +5 -4
  100. package/dist/workers/shp-worker.js +5 -4
  101. package/package.json +7 -7
  102. package/src/lib/parsers/parse-dbf.ts +41 -67
  103. package/src/lib/parsers/parse-shapefile.ts +3 -6
  104. package/src/lib/parsers/parse-shp-geometry.ts +3 -2
  105. package/src/lib/parsers/parse-shp.ts +26 -12
  106. package/src/lib/parsers/types.ts +79 -0
  107. package/src/lib/streaming/binary-chunk-reader.ts +5 -1
  108. package/src/lib/streaming/zip-batch-iterators.ts +2 -2
  109. package/dist/bundle.js.map +0 -1
  110. package/dist/dbf-loader.js.map +0 -1
  111. package/dist/index.js.map +0 -1
  112. package/dist/lib/parsers/parse-dbf.js.map +0 -1
  113. package/dist/lib/parsers/parse-shapefile.js.map +0 -1
  114. package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
  115. package/dist/lib/parsers/parse-shp-header.js.map +0 -1
  116. package/dist/lib/parsers/parse-shp.js.map +0 -1
  117. package/dist/lib/parsers/parse-shx.js.map +0 -1
  118. package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
  119. package/dist/lib/streaming/binary-reader.js.map +0 -1
  120. package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
  121. package/dist/shapefile-loader.js.map +0 -1
  122. package/dist/shp-loader.js.map +0 -1
  123. package/dist/workers/dbf-worker.js.map +0 -1
  124. package/dist/workers/shp-worker.js.map +0 -1
@@ -132,7 +132,7 @@
132
132
  // src/lib/parsers/parse-shp-geometry.ts
133
133
  var LITTLE_ENDIAN2 = true;
134
134
  function parseRecord(view, options) {
135
- const { _maxDimensions } = options?.shp || {};
135
+ const { _maxDimensions = 4 } = options?.shp || {};
136
136
  let offset = 0;
137
137
  const type = view.getInt32(offset, LITTLE_ENDIAN2);
138
138
  offset += Int32Array.BYTES_PER_ELEMENT;
@@ -318,7 +318,13 @@
318
318
  this.binaryReader = new BinaryChunkReader({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
319
319
  this.state = STATE.EXPECTING_HEADER;
320
320
  this.result = {
321
- geometries: []
321
+ geometries: [],
322
+ progress: {
323
+ bytesTotal: NaN,
324
+ bytesUsed: NaN,
325
+ rows: NaN
326
+ },
327
+ currentIndex: NaN
322
328
  };
323
329
  this.options = options;
324
330
  }
@@ -394,7 +400,7 @@
394
400
  binaryReader.rewind(SHP_RECORD_HEADER_SIZE);
395
401
  return state;
396
402
  }
397
- const invalidRecord = recordHeader.byteLength < 4 || recordHeader.type !== result.header.type || recordHeader.recordNumber !== result.currentIndex;
403
+ const invalidRecord = recordHeader.byteLength < 4 || recordHeader.type !== result.header?.type || recordHeader.recordNumber !== result.currentIndex;
398
404
  if (invalidRecord) {
399
405
  binaryReader.rewind(SHP_RECORD_HEADER_SIZE - 4);
400
406
  } else {
@@ -424,7 +430,7 @@
424
430
  }
425
431
 
426
432
  // src/shp-loader.ts
427
- var VERSION = true ? "4.0.0-alpha.5" : "latest";
433
+ var VERSION = true ? "4.0.0-alpha.6" : "latest";
428
434
  var SHP_MAGIC_NUMBER2 = [0, 0, 39, 10];
429
435
  var SHPWorkerLoader = {
430
436
  name: "SHP",
@@ -485,16 +491,33 @@
485
491
  }
486
492
 
487
493
  // ../worker-utils/src/lib/worker-farm/worker-body.ts
494
+ function getParentPort() {
495
+ let parentPort;
496
+ try {
497
+ eval("globalThis.parentPort = require('worker_threads').parentPort");
498
+ parentPort = globalThis.parentPort;
499
+ } catch {
500
+ }
501
+ return parentPort;
502
+ }
488
503
  var onMessageWrapperMap = new Map();
489
504
  var WorkerBody = class {
505
+ static inWorkerThread() {
506
+ return typeof self !== "undefined" || Boolean(getParentPort());
507
+ }
490
508
  static set onmessage(onMessage) {
491
- self.onmessage = (message) => {
492
- if (!isKnownMessage(message)) {
493
- return;
494
- }
495
- const { type, payload } = message.data;
509
+ function handleMessage(message) {
510
+ const parentPort3 = getParentPort();
511
+ const { type, payload } = parentPort3 ? message : message.data;
496
512
  onMessage(type, payload);
497
- };
513
+ }
514
+ const parentPort2 = getParentPort();
515
+ if (parentPort2) {
516
+ parentPort2.on("message", handleMessage);
517
+ parentPort2.on("exit", () => console.debug("Node worker closing"));
518
+ } else {
519
+ globalThis.onmessage = handleMessage;
520
+ }
498
521
  }
499
522
  static addEventListener(onMessage) {
500
523
  let onMessageWrapper = onMessageWrapperMap.get(onMessage);
@@ -503,22 +526,36 @@
503
526
  if (!isKnownMessage(message)) {
504
527
  return;
505
528
  }
506
- const { type, payload } = message.data;
529
+ const parentPort3 = getParentPort();
530
+ const { type, payload } = parentPort3 ? message : message.data;
507
531
  onMessage(type, payload);
508
532
  };
509
533
  }
510
- self.addEventListener("message", onMessageWrapper);
534
+ const parentPort2 = getParentPort();
535
+ if (parentPort2) {
536
+ console.error("not implemented");
537
+ } else {
538
+ globalThis.addEventListener("message", onMessageWrapper);
539
+ }
511
540
  }
512
541
  static removeEventListener(onMessage) {
513
542
  const onMessageWrapper = onMessageWrapperMap.get(onMessage);
514
543
  onMessageWrapperMap.delete(onMessage);
515
- self.removeEventListener("message", onMessageWrapper);
544
+ const parentPort2 = getParentPort();
545
+ if (parentPort2) {
546
+ console.error("not implemented");
547
+ } else {
548
+ globalThis.removeEventListener("message", onMessageWrapper);
549
+ }
516
550
  }
517
551
  static postMessage(type, payload) {
518
- if (self) {
519
- const data = { source: "loaders.gl", type, payload };
520
- const transferList = getTransferList(payload);
521
- self.postMessage(data, transferList);
552
+ const data = { source: "loaders.gl", type, payload };
553
+ const transferList = getTransferList(payload);
554
+ const parentPort2 = getParentPort();
555
+ if (parentPort2) {
556
+ parentPort2.postMessage(data, transferList);
557
+ } else {
558
+ globalThis.postMessage(data, transferList);
522
559
  }
523
560
  }
524
561
  };
@@ -530,19 +567,20 @@
530
567
  // ../loader-utils/src/lib/worker-loader-utils/create-loader-worker.ts
531
568
  var requestId = 0;
532
569
  function createLoaderWorker(loader) {
533
- if (typeof self === "undefined") {
570
+ if (!WorkerBody.inWorkerThread()) {
534
571
  return;
535
572
  }
536
573
  WorkerBody.onmessage = async (type, payload) => {
537
574
  switch (type) {
538
575
  case "process":
539
576
  try {
540
- const { input, options = {} } = payload;
577
+ const { input, options = {}, context = {} } = payload;
541
578
  const result = await parseData({
542
579
  loader,
543
580
  arrayBuffer: input,
544
581
  options,
545
582
  context: {
583
+ ...context,
546
584
  parse: parseOnMainThread
547
585
  }
548
586
  });
@@ -1,4 +1,5 @@
1
- import { DBFLoader } from '../dbf-loader';
2
- import { createLoaderWorker } from '@loaders.gl/loader-utils';
3
- createLoaderWorker(DBFLoader);
4
- //# sourceMappingURL=dbf-worker.js.map
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const dbf_loader_1 = require("../dbf-loader");
4
+ const loader_utils_1 = require("@loaders.gl/loader-utils");
5
+ (0, loader_utils_1.createLoaderWorker)(dbf_loader_1.DBFLoader);
@@ -1,4 +1,5 @@
1
- import { SHPLoader } from '../shp-loader';
2
- import { createLoaderWorker } from '@loaders.gl/loader-utils';
3
- createLoaderWorker(SHPLoader);
4
- //# sourceMappingURL=shp-worker.js.map
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const shp_loader_1 = require("../shp-loader");
4
+ const loader_utils_1 = require("@loaders.gl/loader-utils");
5
+ (0, loader_utils_1.createLoaderWorker)(shp_loader_1.SHPLoader);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@loaders.gl/shapefile",
3
3
  "description": "Loader for the Shapefile Format",
4
- "version": "4.0.0-alpha.5",
4
+ "version": "4.0.0-alpha.6",
5
5
  "license": "MIT",
6
6
  "publishConfig": {
7
7
  "access": "public"
@@ -18,8 +18,8 @@
18
18
  "shp"
19
19
  ],
20
20
  "types": "dist/index.d.ts",
21
- "main": "dist/index.js",
22
- "module": "dist/index.js",
21
+ "main": "dist/es5/index.js",
22
+ "module": "dist/esm/index.js",
23
23
  "sideEffects": false,
24
24
  "browser": {
25
25
  "./src/lib/filesystems/node-filesystem.js": false,
@@ -37,10 +37,10 @@
37
37
  "build-worker-dbf": "esbuild src/workers/dbf-worker.ts --bundle --outfile=dist/dbf-worker.js --define:__VERSION__=\\\"$npm_package_version\\\""
38
38
  },
39
39
  "dependencies": {
40
- "@loaders.gl/gis": "4.0.0-alpha.5",
41
- "@loaders.gl/loader-utils": "4.0.0-alpha.5",
42
- "@loaders.gl/schema": "4.0.0-alpha.5",
40
+ "@loaders.gl/gis": "4.0.0-alpha.6",
41
+ "@loaders.gl/loader-utils": "4.0.0-alpha.6",
42
+ "@loaders.gl/schema": "4.0.0-alpha.6",
43
43
  "@math.gl/proj4": "^3.5.1"
44
44
  },
45
- "gitHead": "7a71a54bdf1ddf985cc3af3db90b82e7fa97d025"
45
+ "gitHead": "acc1985050dfaa0f1f0c066f8da5bce7454a046c"
46
46
  }
@@ -1,47 +1,13 @@
1
- import {Schema, Field, Bool, Utf8, Float64, TimestampMillisecond} from '@loaders.gl/schema';
1
+ import {Field, ObjectRowTable} from '@loaders.gl/schema';
2
2
  import BinaryChunkReader from '../streaming/binary-chunk-reader';
3
-
4
- type DBFRowsOutput = object[];
5
-
6
- interface DBFTableOutput {
7
- schema?: Schema;
8
- rows: DBFRowsOutput;
9
- }
10
-
11
- type DBFHeader = {
12
- // Last updated date
13
- year: number;
14
- month: number;
15
- day: number;
16
- // Number of records in data file
17
- nRecords: number;
18
- // Length of header in bytes
19
- headerLength: number;
20
- // Length of each record
21
- recordLength: number;
22
- // Not sure if this is usually set
23
- languageDriver: number;
24
- };
25
-
26
- type DBFField = {
27
- name: string;
28
- dataType: string;
29
- fieldLength: number;
30
- decimal: number;
31
- };
32
-
33
- type DBFResult = {
34
- data: {[key: string]: any}[];
35
- schema?: Schema;
36
- error?: string;
37
- dbfHeader?: DBFHeader;
38
- dbfFields?: DBFField[];
39
- progress?: {
40
- bytesUsed: number;
41
- rowsTotal: number;
42
- rows: number;
43
- };
44
- };
3
+ import {
4
+ DBFLoaderOptions,
5
+ DBFResult,
6
+ DBFTableOutput,
7
+ DBFHeader,
8
+ DBFRowsOutput,
9
+ DBFField
10
+ } from './types';
45
11
 
46
12
  const LITTLE_ENDIAN = true;
47
13
  const DBF_HEADER_SIZE = 32;
@@ -98,21 +64,27 @@ class DBFParser {
98
64
  */
99
65
  export function parseDBF(
100
66
  arrayBuffer: ArrayBuffer,
101
- options: any = {}
102
- ): DBFRowsOutput | DBFTableOutput {
103
- const loaderOptions = options.dbf || {};
104
- const {encoding} = loaderOptions;
67
+ options: DBFLoaderOptions = {}
68
+ ): DBFRowsOutput | DBFTableOutput | ObjectRowTable {
69
+ const {encoding = 'latin1'} = options.dbf || {};
105
70
 
106
71
  const dbfParser = new DBFParser({encoding});
107
72
  dbfParser.write(arrayBuffer);
108
73
  dbfParser.end();
109
74
 
110
75
  const {data, schema} = dbfParser.result;
111
- switch (options.tables && options.tables.format) {
76
+ const shape = options?.tables?.format || options?.dbf?.shape;
77
+ switch (shape) {
78
+ case 'object-row-table': {
79
+ const table: ObjectRowTable = {
80
+ shape: 'object-row-table',
81
+ schema,
82
+ data
83
+ };
84
+ return table;
85
+ }
112
86
  case 'table':
113
- // TODO - parse columns
114
87
  return {schema, rows: data};
115
-
116
88
  case 'rows':
117
89
  default:
118
90
  return data;
@@ -124,10 +96,9 @@ export function parseDBF(
124
96
  */
125
97
  export async function* parseDBFInBatches(
126
98
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
127
- options: any = {}
99
+ options: DBFLoaderOptions = {}
128
100
  ): AsyncIterable<DBFHeader | DBFRowsOutput | DBFTableOutput> {
129
- const loaderOptions = options.dbf || {};
130
- const {encoding} = loaderOptions;
101
+ const {encoding = 'latin1'} = options.dbf || {};
131
102
 
132
103
  const parser = new DBFParser({encoding});
133
104
  let headerReturned = false;
@@ -160,7 +131,7 @@ export async function* parseDBFInBatches(
160
131
  function parseState(
161
132
  state: STATE,
162
133
  result: DBFResult,
163
- binaryReader: {[key: string]: any},
134
+ binaryReader: BinaryChunkReader,
164
135
  textDecoder: TextDecoder
165
136
  ): STATE {
166
137
  // eslint-disable-next-line no-constant-condition
@@ -173,7 +144,8 @@ function parseState(
173
144
 
174
145
  case STATE.START:
175
146
  // Parse initial file header
176
- const dataView = binaryReader.getDataView(DBF_HEADER_SIZE, 'DBF header');
147
+ // DBF Header
148
+ const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
177
149
  if (!dataView) {
178
150
  return state;
179
151
  }
@@ -190,15 +162,17 @@ function parseState(
190
162
  // Parse DBF field descriptors (schema)
191
163
  const fieldDescriptorView = binaryReader.getDataView(
192
164
  // @ts-ignore
193
- result.dbfHeader.headerLength - DBF_HEADER_SIZE,
194
- 'DBF field descriptors'
165
+ result.dbfHeader.headerLength - DBF_HEADER_SIZE
195
166
  );
196
167
  if (!fieldDescriptorView) {
197
168
  return state;
198
169
  }
199
170
 
200
171
  result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
201
- result.schema = new Schema(result.dbfFields.map((dbfField) => makeField(dbfField)));
172
+ result.schema = {
173
+ fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
174
+ metadata: {}
175
+ };
202
176
 
203
177
  state = STATE.FIELD_PROPERTIES;
204
178
 
@@ -311,7 +285,7 @@ function parseRow(
311
285
  fields: DBFField[],
312
286
  textDecoder: TextDecoder
313
287
  ): {[key: string]: any} {
314
- const out = {};
288
+ const out: {[key: string]: string | number | boolean | null} = {};
315
289
  let offset = 0;
316
290
  for (const field of fields) {
317
291
  const text = textDecoder.decode(
@@ -398,22 +372,22 @@ function parseCharacter(text: string): string | null {
398
372
  * @returns Field
399
373
  */
400
374
  // eslint-disable
401
- function makeField({name, dataType, fieldLength, decimal}): Field {
375
+ function makeField({name, dataType, fieldLength, decimal}: DBFField): Field {
402
376
  switch (dataType) {
403
377
  case 'B':
404
- return new Field(name, new Float64(), true);
378
+ return {name, type: 'float64', nullable: true, metadata: {}};
405
379
  case 'C':
406
- return new Field(name, new Utf8(), true);
380
+ return {name, type: 'utf8', nullable: true, metadata: {}};
407
381
  case 'F':
408
- return new Field(name, new Float64(), true);
382
+ return {name, type: 'float64', nullable: true, metadata: {}};
409
383
  case 'N':
410
- return new Field(name, new Float64(), true);
384
+ return {name, type: 'float64', nullable: true, metadata: {}};
411
385
  case 'O':
412
- return new Field(name, new Float64(), true);
386
+ return {name, type: 'float64', nullable: true, metadata: {}};
413
387
  case 'D':
414
- return new Field(name, new TimestampMillisecond(), true);
388
+ return {name, type: 'timestamp-millisecond', nullable: true, metadata: {}};
415
389
  case 'L':
416
- return new Field(name, new Bool(), true);
390
+ return {name, type: 'bool', nullable: true, metadata: {}};
417
391
  default:
418
392
  throw new Error('Unsupported data type');
419
393
  }
@@ -2,6 +2,7 @@
2
2
  import type {SHXOutput} from './parse-shx';
3
3
  import type {SHPHeader} from './parse-shp-header';
4
4
  import type {LoaderContext} from '@loaders.gl/loader-utils';
5
+ import type {ShapefileLoaderOptions} from './types';
5
6
 
6
7
  import {binaryToGeometry, transformGeoJsonCoords} from '@loaders.gl/gis';
7
8
  import {Proj4Projection} from '@math.gl/proj4';
@@ -20,15 +21,11 @@ interface ShapefileOutput {
20
21
  }
21
22
  /**
22
23
  * Parsing of file in batches
23
- *
24
- * @param asyncIterator
25
- * @param options
26
- * @param context
27
24
  */
28
25
  // eslint-disable-next-line max-statements, complexity
29
26
  export async function* parseShapefileInBatches(
30
27
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
31
- options?: any,
28
+ options?: ShapefileLoaderOptions,
32
29
  context?: LoaderContext
33
30
  ): AsyncIterable<ShapefileOutput> {
34
31
  const {reproject = false, _targetCrs = 'WGS84'} = options?.gis || {};
@@ -109,7 +106,7 @@ export async function* parseShapefileInBatches(
109
106
  */
110
107
  export async function parseShapefile(
111
108
  arrayBuffer: ArrayBuffer,
112
- options?: {[key: string]: any},
109
+ options?: ShapefileLoaderOptions,
113
110
  context?: LoaderContext
114
111
  ): Promise<ShapefileOutput> {
115
112
  const {reproject = false, _targetCrs = 'WGS84'} = options?.gis || {};
@@ -1,4 +1,5 @@
1
1
  import {BinaryGeometry, BinaryGeometryType} from '@loaders.gl/schema';
2
+ import {SHPLoaderOptions} from './types';
2
3
 
3
4
  const LITTLE_ENDIAN = true;
4
5
 
@@ -9,8 +10,8 @@ const LITTLE_ENDIAN = true;
9
10
  * @return Binary Geometry Object
10
11
  */
11
12
  // eslint-disable-next-line complexity
12
- export function parseRecord(view: DataView, options?: {shp?: any}): BinaryGeometry | null {
13
- const {_maxDimensions} = options?.shp || {};
13
+ export function parseRecord(view: DataView, options?: SHPLoaderOptions): BinaryGeometry | null {
14
+ const {_maxDimensions = 4} = options?.shp || {};
14
15
 
15
16
  let offset = 0;
16
17
  const type: number = view.getInt32(offset, LITTLE_ENDIAN);
@@ -1,8 +1,8 @@
1
- import type {LoaderOptions} from '@loaders.gl/loader-utils';
2
1
  import type {BinaryGeometry} from '@loaders.gl/schema';
3
2
  import BinaryChunkReader from '../streaming/binary-chunk-reader';
4
- import {parseSHPHeader} from './parse-shp-header';
3
+ import {parseSHPHeader, SHPHeader} from './parse-shp-header';
5
4
  import {parseRecord} from './parse-shp-geometry';
5
+ import {SHPLoaderOptions} from './types';
6
6
 
7
7
  const LITTLE_ENDIAN = true;
8
8
  const BIG_ENDIAN = false;
@@ -20,20 +20,34 @@ const STATE = {
20
20
  };
21
21
 
22
22
  type SHPResult = {
23
- geometries: [];
24
- header?: {};
23
+ geometries: (BinaryGeometry | null)[];
24
+ header?: SHPHeader;
25
25
  error?: string;
26
+ progress: {
27
+ bytesUsed: number;
28
+ bytesTotal: number;
29
+ rows: number;
30
+ };
31
+ currentIndex: number;
26
32
  };
27
33
 
28
34
  class SHPParser {
29
- options?: any = {};
35
+ options?: SHPLoaderOptions = {};
30
36
  binaryReader = new BinaryChunkReader({maxRewindBytes: SHP_RECORD_HEADER_SIZE});
31
37
  state = STATE.EXPECTING_HEADER;
32
38
  result: SHPResult = {
33
- geometries: []
39
+ geometries: [],
40
+ // Initialize with number values to make TS happy
41
+ // These are initialized for real in STATE.EXPECTING_HEADER
42
+ progress: {
43
+ bytesTotal: NaN,
44
+ bytesUsed: NaN,
45
+ rows: NaN
46
+ },
47
+ currentIndex: NaN
34
48
  };
35
49
 
36
- constructor(options?: LoaderOptions) {
50
+ constructor(options?: SHPLoaderOptions) {
37
51
  this.options = options;
38
52
  }
39
53
 
@@ -53,7 +67,7 @@ class SHPParser {
53
67
  }
54
68
  }
55
69
 
56
- export function parseSHP(arrayBuffer: ArrayBuffer, options?: object): BinaryGeometry[] {
70
+ export function parseSHP(arrayBuffer: ArrayBuffer, options?: SHPLoaderOptions): BinaryGeometry[] {
57
71
  const shpParser = new SHPParser(options);
58
72
  shpParser.write(arrayBuffer);
59
73
  shpParser.end();
@@ -69,7 +83,7 @@ export function parseSHP(arrayBuffer: ArrayBuffer, options?: object): BinaryGeom
69
83
  */
70
84
  export async function* parseSHPInBatches(
71
85
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
72
- options?: object
86
+ options?: SHPLoaderOptions
73
87
  ): AsyncIterable<BinaryGeometry | object> {
74
88
  const parser = new SHPParser(options);
75
89
  let headerReturned = false;
@@ -109,9 +123,9 @@ export async function* parseSHPInBatches(
109
123
  /* eslint-disable complexity, max-depth */
110
124
  function parseState(
111
125
  state: number,
112
- result: {[key: string]: any},
126
+ result: SHPResult,
113
127
  binaryReader: BinaryChunkReader,
114
- options: {shp?: any}
128
+ options?: SHPLoaderOptions
115
129
  ): number {
116
130
  // eslint-disable-next-line no-constant-condition
117
131
  while (true) {
@@ -157,7 +171,7 @@ function parseState(
157
171
 
158
172
  const invalidRecord =
159
173
  recordHeader.byteLength < 4 ||
160
- recordHeader.type !== result.header.type ||
174
+ recordHeader.type !== result.header?.type ||
161
175
  recordHeader.recordNumber !== result.currentIndex;
162
176
 
163
177
  // All records must have at least four bytes (for the record shape type)
@@ -0,0 +1,79 @@
1
+ import {Schema, ObjectRowTable} from '@loaders.gl/schema';
2
+ import type {LoaderOptions} from '@loaders.gl/loader-utils';
3
+
4
+ export type SHPLoaderOptions = LoaderOptions & {
5
+ shp?: {
6
+ _maxDimensions?: number;
7
+ };
8
+ };
9
+
10
+ export type DBFLoaderOptions = LoaderOptions & {
11
+ dbf?: {
12
+ encoding?: string;
13
+ shape?: 'rows' | 'table' | 'object-row-table';
14
+ };
15
+ /** @deprecated */
16
+ tables?: {
17
+ /** @deprecated */
18
+ format?: 'rows' | 'table' | 'object-row-table';
19
+ };
20
+ };
21
+
22
+ export type ShapefileLoaderOptions = LoaderOptions &
23
+ SHPLoaderOptions & {
24
+ shapefile?: {
25
+ shape?: 'geojson';
26
+ };
27
+ gis?: {
28
+ reproject?: boolean;
29
+ _targetCrs?: string;
30
+ /** @deprecated. Use options.shapefile.shape */
31
+ format?: 'geojson';
32
+ };
33
+ };
34
+
35
+ export type DBFRowsOutput = ObjectRowTable['data'];
36
+
37
+ /**
38
+ * DBF Table output. Deprecated in favor of ObjectRowTable
39
+ * @deprecated
40
+ */
41
+ export interface DBFTableOutput {
42
+ schema?: Schema;
43
+ rows: DBFRowsOutput;
44
+ }
45
+
46
+ export type DBFHeader = {
47
+ // Last updated date
48
+ year: number;
49
+ month: number;
50
+ day: number;
51
+ // Number of records in data file
52
+ nRecords: number;
53
+ // Length of header in bytes
54
+ headerLength: number;
55
+ // Length of each record
56
+ recordLength: number;
57
+ // Not sure if this is usually set
58
+ languageDriver: number;
59
+ };
60
+
61
+ export type DBFField = {
62
+ name: string;
63
+ dataType: string;
64
+ fieldLength: number;
65
+ decimal: number;
66
+ };
67
+
68
+ export type DBFResult = {
69
+ data: {[key: string]: any}[];
70
+ schema?: Schema;
71
+ error?: string;
72
+ dbfHeader?: DBFHeader;
73
+ dbfFields?: DBFField[];
74
+ progress?: {
75
+ bytesUsed: number;
76
+ rowsTotal: number;
77
+ rows: number;
78
+ };
79
+ };
@@ -1,10 +1,14 @@
1
+ type BinaryChunkReaderOptions = {
2
+ maxRewindBytes: number;
3
+ };
4
+
1
5
  export default class BinaryChunkReader {
2
6
  offset: number;
3
7
  arrayBuffers: ArrayBuffer[];
4
8
  ended: boolean;
5
9
  maxRewindBytes: number;
6
10
 
7
- constructor(options?: {[key: string]: any}) {
11
+ constructor(options?: BinaryChunkReaderOptions) {
8
12
  const {maxRewindBytes = 0} = options || {};
9
13
 
10
14
  /** current global offset into current array buffer*/
@@ -8,8 +8,8 @@ export async function* zipBatchIterators(
8
8
  iterator1: AsyncIterator<any[]>,
9
9
  iterator2: AsyncIterator<any[]>
10
10
  ): AsyncGenerator<number[][], void, unknown> {
11
- let batch1 = [];
12
- let batch2 = [];
11
+ let batch1: number[] = [];
12
+ let batch2: number[] = [];
13
13
  let iterator1Done: boolean = false;
14
14
  let iterator2Done: boolean = false;
15
15
 
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":"AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/dbf-loader.ts"],"names":["parseDBF","parseDBFInBatches","VERSION","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseInBatches"],"mappings":"AACA,SAAQA,QAAR,EAAkBC,iBAAlB,QAA0C,yBAA1C;AAIA,MAAMC,OAAO,GAAG,2BAAuB,WAAvB,qBAAmD,QAAnE;AAKA,OAAO,MAAMC,eAAuB,GAAG;AACrCC,EAAAA,IAAI,EAAE,KAD+B;AAErCC,EAAAA,EAAE,EAAE,KAFiC;AAGrCC,EAAAA,MAAM,EAAE,WAH6B;AAIrCC,EAAAA,OAAO,EAAEL,OAJ4B;AAKrCM,EAAAA,MAAM,EAAE,IAL6B;AAMrCC,EAAAA,QAAQ,EAAE,OAN2B;AAOrCC,EAAAA,UAAU,EAAE,CAAC,KAAD,CAPyB;AAQrCC,EAAAA,SAAS,EAAE,CAAC,mBAAD,CAR0B;AASrCC,EAAAA,OAAO,EAAE;AACPC,IAAAA,GAAG,EAAE;AACHC,MAAAA,QAAQ,EAAE;AADP;AADE;AAT4B,CAAhC;AAiBP,OAAO,MAAMC,SAA2B,GAAG,EACzC,GAAGZ,eADsC;AAEzCa,EAAAA,KAAK,EAAE,OAAOC,WAAP,EAAoBL,OAApB,KAAgCZ,QAAQ,CAACiB,WAAD,EAAcL,OAAd,CAFN;AAGzCM,EAAAA,SAAS,EAAElB,QAH8B;AAIzCmB,EAAAA,cAAc,EAAElB;AAJyB,CAApC","sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches: parseDBFInBatches\n};\n"],"file":"dbf-loader.js"}
package/dist/index.js.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/index.ts"],"names":["ShapefileLoader","DBFLoader","DBFWorkerLoader","SHPLoader","SHPWorkerLoader"],"mappings":"AAAA,SAAQA,eAAR,QAA8B,oBAA9B;AACA,SAAQC,SAAR,EAAmBC,eAAnB,QAAyC,cAAzC;AACA,SAAQC,SAAR,EAAmBC,eAAnB,QAAyC,cAAzC","sourcesContent":["export {ShapefileLoader} from './shapefile-loader';\nexport {DBFLoader, DBFWorkerLoader} from './dbf-loader';\nexport {SHPLoader, SHPWorkerLoader} from './shp-loader';\n"],"file":"index.js"}