@loaders.gl/shapefile 3.1.7 → 3.2.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/dbf-worker.js +60 -22
  2. package/dist/dist.min.js +23 -10
  3. package/dist/es5/dbf-loader.js +1 -1
  4. package/dist/es5/dbf-loader.js.map +1 -1
  5. package/dist/es5/lib/parsers/parse-dbf.js +69 -53
  6. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
  7. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
  8. package/dist/es5/lib/parsers/parse-shp-geometry.js +2 -1
  9. package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -1
  10. package/dist/es5/lib/parsers/parse-shp.js +10 -2
  11. package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
  12. package/dist/es5/lib/parsers/types.js +2 -0
  13. package/dist/es5/lib/parsers/types.js.map +1 -0
  14. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
  15. package/dist/es5/shapefile-loader.js +1 -1
  16. package/dist/es5/shapefile-loader.js.map +1 -1
  17. package/dist/es5/shp-loader.js +1 -1
  18. package/dist/es5/shp-loader.js.map +1 -1
  19. package/dist/esm/dbf-loader.js +1 -1
  20. package/dist/esm/dbf-loader.js.map +1 -1
  21. package/dist/esm/lib/parsers/parse-dbf.js +20 -9
  22. package/dist/esm/lib/parsers/parse-dbf.js.map +1 -1
  23. package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -1
  24. package/dist/esm/lib/parsers/parse-shp-geometry.js +1 -1
  25. package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -1
  26. package/dist/esm/lib/parsers/parse-shp.js +10 -2
  27. package/dist/esm/lib/parsers/parse-shp.js.map +1 -1
  28. package/dist/esm/lib/parsers/types.js +2 -0
  29. package/dist/esm/lib/parsers/types.js.map +1 -0
  30. package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -1
  31. package/dist/esm/shapefile-loader.js +1 -1
  32. package/dist/esm/shapefile-loader.js.map +1 -1
  33. package/dist/esm/shp-loader.js +1 -1
  34. package/dist/esm/shp-loader.js.map +1 -1
  35. package/dist/lib/parsers/parse-dbf.d.ts +4 -18
  36. package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
  37. package/dist/lib/parsers/parse-dbf.js +15 -8
  38. package/dist/lib/parsers/parse-shapefile.d.ts +3 -8
  39. package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
  40. package/dist/lib/parsers/parse-shapefile.js +0 -4
  41. package/dist/lib/parsers/parse-shp-geometry.d.ts +2 -3
  42. package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
  43. package/dist/lib/parsers/parse-shp-geometry.js +1 -1
  44. package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
  45. package/dist/lib/parsers/parse-shp.js +10 -2
  46. package/dist/lib/parsers/types.d.ts +63 -0
  47. package/dist/lib/parsers/types.d.ts.map +1 -0
  48. package/dist/lib/parsers/types.js +2 -0
  49. package/dist/lib/streaming/binary-chunk-reader.d.ts +5 -3
  50. package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -1
  51. package/dist/shp-worker.js +55 -18
  52. package/package.json +5 -5
  53. package/src/lib/parsers/parse-dbf.ts +37 -58
  54. package/src/lib/parsers/parse-shapefile.ts +3 -6
  55. package/src/lib/parsers/parse-shp-geometry.ts +3 -2
  56. package/src/lib/parsers/parse-shp.ts +24 -10
  57. package/src/lib/parsers/types.ts +74 -0
  58. package/src/lib/streaming/binary-chunk-reader.ts +5 -1
@@ -0,0 +1,63 @@
1
+ import { Schema, ObjectRowTable } from '@loaders.gl/schema';
2
+ import type { LoaderOptions } from '@loaders.gl/loader-utils';
3
+ export declare type SHPLoaderOptions = LoaderOptions & {
4
+ shp?: {
5
+ _maxDimensions?: number;
6
+ };
7
+ };
8
+ export declare type DBFLoaderOptions = LoaderOptions & {
9
+ dbf?: {
10
+ encoding?: string;
11
+ shape?: 'rows' | 'table' | 'object-row-table';
12
+ };
13
+ };
14
+ export declare type ShapefileLoaderOptions = LoaderOptions & SHPLoaderOptions & {
15
+ shapefile?: {
16
+ shape?: 'geojson';
17
+ };
18
+ gis?: {
19
+ reproject?: boolean;
20
+ _targetCrs?: string;
21
+ /** @deprecated. Use options.shapefile.shape */
22
+ format?: 'geojson';
23
+ };
24
+ };
25
+ export declare type DBFRowsOutput = ObjectRowTable['data'];
26
+ /**
27
+ * DBF Table output. Deprecated in favor of ObjectRowTable
28
+ * @deprecated
29
+ */
30
+ export interface DBFTableOutput {
31
+ schema?: Schema;
32
+ rows: DBFRowsOutput;
33
+ }
34
+ export declare type DBFHeader = {
35
+ year: number;
36
+ month: number;
37
+ day: number;
38
+ nRecords: number;
39
+ headerLength: number;
40
+ recordLength: number;
41
+ languageDriver: number;
42
+ };
43
+ export declare type DBFField = {
44
+ name: string;
45
+ dataType: string;
46
+ fieldLength: number;
47
+ decimal: number;
48
+ };
49
+ export declare type DBFResult = {
50
+ data: {
51
+ [key: string]: any;
52
+ }[];
53
+ schema?: Schema;
54
+ error?: string;
55
+ dbfHeader?: DBFHeader;
56
+ dbfFields?: DBFField[];
57
+ progress?: {
58
+ bytesUsed: number;
59
+ rowsTotal: number;
60
+ rows: number;
61
+ };
62
+ };
63
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/types.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,MAAM,EAAE,cAAc,EAAC,MAAM,oBAAoB,CAAC;AAC1D,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAE5D,oBAAY,gBAAgB,GAAG,aAAa,GAAG;IAC7C,GAAG,CAAC,EAAE;QACJ,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,CAAC;CACH,CAAC;AAEF,oBAAY,gBAAgB,GAAG,aAAa,GAAG;IAC7C,GAAG,CAAC,EAAE;QACJ,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,GAAG,kBAAkB,CAAC;KAC/C,CAAC;CACH,CAAC;AAEF,oBAAY,sBAAsB,GAAG,aAAa,GAChD,gBAAgB,GAAG;IACjB,SAAS,CAAC,EAAE;QACV,KAAK,CAAC,EAAE,SAAS,CAAC;KACnB,CAAC;IACF,GAAG,CAAC,EAAE;QACJ,SAAS,CAAC,EAAE,OAAO,CAAC;QACpB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,+CAA+C;QAC/C,MAAM,CAAC,EAAE,SAAS,CAAC;KACpB,CAAC;CACH,CAAC;AAEJ,oBAAY,aAAa,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC;AAEnD;;;GAGG;AACH,MAAM,WAAW,cAAc;IAC7B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,IAAI,EAAE,aAAa,CAAC;CACrB;AAED,oBAAY,SAAS,GAAG;IAEtB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,CAAC;IACd,GAAG,EAAE,MAAM,CAAC;IAEZ,QAAQ,EAAE,MAAM,CAAC;IAEjB,YAAY,EAAE,MAAM,CAAC;IAErB,YAAY,EAAE,MAAM,CAAC;IAErB,cAAc,EAAE,MAAM,CAAC;CACxB,CAAC;AAEF,oBAAY,QAAQ,GAAG;IACrB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,EAAE,MAAM,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF,oBAAY,SAAS,GAAG;IACtB,IAAI,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAC,EAAE,CAAC;IAC7B,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,SAAS,CAAC;IACtB,SAAS,CAAC,EAAE,QAAQ,EAAE,CAAC;IACvB,QAAQ,CAAC,EAAE;QACT,SAAS,EAAE,MAAM,CAAC;QAClB,SAAS,EAAE,MAAM,CAAC;QAClB,IAAI,EAAE,MAAM,CAAC;KACd,CAAC;CACH,CAAC"}
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -1,11 +1,12 @@
1
+ declare type BinaryChunkReaderOptions = {
2
+ maxRewindBytes: number;
3
+ };
1
4
  export default class BinaryChunkReader {
2
5
  offset: number;
3
6
  arrayBuffers: ArrayBuffer[];
4
7
  ended: boolean;
5
8
  maxRewindBytes: number;
6
- constructor(options?: {
7
- [key: string]: any;
8
- });
9
+ constructor(options?: BinaryChunkReaderOptions);
9
10
  /**
10
11
  * @param arrayBuffer
11
12
  */
@@ -56,4 +57,5 @@ export default class BinaryChunkReader {
56
57
  */
57
58
  rewind(bytes: number): void;
58
59
  }
60
+ export {};
59
61
  //# sourceMappingURL=binary-chunk-reader.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"binary-chunk-reader.d.ts","sourceRoot":"","sources":["../../../src/lib/streaming/binary-chunk-reader.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,OAAO,OAAO,iBAAiB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,WAAW,EAAE,CAAC;IAC5B,KAAK,EAAE,OAAO,CAAC;IACf,cAAc,EAAE,MAAM,CAAC;gBAEX,OAAO,CAAC,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAC;IAY1C;;OAEG;IACH,KAAK,CAAC,WAAW,EAAE,WAAW,GAAG,IAAI;IAIrC,GAAG,IAAI,IAAI;IAKX;;;;;OAKG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO;IAWzC;;;;;OAKG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,GAAG,EAAE,GAAG,IAAI;IAwC9C;;;;;OAKG;IACH,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,QAAQ,GAAG,IAAI;IA+B3C;;OAEG;IACH,cAAc,IAAI,IAAI;IAUtB;;;;;;;;;OASG;IACH,oBAAoB,CAAC,aAAa,EAAE,GAAG,EAAE,GAAG,eAAe;IAoB3D;;OAEG;IACH,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAGzB;;OAEG;IACH,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CAI5B"}
1
+ {"version":3,"file":"binary-chunk-reader.d.ts","sourceRoot":"","sources":["../../../src/lib/streaming/binary-chunk-reader.ts"],"names":[],"mappings":"AAAA,aAAK,wBAAwB,GAAG;IAC9B,cAAc,EAAE,MAAM,CAAC;CACxB,CAAC;AAEF,MAAM,CAAC,OAAO,OAAO,iBAAiB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,EAAE,WAAW,EAAE,CAAC;IAC5B,KAAK,EAAE,OAAO,CAAC;IACf,cAAc,EAAE,MAAM,CAAC;gBAEX,OAAO,CAAC,EAAE,wBAAwB;IAY9C;;OAEG;IACH,KAAK,CAAC,WAAW,EAAE,WAAW,GAAG,IAAI;IAIrC,GAAG,IAAI,IAAI;IAKX;;;;;OAKG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO;IAWzC;;;;;OAKG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,GAAG,GAAG,EAAE,GAAG,IAAI;IAwC9C;;;;;OAKG;IACH,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,QAAQ,GAAG,IAAI;IA+B3C;;OAEG;IACH,cAAc,IAAI,IAAI;IAUtB;;;;;;;;;OASG;IACH,oBAAoB,CAAC,aAAa,EAAE,GAAG,EAAE,GAAG,eAAe;IAoB3D;;OAEG;IACH,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAGzB;;OAEG;IACH,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;CAI5B"}
@@ -132,7 +132,7 @@
132
132
  // src/lib/parsers/parse-shp-geometry.ts
133
133
  var LITTLE_ENDIAN2 = true;
134
134
  function parseRecord(view, options) {
135
- const { _maxDimensions } = options?.shp || {};
135
+ const { _maxDimensions = 4 } = options?.shp || {};
136
136
  let offset = 0;
137
137
  const type = view.getInt32(offset, LITTLE_ENDIAN2);
138
138
  offset += Int32Array.BYTES_PER_ELEMENT;
@@ -318,7 +318,13 @@
318
318
  this.binaryReader = new BinaryChunkReader({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
319
319
  this.state = STATE.EXPECTING_HEADER;
320
320
  this.result = {
321
- geometries: []
321
+ geometries: [],
322
+ progress: {
323
+ bytesTotal: NaN,
324
+ bytesUsed: NaN,
325
+ rows: NaN
326
+ },
327
+ currentIndex: NaN
322
328
  };
323
329
  this.options = options;
324
330
  }
@@ -394,7 +400,7 @@
394
400
  binaryReader.rewind(SHP_RECORD_HEADER_SIZE);
395
401
  return state;
396
402
  }
397
- const invalidRecord = recordHeader.byteLength < 4 || recordHeader.type !== result.header.type || recordHeader.recordNumber !== result.currentIndex;
403
+ const invalidRecord = recordHeader.byteLength < 4 || recordHeader.type !== result.header?.type || recordHeader.recordNumber !== result.currentIndex;
398
404
  if (invalidRecord) {
399
405
  binaryReader.rewind(SHP_RECORD_HEADER_SIZE - 4);
400
406
  } else {
@@ -424,7 +430,7 @@
424
430
  }
425
431
 
426
432
  // src/shp-loader.ts
427
- var VERSION = true ? "3.1.7" : "latest";
433
+ var VERSION = true ? "3.2.0-alpha.2" : "latest";
428
434
  var SHP_MAGIC_NUMBER2 = [0, 0, 39, 10];
429
435
  var SHPWorkerLoader = {
430
436
  name: "SHP",
@@ -485,16 +491,33 @@
485
491
  }
486
492
 
487
493
  // ../worker-utils/src/lib/worker-farm/worker-body.ts
494
+ function getParentPort() {
495
+ let parentPort;
496
+ try {
497
+ eval("globalThis.parentPort = require('worker_threads').parentPort");
498
+ parentPort = globalThis.parentPort;
499
+ } catch {
500
+ }
501
+ return parentPort;
502
+ }
488
503
  var onMessageWrapperMap = new Map();
489
504
  var WorkerBody = class {
505
+ static inWorkerThread() {
506
+ return typeof self !== "undefined" || Boolean(getParentPort());
507
+ }
490
508
  static set onmessage(onMessage) {
491
- self.onmessage = (message) => {
492
- if (!isKnownMessage(message)) {
493
- return;
494
- }
495
- const { type, payload } = message.data;
509
+ function handleMessage(message) {
510
+ const parentPort3 = getParentPort();
511
+ const { type, payload } = parentPort3 ? message : message.data;
496
512
  onMessage(type, payload);
497
- };
513
+ }
514
+ const parentPort2 = getParentPort();
515
+ if (parentPort2) {
516
+ parentPort2.on("message", handleMessage);
517
+ parentPort2.on("exit", () => console.debug("Node worker closing"));
518
+ } else {
519
+ globalThis.onmessage = handleMessage;
520
+ }
498
521
  }
499
522
  static addEventListener(onMessage) {
500
523
  let onMessageWrapper = onMessageWrapperMap.get(onMessage);
@@ -503,22 +526,36 @@
503
526
  if (!isKnownMessage(message)) {
504
527
  return;
505
528
  }
506
- const { type, payload } = message.data;
529
+ const parentPort3 = getParentPort();
530
+ const { type, payload } = parentPort3 ? message : message.data;
507
531
  onMessage(type, payload);
508
532
  };
509
533
  }
510
- self.addEventListener("message", onMessageWrapper);
534
+ const parentPort2 = getParentPort();
535
+ if (parentPort2) {
536
+ console.error("not implemented");
537
+ } else {
538
+ globalThis.addEventListener("message", onMessageWrapper);
539
+ }
511
540
  }
512
541
  static removeEventListener(onMessage) {
513
542
  const onMessageWrapper = onMessageWrapperMap.get(onMessage);
514
543
  onMessageWrapperMap.delete(onMessage);
515
- self.removeEventListener("message", onMessageWrapper);
544
+ const parentPort2 = getParentPort();
545
+ if (parentPort2) {
546
+ console.error("not implemented");
547
+ } else {
548
+ globalThis.removeEventListener("message", onMessageWrapper);
549
+ }
516
550
  }
517
551
  static postMessage(type, payload) {
518
- if (self) {
519
- const data = { source: "loaders.gl", type, payload };
520
- const transferList = getTransferList(payload);
521
- self.postMessage(data, transferList);
552
+ const data = { source: "loaders.gl", type, payload };
553
+ const transferList = getTransferList(payload);
554
+ const parentPort2 = getParentPort();
555
+ if (parentPort2) {
556
+ parentPort2.postMessage(data, transferList);
557
+ } else {
558
+ globalThis.postMessage(data, transferList);
522
559
  }
523
560
  }
524
561
  };
@@ -530,7 +567,7 @@
530
567
  // ../loader-utils/src/lib/worker-loader-utils/create-loader-worker.ts
531
568
  var requestId = 0;
532
569
  function createLoaderWorker(loader) {
533
- if (typeof self === "undefined") {
570
+ if (!WorkerBody.inWorkerThread()) {
534
571
  return;
535
572
  }
536
573
  WorkerBody.onmessage = async (type, payload) => {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@loaders.gl/shapefile",
3
3
  "description": "Loader for the Shapefile Format",
4
- "version": "3.1.7",
4
+ "version": "3.2.0-alpha.2",
5
5
  "license": "MIT",
6
6
  "publishConfig": {
7
7
  "access": "public"
@@ -37,10 +37,10 @@
37
37
  "build-worker-dbf": "esbuild src/workers/dbf-worker.ts --bundle --outfile=dist/dbf-worker.js --define:__VERSION__=\\\"$npm_package_version\\\""
38
38
  },
39
39
  "dependencies": {
40
- "@loaders.gl/gis": "3.1.7",
41
- "@loaders.gl/loader-utils": "3.1.7",
42
- "@loaders.gl/schema": "3.1.7",
40
+ "@loaders.gl/gis": "3.2.0-alpha.2",
41
+ "@loaders.gl/loader-utils": "3.2.0-alpha.2",
42
+ "@loaders.gl/schema": "3.2.0-alpha.2",
43
43
  "@math.gl/proj4": "^3.5.1"
44
44
  },
45
- "gitHead": "e48d29e426e9f23879875763e73efc5c58345be1"
45
+ "gitHead": "52a602739cbfce60fc314f474efc984d199dff78"
46
46
  }
@@ -1,47 +1,21 @@
1
- import {Schema, Field, Bool, Utf8, Float64, TimestampMillisecond} from '@loaders.gl/schema';
1
+ import {
2
+ Schema,
3
+ Field,
4
+ Bool,
5
+ Utf8,
6
+ Float64,
7
+ TimestampMillisecond,
8
+ ObjectRowTable
9
+ } from '@loaders.gl/schema';
2
10
  import BinaryChunkReader from '../streaming/binary-chunk-reader';
3
-
4
- type DBFRowsOutput = object[];
5
-
6
- interface DBFTableOutput {
7
- schema?: Schema;
8
- rows: DBFRowsOutput;
9
- }
10
-
11
- type DBFHeader = {
12
- // Last updated date
13
- year: number;
14
- month: number;
15
- day: number;
16
- // Number of records in data file
17
- nRecords: number;
18
- // Length of header in bytes
19
- headerLength: number;
20
- // Length of each record
21
- recordLength: number;
22
- // Not sure if this is usually set
23
- languageDriver: number;
24
- };
25
-
26
- type DBFField = {
27
- name: string;
28
- dataType: string;
29
- fieldLength: number;
30
- decimal: number;
31
- };
32
-
33
- type DBFResult = {
34
- data: {[key: string]: any}[];
35
- schema?: Schema;
36
- error?: string;
37
- dbfHeader?: DBFHeader;
38
- dbfFields?: DBFField[];
39
- progress?: {
40
- bytesUsed: number;
41
- rowsTotal: number;
42
- rows: number;
43
- };
44
- };
11
+ import {
12
+ DBFLoaderOptions,
13
+ DBFResult,
14
+ DBFTableOutput,
15
+ DBFHeader,
16
+ DBFRowsOutput,
17
+ DBFField
18
+ } from './types';
45
19
 
46
20
  const LITTLE_ENDIAN = true;
47
21
  const DBF_HEADER_SIZE = 32;
@@ -98,21 +72,27 @@ class DBFParser {
98
72
  */
99
73
  export function parseDBF(
100
74
  arrayBuffer: ArrayBuffer,
101
- options: any = {}
102
- ): DBFRowsOutput | DBFTableOutput {
103
- const loaderOptions = options.dbf || {};
104
- const {encoding} = loaderOptions;
75
+ options: DBFLoaderOptions = {}
76
+ ): DBFRowsOutput | DBFTableOutput | ObjectRowTable {
77
+ const {encoding = 'latin1'} = options.dbf || {};
105
78
 
106
79
  const dbfParser = new DBFParser({encoding});
107
80
  dbfParser.write(arrayBuffer);
108
81
  dbfParser.end();
109
82
 
110
83
  const {data, schema} = dbfParser.result;
111
- switch (options.tables && options.tables.format) {
84
+ const shape = options?.tables?.format || options?.dbf?.shape;
85
+ switch (shape) {
86
+ case 'object-row-table': {
87
+ const table: ObjectRowTable = {
88
+ shape: 'object-row-table',
89
+ schema,
90
+ data
91
+ };
92
+ return table;
93
+ }
112
94
  case 'table':
113
- // TODO - parse columns
114
95
  return {schema, rows: data};
115
-
116
96
  case 'rows':
117
97
  default:
118
98
  return data;
@@ -124,10 +104,9 @@ export function parseDBF(
124
104
  */
125
105
  export async function* parseDBFInBatches(
126
106
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
127
- options: any = {}
107
+ options: DBFLoaderOptions = {}
128
108
  ): AsyncIterable<DBFHeader | DBFRowsOutput | DBFTableOutput> {
129
- const loaderOptions = options.dbf || {};
130
- const {encoding} = loaderOptions;
109
+ const {encoding = 'latin1'} = options.dbf || {};
131
110
 
132
111
  const parser = new DBFParser({encoding});
133
112
  let headerReturned = false;
@@ -160,7 +139,7 @@ export async function* parseDBFInBatches(
160
139
  function parseState(
161
140
  state: STATE,
162
141
  result: DBFResult,
163
- binaryReader: {[key: string]: any},
142
+ binaryReader: BinaryChunkReader,
164
143
  textDecoder: TextDecoder
165
144
  ): STATE {
166
145
  // eslint-disable-next-line no-constant-condition
@@ -173,7 +152,8 @@ function parseState(
173
152
 
174
153
  case STATE.START:
175
154
  // Parse initial file header
176
- const dataView = binaryReader.getDataView(DBF_HEADER_SIZE, 'DBF header');
155
+ // DBF Header
156
+ const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
177
157
  if (!dataView) {
178
158
  return state;
179
159
  }
@@ -190,8 +170,7 @@ function parseState(
190
170
  // Parse DBF field descriptors (schema)
191
171
  const fieldDescriptorView = binaryReader.getDataView(
192
172
  // @ts-ignore
193
- result.dbfHeader.headerLength - DBF_HEADER_SIZE,
194
- 'DBF field descriptors'
173
+ result.dbfHeader.headerLength - DBF_HEADER_SIZE
195
174
  );
196
175
  if (!fieldDescriptorView) {
197
176
  return state;
@@ -398,7 +377,7 @@ function parseCharacter(text: string): string | null {
398
377
  * @returns Field
399
378
  */
400
379
  // eslint-disable
401
- function makeField({name, dataType, fieldLength, decimal}): Field {
380
+ function makeField({name, dataType, fieldLength, decimal}: DBFField): Field {
402
381
  switch (dataType) {
403
382
  case 'B':
404
383
  return new Field(name, new Float64(), true);
@@ -2,6 +2,7 @@
2
2
  import type {SHXOutput} from './parse-shx';
3
3
  import type {SHPHeader} from './parse-shp-header';
4
4
  import type {LoaderContext} from '@loaders.gl/loader-utils';
5
+ import type {ShapefileLoaderOptions} from './types';
5
6
 
6
7
  import {binaryToGeometry, transformGeoJsonCoords} from '@loaders.gl/gis';
7
8
  import {Proj4Projection} from '@math.gl/proj4';
@@ -20,15 +21,11 @@ interface ShapefileOutput {
20
21
  }
21
22
  /**
22
23
  * Parsing of file in batches
23
- *
24
- * @param asyncIterator
25
- * @param options
26
- * @param context
27
24
  */
28
25
  // eslint-disable-next-line max-statements, complexity
29
26
  export async function* parseShapefileInBatches(
30
27
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
31
- options?: any,
28
+ options?: ShapefileLoaderOptions,
32
29
  context?: LoaderContext
33
30
  ): AsyncIterable<ShapefileOutput> {
34
31
  const {reproject = false, _targetCrs = 'WGS84'} = options?.gis || {};
@@ -109,7 +106,7 @@ export async function* parseShapefileInBatches(
109
106
  */
110
107
  export async function parseShapefile(
111
108
  arrayBuffer: ArrayBuffer,
112
- options?: {[key: string]: any},
109
+ options?: ShapefileLoaderOptions,
113
110
  context?: LoaderContext
114
111
  ): Promise<ShapefileOutput> {
115
112
  const {reproject = false, _targetCrs = 'WGS84'} = options?.gis || {};
@@ -1,4 +1,5 @@
1
1
  import {BinaryGeometry, BinaryGeometryType} from '@loaders.gl/schema';
2
+ import {SHPLoaderOptions} from './types';
2
3
 
3
4
  const LITTLE_ENDIAN = true;
4
5
 
@@ -9,8 +10,8 @@ const LITTLE_ENDIAN = true;
9
10
  * @return Binary Geometry Object
10
11
  */
11
12
  // eslint-disable-next-line complexity
12
- export function parseRecord(view: DataView, options?: {shp?: any}): BinaryGeometry | null {
13
- const {_maxDimensions} = options?.shp || {};
13
+ export function parseRecord(view: DataView, options?: SHPLoaderOptions): BinaryGeometry | null {
14
+ const {_maxDimensions = 4} = options?.shp || {};
14
15
 
15
16
  let offset = 0;
16
17
  const type: number = view.getInt32(offset, LITTLE_ENDIAN);
@@ -1,8 +1,8 @@
1
- import type {LoaderOptions} from '@loaders.gl/loader-utils';
2
1
  import type {BinaryGeometry} from '@loaders.gl/schema';
3
2
  import BinaryChunkReader from '../streaming/binary-chunk-reader';
4
- import {parseSHPHeader} from './parse-shp-header';
3
+ import {parseSHPHeader, SHPHeader} from './parse-shp-header';
5
4
  import {parseRecord} from './parse-shp-geometry';
5
+ import {SHPLoaderOptions} from './types';
6
6
 
7
7
  const LITTLE_ENDIAN = true;
8
8
  const BIG_ENDIAN = false;
@@ -20,20 +20,34 @@ const STATE = {
20
20
  };
21
21
 
22
22
  type SHPResult = {
23
- geometries: [];
24
- header?: {};
23
+ geometries: (BinaryGeometry | null)[];
24
+ header?: SHPHeader;
25
25
  error?: string;
26
+ progress: {
27
+ bytesUsed: number;
28
+ bytesTotal: number;
29
+ rows: number;
30
+ };
31
+ currentIndex: number;
26
32
  };
27
33
 
28
34
  class SHPParser {
29
- options?: any = {};
35
+ options?: SHPLoaderOptions = {};
30
36
  binaryReader = new BinaryChunkReader({maxRewindBytes: SHP_RECORD_HEADER_SIZE});
31
37
  state = STATE.EXPECTING_HEADER;
32
38
  result: SHPResult = {
33
- geometries: []
39
+ geometries: [],
40
+ // Initialize with number values to make TS happy
41
+ // These are initialized for real in STATE.EXPECTING_HEADER
42
+ progress: {
43
+ bytesTotal: NaN,
44
+ bytesUsed: NaN,
45
+ rows: NaN
46
+ },
47
+ currentIndex: NaN
34
48
  };
35
49
 
36
- constructor(options?: LoaderOptions) {
50
+ constructor(options?: SHPLoaderOptions) {
37
51
  this.options = options;
38
52
  }
39
53
 
@@ -109,9 +123,9 @@ export async function* parseSHPInBatches(
109
123
  /* eslint-disable complexity, max-depth */
110
124
  function parseState(
111
125
  state: number,
112
- result: {[key: string]: any},
126
+ result: SHPResult,
113
127
  binaryReader: BinaryChunkReader,
114
- options: {shp?: any}
128
+ options?: SHPLoaderOptions
115
129
  ): number {
116
130
  // eslint-disable-next-line no-constant-condition
117
131
  while (true) {
@@ -157,7 +171,7 @@ function parseState(
157
171
 
158
172
  const invalidRecord =
159
173
  recordHeader.byteLength < 4 ||
160
- recordHeader.type !== result.header.type ||
174
+ recordHeader.type !== result.header?.type ||
161
175
  recordHeader.recordNumber !== result.currentIndex;
162
176
 
163
177
  // All records must have at least four bytes (for the record shape type)
@@ -0,0 +1,74 @@
1
+ import {Schema, ObjectRowTable} from '@loaders.gl/schema';
2
+ import type {LoaderOptions} from '@loaders.gl/loader-utils';
3
+
4
+ export type SHPLoaderOptions = LoaderOptions & {
5
+ shp?: {
6
+ _maxDimensions?: number;
7
+ };
8
+ };
9
+
10
+ export type DBFLoaderOptions = LoaderOptions & {
11
+ dbf?: {
12
+ encoding?: string;
13
+ shape?: 'rows' | 'table' | 'object-row-table';
14
+ };
15
+ };
16
+
17
+ export type ShapefileLoaderOptions = LoaderOptions &
18
+ SHPLoaderOptions & {
19
+ shapefile?: {
20
+ shape?: 'geojson';
21
+ };
22
+ gis?: {
23
+ reproject?: boolean;
24
+ _targetCrs?: string;
25
+ /** @deprecated. Use options.shapefile.shape */
26
+ format?: 'geojson';
27
+ };
28
+ };
29
+
30
+ export type DBFRowsOutput = ObjectRowTable['data'];
31
+
32
+ /**
33
+ * DBF Table output. Deprecated in favor of ObjectRowTable
34
+ * @deprecated
35
+ */
36
+ export interface DBFTableOutput {
37
+ schema?: Schema;
38
+ rows: DBFRowsOutput;
39
+ }
40
+
41
+ export type DBFHeader = {
42
+ // Last updated date
43
+ year: number;
44
+ month: number;
45
+ day: number;
46
+ // Number of records in data file
47
+ nRecords: number;
48
+ // Length of header in bytes
49
+ headerLength: number;
50
+ // Length of each record
51
+ recordLength: number;
52
+ // Not sure if this is usually set
53
+ languageDriver: number;
54
+ };
55
+
56
+ export type DBFField = {
57
+ name: string;
58
+ dataType: string;
59
+ fieldLength: number;
60
+ decimal: number;
61
+ };
62
+
63
+ export type DBFResult = {
64
+ data: {[key: string]: any}[];
65
+ schema?: Schema;
66
+ error?: string;
67
+ dbfHeader?: DBFHeader;
68
+ dbfFields?: DBFField[];
69
+ progress?: {
70
+ bytesUsed: number;
71
+ rowsTotal: number;
72
+ rows: number;
73
+ };
74
+ };
@@ -1,10 +1,14 @@
1
+ type BinaryChunkReaderOptions = {
2
+ maxRewindBytes: number;
3
+ };
4
+
1
5
  export default class BinaryChunkReader {
2
6
  offset: number;
3
7
  arrayBuffers: ArrayBuffer[];
4
8
  ended: boolean;
5
9
  maxRewindBytes: number;
6
10
 
7
- constructor(options?: {[key: string]: any}) {
11
+ constructor(options?: BinaryChunkReaderOptions) {
8
12
  const {maxRewindBytes = 0} = options || {};
9
13
 
10
14
  /** current global offset into current array buffer*/