@loaders.gl/shapefile 3.2.0-alpha.1 → 3.2.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/dbf-worker.js +17 -9
  2. package/dist/dist.min.js +23 -10
  3. package/dist/es5/dbf-loader.js +1 -1
  4. package/dist/es5/lib/parsers/parse-dbf.js +69 -53
  5. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
  6. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
  7. package/dist/es5/lib/parsers/parse-shp-geometry.js +2 -1
  8. package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -1
  9. package/dist/es5/lib/parsers/parse-shp.js +10 -2
  10. package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
  11. package/dist/es5/lib/parsers/types.js +2 -0
  12. package/dist/es5/lib/parsers/types.js.map +1 -0
  13. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
  14. package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -1
  15. package/dist/es5/shapefile-loader.js +1 -1
  16. package/dist/es5/shp-loader.js +1 -1
  17. package/dist/esm/dbf-loader.js +1 -1
  18. package/dist/esm/lib/parsers/parse-dbf.js +20 -9
  19. package/dist/esm/lib/parsers/parse-dbf.js.map +1 -1
  20. package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -1
  21. package/dist/esm/lib/parsers/parse-shp-geometry.js +1 -1
  22. package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -1
  23. package/dist/esm/lib/parsers/parse-shp.js +10 -2
  24. package/dist/esm/lib/parsers/parse-shp.js.map +1 -1
  25. package/dist/esm/lib/parsers/types.js +2 -0
  26. package/dist/esm/lib/parsers/types.js.map +1 -0
  27. package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -1
  28. package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -1
  29. package/dist/esm/shapefile-loader.js +1 -1
  30. package/dist/esm/shp-loader.js +1 -1
  31. package/dist/lib/parsers/parse-dbf.d.ts +4 -18
  32. package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
  33. package/dist/lib/parsers/parse-dbf.js +15 -8
  34. package/dist/lib/parsers/parse-shapefile.d.ts +3 -8
  35. package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
  36. package/dist/lib/parsers/parse-shapefile.js +0 -4
  37. package/dist/lib/parsers/parse-shp-geometry.d.ts +2 -3
  38. package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
  39. package/dist/lib/parsers/parse-shp-geometry.js +1 -1
  40. package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
  41. package/dist/lib/parsers/parse-shp.js +10 -2
  42. package/dist/lib/parsers/types.d.ts +63 -0
  43. package/dist/lib/parsers/types.d.ts.map +1 -0
  44. package/dist/lib/parsers/types.js +2 -0
  45. package/dist/lib/streaming/binary-chunk-reader.d.ts +5 -3
  46. package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -1
  47. package/dist/shp-worker.js +12 -5
  48. package/package.json +5 -5
  49. package/src/lib/parsers/parse-dbf.ts +37 -58
  50. package/src/lib/parsers/parse-shapefile.ts +3 -6
  51. package/src/lib/parsers/parse-shp-geometry.ts +3 -2
  52. package/src/lib/parsers/parse-shp.ts +24 -10
  53. package/src/lib/parsers/types.ts +74 -0
  54. package/src/lib/streaming/binary-chunk-reader.ts +5 -1
  55. package/src/lib/streaming/zip-batch-iterators.ts +2 -2
@@ -132,7 +132,7 @@
132
132
  // src/lib/parsers/parse-shp-geometry.ts
133
133
  var LITTLE_ENDIAN2 = true;
134
134
  function parseRecord(view, options) {
135
- const { _maxDimensions } = options?.shp || {};
135
+ const { _maxDimensions = 4 } = options?.shp || {};
136
136
  let offset = 0;
137
137
  const type = view.getInt32(offset, LITTLE_ENDIAN2);
138
138
  offset += Int32Array.BYTES_PER_ELEMENT;
@@ -318,7 +318,13 @@
318
318
  this.binaryReader = new BinaryChunkReader({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
319
319
  this.state = STATE.EXPECTING_HEADER;
320
320
  this.result = {
321
- geometries: []
321
+ geometries: [],
322
+ progress: {
323
+ bytesTotal: NaN,
324
+ bytesUsed: NaN,
325
+ rows: NaN
326
+ },
327
+ currentIndex: NaN
322
328
  };
323
329
  this.options = options;
324
330
  }
@@ -394,7 +400,7 @@
394
400
  binaryReader.rewind(SHP_RECORD_HEADER_SIZE);
395
401
  return state;
396
402
  }
397
- const invalidRecord = recordHeader.byteLength < 4 || recordHeader.type !== result.header.type || recordHeader.recordNumber !== result.currentIndex;
403
+ const invalidRecord = recordHeader.byteLength < 4 || recordHeader.type !== result.header?.type || recordHeader.recordNumber !== result.currentIndex;
398
404
  if (invalidRecord) {
399
405
  binaryReader.rewind(SHP_RECORD_HEADER_SIZE - 4);
400
406
  } else {
@@ -424,7 +430,7 @@
424
430
  }
425
431
 
426
432
  // src/shp-loader.ts
427
- var VERSION = true ? "3.2.0-alpha.1" : "latest";
433
+ var VERSION = true ? "3.2.0-alpha.4" : "latest";
428
434
  var SHP_MAGIC_NUMBER2 = [0, 0, 39, 10];
429
435
  var SHPWorkerLoader = {
430
436
  name: "SHP",
@@ -568,12 +574,13 @@
568
574
  switch (type) {
569
575
  case "process":
570
576
  try {
571
- const { input, options = {} } = payload;
577
+ const { input, options = {}, context = {} } = payload;
572
578
  const result = await parseData({
573
579
  loader,
574
580
  arrayBuffer: input,
575
581
  options,
576
582
  context: {
583
+ ...context,
577
584
  parse: parseOnMainThread
578
585
  }
579
586
  });
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@loaders.gl/shapefile",
3
3
  "description": "Loader for the Shapefile Format",
4
- "version": "3.2.0-alpha.1",
4
+ "version": "3.2.0-alpha.4",
5
5
  "license": "MIT",
6
6
  "publishConfig": {
7
7
  "access": "public"
@@ -37,10 +37,10 @@
37
37
  "build-worker-dbf": "esbuild src/workers/dbf-worker.ts --bundle --outfile=dist/dbf-worker.js --define:__VERSION__=\\\"$npm_package_version\\\""
38
38
  },
39
39
  "dependencies": {
40
- "@loaders.gl/gis": "3.2.0-alpha.1",
41
- "@loaders.gl/loader-utils": "3.2.0-alpha.1",
42
- "@loaders.gl/schema": "3.2.0-alpha.1",
40
+ "@loaders.gl/gis": "3.2.0-alpha.4",
41
+ "@loaders.gl/loader-utils": "3.2.0-alpha.4",
42
+ "@loaders.gl/schema": "3.2.0-alpha.4",
43
43
  "@math.gl/proj4": "^3.5.1"
44
44
  },
45
- "gitHead": "423a2815092b08dcf93ad5b7dc2940b167305afe"
45
+ "gitHead": "f030d39d5cdedb137e43e757a9da10dd637857fd"
46
46
  }
@@ -1,47 +1,21 @@
1
- import {Schema, Field, Bool, Utf8, Float64, TimestampMillisecond} from '@loaders.gl/schema';
1
+ import {
2
+ Schema,
3
+ Field,
4
+ Bool,
5
+ Utf8,
6
+ Float64,
7
+ TimestampMillisecond,
8
+ ObjectRowTable
9
+ } from '@loaders.gl/schema';
2
10
  import BinaryChunkReader from '../streaming/binary-chunk-reader';
3
-
4
- type DBFRowsOutput = object[];
5
-
6
- interface DBFTableOutput {
7
- schema?: Schema;
8
- rows: DBFRowsOutput;
9
- }
10
-
11
- type DBFHeader = {
12
- // Last updated date
13
- year: number;
14
- month: number;
15
- day: number;
16
- // Number of records in data file
17
- nRecords: number;
18
- // Length of header in bytes
19
- headerLength: number;
20
- // Length of each record
21
- recordLength: number;
22
- // Not sure if this is usually set
23
- languageDriver: number;
24
- };
25
-
26
- type DBFField = {
27
- name: string;
28
- dataType: string;
29
- fieldLength: number;
30
- decimal: number;
31
- };
32
-
33
- type DBFResult = {
34
- data: {[key: string]: any}[];
35
- schema?: Schema;
36
- error?: string;
37
- dbfHeader?: DBFHeader;
38
- dbfFields?: DBFField[];
39
- progress?: {
40
- bytesUsed: number;
41
- rowsTotal: number;
42
- rows: number;
43
- };
44
- };
11
+ import {
12
+ DBFLoaderOptions,
13
+ DBFResult,
14
+ DBFTableOutput,
15
+ DBFHeader,
16
+ DBFRowsOutput,
17
+ DBFField
18
+ } from './types';
45
19
 
46
20
  const LITTLE_ENDIAN = true;
47
21
  const DBF_HEADER_SIZE = 32;
@@ -98,21 +72,27 @@ class DBFParser {
98
72
  */
99
73
  export function parseDBF(
100
74
  arrayBuffer: ArrayBuffer,
101
- options: any = {}
102
- ): DBFRowsOutput | DBFTableOutput {
103
- const loaderOptions = options.dbf || {};
104
- const {encoding} = loaderOptions;
75
+ options: DBFLoaderOptions = {}
76
+ ): DBFRowsOutput | DBFTableOutput | ObjectRowTable {
77
+ const {encoding = 'latin1'} = options.dbf || {};
105
78
 
106
79
  const dbfParser = new DBFParser({encoding});
107
80
  dbfParser.write(arrayBuffer);
108
81
  dbfParser.end();
109
82
 
110
83
  const {data, schema} = dbfParser.result;
111
- switch (options.tables && options.tables.format) {
84
+ const shape = options?.tables?.format || options?.dbf?.shape;
85
+ switch (shape) {
86
+ case 'object-row-table': {
87
+ const table: ObjectRowTable = {
88
+ shape: 'object-row-table',
89
+ schema,
90
+ data
91
+ };
92
+ return table;
93
+ }
112
94
  case 'table':
113
- // TODO - parse columns
114
95
  return {schema, rows: data};
115
-
116
96
  case 'rows':
117
97
  default:
118
98
  return data;
@@ -124,10 +104,9 @@ export function parseDBF(
124
104
  */
125
105
  export async function* parseDBFInBatches(
126
106
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
127
- options: any = {}
107
+ options: DBFLoaderOptions = {}
128
108
  ): AsyncIterable<DBFHeader | DBFRowsOutput | DBFTableOutput> {
129
- const loaderOptions = options.dbf || {};
130
- const {encoding} = loaderOptions;
109
+ const {encoding = 'latin1'} = options.dbf || {};
131
110
 
132
111
  const parser = new DBFParser({encoding});
133
112
  let headerReturned = false;
@@ -160,7 +139,7 @@ export async function* parseDBFInBatches(
160
139
  function parseState(
161
140
  state: STATE,
162
141
  result: DBFResult,
163
- binaryReader: {[key: string]: any},
142
+ binaryReader: BinaryChunkReader,
164
143
  textDecoder: TextDecoder
165
144
  ): STATE {
166
145
  // eslint-disable-next-line no-constant-condition
@@ -173,7 +152,8 @@ function parseState(
173
152
 
174
153
  case STATE.START:
175
154
  // Parse initial file header
176
- const dataView = binaryReader.getDataView(DBF_HEADER_SIZE, 'DBF header');
155
+ // DBF Header
156
+ const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
177
157
  if (!dataView) {
178
158
  return state;
179
159
  }
@@ -190,8 +170,7 @@ function parseState(
190
170
  // Parse DBF field descriptors (schema)
191
171
  const fieldDescriptorView = binaryReader.getDataView(
192
172
  // @ts-ignore
193
- result.dbfHeader.headerLength - DBF_HEADER_SIZE,
194
- 'DBF field descriptors'
173
+ result.dbfHeader.headerLength - DBF_HEADER_SIZE
195
174
  );
196
175
  if (!fieldDescriptorView) {
197
176
  return state;
@@ -398,7 +377,7 @@ function parseCharacter(text: string): string | null {
398
377
  * @returns Field
399
378
  */
400
379
  // eslint-disable
401
- function makeField({name, dataType, fieldLength, decimal}): Field {
380
+ function makeField({name, dataType, fieldLength, decimal}: DBFField): Field {
402
381
  switch (dataType) {
403
382
  case 'B':
404
383
  return new Field(name, new Float64(), true);
@@ -2,6 +2,7 @@
2
2
  import type {SHXOutput} from './parse-shx';
3
3
  import type {SHPHeader} from './parse-shp-header';
4
4
  import type {LoaderContext} from '@loaders.gl/loader-utils';
5
+ import type {ShapefileLoaderOptions} from './types';
5
6
 
6
7
  import {binaryToGeometry, transformGeoJsonCoords} from '@loaders.gl/gis';
7
8
  import {Proj4Projection} from '@math.gl/proj4';
@@ -20,15 +21,11 @@ interface ShapefileOutput {
20
21
  }
21
22
  /**
22
23
  * Parsing of file in batches
23
- *
24
- * @param asyncIterator
25
- * @param options
26
- * @param context
27
24
  */
28
25
  // eslint-disable-next-line max-statements, complexity
29
26
  export async function* parseShapefileInBatches(
30
27
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
31
- options?: any,
28
+ options?: ShapefileLoaderOptions,
32
29
  context?: LoaderContext
33
30
  ): AsyncIterable<ShapefileOutput> {
34
31
  const {reproject = false, _targetCrs = 'WGS84'} = options?.gis || {};
@@ -109,7 +106,7 @@ export async function* parseShapefileInBatches(
109
106
  */
110
107
  export async function parseShapefile(
111
108
  arrayBuffer: ArrayBuffer,
112
- options?: {[key: string]: any},
109
+ options?: ShapefileLoaderOptions,
113
110
  context?: LoaderContext
114
111
  ): Promise<ShapefileOutput> {
115
112
  const {reproject = false, _targetCrs = 'WGS84'} = options?.gis || {};
@@ -1,4 +1,5 @@
1
1
  import {BinaryGeometry, BinaryGeometryType} from '@loaders.gl/schema';
2
+ import {SHPLoaderOptions} from './types';
2
3
 
3
4
  const LITTLE_ENDIAN = true;
4
5
 
@@ -9,8 +10,8 @@ const LITTLE_ENDIAN = true;
9
10
  * @return Binary Geometry Object
10
11
  */
11
12
  // eslint-disable-next-line complexity
12
- export function parseRecord(view: DataView, options?: {shp?: any}): BinaryGeometry | null {
13
- const {_maxDimensions} = options?.shp || {};
13
+ export function parseRecord(view: DataView, options?: SHPLoaderOptions): BinaryGeometry | null {
14
+ const {_maxDimensions = 4} = options?.shp || {};
14
15
 
15
16
  let offset = 0;
16
17
  const type: number = view.getInt32(offset, LITTLE_ENDIAN);
@@ -1,8 +1,8 @@
1
- import type {LoaderOptions} from '@loaders.gl/loader-utils';
2
1
  import type {BinaryGeometry} from '@loaders.gl/schema';
3
2
  import BinaryChunkReader from '../streaming/binary-chunk-reader';
4
- import {parseSHPHeader} from './parse-shp-header';
3
+ import {parseSHPHeader, SHPHeader} from './parse-shp-header';
5
4
  import {parseRecord} from './parse-shp-geometry';
5
+ import {SHPLoaderOptions} from './types';
6
6
 
7
7
  const LITTLE_ENDIAN = true;
8
8
  const BIG_ENDIAN = false;
@@ -20,20 +20,34 @@ const STATE = {
20
20
  };
21
21
 
22
22
  type SHPResult = {
23
- geometries: [];
24
- header?: {};
23
+ geometries: (BinaryGeometry | null)[];
24
+ header?: SHPHeader;
25
25
  error?: string;
26
+ progress: {
27
+ bytesUsed: number;
28
+ bytesTotal: number;
29
+ rows: number;
30
+ };
31
+ currentIndex: number;
26
32
  };
27
33
 
28
34
  class SHPParser {
29
- options?: any = {};
35
+ options?: SHPLoaderOptions = {};
30
36
  binaryReader = new BinaryChunkReader({maxRewindBytes: SHP_RECORD_HEADER_SIZE});
31
37
  state = STATE.EXPECTING_HEADER;
32
38
  result: SHPResult = {
33
- geometries: []
39
+ geometries: [],
40
+ // Initialize with number values to make TS happy
41
+ // These are initialized for real in STATE.EXPECTING_HEADER
42
+ progress: {
43
+ bytesTotal: NaN,
44
+ bytesUsed: NaN,
45
+ rows: NaN
46
+ },
47
+ currentIndex: NaN
34
48
  };
35
49
 
36
- constructor(options?: LoaderOptions) {
50
+ constructor(options?: SHPLoaderOptions) {
37
51
  this.options = options;
38
52
  }
39
53
 
@@ -109,9 +123,9 @@ export async function* parseSHPInBatches(
109
123
  /* eslint-disable complexity, max-depth */
110
124
  function parseState(
111
125
  state: number,
112
- result: {[key: string]: any},
126
+ result: SHPResult,
113
127
  binaryReader: BinaryChunkReader,
114
- options: {shp?: any}
128
+ options?: SHPLoaderOptions
115
129
  ): number {
116
130
  // eslint-disable-next-line no-constant-condition
117
131
  while (true) {
@@ -157,7 +171,7 @@ function parseState(
157
171
 
158
172
  const invalidRecord =
159
173
  recordHeader.byteLength < 4 ||
160
- recordHeader.type !== result.header.type ||
174
+ recordHeader.type !== result.header?.type ||
161
175
  recordHeader.recordNumber !== result.currentIndex;
162
176
 
163
177
  // All records must have at least four bytes (for the record shape type)
@@ -0,0 +1,74 @@
1
+ import {Schema, ObjectRowTable} from '@loaders.gl/schema';
2
+ import type {LoaderOptions} from '@loaders.gl/loader-utils';
3
+
4
+ export type SHPLoaderOptions = LoaderOptions & {
5
+ shp?: {
6
+ _maxDimensions?: number;
7
+ };
8
+ };
9
+
10
+ export type DBFLoaderOptions = LoaderOptions & {
11
+ dbf?: {
12
+ encoding?: string;
13
+ shape?: 'rows' | 'table' | 'object-row-table';
14
+ };
15
+ };
16
+
17
+ export type ShapefileLoaderOptions = LoaderOptions &
18
+ SHPLoaderOptions & {
19
+ shapefile?: {
20
+ shape?: 'geojson';
21
+ };
22
+ gis?: {
23
+ reproject?: boolean;
24
+ _targetCrs?: string;
25
+ /** @deprecated. Use options.shapefile.shape */
26
+ format?: 'geojson';
27
+ };
28
+ };
29
+
30
+ export type DBFRowsOutput = ObjectRowTable['data'];
31
+
32
+ /**
33
+ * DBF Table output. Deprecated in favor of ObjectRowTable
34
+ * @deprecated
35
+ */
36
+ export interface DBFTableOutput {
37
+ schema?: Schema;
38
+ rows: DBFRowsOutput;
39
+ }
40
+
41
+ export type DBFHeader = {
42
+ // Last updated date
43
+ year: number;
44
+ month: number;
45
+ day: number;
46
+ // Number of records in data file
47
+ nRecords: number;
48
+ // Length of header in bytes
49
+ headerLength: number;
50
+ // Length of each record
51
+ recordLength: number;
52
+ // Not sure if this is usually set
53
+ languageDriver: number;
54
+ };
55
+
56
+ export type DBFField = {
57
+ name: string;
58
+ dataType: string;
59
+ fieldLength: number;
60
+ decimal: number;
61
+ };
62
+
63
+ export type DBFResult = {
64
+ data: {[key: string]: any}[];
65
+ schema?: Schema;
66
+ error?: string;
67
+ dbfHeader?: DBFHeader;
68
+ dbfFields?: DBFField[];
69
+ progress?: {
70
+ bytesUsed: number;
71
+ rowsTotal: number;
72
+ rows: number;
73
+ };
74
+ };
@@ -1,10 +1,14 @@
1
+ type BinaryChunkReaderOptions = {
2
+ maxRewindBytes: number;
3
+ };
4
+
1
5
  export default class BinaryChunkReader {
2
6
  offset: number;
3
7
  arrayBuffers: ArrayBuffer[];
4
8
  ended: boolean;
5
9
  maxRewindBytes: number;
6
10
 
7
- constructor(options?: {[key: string]: any}) {
11
+ constructor(options?: BinaryChunkReaderOptions) {
8
12
  const {maxRewindBytes = 0} = options || {};
9
13
 
10
14
  /** current global offset into current array buffer*/
@@ -8,8 +8,8 @@ export async function* zipBatchIterators(
8
8
  iterator1: AsyncIterator<any[]>,
9
9
  iterator2: AsyncIterator<any[]>
10
10
  ): AsyncGenerator<number[][], void, unknown> {
11
- let batch1 = [];
12
- let batch2 = [];
11
+ let batch1: number[] = [];
12
+ let batch2: number[] = [];
13
13
  let iterator1Done: boolean = false;
14
14
  let iterator2Done: boolean = false;
15
15