@loaders.gl/shapefile 4.0.0-alpha.23 → 4.0.0-alpha.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/dist/dbf-worker.js +4 -2
  2. package/dist/dist.min.js +77 -31
  3. package/dist/es5/dbf-loader.js +4 -2
  4. package/dist/es5/dbf-loader.js.map +1 -1
  5. package/dist/es5/index.js +21 -0
  6. package/dist/es5/index.js.map +1 -1
  7. package/dist/es5/lib/parsers/parse-dbf.js +2 -2
  8. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
  9. package/dist/es5/lib/parsers/parse-shapefile.js +67 -64
  10. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
  11. package/dist/es5/lib/parsers/parse-shp.js +2 -2
  12. package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
  13. package/dist/es5/lib/streaming/binary-chunk-reader.js +2 -2
  14. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
  15. package/dist/es5/lib/streaming/binary-reader.js +2 -2
  16. package/dist/es5/lib/streaming/binary-reader.js.map +1 -1
  17. package/dist/es5/lib/streaming/zip-batch-iterators.js +30 -26
  18. package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -1
  19. package/dist/es5/shapefile-loader.js +1 -1
  20. package/dist/es5/shp-loader.js +4 -2
  21. package/dist/es5/shp-loader.js.map +1 -1
  22. package/dist/esm/dbf-loader.js +4 -2
  23. package/dist/esm/dbf-loader.js.map +1 -1
  24. package/dist/esm/index.js +3 -0
  25. package/dist/esm/index.js.map +1 -1
  26. package/dist/esm/lib/parsers/parse-dbf.js +1 -1
  27. package/dist/esm/lib/parsers/parse-dbf.js.map +1 -1
  28. package/dist/esm/lib/parsers/parse-shapefile.js +21 -17
  29. package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -1
  30. package/dist/esm/lib/parsers/parse-shp.js +1 -1
  31. package/dist/esm/lib/parsers/parse-shp.js.map +1 -1
  32. package/dist/esm/lib/streaming/binary-chunk-reader.js +1 -1
  33. package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -1
  34. package/dist/esm/lib/streaming/binary-reader.js +1 -1
  35. package/dist/esm/lib/streaming/binary-reader.js.map +1 -1
  36. package/dist/esm/lib/streaming/zip-batch-iterators.js +17 -11
  37. package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -1
  38. package/dist/esm/shapefile-loader.js +1 -1
  39. package/dist/esm/shp-loader.js +2 -2
  40. package/dist/esm/shp-loader.js.map +1 -1
  41. package/dist/shp-worker.js +2 -2
  42. package/dist/src/bundle.d.ts.map +1 -0
  43. package/dist/src/dbf-loader.d.ts.map +1 -0
  44. package/dist/src/index.d.ts +7 -0
  45. package/dist/src/index.d.ts.map +1 -0
  46. package/dist/src/lib/parsers/parse-dbf.d.ts.map +1 -0
  47. package/dist/{lib → src/lib}/parsers/parse-shapefile.d.ts +1 -1
  48. package/dist/src/lib/parsers/parse-shapefile.d.ts.map +1 -0
  49. package/dist/src/lib/parsers/parse-shp-geometry.d.ts.map +1 -0
  50. package/dist/src/lib/parsers/parse-shp-header.d.ts.map +1 -0
  51. package/dist/{lib → src/lib}/parsers/parse-shp.d.ts +1 -1
  52. package/dist/src/lib/parsers/parse-shp.d.ts.map +1 -0
  53. package/dist/src/lib/parsers/parse-shx.d.ts.map +1 -0
  54. package/dist/src/lib/parsers/types.d.ts.map +1 -0
  55. package/dist/{lib → src/lib}/streaming/binary-chunk-reader.d.ts +2 -3
  56. package/dist/src/lib/streaming/binary-chunk-reader.d.ts.map +1 -0
  57. package/dist/{lib → src/lib}/streaming/binary-reader.d.ts +1 -1
  58. package/dist/src/lib/streaming/binary-reader.d.ts.map +1 -0
  59. package/dist/src/lib/streaming/zip-batch-iterators.d.ts +11 -0
  60. package/dist/src/lib/streaming/zip-batch-iterators.d.ts.map +1 -0
  61. package/dist/src/shapefile-loader.d.ts.map +1 -0
  62. package/dist/src/shp-loader.d.ts.map +1 -0
  63. package/dist/{workers → src/workers}/dbf-worker.d.ts.map +1 -1
  64. package/dist/{workers → src/workers}/shp-worker.d.ts.map +1 -1
  65. package/dist/tsconfig.tsbuildinfo +1 -0
  66. package/package.json +5 -6
  67. package/src/dbf-loader.ts +3 -1
  68. package/src/index.ts +5 -0
  69. package/src/lib/parsers/parse-dbf.ts +1 -1
  70. package/src/lib/parsers/parse-shapefile.ts +31 -22
  71. package/src/lib/parsers/parse-shp.ts +2 -2
  72. package/src/lib/streaming/binary-chunk-reader.ts +4 -3
  73. package/src/lib/streaming/binary-reader.ts +3 -1
  74. package/src/lib/streaming/zip-batch-iterators.ts +28 -14
  75. package/src/shp-loader.ts +4 -1
  76. package/dist/bundle.d.ts.map +0 -1
  77. package/dist/bundle.js +0 -5
  78. package/dist/dbf-loader.d.ts.map +0 -1
  79. package/dist/dbf-loader.js +0 -32
  80. package/dist/index.d.ts +0 -4
  81. package/dist/index.d.ts.map +0 -1
  82. package/dist/index.js +0 -11
  83. package/dist/lib/parsers/parse-dbf.d.ts.map +0 -1
  84. package/dist/lib/parsers/parse-dbf.js +0 -344
  85. package/dist/lib/parsers/parse-shapefile.d.ts.map +0 -1
  86. package/dist/lib/parsers/parse-shapefile.js +0 -240
  87. package/dist/lib/parsers/parse-shp-geometry.d.ts.map +0 -1
  88. package/dist/lib/parsers/parse-shp-geometry.js +0 -287
  89. package/dist/lib/parsers/parse-shp-header.d.ts.map +0 -1
  90. package/dist/lib/parsers/parse-shp-header.js +0 -43
  91. package/dist/lib/parsers/parse-shp.d.ts.map +0 -1
  92. package/dist/lib/parsers/parse-shp.js +0 -178
  93. package/dist/lib/parsers/parse-shx.d.ts.map +0 -1
  94. package/dist/lib/parsers/parse-shx.js +0 -28
  95. package/dist/lib/parsers/types.d.ts.map +0 -1
  96. package/dist/lib/parsers/types.js +0 -2
  97. package/dist/lib/streaming/binary-chunk-reader.d.ts.map +0 -1
  98. package/dist/lib/streaming/binary-chunk-reader.js +0 -161
  99. package/dist/lib/streaming/binary-reader.d.ts.map +0 -1
  100. package/dist/lib/streaming/binary-reader.js +0 -52
  101. package/dist/lib/streaming/zip-batch-iterators.d.ts +0 -8
  102. package/dist/lib/streaming/zip-batch-iterators.d.ts.map +0 -1
  103. package/dist/lib/streaming/zip-batch-iterators.js +0 -61
  104. package/dist/shapefile-loader.d.ts.map +0 -1
  105. package/dist/shapefile-loader.js +0 -30
  106. package/dist/shp-loader.d.ts.map +0 -1
  107. package/dist/shp-loader.js +0 -35
  108. package/dist/workers/dbf-worker.js +0 -5
  109. package/dist/workers/shp-worker.js +0 -5
  110. /package/dist/{bundle.d.ts → src/bundle.d.ts} +0 -0
  111. /package/dist/{dbf-loader.d.ts → src/dbf-loader.d.ts} +0 -0
  112. /package/dist/{lib → src/lib}/parsers/parse-dbf.d.ts +0 -0
  113. /package/dist/{lib → src/lib}/parsers/parse-shp-geometry.d.ts +0 -0
  114. /package/dist/{lib → src/lib}/parsers/parse-shp-header.d.ts +0 -0
  115. /package/dist/{lib → src/lib}/parsers/parse-shx.d.ts +0 -0
  116. /package/dist/{lib → src/lib}/parsers/types.d.ts +0 -0
  117. /package/dist/{shapefile-loader.d.ts → src/shapefile-loader.d.ts} +0 -0
  118. /package/dist/{shp-loader.d.ts → src/shp-loader.d.ts} +0 -0
  119. /package/dist/{workers → src/workers}/dbf-worker.d.ts +0 -0
  120. /package/dist/{workers → src/workers}/shp-worker.d.ts +0 -0
@@ -1,6 +1,7 @@
1
1
  // import type {Feature} from '@loaders.gl/gis';
2
2
  import {LoaderContext, parseInBatchesFromContext, parseFromContext} from '@loaders.gl/loader-utils';
3
3
  import {binaryToGeometry, transformGeoJsonCoords} from '@loaders.gl/gis';
4
+ import type {BinaryGeometry, Geometry, ObjectRowTableBatch} from '@loaders.gl/schema';
4
5
  import {Proj4Projection} from '@math.gl/proj4';
5
6
 
6
7
  import type {SHXOutput} from './parse-shx';
@@ -32,19 +33,22 @@ export async function* parseShapefileInBatches(
32
33
  const {shx, cpg, prj} = await loadShapefileSidecarFiles(options, context);
33
34
 
34
35
  // parse geometries
35
- const shapeIterable: any = await parseInBatchesFromContext(
36
+ const shapeIterable = await parseInBatchesFromContext(
36
37
  asyncIterator,
37
38
  SHPLoader,
38
39
  options,
39
40
  context!
40
41
  );
41
42
 
43
+ const shapeIterator: AsyncIterator<any> =
44
+ shapeIterable[Symbol.asyncIterator]?.() || shapeIterable[Symbol.iterator]?.();
45
+
42
46
  // parse properties
43
- let propertyIterable: any;
47
+ let propertyIterator: AsyncIterator<any> | null = null;
44
48
  const dbfResponse = await context?.fetch(replaceExtension(context?.url || '', 'dbf'));
45
49
  if (dbfResponse?.ok) {
46
- propertyIterable = await parseInBatchesFromContext(
47
- dbfResponse as any,
50
+ const propertyIterable = await parseInBatchesFromContext(
51
+ dbfResponse,
48
52
  DBFLoader,
49
53
  {
50
54
  ...options,
@@ -52,39 +56,44 @@ export async function* parseShapefileInBatches(
52
56
  },
53
57
  context!
54
58
  );
59
+ propertyIterator =
60
+ propertyIterable[Symbol.asyncIterator]?.() || propertyIterable[Symbol.iterator]();
55
61
  }
56
62
 
57
63
  // When `options.metadata` is `true`, there's an extra initial `metadata`
58
64
  // object before the iterator starts. zipBatchIterators expects to receive
59
65
  // batches of Array objects, and will fail with non-iterable batches, so it's
60
66
  // important to skip over the first batch.
61
- let shapeHeader = (await shapeIterable.next()).value;
67
+ let shapeHeader = (await shapeIterator.next()).value;
62
68
  if (shapeHeader && shapeHeader.batchType === 'metadata') {
63
- shapeHeader = (await shapeIterable.next()).value;
69
+ shapeHeader = (await shapeIterator.next()).value;
64
70
  }
65
71
 
66
72
  let dbfHeader: {batchType?: string} = {};
67
- if (propertyIterable) {
68
- dbfHeader = (await propertyIterable.next()).value;
73
+ if (propertyIterator) {
74
+ dbfHeader = (await propertyIterator.next()).value;
69
75
  if (dbfHeader && dbfHeader.batchType === 'metadata') {
70
- dbfHeader = (await propertyIterable.next()).value;
76
+ dbfHeader = (await propertyIterator.next()).value;
71
77
  }
72
78
  }
73
79
 
74
- let iterator: any;
75
- if (propertyIterable) {
76
- iterator = zipBatchIterators(shapeIterable, propertyIterable);
77
- } else {
78
- iterator = shapeIterable;
79
- }
80
+ const zippedIterator: AsyncIterator<ObjectRowTableBatch> = propertyIterator
81
+ ? zipBatchIterators(shapeIterator, propertyIterator, 'object-row-table')
82
+ : shapeIterator;
80
83
 
81
- for await (const item of iterator) {
84
+ const zippedBatchIterable: AsyncIterable<ObjectRowTableBatch> = {
85
+ [Symbol.asyncIterator]() {
86
+ return zippedIterator;
87
+ }
88
+ };
89
+
90
+ for await (const batch of zippedBatchIterable) {
82
91
  let geometries: any;
83
92
  let properties: any;
84
- if (!propertyIterable) {
85
- geometries = item;
93
+ if (!propertyIterator) {
94
+ geometries = batch;
86
95
  } else {
87
- [geometries, properties] = item;
96
+ [geometries, properties] = batch.data;
88
97
  }
89
98
 
90
99
  const geojsonGeometries = parseGeometries(geometries);
@@ -157,7 +166,7 @@ export async function parseShapefile(
157
166
  * @param geometries
158
167
  * @returns geometries as an array
159
168
  */
160
- function parseGeometries(geometries: any[]): any[] {
169
+ function parseGeometries(geometries: BinaryGeometry[]): Geometry[] {
161
170
  const geojsonGeometries: any[] = [];
162
171
  for (const geom of geometries) {
163
172
  geojsonGeometries.push(binaryToGeometry(geom));
@@ -172,7 +181,7 @@ function parseGeometries(geometries: any[]): any[] {
172
181
  * @param properties [description]
173
182
  * @return [description]
174
183
  */
175
- function joinProperties(geometries: object[], properties: object[]): Feature[] {
184
+ function joinProperties(geometries: Geometry[], properties: object[]): Feature[] {
176
185
  const features: Feature[] = [];
177
186
  for (let i = 0; i < geometries.length; i++) {
178
187
  const geometry = geometries[i];
@@ -213,7 +222,7 @@ function reprojectFeatures(features: Feature[], sourceCrs?: string, targetCrs?:
213
222
  */
214
223
  // eslint-disable-next-line max-statements
215
224
  export async function loadShapefileSidecarFiles(
216
- options?: object,
225
+ options?: ShapefileLoaderOptions,
217
226
  context?: LoaderContext
218
227
  ): Promise<{
219
228
  shx?: SHXOutput;
@@ -1,5 +1,5 @@
1
1
  import type {BinaryGeometry} from '@loaders.gl/schema';
2
- import BinaryChunkReader from '../streaming/binary-chunk-reader';
2
+ import {BinaryChunkReader} from '../streaming/binary-chunk-reader';
3
3
  import {parseSHPHeader, SHPHeader} from './parse-shp-header';
4
4
  import {parseRecord} from './parse-shp-geometry';
5
5
  import {SHPLoaderOptions} from './types';
@@ -84,7 +84,7 @@ export function parseSHP(arrayBuffer: ArrayBuffer, options?: SHPLoaderOptions):
84
84
  export async function* parseSHPInBatches(
85
85
  asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
86
86
  options?: SHPLoaderOptions
87
- ): AsyncIterable<BinaryGeometry | object> {
87
+ ): AsyncGenerator<BinaryGeometry | object> {
88
88
  const parser = new SHPParser(options);
89
89
  let headerReturned = false;
90
90
  for await (const arrayBuffer of asyncIterator) {
@@ -1,8 +1,10 @@
1
- type BinaryChunkReaderOptions = {
1
+ // loaders.gl, MIT license
2
+
3
+ export type BinaryChunkReaderOptions = {
2
4
  maxRewindBytes: number;
3
5
  };
4
6
 
5
- export default class BinaryChunkReader {
7
+ export class BinaryChunkReader {
6
8
  offset: number;
7
9
  arrayBuffers: ArrayBuffer[];
8
10
  ended: boolean;
@@ -110,7 +112,6 @@ export default class BinaryChunkReader {
110
112
  }
111
113
 
112
114
  if (!bufferOffsets) {
113
- // @ts-ignore
114
115
  return null;
115
116
  }
116
117
 
@@ -1,4 +1,6 @@
1
- export default class BinaryReader {
1
+ // loaders.gl, MIT license
2
+
3
+ export class BinaryReader {
2
4
  offset: number;
3
5
  arrayBuffer: ArrayBuffer;
4
6
 
@@ -1,3 +1,9 @@
1
+ // loaders.gl, MIT license
2
+
3
+ import type {ObjectRowTableBatch, ArrayRowTableBatch} from '@loaders.gl/schema';
4
+
5
+ type RowTableBatch = ObjectRowTableBatch | ArrayRowTableBatch;
6
+
1
7
  /**
2
8
  * Zip two iterators together
3
9
  *
@@ -5,36 +11,44 @@
5
11
  * @param iterator2
6
12
  */
7
13
  export async function* zipBatchIterators(
8
- iterator1: AsyncIterator<any[]>,
9
- iterator2: AsyncIterator<any[]>
10
- ): AsyncGenerator<number[][], void, unknown> {
11
- let batch1: number[] = [];
12
- let batch2: number[] = [];
14
+ iterator1: AsyncIterator<RowTableBatch> | Iterator<RowTableBatch>,
15
+ iterator2: AsyncIterator<RowTableBatch> | Iterator<RowTableBatch>,
16
+ shape: 'object-row-table' | 'array-row-table'
17
+ ): AsyncGenerator<RowTableBatch, void, unknown> {
18
+ const batch1Data: unknown[] = [];
19
+ const batch2Data: unknown[] = [];
13
20
  let iterator1Done: boolean = false;
14
21
  let iterator2Done: boolean = false;
15
22
 
16
23
  // TODO - one could let all iterators flow at full speed using `Promise.race`
17
24
  // however we might end up with a big temporary buffer
18
25
  while (!iterator1Done && !iterator2Done) {
19
- if (batch1.length === 0 && !iterator1Done) {
26
+ if (batch1Data.length === 0 && !iterator1Done) {
20
27
  const {value, done} = await iterator1.next();
21
28
  if (done) {
22
29
  iterator1Done = true;
23
30
  } else {
24
- batch1 = value;
31
+ // @ts-expect-error
32
+ batch1Data.push(...value);
25
33
  }
26
- } else if (batch2.length === 0 && !iterator2Done) {
34
+ }
35
+ if (batch2Data.length === 0 && !iterator2Done) {
27
36
  const {value, done} = await iterator2.next();
28
37
  if (done) {
29
38
  iterator2Done = true;
30
39
  } else {
31
- batch2 = value;
40
+ batch2Data.push(...value);
32
41
  }
33
42
  }
34
43
 
35
- const batch = extractBatch(batch1, batch2);
36
- if (batch) {
37
- yield batch;
44
+ const batchData = extractBatchData(batch1Data, batch2Data);
45
+ if (batchData) {
46
+ yield {
47
+ batchType: 'data',
48
+ shape,
49
+ length: batchData.length,
50
+ data: batchData
51
+ };
38
52
  }
39
53
  }
40
54
  }
@@ -46,14 +60,14 @@ export async function* zipBatchIterators(
46
60
  * @param batch2
47
61
  * @return array | null
48
62
  */
49
- function extractBatch(batch1: number[], batch2: number[]): number[][] | null {
63
+ function extractBatchData(batch1: any[], batch2: any[]): any[] | null {
50
64
  const batchLength: number = Math.min(batch1.length, batch2.length);
51
65
  if (batchLength === 0) {
52
66
  return null;
53
67
  }
54
68
 
55
69
  // Non interleaved arrays
56
- const batch: number[][] = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
70
+ const batch: any[] = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
57
71
 
58
72
  // Modify the 2 batches
59
73
  batch1.splice(0, batchLength);
package/src/shp-loader.ts CHANGED
@@ -33,5 +33,8 @@ export const SHPLoader: LoaderWithParser = {
33
33
  ...SHPWorkerLoader,
34
34
  parse: async (arrayBuffer, options?) => parseSHP(arrayBuffer, options),
35
35
  parseSync: parseSHP,
36
- parseInBatches: parseSHPInBatches
36
+ parseInBatches: (
37
+ arrayBufferIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
38
+ options
39
+ ) => parseSHPInBatches(arrayBufferIterator, options)
37
40
  };
@@ -1 +0,0 @@
1
- {"version":3,"file":"bundle.d.ts","sourceRoot":"","sources":["../src/bundle.ts"],"names":[],"mappings":"AACA,QAAA,MAAM,aAAa,KAAqB,CAAC"}
package/dist/bundle.js DELETED
@@ -1,5 +0,0 @@
1
- "use strict";
2
- // @ts-nocheck
3
- const moduleExports = require('./index');
4
- globalThis.loaders = globalThis.loaders || {};
5
- module.exports = Object.assign(globalThis.loaders, moduleExports);
@@ -1 +0,0 @@
1
- {"version":3,"file":"dbf-loader.d.ts","sourceRoot":"","sources":["../src/dbf-loader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAE,gBAAgB,EAAC,MAAM,0BAA0B,CAAC;AAOvE;;GAEG;AACH,eAAO,MAAM,eAAe,EAAE,MAc7B,CAAC;AAEF,sBAAsB;AACtB,eAAO,MAAM,SAAS,EAAE,gBAKvB,CAAC"}
@@ -1,32 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.DBFLoader = exports.DBFWorkerLoader = void 0;
4
- const parse_dbf_1 = require("./lib/parsers/parse-dbf");
5
- // __VERSION__ is injected by babel-plugin-version-inline
6
- // @ts-ignore TS2304: Cannot find name '__VERSION__'.
7
- const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
8
- /**
9
- * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles
10
- */
11
- exports.DBFWorkerLoader = {
12
- name: 'DBF',
13
- id: 'dbf',
14
- module: 'shapefile',
15
- version: VERSION,
16
- worker: true,
17
- category: 'table',
18
- extensions: ['dbf'],
19
- mimeTypes: ['application/x-dbf'],
20
- options: {
21
- dbf: {
22
- encoding: 'latin1'
23
- }
24
- }
25
- };
26
- /** DBF file loader */
27
- exports.DBFLoader = {
28
- ...exports.DBFWorkerLoader,
29
- parse: async (arrayBuffer, options) => (0, parse_dbf_1.parseDBF)(arrayBuffer, options),
30
- parseSync: parse_dbf_1.parseDBF,
31
- parseInBatches: parse_dbf_1.parseDBFInBatches
32
- };
package/dist/index.d.ts DELETED
@@ -1,4 +0,0 @@
1
- export { ShapefileLoader } from './shapefile-loader';
2
- export { DBFLoader, DBFWorkerLoader } from './dbf-loader';
3
- export { SHPLoader, SHPWorkerLoader } from './shp-loader';
4
- //# sourceMappingURL=index.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,eAAe,EAAC,MAAM,oBAAoB,CAAC;AACnD,OAAO,EAAC,SAAS,EAAE,eAAe,EAAC,MAAM,cAAc,CAAC;AACxD,OAAO,EAAC,SAAS,EAAE,eAAe,EAAC,MAAM,cAAc,CAAC"}
package/dist/index.js DELETED
@@ -1,11 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.SHPWorkerLoader = exports.SHPLoader = exports.DBFWorkerLoader = exports.DBFLoader = exports.ShapefileLoader = void 0;
4
- var shapefile_loader_1 = require("./shapefile-loader");
5
- Object.defineProperty(exports, "ShapefileLoader", { enumerable: true, get: function () { return shapefile_loader_1.ShapefileLoader; } });
6
- var dbf_loader_1 = require("./dbf-loader");
7
- Object.defineProperty(exports, "DBFLoader", { enumerable: true, get: function () { return dbf_loader_1.DBFLoader; } });
8
- Object.defineProperty(exports, "DBFWorkerLoader", { enumerable: true, get: function () { return dbf_loader_1.DBFWorkerLoader; } });
9
- var shp_loader_1 = require("./shp-loader");
10
- Object.defineProperty(exports, "SHPLoader", { enumerable: true, get: function () { return shp_loader_1.SHPLoader; } });
11
- Object.defineProperty(exports, "SHPWorkerLoader", { enumerable: true, get: function () { return shp_loader_1.SHPWorkerLoader; } });
@@ -1 +0,0 @@
1
- {"version":3,"file":"parse-dbf.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-dbf.ts"],"names":[],"mappings":"AAAA,OAAO,EAAQ,cAAc,EAAC,MAAM,oBAAoB,CAAC;AAEzD,OAAO,EACL,gBAAgB,EAEhB,cAAc,EACd,SAAS,EACT,aAAa,EAEd,MAAM,SAAS,CAAC;AAkDjB;;;;GAIG;AACH,wBAAgB,QAAQ,CACtB,WAAW,EAAE,WAAW,EACxB,OAAO,GAAE,gBAAqB,GAC7B,aAAa,GAAG,cAAc,GAAG,cAAc,CAwBjD;AACD;;;GAGG;AACH,wBAAuB,iBAAiB,CACtC,aAAa,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACjE,OAAO,GAAE,gBAAqB,GAC7B,aAAa,CAAC,SAAS,GAAG,aAAa,GAAG,cAAc,CAAC,CAqB3D"}
@@ -1,344 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.parseDBFInBatches = exports.parseDBF = void 0;
7
- const binary_chunk_reader_1 = __importDefault(require("../streaming/binary-chunk-reader"));
8
- const LITTLE_ENDIAN = true;
9
- const DBF_HEADER_SIZE = 32;
10
- var STATE;
11
- (function (STATE) {
12
- STATE[STATE["START"] = 0] = "START";
13
- STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
14
- STATE[STATE["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
15
- STATE[STATE["END"] = 3] = "END";
16
- STATE[STATE["ERROR"] = 4] = "ERROR";
17
- })(STATE || (STATE = {}));
18
- class DBFParser {
19
- constructor(options) {
20
- this.binaryReader = new binary_chunk_reader_1.default();
21
- this.state = STATE.START;
22
- this.result = {
23
- data: []
24
- };
25
- this.textDecoder = new TextDecoder(options.encoding);
26
- }
27
- /**
28
- * @param arrayBuffer
29
- */
30
- write(arrayBuffer) {
31
- this.binaryReader.write(arrayBuffer);
32
- this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
33
- // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
34
- // important events:
35
- // - schema available
36
- // - first rows available
37
- // - all rows available
38
- }
39
- end() {
40
- this.binaryReader.end();
41
- this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
42
- // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
43
- if (this.state !== STATE.END) {
44
- this.state = STATE.ERROR;
45
- this.result.error = 'DBF incomplete file';
46
- }
47
- }
48
- }
49
- /**
50
- * @param arrayBuffer
51
- * @param options
52
- * @returns DBFTable or rows
53
- */
54
- function parseDBF(arrayBuffer, options = {}) {
55
- const { encoding = 'latin1' } = options.dbf || {};
56
- const dbfParser = new DBFParser({ encoding });
57
- dbfParser.write(arrayBuffer);
58
- dbfParser.end();
59
- const { data, schema } = dbfParser.result;
60
- const shape = options?.tables?.format || options?.dbf?.shape;
61
- switch (shape) {
62
- case 'object-row-table': {
63
- const table = {
64
- shape: 'object-row-table',
65
- schema,
66
- data
67
- };
68
- return table;
69
- }
70
- case 'table':
71
- return { schema, rows: data };
72
- case 'rows':
73
- default:
74
- return data;
75
- }
76
- }
77
- exports.parseDBF = parseDBF;
78
- /**
79
- * @param asyncIterator
80
- * @param options
81
- */
82
- async function* parseDBFInBatches(asyncIterator, options = {}) {
83
- const { encoding = 'latin1' } = options.dbf || {};
84
- const parser = new DBFParser({ encoding });
85
- let headerReturned = false;
86
- for await (const arrayBuffer of asyncIterator) {
87
- parser.write(arrayBuffer);
88
- if (!headerReturned && parser.result.dbfHeader) {
89
- headerReturned = true;
90
- yield parser.result.dbfHeader;
91
- }
92
- if (parser.result.data.length > 0) {
93
- yield parser.result.data;
94
- parser.result.data = [];
95
- }
96
- }
97
- parser.end();
98
- if (parser.result.data.length > 0) {
99
- yield parser.result.data;
100
- }
101
- }
102
- exports.parseDBFInBatches = parseDBFInBatches;
103
- /**
104
- * https://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm
105
- * @param state
106
- * @param result
107
- * @param binaryReader
108
- * @param textDecoder
109
- * @returns
110
- */
111
- /* eslint-disable complexity, max-depth */
112
- function parseState(state, result, binaryReader, textDecoder) {
113
- // eslint-disable-next-line no-constant-condition
114
- while (true) {
115
- try {
116
- switch (state) {
117
- case STATE.ERROR:
118
- case STATE.END:
119
- return state;
120
- case STATE.START:
121
- // Parse initial file header
122
- // DBF Header
123
- const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
124
- if (!dataView) {
125
- return state;
126
- }
127
- result.dbfHeader = parseDBFHeader(dataView);
128
- result.progress = {
129
- bytesUsed: 0,
130
- rowsTotal: result.dbfHeader.nRecords,
131
- rows: 0
132
- };
133
- state = STATE.FIELD_DESCRIPTORS;
134
- break;
135
- case STATE.FIELD_DESCRIPTORS:
136
- // Parse DBF field descriptors (schema)
137
- const fieldDescriptorView = binaryReader.getDataView(
138
- // @ts-ignore
139
- result.dbfHeader.headerLength - DBF_HEADER_SIZE);
140
- if (!fieldDescriptorView) {
141
- return state;
142
- }
143
- result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
144
- result.schema = {
145
- fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
146
- metadata: {}
147
- };
148
- state = STATE.FIELD_PROPERTIES;
149
- // TODO(kyle) Not exactly sure why start offset needs to be headerLength + 1?
150
- // parsedbf uses ((fields.length + 1) << 5) + 2;
151
- binaryReader.skip(1);
152
- break;
153
- case STATE.FIELD_PROPERTIES:
154
- const { recordLength = 0, nRecords = 0 } = result?.dbfHeader || {};
155
- while (result.data.length < nRecords) {
156
- const recordView = binaryReader.getDataView(recordLength - 1);
157
- if (!recordView) {
158
- return state;
159
- }
160
- // Note: Avoid actually reading the last byte, which may not be present
161
- binaryReader.skip(1);
162
- // @ts-ignore
163
- const row = parseRow(recordView, result.dbfFields, textDecoder);
164
- result.data.push(row);
165
- // @ts-ignore
166
- result.progress.rows = result.data.length;
167
- }
168
- state = STATE.END;
169
- break;
170
- default:
171
- state = STATE.ERROR;
172
- result.error = `illegal parser state ${state}`;
173
- return state;
174
- }
175
- }
176
- catch (error) {
177
- state = STATE.ERROR;
178
- result.error = `DBF parsing failed: ${error.message}`;
179
- return state;
180
- }
181
- }
182
- }
183
- /**
184
- * @param headerView
185
- */
186
- function parseDBFHeader(headerView) {
187
- return {
188
- // Last updated date
189
- year: headerView.getUint8(1) + 1900,
190
- month: headerView.getUint8(2),
191
- day: headerView.getUint8(3),
192
- // Number of records in data file
193
- nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
194
- // Length of header in bytes
195
- headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
196
- // Length of each record
197
- recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
198
- // Not sure if this is usually set
199
- languageDriver: headerView.getUint8(29)
200
- };
201
- }
202
- /**
203
- * @param view
204
- */
205
- function parseFieldDescriptors(view, textDecoder) {
206
- // NOTE: this might overestimate the number of fields if the "Database
207
- // Container" container exists and is included in the headerLength
208
- const nFields = (view.byteLength - 1) / 32;
209
- const fields = [];
210
- let offset = 0;
211
- for (let i = 0; i < nFields; i++) {
212
- const name = textDecoder
213
- .decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11))
214
- // eslint-disable-next-line no-control-regex
215
- .replace(/\u0000/g, '');
216
- fields.push({
217
- name,
218
- dataType: String.fromCharCode(view.getUint8(offset + 11)),
219
- fieldLength: view.getUint8(offset + 16),
220
- decimal: view.getUint8(offset + 17)
221
- });
222
- offset += 32;
223
- }
224
- return fields;
225
- }
226
- /*
227
- * @param {BinaryChunkReader} binaryReader
228
- function parseRows(binaryReader, fields, nRecords, recordLength, textDecoder) {
229
- const rows = [];
230
- for (let i = 0; i < nRecords; i++) {
231
- const recordView = binaryReader.getDataView(recordLength - 1);
232
- binaryReader.skip(1);
233
- // @ts-ignore
234
- rows.push(parseRow(recordView, fields, textDecoder));
235
- }
236
- return rows;
237
- }
238
- */
239
- /**
240
- *
241
- * @param view
242
- * @param fields
243
- * @param textDecoder
244
- * @returns
245
- */
246
- function parseRow(view, fields, textDecoder) {
247
- const out = {};
248
- let offset = 0;
249
- for (const field of fields) {
250
- const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
251
- out[field.name] = parseField(text, field.dataType);
252
- offset += field.fieldLength;
253
- }
254
- return out;
255
- }
256
- /**
257
- * Should NaN be coerced to null?
258
- * @param text
259
- * @param dataType
260
- * @returns Field depends on a type of the data
261
- */
262
- function parseField(text, dataType) {
263
- switch (dataType) {
264
- case 'B':
265
- return parseNumber(text);
266
- case 'C':
267
- return parseCharacter(text);
268
- case 'F':
269
- return parseNumber(text);
270
- case 'N':
271
- return parseNumber(text);
272
- case 'O':
273
- return parseNumber(text);
274
- case 'D':
275
- return parseDate(text);
276
- case 'L':
277
- return parseBoolean(text);
278
- default:
279
- throw new Error('Unsupported data type');
280
- }
281
- }
282
- /**
283
- * Parse YYYYMMDD to date in milliseconds
284
- * @param str YYYYMMDD
285
- * @returns new Date as a number
286
- */
287
- function parseDate(str) {
288
- return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
289
- }
290
- /**
291
- * Read boolean value
292
- * any of Y, y, T, t coerce to true
293
- * any of N, n, F, f coerce to false
294
- * otherwise null
295
- * @param value
296
- * @returns boolean | null
297
- */
298
- function parseBoolean(value) {
299
- return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
300
- }
301
- /**
302
- * Return null instead of NaN
303
- * @param text
304
- * @returns number | null
305
- */
306
- function parseNumber(text) {
307
- const number = parseFloat(text);
308
- return isNaN(number) ? null : number;
309
- }
310
- /**
311
- *
312
- * @param text
313
- * @returns string | null
314
- */
315
- function parseCharacter(text) {
316
- return text.trim() || null;
317
- }
318
- /**
319
- * Create a standard Arrow-style `Field` from field descriptor.
320
- * TODO - use `fieldLength` and `decimal` to generate smaller types?
321
- * @param param0
322
- * @returns Field
323
- */
324
- // eslint-disable
325
- function makeField({ name, dataType, fieldLength, decimal }) {
326
- switch (dataType) {
327
- case 'B':
328
- return { name, type: 'float64', nullable: true, metadata: {} };
329
- case 'C':
330
- return { name, type: 'utf8', nullable: true, metadata: {} };
331
- case 'F':
332
- return { name, type: 'float64', nullable: true, metadata: {} };
333
- case 'N':
334
- return { name, type: 'float64', nullable: true, metadata: {} };
335
- case 'O':
336
- return { name, type: 'float64', nullable: true, metadata: {} };
337
- case 'D':
338
- return { name, type: 'timestamp-millisecond', nullable: true, metadata: {} };
339
- case 'L':
340
- return { name, type: 'bool', nullable: true, metadata: {} };
341
- default:
342
- throw new Error('Unsupported data type');
343
- }
344
- }