@loaders.gl/shapefile 3.3.0-alpha.5 → 3.3.0-alpha.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/dist/dbf-worker.js +1 -1
  2. package/dist/dist.min.js +47 -11
  3. package/dist/es5/bundle.js +0 -1
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/dbf-loader.js +5 -16
  6. package/dist/es5/dbf-loader.js.map +1 -1
  7. package/dist/es5/index.js +6 -9
  8. package/dist/es5/index.js.map +1 -1
  9. package/dist/es5/lib/parsers/parse-dbf.js +69 -154
  10. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
  11. package/dist/es5/lib/parsers/parse-shapefile.js +34 -148
  12. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
  13. package/dist/es5/lib/parsers/parse-shp-geometry.js +4 -57
  14. package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -1
  15. package/dist/es5/lib/parsers/parse-shp-header.js +0 -3
  16. package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -1
  17. package/dist/es5/lib/parsers/parse-shp.js +39 -98
  18. package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
  19. package/dist/es5/lib/parsers/parse-shx.js +0 -4
  20. package/dist/es5/lib/parsers/parse-shx.js.map +1 -1
  21. package/dist/es5/lib/parsers/types.js.map +1 -1
  22. package/dist/es5/lib/streaming/binary-chunk-reader.js +39 -55
  23. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
  24. package/dist/es5/lib/streaming/binary-reader.js +11 -12
  25. package/dist/es5/lib/streaming/binary-reader.js.map +1 -1
  26. package/dist/es5/lib/streaming/zip-batch-iterators.js +1 -24
  27. package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -1
  28. package/dist/es5/shapefile-loader.js +1 -3
  29. package/dist/es5/shapefile-loader.js.map +1 -1
  30. package/dist/es5/shp-loader.js +6 -17
  31. package/dist/es5/shp-loader.js.map +1 -1
  32. package/dist/es5/workers/dbf-worker.js +0 -2
  33. package/dist/es5/workers/dbf-worker.js.map +1 -1
  34. package/dist/es5/workers/shp-worker.js +0 -2
  35. package/dist/es5/workers/shp-worker.js.map +1 -1
  36. package/dist/esm/bundle.js +1 -1
  37. package/dist/esm/bundle.js.map +1 -1
  38. package/dist/esm/dbf-loader.js +6 -2
  39. package/dist/esm/dbf-loader.js.map +1 -1
  40. package/dist/esm/index.js.map +1 -1
  41. package/dist/esm/lib/parsers/parse-dbf.js +18 -58
  42. package/dist/esm/lib/parsers/parse-dbf.js.map +1 -1
  43. package/dist/esm/lib/parsers/parse-shapefile.js +12 -35
  44. package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -1
  45. package/dist/esm/lib/parsers/parse-shp-geometry.js +3 -28
  46. package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -1
  47. package/dist/esm/lib/parsers/parse-shp-header.js +1 -3
  48. package/dist/esm/lib/parsers/parse-shp-header.js.map +1 -1
  49. package/dist/esm/lib/parsers/parse-shp.js +2 -26
  50. package/dist/esm/lib/parsers/parse-shp.js.map +1 -1
  51. package/dist/esm/lib/parsers/parse-shx.js +1 -2
  52. package/dist/esm/lib/parsers/parse-shx.js.map +1 -1
  53. package/dist/esm/lib/parsers/types.js.map +1 -1
  54. package/dist/esm/lib/streaming/binary-chunk-reader.js +4 -19
  55. package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -1
  56. package/dist/esm/lib/streaming/binary-reader.js +0 -5
  57. package/dist/esm/lib/streaming/binary-reader.js.map +1 -1
  58. package/dist/esm/lib/streaming/zip-batch-iterators.js +2 -5
  59. package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -1
  60. package/dist/esm/shapefile-loader.js +3 -1
  61. package/dist/esm/shapefile-loader.js.map +1 -1
  62. package/dist/esm/shp-loader.js +6 -2
  63. package/dist/esm/shp-loader.js.map +1 -1
  64. package/dist/esm/workers/dbf-worker.js.map +1 -1
  65. package/dist/esm/workers/shp-worker.js.map +1 -1
  66. package/dist/shp-worker.js +1 -1
  67. package/package.json +5 -5
  68. package/src/lib/parsers/parse-dbf.ts +1 -1
@@ -1,26 +1,19 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
-
5
4
  Object.defineProperty(exports, "__esModule", {
6
5
  value: true
7
6
  });
8
- exports.SHPLoader = exports.SHPWorkerLoader = exports.SHP_MAGIC_NUMBER = void 0;
9
-
7
+ exports.SHP_MAGIC_NUMBER = exports.SHPWorkerLoader = exports.SHPLoader = void 0;
10
8
  var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
11
-
12
9
  var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
13
-
14
10
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
15
-
16
11
  var _parseShp = require("./lib/parsers/parse-shp");
17
-
18
- function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
19
-
20
- function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
21
-
22
- var VERSION = typeof "3.3.0-alpha.5" !== 'undefined' ? "3.3.0-alpha.5" : 'latest';
12
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
13
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
14
+ var VERSION = typeof "3.3.0-alpha.7" !== 'undefined' ? "3.3.0-alpha.7" : 'latest';
23
15
  var SHP_MAGIC_NUMBER = [0x00, 0x00, 0x27, 0x0a];
16
+
24
17
  exports.SHP_MAGIC_NUMBER = SHP_MAGIC_NUMBER;
25
18
  var SHPWorkerLoader = {
26
19
  name: 'SHP',
@@ -38,8 +31,8 @@ var SHPWorkerLoader = {
38
31
  }
39
32
  }
40
33
  };
41
- exports.SHPWorkerLoader = SHPWorkerLoader;
42
34
 
35
+ exports.SHPWorkerLoader = SHPWorkerLoader;
43
36
  var SHPLoader = _objectSpread(_objectSpread({}, SHPWorkerLoader), {}, {
44
37
  parse: function () {
45
38
  var _parse = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(arrayBuffer, options) {
@@ -48,7 +41,6 @@ var SHPLoader = _objectSpread(_objectSpread({}, SHPWorkerLoader), {}, {
48
41
  switch (_context.prev = _context.next) {
49
42
  case 0:
50
43
  return _context.abrupt("return", (0, _parseShp.parseSHP)(arrayBuffer, options));
51
-
52
44
  case 1:
53
45
  case "end":
54
46
  return _context.stop();
@@ -56,16 +48,13 @@ var SHPLoader = _objectSpread(_objectSpread({}, SHPWorkerLoader), {}, {
56
48
  }
57
49
  }, _callee);
58
50
  }));
59
-
60
51
  function parse(_x, _x2) {
61
52
  return _parse.apply(this, arguments);
62
53
  }
63
-
64
54
  return parse;
65
55
  }(),
66
56
  parseSync: _parseShp.parseSHP,
67
57
  parseInBatches: _parseShp.parseSHPInBatches
68
58
  });
69
-
70
59
  exports.SHPLoader = SHPLoader;
71
60
  //# sourceMappingURL=shp-loader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/shp-loader.ts"],"names":["VERSION","SHP_MAGIC_NUMBER","SHPWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","tests","Uint8Array","buffer","options","shp","_maxDimensions","SHPLoader","parse","arrayBuffer","parseSync","parseSHP","parseInBatches","parseSHPInBatches"],"mappings":";;;;;;;;;;;;;;;AACA;;;;;;AAIA,IAAMA,OAAO,GAAG,2BAAuB,WAAvB,qBAAmD,QAAnE;AAEO,IAAMC,gBAAgB,GAAG,CAAC,IAAD,EAAO,IAAP,EAAa,IAAb,EAAmB,IAAnB,CAAzB;;AAKA,IAAMC,eAAuB,GAAG;AACrCC,EAAAA,IAAI,EAAE,KAD+B;AAErCC,EAAAA,EAAE,EAAE,KAFiC;AAGrCC,EAAAA,MAAM,EAAE,WAH6B;AAIrCC,EAAAA,OAAO,EAAEN,OAJ4B;AAKrCO,EAAAA,MAAM,EAAE,IAL6B;AAMrCC,EAAAA,QAAQ,EAAE,UAN2B;AAOrCC,EAAAA,UAAU,EAAE,CAAC,KAAD,CAPyB;AAQrCC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CAR0B;AAUrCC,EAAAA,KAAK,EAAE,CAAC,IAAIC,UAAJ,CAAeX,gBAAf,EAAiCY,MAAlC,CAV8B;AAWrCC,EAAAA,OAAO,EAAE;AACPC,IAAAA,GAAG,EAAE;AACHC,MAAAA,cAAc,EAAE;AADb;AADE;AAX4B,CAAhC;;;AAmBA,IAAMC,SAA2B,mCACnCf,eADmC;AAEtCgB,EAAAA,KAAK;AAAA,2EAAE,iBAAOC,WAAP,EAAoBL,OAApB;AAAA;AAAA;AAAA;AAAA;AAAA,+CAAiC,wBAASK,WAAT,EAAsBL,OAAtB,CAAjC;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAAF;;AAAA;AAAA;AAAA;;AAAA;AAAA,KAFiC;AAGtCM,EAAAA,SAAS,EAAEC,kBAH2B;AAItCC,EAAAA,cAAc,EAAEC;AAJsB,EAAjC","sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseSHP, parseSHPInBatches} from './lib/parsers/parse-shp';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport const SHP_MAGIC_NUMBER = [0x00, 0x00, 0x27, 0x0a];\n\n/**\n * SHP file loader\n */\nexport const SHPWorkerLoader: Loader = {\n name: 'SHP',\n id: 'shp',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'geometry',\n extensions: ['shp'],\n mimeTypes: ['application/octet-stream'],\n // ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...\n tests: [new Uint8Array(SHP_MAGIC_NUMBER).buffer],\n options: {\n shp: {\n _maxDimensions: 4\n }\n }\n};\n\n/** SHP file loader */\nexport const SHPLoader: LoaderWithParser = {\n ...SHPWorkerLoader,\n parse: async (arrayBuffer, options?) => parseSHP(arrayBuffer, options),\n parseSync: parseSHP,\n parseInBatches: parseSHPInBatches\n};\n"],"file":"shp-loader.js"}
1
+ {"version":3,"file":"shp-loader.js","names":["VERSION","SHP_MAGIC_NUMBER","SHPWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","tests","Uint8Array","buffer","options","shp","_maxDimensions","SHPLoader","parse","arrayBuffer","parseSHP","parseSync","parseInBatches","parseSHPInBatches"],"sources":["../../src/shp-loader.ts"],"sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseSHP, parseSHPInBatches} from './lib/parsers/parse-shp';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport const SHP_MAGIC_NUMBER = [0x00, 0x00, 0x27, 0x0a];\n\n/**\n * SHP file loader\n */\nexport const SHPWorkerLoader: Loader = {\n name: 'SHP',\n id: 'shp',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'geometry',\n extensions: ['shp'],\n mimeTypes: ['application/octet-stream'],\n // ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...\n tests: [new Uint8Array(SHP_MAGIC_NUMBER).buffer],\n options: {\n shp: {\n _maxDimensions: 4\n }\n }\n};\n\n/** SHP file loader */\nexport const SHPLoader: LoaderWithParser = {\n ...SHPWorkerLoader,\n parse: async (arrayBuffer, options?) => parseSHP(arrayBuffer, options),\n parseSync: parseSHP,\n parseInBatches: parseSHPInBatches\n};\n"],"mappings":";;;;;;;;;;AACA;AAAoE;AAAA;AAIpE,IAAMA,OAAO,GAAG,sBAAkB,KAAK,WAAW,qBAAiB,QAAQ;AAEpE,IAAMC,gBAAgB,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC;;AAAC;AAKlD,IAAMC,eAAuB,GAAG;EACrCC,IAAI,EAAE,KAAK;EACXC,EAAE,EAAE,KAAK;EACTC,MAAM,EAAE,WAAW;EACnBC,OAAO,EAAEN,OAAO;EAChBO,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,UAAU;EACpBC,UAAU,EAAE,CAAC,KAAK,CAAC;EACnBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EAEvCC,KAAK,EAAE,CAAC,IAAIC,UAAU,CAACX,gBAAgB,CAAC,CAACY,MAAM,CAAC;EAChDC,OAAO,EAAE;IACPC,GAAG,EAAE;MACHC,cAAc,EAAE;IAClB;EACF;AACF,CAAC;;AAAC;AAGK,IAAMC,SAA2B,mCACnCf,eAAe;EAClBgB,KAAK;IAAA,uEAAE,iBAAOC,WAAW,EAAEL,OAAQ;MAAA;QAAA;UAAA;YAAA;cAAA,iCAAK,IAAAM,kBAAQ,EAACD,WAAW,EAAEL,OAAO,CAAC;YAAA;YAAA;cAAA;UAAA;QAAA;MAAA;IAAA;IAAA;MAAA;IAAA;IAAA;EAAA;EACtEO,SAAS,EAAED,kBAAQ;EACnBE,cAAc,EAAEC;AAAiB,EAClC;AAAC"}
@@ -1,8 +1,6 @@
1
1
  "use strict";
2
2
 
3
3
  var _dbfLoader = require("../dbf-loader");
4
-
5
4
  var _loaderUtils = require("@loaders.gl/loader-utils");
6
-
7
5
  (0, _loaderUtils.createLoaderWorker)(_dbfLoader.DBFLoader);
8
6
  //# sourceMappingURL=dbf-worker.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/workers/dbf-worker.ts"],"names":["DBFLoader"],"mappings":";;AAAA;;AACA;;AAEA,qCAAmBA,oBAAnB","sourcesContent":["import {DBFLoader} from '../dbf-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(DBFLoader);\n"],"file":"dbf-worker.js"}
1
+ {"version":3,"file":"dbf-worker.js","names":["createLoaderWorker","DBFLoader"],"sources":["../../../src/workers/dbf-worker.ts"],"sourcesContent":["import {DBFLoader} from '../dbf-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(DBFLoader);\n"],"mappings":";;AAAA;AACA;AAEA,IAAAA,+BAAkB,EAACC,oBAAS,CAAC"}
@@ -1,8 +1,6 @@
1
1
  "use strict";
2
2
 
3
3
  var _shpLoader = require("../shp-loader");
4
-
5
4
  var _loaderUtils = require("@loaders.gl/loader-utils");
6
-
7
5
  (0, _loaderUtils.createLoaderWorker)(_shpLoader.SHPLoader);
8
6
  //# sourceMappingURL=shp-worker.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/workers/shp-worker.ts"],"names":["SHPLoader"],"mappings":";;AAAA;;AACA;;AAEA,qCAAmBA,oBAAnB","sourcesContent":["import {SHPLoader} from '../shp-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(SHPLoader);\n"],"file":"shp-worker.js"}
1
+ {"version":3,"file":"shp-worker.js","names":["createLoaderWorker","SHPLoader"],"sources":["../../../src/workers/shp-worker.ts"],"sourcesContent":["import {SHPLoader} from '../shp-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(SHPLoader);\n"],"mappings":";;AAAA;AACA;AAEA,IAAAA,+BAAkB,EAACC,oBAAS,CAAC"}
@@ -1,5 +1,5 @@
1
- const moduleExports = require('./index');
2
1
 
2
+ const moduleExports = require('./index');
3
3
  globalThis.loaders = globalThis.loaders || {};
4
4
  module.exports = Object.assign(globalThis.loaders, moduleExports);
5
5
  //# sourceMappingURL=bundle.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":"AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
1
+ {"version":3,"file":"bundle.js","names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"sources":["../../src/bundle.ts"],"sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"mappings":";AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAS,CAAC;AACxCC,UAAU,CAACC,OAAO,GAAGD,UAAU,CAACC,OAAO,IAAI,CAAC,CAAC;AAC7CC,MAAM,CAACC,OAAO,GAAGC,MAAM,CAACC,MAAM,CAACL,UAAU,CAACC,OAAO,EAAEH,aAAa,CAAC"}
@@ -1,5 +1,7 @@
1
1
  import { parseDBF, parseDBFInBatches } from './lib/parsers/parse-dbf';
2
- const VERSION = typeof "3.3.0-alpha.5" !== 'undefined' ? "3.3.0-alpha.5" : 'latest';
2
+
3
+ const VERSION = typeof "3.3.0-alpha.7" !== 'undefined' ? "3.3.0-alpha.7" : 'latest';
4
+
3
5
  export const DBFWorkerLoader = {
4
6
  name: 'DBF',
5
7
  id: 'dbf',
@@ -15,7 +17,9 @@ export const DBFWorkerLoader = {
15
17
  }
16
18
  }
17
19
  };
18
- export const DBFLoader = { ...DBFWorkerLoader,
20
+
21
+ export const DBFLoader = {
22
+ ...DBFWorkerLoader,
19
23
  parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),
20
24
  parseSync: parseDBF,
21
25
  parseInBatches: parseDBFInBatches
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/dbf-loader.ts"],"names":["parseDBF","parseDBFInBatches","VERSION","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseInBatches"],"mappings":"AACA,SAAQA,QAAR,EAAkBC,iBAAlB,QAA0C,yBAA1C;AAIA,MAAMC,OAAO,GAAG,2BAAuB,WAAvB,qBAAmD,QAAnE;AAKA,OAAO,MAAMC,eAAuB,GAAG;AACrCC,EAAAA,IAAI,EAAE,KAD+B;AAErCC,EAAAA,EAAE,EAAE,KAFiC;AAGrCC,EAAAA,MAAM,EAAE,WAH6B;AAIrCC,EAAAA,OAAO,EAAEL,OAJ4B;AAKrCM,EAAAA,MAAM,EAAE,IAL6B;AAMrCC,EAAAA,QAAQ,EAAE,OAN2B;AAOrCC,EAAAA,UAAU,EAAE,CAAC,KAAD,CAPyB;AAQrCC,EAAAA,SAAS,EAAE,CAAC,mBAAD,CAR0B;AASrCC,EAAAA,OAAO,EAAE;AACPC,IAAAA,GAAG,EAAE;AACHC,MAAAA,QAAQ,EAAE;AADP;AADE;AAT4B,CAAhC;AAiBP,OAAO,MAAMC,SAA2B,GAAG,EACzC,GAAGZ,eADsC;AAEzCa,EAAAA,KAAK,EAAE,OAAOC,WAAP,EAAoBL,OAApB,KAAgCZ,QAAQ,CAACiB,WAAD,EAAcL,OAAd,CAFN;AAGzCM,EAAAA,SAAS,EAAElB,QAH8B;AAIzCmB,EAAAA,cAAc,EAAElB;AAJyB,CAApC","sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches: parseDBFInBatches\n};\n"],"file":"dbf-loader.js"}
1
+ {"version":3,"file":"dbf-loader.js","names":["parseDBF","parseDBFInBatches","VERSION","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseInBatches"],"sources":["../../src/dbf-loader.ts"],"sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches: parseDBFInBatches\n};\n"],"mappings":"AACA,SAAQA,QAAQ,EAAEC,iBAAiB,QAAO,yBAAyB;;AAInE,MAAMC,OAAO,GAAG,sBAAkB,KAAK,WAAW,qBAAiB,QAAQ;;AAK3E,OAAO,MAAMC,eAAuB,GAAG;EACrCC,IAAI,EAAE,KAAK;EACXC,EAAE,EAAE,KAAK;EACTC,MAAM,EAAE,WAAW;EACnBC,OAAO,EAAEL,OAAO;EAChBM,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,KAAK,CAAC;EACnBC,SAAS,EAAE,CAAC,mBAAmB,CAAC;EAChCC,OAAO,EAAE;IACPC,GAAG,EAAE;MACHC,QAAQ,EAAE;IACZ;EACF;AACF,CAAC;;AAGD,OAAO,MAAMC,SAA2B,GAAG;EACzC,GAAGZ,eAAe;EAClBa,KAAK,EAAE,OAAOC,WAAW,EAAEL,OAAO,KAAKZ,QAAQ,CAACiB,WAAW,EAAEL,OAAO,CAAC;EACrEM,SAAS,EAAElB,QAAQ;EACnBmB,cAAc,EAAElB;AAClB,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/index.ts"],"names":["ShapefileLoader","DBFLoader","DBFWorkerLoader","SHPLoader","SHPWorkerLoader"],"mappings":"AAAA,SAAQA,eAAR,QAA8B,oBAA9B;AACA,SAAQC,SAAR,EAAmBC,eAAnB,QAAyC,cAAzC;AACA,SAAQC,SAAR,EAAmBC,eAAnB,QAAyC,cAAzC","sourcesContent":["export {ShapefileLoader} from './shapefile-loader';\nexport {DBFLoader, DBFWorkerLoader} from './dbf-loader';\nexport {SHPLoader, SHPWorkerLoader} from './shp-loader';\n"],"file":"index.js"}
1
+ {"version":3,"file":"index.js","names":["ShapefileLoader","DBFLoader","DBFWorkerLoader","SHPLoader","SHPWorkerLoader"],"sources":["../../src/index.ts"],"sourcesContent":["export {ShapefileLoader} from './shapefile-loader';\nexport {DBFLoader, DBFWorkerLoader} from './dbf-loader';\nexport {SHPLoader, SHPWorkerLoader} from './shp-loader';\n"],"mappings":"AAAA,SAAQA,eAAe,QAAO,oBAAoB;AAClD,SAAQC,SAAS,EAAEC,eAAe,QAAO,cAAc;AACvD,SAAQC,SAAS,EAAEC,eAAe,QAAO,cAAc"}
@@ -4,7 +4,6 @@ import BinaryChunkReader from '../streaming/binary-chunk-reader';
4
4
  const LITTLE_ENDIAN = true;
5
5
  const DBF_HEADER_SIZE = 32;
6
6
  var STATE;
7
-
8
7
  (function (STATE) {
9
8
  STATE[STATE["START"] = 0] = "START";
10
9
  STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
@@ -12,42 +11,36 @@ var STATE;
12
11
  STATE[STATE["END"] = 3] = "END";
13
12
  STATE[STATE["ERROR"] = 4] = "ERROR";
14
13
  })(STATE || (STATE = {}));
15
-
16
14
  class DBFParser {
17
15
  constructor(options) {
18
16
  _defineProperty(this, "binaryReader", new BinaryChunkReader());
19
-
20
17
  _defineProperty(this, "textDecoder", void 0);
21
-
22
18
  _defineProperty(this, "state", STATE.START);
23
-
24
19
  _defineProperty(this, "result", {
25
20
  data: []
26
21
  });
27
-
28
22
  this.textDecoder = new TextDecoder(options.encoding);
29
23
  }
30
24
 
31
25
  write(arrayBuffer) {
32
26
  this.binaryReader.write(arrayBuffer);
33
27
  this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
28
+
34
29
  }
35
30
 
36
31
  end() {
37
32
  this.binaryReader.end();
38
33
  this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
39
-
40
34
  if (this.state !== STATE.END) {
41
35
  this.state = STATE.ERROR;
42
36
  this.result.error = 'DBF incomplete file';
43
37
  }
44
38
  }
45
-
46
39
  }
47
40
 
48
- export function parseDBF(arrayBuffer, options = {}) {
41
+ export function parseDBF(arrayBuffer) {
49
42
  var _options$tables, _options$dbf;
50
-
43
+ let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
51
44
  const {
52
45
  encoding = 'latin1'
53
46
  } = options.dbf || {};
@@ -61,7 +54,6 @@ export function parseDBF(arrayBuffer, options = {}) {
61
54
  schema
62
55
  } = dbfParser.result;
63
56
  const shape = (options === null || options === void 0 ? void 0 : (_options$tables = options.tables) === null || _options$tables === void 0 ? void 0 : _options$tables.format) || (options === null || options === void 0 ? void 0 : (_options$dbf = options.dbf) === null || _options$dbf === void 0 ? void 0 : _options$dbf.shape);
64
-
65
57
  switch (shape) {
66
58
  case 'object-row-table':
67
59
  {
@@ -72,19 +64,18 @@ export function parseDBF(arrayBuffer, options = {}) {
72
64
  };
73
65
  return table;
74
66
  }
75
-
76
67
  case 'table':
77
68
  return {
78
69
  schema,
79
70
  rows: data
80
71
  };
81
-
82
72
  case 'rows':
83
73
  default:
84
74
  return data;
85
75
  }
86
76
  }
87
- export async function* parseDBFInBatches(asyncIterator, options = {}) {
77
+ export async function* parseDBFInBatches(asyncIterator) {
78
+ let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
88
79
  const {
89
80
  encoding = 'latin1'
90
81
  } = options.dbf || {};
@@ -92,28 +83,22 @@ export async function* parseDBFInBatches(asyncIterator, options = {}) {
92
83
  encoding
93
84
  });
94
85
  let headerReturned = false;
95
-
96
86
  for await (const arrayBuffer of asyncIterator) {
97
87
  parser.write(arrayBuffer);
98
-
99
88
  if (!headerReturned && parser.result.dbfHeader) {
100
89
  headerReturned = true;
101
90
  yield parser.result.dbfHeader;
102
91
  }
103
-
104
92
  if (parser.result.data.length > 0) {
105
93
  yield parser.result.data;
106
94
  parser.result.data = [];
107
95
  }
108
96
  }
109
-
110
97
  parser.end();
111
-
112
98
  if (parser.result.data.length > 0) {
113
99
  yield parser.result.data;
114
100
  }
115
101
  }
116
-
117
102
  function parseState(state, result, binaryReader, textDecoder) {
118
103
  while (true) {
119
104
  try {
@@ -121,14 +106,11 @@ function parseState(state, result, binaryReader, textDecoder) {
121
106
  case STATE.ERROR:
122
107
  case STATE.END:
123
108
  return state;
124
-
125
109
  case STATE.START:
126
110
  const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
127
-
128
111
  if (!dataView) {
129
112
  return state;
130
113
  }
131
-
132
114
  result.dbfHeader = parseDBFHeader(dataView);
133
115
  result.progress = {
134
116
  bytesUsed: 0,
@@ -137,42 +119,36 @@ function parseState(state, result, binaryReader, textDecoder) {
137
119
  };
138
120
  state = STATE.FIELD_DESCRIPTORS;
139
121
  break;
140
-
141
122
  case STATE.FIELD_DESCRIPTORS:
142
- const fieldDescriptorView = binaryReader.getDataView(result.dbfHeader.headerLength - DBF_HEADER_SIZE);
143
-
123
+ const fieldDescriptorView = binaryReader.getDataView(
124
+ result.dbfHeader.headerLength - DBF_HEADER_SIZE);
144
125
  if (!fieldDescriptorView) {
145
126
  return state;
146
127
  }
147
-
148
128
  result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
149
129
  result.schema = new Schema(result.dbfFields.map(dbfField => makeField(dbfField)));
150
130
  state = STATE.FIELD_PROPERTIES;
131
+
151
132
  binaryReader.skip(1);
152
133
  break;
153
-
154
134
  case STATE.FIELD_PROPERTIES:
155
135
  const {
156
136
  recordLength = 0,
157
137
  nRecords = 0
158
138
  } = (result === null || result === void 0 ? void 0 : result.dbfHeader) || {};
159
-
160
139
  while (result.data.length < nRecords) {
161
140
  const recordView = binaryReader.getDataView(recordLength - 1);
162
-
163
141
  if (!recordView) {
164
142
  return state;
165
143
  }
166
-
167
144
  binaryReader.skip(1);
145
+
168
146
  const row = parseRow(recordView, result.dbfFields, textDecoder);
169
147
  result.data.push(row);
170
148
  result.progress.rows = result.data.length;
171
149
  }
172
-
173
150
  state = STATE.END;
174
151
  break;
175
-
176
152
  default:
177
153
  state = STATE.ERROR;
178
154
  result.error = "illegal parser state ".concat(state);
@@ -202,9 +178,9 @@ function parseFieldDescriptors(view, textDecoder) {
202
178
  const nFields = (view.byteLength - 1) / 32;
203
179
  const fields = [];
204
180
  let offset = 0;
205
-
206
181
  for (let i = 0; i < nFields; i++) {
207
- const name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11)).replace(/\u0000/g, '');
182
+ const name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11))
183
+ .replace(/\u0000/g, '');
208
184
  fields.push({
209
185
  name,
210
186
  dataType: String.fromCharCode(view.getUint8(offset + 11)),
@@ -213,20 +189,17 @@ function parseFieldDescriptors(view, textDecoder) {
213
189
  });
214
190
  offset += 32;
215
191
  }
216
-
217
192
  return fields;
218
193
  }
219
194
 
220
195
  function parseRow(view, fields, textDecoder) {
221
196
  const out = {};
222
197
  let offset = 0;
223
-
224
198
  for (const field of fields) {
225
199
  const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
226
200
  out[field.name] = parseField(text, field.dataType);
227
201
  offset += field.fieldLength;
228
202
  }
229
-
230
203
  return out;
231
204
  }
232
205
 
@@ -234,25 +207,18 @@ function parseField(text, dataType) {
234
207
  switch (dataType) {
235
208
  case 'B':
236
209
  return parseNumber(text);
237
-
238
210
  case 'C':
239
211
  return parseCharacter(text);
240
-
241
212
  case 'F':
242
213
  return parseNumber(text);
243
-
244
214
  case 'N':
245
215
  return parseNumber(text);
246
-
247
216
  case 'O':
248
217
  return parseNumber(text);
249
-
250
218
  case 'D':
251
219
  return parseDate(text);
252
-
253
220
  case 'L':
254
221
  return parseBoolean(text);
255
-
256
222
  default:
257
223
  throw new Error('Unsupported data type');
258
224
  }
@@ -275,34 +241,28 @@ function parseCharacter(text) {
275
241
  return text.trim() || null;
276
242
  }
277
243
 
278
- function makeField({
279
- name,
280
- dataType,
281
- fieldLength,
282
- decimal
283
- }) {
244
+ function makeField(_ref) {
245
+ let {
246
+ name,
247
+ dataType,
248
+ fieldLength,
249
+ decimal
250
+ } = _ref;
284
251
  switch (dataType) {
285
252
  case 'B':
286
253
  return new Field(name, new Float64(), true);
287
-
288
254
  case 'C':
289
255
  return new Field(name, new Utf8(), true);
290
-
291
256
  case 'F':
292
257
  return new Field(name, new Float64(), true);
293
-
294
258
  case 'N':
295
259
  return new Field(name, new Float64(), true);
296
-
297
260
  case 'O':
298
261
  return new Field(name, new Float64(), true);
299
-
300
262
  case 'D':
301
263
  return new Field(name, new TimestampMillisecond(), true);
302
-
303
264
  case 'L':
304
265
  return new Field(name, new Bool(), true);
305
-
306
266
  default:
307
267
  throw new Error('Unsupported data type');
308
268
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/lib/parsers/parse-dbf.ts"],"names":["Schema","Field","Bool","Utf8","Float64","TimestampMillisecond","BinaryChunkReader","LITTLE_ENDIAN","DBF_HEADER_SIZE","STATE","DBFParser","constructor","options","START","data","textDecoder","TextDecoder","encoding","write","arrayBuffer","binaryReader","state","parseState","result","end","END","ERROR","error","parseDBF","dbf","dbfParser","schema","shape","tables","format","table","rows","parseDBFInBatches","asyncIterator","parser","headerReturned","dbfHeader","length","dataView","getDataView","parseDBFHeader","progress","bytesUsed","rowsTotal","nRecords","FIELD_DESCRIPTORS","fieldDescriptorView","headerLength","dbfFields","parseFieldDescriptors","map","dbfField","makeField","FIELD_PROPERTIES","skip","recordLength","recordView","row","parseRow","push","message","headerView","year","getUint8","month","day","getUint32","getUint16","languageDriver","view","nFields","byteLength","fields","offset","i","name","decode","Uint8Array","buffer","byteOffset","replace","dataType","String","fromCharCode","fieldLength","decimal","out","field","text","parseField","parseNumber","parseCharacter","parseDate","parseBoolean","Error","str","Date","UTC","slice","parseInt","value","test","number","parseFloat","isNaN","trim"],"mappings":";AAAA,SACEA,MADF,EAEEC,KAFF,EAGEC,IAHF,EAIEC,IAJF,EAKEC,OALF,EAMEC,oBANF,QAQO,oBARP;AASA,OAAOC,iBAAP,MAA8B,kCAA9B;AAUA,MAAMC,aAAa,GAAG,IAAtB;AACA,MAAMC,eAAe,GAAG,EAAxB;IAEKC,K;;WAAAA,K;AAAAA,EAAAA,K,CAAAA,K;AAAAA,EAAAA,K,CAAAA,K;AAAAA,EAAAA,K,CAAAA,K;AAAAA,EAAAA,K,CAAAA,K;AAAAA,EAAAA,K,CAAAA,K;GAAAA,K,KAAAA,K;;AAQL,MAAMC,SAAN,CAAgB;AAQdC,EAAAA,WAAW,CAACC,OAAD,EAA8B;AAAA,0CAP1B,IAAIN,iBAAJ,EAO0B;;AAAA;;AAAA,mCALjCG,KAAK,CAACI,KAK2B;;AAAA,oCAJrB;AAClBC,MAAAA,IAAI,EAAE;AADY,KAIqB;;AACvC,SAAKC,WAAL,GAAmB,IAAIC,WAAJ,CAAgBJ,OAAO,CAACK,QAAxB,CAAnB;AACD;;AAKDC,EAAAA,KAAK,CAACC,WAAD,EAAiC;AACpC,SAAKC,YAAL,CAAkBF,KAAlB,CAAwBC,WAAxB;AACA,SAAKE,KAAL,GAAaC,UAAU,CAAC,KAAKD,KAAN,EAAa,KAAKE,MAAlB,EAA0B,KAAKH,YAA/B,EAA6C,KAAKL,WAAlD,CAAvB;AAOD;;AAEDS,EAAAA,GAAG,GAAS;AACV,SAAKJ,YAAL,CAAkBI,GAAlB;AACA,SAAKH,KAAL,GAAaC,UAAU,CAAC,KAAKD,KAAN,EAAa,KAAKE,MAAlB,EAA0B,KAAKH,YAA/B,EAA6C,KAAKL,WAAlD,CAAvB;;AAEA,QAAI,KAAKM,KAAL,KAAeZ,KAAK,CAACgB,GAAzB,EAA8B;AAC5B,WAAKJ,KAAL,GAAaZ,KAAK,CAACiB,KAAnB;AACA,WAAKH,MAAL,CAAYI,KAAZ,GAAoB,qBAApB;AACD;AACF;;AAlCa;;AA0ChB,OAAO,SAASC,QAAT,CACLT,WADK,EAELP,OAAyB,GAAG,EAFvB,EAG4C;AAAA;;AACjD,QAAM;AAACK,IAAAA,QAAQ,GAAG;AAAZ,MAAwBL,OAAO,CAACiB,GAAR,IAAe,EAA7C;AAEA,QAAMC,SAAS,GAAG,IAAIpB,SAAJ,CAAc;AAACO,IAAAA;AAAD,GAAd,CAAlB;AACAa,EAAAA,SAAS,CAACZ,KAAV,CAAgBC,WAAhB;AACAW,EAAAA,SAAS,CAACN,GAAV;AAEA,QAAM;AAACV,IAAAA,IAAD;AAAOiB,IAAAA;AAAP,MAAiBD,SAAS,CAACP,MAAjC;AACA,QAAMS,KAAK,GAAG,CAAApB,OAAO,SAAP,IAAAA,OAAO,WAAP,+BAAAA,OAAO,CAAEqB,MAAT,oEAAiBC,MAAjB,MAA2BtB,OAA3B,aAA2BA,OAA3B,uCAA2BA,OAAO,CAAEiB,GAApC,iDAA2B,aAAcG,KAAzC,CAAd;;AACA,UAAQA,KAAR;AACE,SAAK,kBAAL;AAAyB;AACvB,cAAMG,KAAqB,GAAG;AAC5BH,UAAAA,KAAK,EAAE,kBADqB;AAE5BD,UAAAA,MAF4B;AAG5BjB,UAAAA;AAH4B,SAA9B;AAKA,eAAOqB,KAAP;AACD;;AACD,SAAK,OAAL;AACE,aAAO;AAACJ,QAAAA,MAAD;AAASK,QAAAA,IAAI,EAAEtB;AAAf,OAAP;;AACF,SAAK,MAAL;AACA;AACE,aAAOA,IAAP;AAbJ;AAeD;AAKD,OAAO,gBAAgBuB,iBAAhB,CACLC,aADK,EAEL1B,OAAyB,GAAG,EAFvB,EAGsD;AAC3D,QAAM;AAACK,IAAAA,QAAQ,GAAG;AAAZ,MAAwBL,OAAO,CAACiB,GAAR,IAAe,EAA7C;AAEA,QAAMU,MAAM,GAAG,IAAI7B,SAAJ,CAAc;AAACO,IAAAA;AAAD,GAAd,CAAf;AACA,MAAIuB,cAAc,GAAG,KAArB;;AACA,aAAW,MAAMrB,WAAjB,IAAgCmB,aAAhC,EAA+C;AAC7CC,IAAAA,MAAM,CAACrB,KAAP,CAAaC,WAAb;;AACA,QAAI,CAACqB,cAAD,IAAmBD,MAAM,CAAChB,MAAP,CAAckB,SAArC,EAAgD;AAC9CD,MAAAA,cAAc,GAAG,IAAjB;AACA,YAAMD,MAAM,CAAChB,MAAP,CAAckB,SAApB;AACD;;AAED,QAAIF,MAAM,CAAChB,MAAP,CAAcT,IAAd,CAAmB4B,MAAnB,GAA4B,CAAhC,EAAmC;AACjC,YAAMH,MAAM,CAAChB,MAAP,CAAcT,IAApB;AACAyB,MAAAA,MAAM,CAAChB,MAAP,CAAcT,IAAd,GAAqB,EAArB;AACD;AACF;;AACDyB,EAAAA,MAAM,CAACf,GAAP;;AACA,MAAIe,MAAM,CAAChB,MAAP,CAAcT,IAAd,CAAmB4B,MAAnB,GAA4B,CAAhC,EAAmC;AACjC,UAAMH,MAAM,CAAChB,MAAP,CAAcT,IAApB;AACD;AACF;;AAUD,SAASQ,UAAT,CACED,KADF,EAEEE,MAFF,EAGEH,YAHF,EAIEL,WAJF,EAKS;AAEP,SAAO,IAAP,EAAa;AACX,QAAI;AACF,cAAQM,KAAR;AACE,aAAKZ,KAAK,CAACiB,KAAX;AACA,aAAKjB,KAAK,CAACgB,GAAX;AACE,iBAAOJ,KAAP;;AAEF,aAAKZ,KAAK,CAACI,KAAX;AAGE,gBAAM8B,QAAQ,GAAGvB,YAAY,CAACwB,WAAb,CAAyBpC,eAAzB,CAAjB;;AACA,cAAI,CAACmC,QAAL,EAAe;AACb,mBAAOtB,KAAP;AACD;;AACDE,UAAAA,MAAM,CAACkB,SAAP,GAAmBI,cAAc,CAACF,QAAD,CAAjC;AACApB,UAAAA,MAAM,CAACuB,QAAP,GAAkB;AAChBC,YAAAA,SAAS,EAAE,CADK;AAEhBC,YAAAA,SAAS,EAAEzB,MAAM,CAACkB,SAAP,CAAiBQ,QAFZ;AAGhBb,YAAAA,IAAI,EAAE;AAHU,WAAlB;AAKAf,UAAAA,KAAK,GAAGZ,KAAK,CAACyC,iBAAd;AACA;;AAEF,aAAKzC,KAAK,CAACyC,iBAAX;AAEE,gBAAMC,mBAAmB,GAAG/B,YAAY,CAACwB,WAAb,CAE1BrB,MAAM,CAACkB,SAAP,CAAiBW,YAAjB,GAAgC5C,eAFN,CAA5B;;AAIA,cAAI,CAAC2C,mBAAL,EAA0B;AACxB,mBAAO9B,KAAP;AACD;;AAEDE,UAAAA,MAAM,CAAC8B,SAAP,GAAmBC,qBAAqB,CAACH,mBAAD,EAAsBpC,WAAtB,CAAxC;AACAQ,UAAAA,MAAM,CAACQ,MAAP,GAAgB,IAAI/B,MAAJ,CAAWuB,MAAM,CAAC8B,SAAP,CAAiBE,GAAjB,CAAsBC,QAAD,IAAcC,SAAS,CAACD,QAAD,CAA5C,CAAX,CAAhB;AAEAnC,UAAAA,KAAK,GAAGZ,KAAK,CAACiD,gBAAd;AAIAtC,UAAAA,YAAY,CAACuC,IAAb,CAAkB,CAAlB;AACA;;AAEF,aAAKlD,KAAK,CAACiD,gBAAX;AACE,gBAAM;AAACE,YAAAA,YAAY,GAAG,CAAhB;AAAmBX,YAAAA,QAAQ,GAAG;AAA9B,cAAmC,CAAA1B,MAAM,SAAN,IAAAA,MAAM,WAAN,YAAAA,MAAM,CAAEkB,SAAR,KAAqB,EAA9D;;AACA,iBAAOlB,MAAM,CAACT,IAAP,CAAY4B,MAAZ,GAAqBO,QAA5B,EAAsC;AACpC,kBAAMY,UAAU,GAAGzC,YAAY,CAACwB,WAAb,CAAyBgB,YAAY,GAAG,CAAxC,CAAnB;;AACA,gBAAI,CAACC,UAAL,EAAiB;AACf,qBAAOxC,KAAP;AACD;;AAEDD,YAAAA,YAAY,CAACuC,IAAb,CAAkB,CAAlB;AAGA,kBAAMG,GAAG,GAAGC,QAAQ,CAACF,UAAD,EAAatC,MAAM,CAAC8B,SAApB,EAA+BtC,WAA/B,CAApB;AACAQ,YAAAA,MAAM,CAACT,IAAP,CAAYkD,IAAZ,CAAiBF,GAAjB;AAEAvC,YAAAA,MAAM,CAACuB,QAAP,CAAgBV,IAAhB,GAAuBb,MAAM,CAACT,IAAP,CAAY4B,MAAnC;AACD;;AACDrB,UAAAA,KAAK,GAAGZ,KAAK,CAACgB,GAAd;AACA;;AAEF;AACEJ,UAAAA,KAAK,GAAGZ,KAAK,CAACiB,KAAd;AACAH,UAAAA,MAAM,CAACI,KAAP,kCAAuCN,KAAvC;AACA,iBAAOA,KAAP;AA/DJ;AAiED,KAlED,CAkEE,OAAOM,KAAP,EAAc;AACdN,MAAAA,KAAK,GAAGZ,KAAK,CAACiB,KAAd;AACAH,MAAAA,MAAM,CAACI,KAAP,iCAAuCA,KAAD,CAAiBsC,OAAvD;AACA,aAAO5C,KAAP;AACD;AACF;AACF;;AAKD,SAASwB,cAAT,CAAwBqB,UAAxB,EAAyD;AACvD,SAAO;AAELC,IAAAA,IAAI,EAAED,UAAU,CAACE,QAAX,CAAoB,CAApB,IAAyB,IAF1B;AAGLC,IAAAA,KAAK,EAAEH,UAAU,CAACE,QAAX,CAAoB,CAApB,CAHF;AAILE,IAAAA,GAAG,EAAEJ,UAAU,CAACE,QAAX,CAAoB,CAApB,CAJA;AAMLnB,IAAAA,QAAQ,EAAEiB,UAAU,CAACK,SAAX,CAAqB,CAArB,EAAwBhE,aAAxB,CANL;AAQL6C,IAAAA,YAAY,EAAEc,UAAU,CAACM,SAAX,CAAqB,CAArB,EAAwBjE,aAAxB,CART;AAULqD,IAAAA,YAAY,EAAEM,UAAU,CAACM,SAAX,CAAqB,EAArB,EAAyBjE,aAAzB,CAVT;AAYLkE,IAAAA,cAAc,EAAEP,UAAU,CAACE,QAAX,CAAoB,EAApB;AAZX,GAAP;AAcD;;AAKD,SAASd,qBAAT,CAA+BoB,IAA/B,EAA+C3D,WAA/C,EAAqF;AAGnF,QAAM4D,OAAO,GAAG,CAACD,IAAI,CAACE,UAAL,GAAkB,CAAnB,IAAwB,EAAxC;AACA,QAAMC,MAAkB,GAAG,EAA3B;AACA,MAAIC,MAAM,GAAG,CAAb;;AACA,OAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGJ,OAApB,EAA6BI,CAAC,EAA9B,EAAkC;AAChC,UAAMC,IAAI,GAAGjE,WAAW,CACrBkE,MADU,CACH,IAAIC,UAAJ,CAAeR,IAAI,CAACS,MAApB,EAA4BT,IAAI,CAACU,UAAL,GAAkBN,MAA9C,EAAsD,EAAtD,CADG,EAGVO,OAHU,CAGF,SAHE,EAGS,EAHT,CAAb;AAKAR,IAAAA,MAAM,CAACb,IAAP,CAAY;AACVgB,MAAAA,IADU;AAEVM,MAAAA,QAAQ,EAAEC,MAAM,CAACC,YAAP,CAAoBd,IAAI,CAACN,QAAL,CAAcU,MAAM,GAAG,EAAvB,CAApB,CAFA;AAGVW,MAAAA,WAAW,EAAEf,IAAI,CAACN,QAAL,CAAcU,MAAM,GAAG,EAAvB,CAHH;AAIVY,MAAAA,OAAO,EAAEhB,IAAI,CAACN,QAAL,CAAcU,MAAM,GAAG,EAAvB;AAJC,KAAZ;AAMAA,IAAAA,MAAM,IAAI,EAAV;AACD;;AACD,SAAOD,MAAP;AACD;;AAuBD,SAASd,QAAT,CACEW,IADF,EAEEG,MAFF,EAGE9D,WAHF,EAIwB;AACtB,QAAM4E,GAAG,GAAG,EAAZ;AACA,MAAIb,MAAM,GAAG,CAAb;;AACA,OAAK,MAAMc,KAAX,IAAoBf,MAApB,EAA4B;AAC1B,UAAMgB,IAAI,GAAG9E,WAAW,CAACkE,MAAZ,CACX,IAAIC,UAAJ,CAAeR,IAAI,CAACS,MAApB,EAA4BT,IAAI,CAACU,UAAL,GAAkBN,MAA9C,EAAsDc,KAAK,CAACH,WAA5D,CADW,CAAb;AAGAE,IAAAA,GAAG,CAACC,KAAK,CAACZ,IAAP,CAAH,GAAkBc,UAAU,CAACD,IAAD,EAAOD,KAAK,CAACN,QAAb,CAA5B;AACAR,IAAAA,MAAM,IAAIc,KAAK,CAACH,WAAhB;AACD;;AAED,SAAOE,GAAP;AACD;;AAQD,SAASG,UAAT,CAAoBD,IAApB,EAAkCP,QAAlC,EAAsF;AACpF,UAAQA,QAAR;AACE,SAAK,GAAL;AACE,aAAOS,WAAW,CAACF,IAAD,CAAlB;;AACF,SAAK,GAAL;AACE,aAAOG,cAAc,CAACH,IAAD,CAArB;;AACF,SAAK,GAAL;AACE,aAAOE,WAAW,CAACF,IAAD,CAAlB;;AACF,SAAK,GAAL;AACE,aAAOE,WAAW,CAACF,IAAD,CAAlB;;AACF,SAAK,GAAL;AACE,aAAOE,WAAW,CAACF,IAAD,CAAlB;;AACF,SAAK,GAAL;AACE,aAAOI,SAAS,CAACJ,IAAD,CAAhB;;AACF,SAAK,GAAL;AACE,aAAOK,YAAY,CAACL,IAAD,CAAnB;;AACF;AACE,YAAM,IAAIM,KAAJ,CAAU,uBAAV,CAAN;AAhBJ;AAkBD;;AAOD,SAASF,SAAT,CAAmBG,GAAnB,EAAqC;AACnC,SAAOC,IAAI,CAACC,GAAL,CAASF,GAAG,CAACG,KAAJ,CAAU,CAAV,EAAa,CAAb,CAAT,EAA0BC,QAAQ,CAACJ,GAAG,CAACG,KAAJ,CAAU,CAAV,EAAa,CAAb,CAAD,EAAkB,EAAlB,CAAR,GAAgC,CAA1D,EAA6DH,GAAG,CAACG,KAAJ,CAAU,CAAV,EAAa,CAAb,CAA7D,CAAP;AACD;;AAUD,SAASL,YAAT,CAAsBO,KAAtB,EAAqD;AACnD,SAAO,UAAUC,IAAV,CAAeD,KAAf,IAAwB,KAAxB,GAAgC,UAAUC,IAAV,CAAeD,KAAf,IAAwB,IAAxB,GAA+B,IAAtE;AACD;;AAOD,SAASV,WAAT,CAAqBF,IAArB,EAAkD;AAChD,QAAMc,MAAM,GAAGC,UAAU,CAACf,IAAD,CAAzB;AACA,SAAOgB,KAAK,CAACF,MAAD,CAAL,GAAgB,IAAhB,GAAuBA,MAA9B;AACD;;AAOD,SAASX,cAAT,CAAwBH,IAAxB,EAAqD;AACnD,SAAOA,IAAI,CAACiB,IAAL,MAAe,IAAtB;AACD;;AASD,SAASrD,SAAT,CAAmB;AAACuB,EAAAA,IAAD;AAAOM,EAAAA,QAAP;AAAiBG,EAAAA,WAAjB;AAA8BC,EAAAA;AAA9B,CAAnB,EAA4E;AAC1E,UAAQJ,QAAR;AACE,SAAK,GAAL;AACE,aAAO,IAAIrF,KAAJ,CAAU+E,IAAV,EAAgB,IAAI5E,OAAJ,EAAhB,EAA+B,IAA/B,CAAP;;AACF,SAAK,GAAL;AACE,aAAO,IAAIH,KAAJ,CAAU+E,IAAV,EAAgB,IAAI7E,IAAJ,EAAhB,EAA4B,IAA5B,CAAP;;AACF,SAAK,GAAL;AACE,aAAO,IAAIF,KAAJ,CAAU+E,IAAV,EAAgB,IAAI5E,OAAJ,EAAhB,EAA+B,IAA/B,CAAP;;AACF,SAAK,GAAL;AACE,aAAO,IAAIH,KAAJ,CAAU+E,IAAV,EAAgB,IAAI5E,OAAJ,EAAhB,EAA+B,IAA/B,CAAP;;AACF,SAAK,GAAL;AACE,aAAO,IAAIH,KAAJ,CAAU+E,IAAV,EAAgB,IAAI5E,OAAJ,EAAhB,EAA+B,IAA/B,CAAP;;AACF,SAAK,GAAL;AACE,aAAO,IAAIH,KAAJ,CAAU+E,IAAV,EAAgB,IAAI3E,oBAAJ,EAAhB,EAA4C,IAA5C,CAAP;;AACF,SAAK,GAAL;AACE,aAAO,IAAIJ,KAAJ,CAAU+E,IAAV,EAAgB,IAAI9E,IAAJ,EAAhB,EAA4B,IAA5B,CAAP;;AACF;AACE,YAAM,IAAIiG,KAAJ,CAAU,uBAAV,CAAN;AAhBJ;AAkBD","sourcesContent":["import {\n Schema,\n Field,\n Bool,\n Utf8,\n Float64,\n TimestampMillisecond,\n ObjectRowTable\n} from '@loaders.gl/schema';\nimport BinaryChunkReader from '../streaming/binary-chunk-reader';\nimport {\n DBFLoaderOptions,\n DBFResult,\n DBFTableOutput,\n DBFHeader,\n DBFRowsOutput,\n DBFField\n} from './types';\n\nconst LITTLE_ENDIAN = true;\nconst DBF_HEADER_SIZE = 32;\n\nenum STATE {\n START = 0, // Expecting header\n FIELD_DESCRIPTORS = 1,\n FIELD_PROPERTIES = 2,\n END = 3,\n ERROR = 4\n}\n\nclass DBFParser {\n binaryReader = new BinaryChunkReader();\n textDecoder: TextDecoder;\n state = STATE.START;\n result: DBFResult = {\n data: []\n };\n\n constructor(options: {encoding: string}) {\n this.textDecoder = new TextDecoder(options.encoding);\n }\n\n /**\n * @param arrayBuffer\n */\n write(arrayBuffer: ArrayBuffer): void {\n this.binaryReader.write(arrayBuffer);\n this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);\n // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();\n\n // important events:\n // - schema available\n // - first rows available\n // - all rows available\n }\n\n end(): void {\n this.binaryReader.end();\n this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);\n // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();\n if (this.state !== STATE.END) {\n this.state = STATE.ERROR;\n this.result.error = 'DBF incomplete file';\n }\n }\n}\n\n/**\n * @param arrayBuffer\n * @param options\n * @returns DBFTable or rows\n */\nexport function parseDBF(\n arrayBuffer: ArrayBuffer,\n options: DBFLoaderOptions = {}\n): DBFRowsOutput | DBFTableOutput | ObjectRowTable {\n const {encoding = 'latin1'} = options.dbf || {};\n\n const dbfParser = new DBFParser({encoding});\n dbfParser.write(arrayBuffer);\n dbfParser.end();\n\n const {data, schema} = dbfParser.result;\n const shape = options?.tables?.format || options?.dbf?.shape;\n switch (shape) {\n case 'object-row-table': {\n const table: ObjectRowTable = {\n shape: 'object-row-table',\n schema,\n data\n };\n return table;\n }\n case 'table':\n return {schema, rows: data};\n case 'rows':\n default:\n return data;\n }\n}\n/**\n * @param asyncIterator\n * @param options\n */\nexport async function* parseDBFInBatches(\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: DBFLoaderOptions = {}\n): AsyncIterable<DBFHeader | DBFRowsOutput | DBFTableOutput> {\n const {encoding = 'latin1'} = options.dbf || {};\n\n const parser = new DBFParser({encoding});\n let headerReturned = false;\n for await (const arrayBuffer of asyncIterator) {\n parser.write(arrayBuffer);\n if (!headerReturned && parser.result.dbfHeader) {\n headerReturned = true;\n yield parser.result.dbfHeader;\n }\n\n if (parser.result.data.length > 0) {\n yield parser.result.data;\n parser.result.data = [];\n }\n }\n parser.end();\n if (parser.result.data.length > 0) {\n yield parser.result.data;\n }\n}\n/**\n * https://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm\n * @param state\n * @param result\n * @param binaryReader\n * @param textDecoder\n * @returns\n */\n/* eslint-disable complexity, max-depth */\nfunction parseState(\n state: STATE,\n result: DBFResult,\n binaryReader: BinaryChunkReader,\n textDecoder: TextDecoder\n): STATE {\n // eslint-disable-next-line no-constant-condition\n while (true) {\n try {\n switch (state) {\n case STATE.ERROR:\n case STATE.END:\n return state;\n\n case STATE.START:\n // Parse initial file header\n // DBF Header\n const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);\n if (!dataView) {\n return state;\n }\n result.dbfHeader = parseDBFHeader(dataView);\n result.progress = {\n bytesUsed: 0,\n rowsTotal: result.dbfHeader.nRecords,\n rows: 0\n };\n state = STATE.FIELD_DESCRIPTORS;\n break;\n\n case STATE.FIELD_DESCRIPTORS:\n // Parse DBF field descriptors (schema)\n const fieldDescriptorView = binaryReader.getDataView(\n // @ts-ignore\n result.dbfHeader.headerLength - DBF_HEADER_SIZE\n );\n if (!fieldDescriptorView) {\n return state;\n }\n\n result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);\n result.schema = new Schema(result.dbfFields.map((dbfField) => makeField(dbfField)));\n\n state = STATE.FIELD_PROPERTIES;\n\n // TODO(kyle) Not exactly sure why start offset needs to be headerLength + 1?\n // parsedbf uses ((fields.length + 1) << 5) + 2;\n binaryReader.skip(1);\n break;\n\n case STATE.FIELD_PROPERTIES:\n const {recordLength = 0, nRecords = 0} = result?.dbfHeader || {};\n while (result.data.length < nRecords) {\n const recordView = binaryReader.getDataView(recordLength - 1);\n if (!recordView) {\n return state;\n }\n // Note: Avoid actually reading the last byte, which may not be present\n binaryReader.skip(1);\n\n // @ts-ignore\n const row = parseRow(recordView, result.dbfFields, textDecoder);\n result.data.push(row);\n // @ts-ignore\n result.progress.rows = result.data.length;\n }\n state = STATE.END;\n break;\n\n default:\n state = STATE.ERROR;\n result.error = `illegal parser state ${state}`;\n return state;\n }\n } catch (error) {\n state = STATE.ERROR;\n result.error = `DBF parsing failed: ${(error as Error).message}`;\n return state;\n }\n }\n}\n\n/**\n * @param headerView\n */\nfunction parseDBFHeader(headerView: DataView): DBFHeader {\n return {\n // Last updated date\n year: headerView.getUint8(1) + 1900,\n month: headerView.getUint8(2),\n day: headerView.getUint8(3),\n // Number of records in data file\n nRecords: headerView.getUint32(4, LITTLE_ENDIAN),\n // Length of header in bytes\n headerLength: headerView.getUint16(8, LITTLE_ENDIAN),\n // Length of each record\n recordLength: headerView.getUint16(10, LITTLE_ENDIAN),\n // Not sure if this is usually set\n languageDriver: headerView.getUint8(29)\n };\n}\n\n/**\n * @param view\n */\nfunction parseFieldDescriptors(view: DataView, textDecoder: TextDecoder): DBFField[] {\n // NOTE: this might overestimate the number of fields if the \"Database\n // Container\" container exists and is included in the headerLength\n const nFields = (view.byteLength - 1) / 32;\n const fields: DBFField[] = [];\n let offset = 0;\n for (let i = 0; i < nFields; i++) {\n const name = textDecoder\n .decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11))\n // eslint-disable-next-line no-control-regex\n .replace(/\\u0000/g, '');\n\n fields.push({\n name,\n dataType: String.fromCharCode(view.getUint8(offset + 11)),\n fieldLength: view.getUint8(offset + 16),\n decimal: view.getUint8(offset + 17)\n });\n offset += 32;\n }\n return fields;\n}\n\n/*\n * @param {BinaryChunkReader} binaryReader\nfunction parseRows(binaryReader, fields, nRecords, recordLength, textDecoder) {\n const rows = [];\n for (let i = 0; i < nRecords; i++) {\n const recordView = binaryReader.getDataView(recordLength - 1);\n binaryReader.skip(1);\n // @ts-ignore\n rows.push(parseRow(recordView, fields, textDecoder));\n }\n return rows;\n}\n */\n\n/**\n *\n * @param view\n * @param fields\n * @param textDecoder\n * @returns\n */\nfunction parseRow(\n view: DataView,\n fields: DBFField[],\n textDecoder: TextDecoder\n): {[key: string]: any} {\n const out = {};\n let offset = 0;\n for (const field of fields) {\n const text = textDecoder.decode(\n new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength)\n );\n out[field.name] = parseField(text, field.dataType);\n offset += field.fieldLength;\n }\n\n return out;\n}\n\n/**\n * Should NaN be coerced to null?\n * @param text\n * @param dataType\n * @returns Field depends on a type of the data\n */\nfunction parseField(text: string, dataType: string): string | number | boolean | null {\n switch (dataType) {\n case 'B':\n return parseNumber(text);\n case 'C':\n return parseCharacter(text);\n case 'F':\n return parseNumber(text);\n case 'N':\n return parseNumber(text);\n case 'O':\n return parseNumber(text);\n case 'D':\n return parseDate(text);\n case 'L':\n return parseBoolean(text);\n default:\n throw new Error('Unsupported data type');\n }\n}\n\n/**\n * Parse YYYYMMDD to date in milliseconds\n * @param str YYYYMMDD\n * @returns new Date as a number\n */\nfunction parseDate(str: any): number {\n return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));\n}\n\n/**\n * Read boolean value\n * any of Y, y, T, t coerce to true\n * any of N, n, F, f coerce to false\n * otherwise null\n * @param value\n * @returns boolean | null\n */\nfunction parseBoolean(value: string): boolean | null {\n return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;\n}\n\n/**\n * Return null instead of NaN\n * @param text\n * @returns number | null\n */\nfunction parseNumber(text: string): number | null {\n const number = parseFloat(text);\n return isNaN(number) ? null : number;\n}\n\n/**\n *\n * @param text\n * @returns string | null\n */\nfunction parseCharacter(text: string): string | null {\n return text.trim() || null;\n}\n\n/**\n * Create a standard Arrow-style `Field` from field descriptor.\n * TODO - use `fieldLength` and `decimal` to generate smaller types?\n * @param param0\n * @returns Field\n */\n// eslint-disable\nfunction makeField({name, dataType, fieldLength, decimal}: DBFField): Field {\n switch (dataType) {\n case 'B':\n return new Field(name, new Float64(), true);\n case 'C':\n return new Field(name, new Utf8(), true);\n case 'F':\n return new Field(name, new Float64(), true);\n case 'N':\n return new Field(name, new Float64(), true);\n case 'O':\n return new Field(name, new Float64(), true);\n case 'D':\n return new Field(name, new TimestampMillisecond(), true);\n case 'L':\n return new Field(name, new Bool(), true);\n default:\n throw new Error('Unsupported data type');\n }\n}\n"],"file":"parse-dbf.js"}
1
+ {"version":3,"file":"parse-dbf.js","names":["Schema","Field","Bool","Utf8","Float64","TimestampMillisecond","BinaryChunkReader","LITTLE_ENDIAN","DBF_HEADER_SIZE","STATE","DBFParser","constructor","options","START","data","textDecoder","TextDecoder","encoding","write","arrayBuffer","binaryReader","state","parseState","result","end","END","ERROR","error","parseDBF","dbf","dbfParser","schema","shape","tables","format","table","rows","parseDBFInBatches","asyncIterator","parser","headerReturned","dbfHeader","length","dataView","getDataView","parseDBFHeader","progress","bytesUsed","rowsTotal","nRecords","FIELD_DESCRIPTORS","fieldDescriptorView","headerLength","dbfFields","parseFieldDescriptors","map","dbfField","makeField","FIELD_PROPERTIES","skip","recordLength","recordView","row","parseRow","push","message","headerView","year","getUint8","month","day","getUint32","getUint16","languageDriver","view","nFields","byteLength","fields","offset","i","name","decode","Uint8Array","buffer","byteOffset","replace","dataType","String","fromCharCode","fieldLength","decimal","out","field","text","parseField","parseNumber","parseCharacter","parseDate","parseBoolean","Error","str","Date","UTC","slice","parseInt","value","test","number","parseFloat","isNaN","trim"],"sources":["../../../../src/lib/parsers/parse-dbf.ts"],"sourcesContent":["import {\n Schema,\n Field,\n Bool,\n Utf8,\n Float64,\n TimestampMillisecond,\n ObjectRowTable\n} from '@loaders.gl/schema';\nimport BinaryChunkReader from '../streaming/binary-chunk-reader';\nimport {\n DBFLoaderOptions,\n DBFResult,\n DBFTableOutput,\n DBFHeader,\n DBFRowsOutput,\n DBFField\n} from './types';\n\nconst LITTLE_ENDIAN = true;\nconst DBF_HEADER_SIZE = 32;\n\nenum STATE {\n START = 0, // Expecting header\n FIELD_DESCRIPTORS = 1,\n FIELD_PROPERTIES = 2,\n END = 3,\n ERROR = 4\n}\n\nclass DBFParser {\n binaryReader = new BinaryChunkReader();\n textDecoder: TextDecoder;\n state = STATE.START;\n result: DBFResult = {\n data: []\n };\n\n constructor(options: {encoding: string}) {\n this.textDecoder = new TextDecoder(options.encoding);\n }\n\n /**\n * @param arrayBuffer\n */\n write(arrayBuffer: ArrayBuffer): void {\n this.binaryReader.write(arrayBuffer);\n this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);\n // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();\n\n // important events:\n // - schema available\n // - first rows available\n // - all rows available\n }\n\n end(): void {\n this.binaryReader.end();\n this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);\n // this.result.progress.bytesUsed = this.binaryReader.bytesUsed();\n if (this.state !== STATE.END) {\n this.state = STATE.ERROR;\n this.result.error = 'DBF incomplete file';\n }\n }\n}\n\n/**\n * @param arrayBuffer\n * @param options\n * @returns DBFTable or rows\n */\nexport function parseDBF(\n arrayBuffer: ArrayBuffer,\n options: DBFLoaderOptions = {}\n): DBFRowsOutput | DBFTableOutput | ObjectRowTable {\n const {encoding = 'latin1'} = options.dbf || {};\n\n const dbfParser = new DBFParser({encoding});\n dbfParser.write(arrayBuffer);\n dbfParser.end();\n\n const {data, schema} = dbfParser.result;\n const shape = options?.tables?.format || options?.dbf?.shape;\n switch (shape) {\n case 'object-row-table': {\n const table: ObjectRowTable = {\n shape: 'object-row-table',\n schema,\n data\n };\n return table;\n }\n case 'table':\n return {schema, rows: data};\n case 'rows':\n default:\n return data;\n }\n}\n/**\n * @param asyncIterator\n * @param options\n */\nexport async function* parseDBFInBatches(\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: DBFLoaderOptions = {}\n): AsyncIterable<DBFHeader | DBFRowsOutput | DBFTableOutput> {\n const {encoding = 'latin1'} = options.dbf || {};\n\n const parser = new DBFParser({encoding});\n let headerReturned = false;\n for await (const arrayBuffer of asyncIterator) {\n parser.write(arrayBuffer);\n if (!headerReturned && parser.result.dbfHeader) {\n headerReturned = true;\n yield parser.result.dbfHeader;\n }\n\n if (parser.result.data.length > 0) {\n yield parser.result.data;\n parser.result.data = [];\n }\n }\n parser.end();\n if (parser.result.data.length > 0) {\n yield parser.result.data;\n }\n}\n/**\n * https://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm\n * @param state\n * @param result\n * @param binaryReader\n * @param textDecoder\n * @returns\n */\n/* eslint-disable complexity, max-depth */\nfunction parseState(\n state: STATE,\n result: DBFResult,\n binaryReader: BinaryChunkReader,\n textDecoder: TextDecoder\n): STATE {\n // eslint-disable-next-line no-constant-condition\n while (true) {\n try {\n switch (state) {\n case STATE.ERROR:\n case STATE.END:\n return state;\n\n case STATE.START:\n // Parse initial file header\n // DBF Header\n const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);\n if (!dataView) {\n return state;\n }\n result.dbfHeader = parseDBFHeader(dataView);\n result.progress = {\n bytesUsed: 0,\n rowsTotal: result.dbfHeader.nRecords,\n rows: 0\n };\n state = STATE.FIELD_DESCRIPTORS;\n break;\n\n case STATE.FIELD_DESCRIPTORS:\n // Parse DBF field descriptors (schema)\n const fieldDescriptorView = binaryReader.getDataView(\n // @ts-ignore\n result.dbfHeader.headerLength - DBF_HEADER_SIZE\n );\n if (!fieldDescriptorView) {\n return state;\n }\n\n result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);\n result.schema = new Schema(result.dbfFields.map((dbfField) => makeField(dbfField)));\n\n state = STATE.FIELD_PROPERTIES;\n\n // TODO(kyle) Not exactly sure why start offset needs to be headerLength + 1?\n // parsedbf uses ((fields.length + 1) << 5) + 2;\n binaryReader.skip(1);\n break;\n\n case STATE.FIELD_PROPERTIES:\n const {recordLength = 0, nRecords = 0} = result?.dbfHeader || {};\n while (result.data.length < nRecords) {\n const recordView = binaryReader.getDataView(recordLength - 1);\n if (!recordView) {\n return state;\n }\n // Note: Avoid actually reading the last byte, which may not be present\n binaryReader.skip(1);\n\n // @ts-ignore\n const row = parseRow(recordView, result.dbfFields, textDecoder);\n result.data.push(row);\n // @ts-ignore\n result.progress.rows = result.data.length;\n }\n state = STATE.END;\n break;\n\n default:\n state = STATE.ERROR;\n result.error = `illegal parser state ${state}`;\n return state;\n }\n } catch (error) {\n state = STATE.ERROR;\n result.error = `DBF parsing failed: ${(error as Error).message}`;\n return state;\n }\n }\n}\n\n/**\n * @param headerView\n */\nfunction parseDBFHeader(headerView: DataView): DBFHeader {\n return {\n // Last updated date\n year: headerView.getUint8(1) + 1900,\n month: headerView.getUint8(2),\n day: headerView.getUint8(3),\n // Number of records in data file\n nRecords: headerView.getUint32(4, LITTLE_ENDIAN),\n // Length of header in bytes\n headerLength: headerView.getUint16(8, LITTLE_ENDIAN),\n // Length of each record\n recordLength: headerView.getUint16(10, LITTLE_ENDIAN),\n // Not sure if this is usually set\n languageDriver: headerView.getUint8(29)\n };\n}\n\n/**\n * @param view\n */\nfunction parseFieldDescriptors(view: DataView, textDecoder: TextDecoder): DBFField[] {\n // NOTE: this might overestimate the number of fields if the \"Database\n // Container\" container exists and is included in the headerLength\n const nFields = (view.byteLength - 1) / 32;\n const fields: DBFField[] = [];\n let offset = 0;\n for (let i = 0; i < nFields; i++) {\n const name = textDecoder\n .decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11))\n // eslint-disable-next-line no-control-regex\n .replace(/\\u0000/g, '');\n\n fields.push({\n name,\n dataType: String.fromCharCode(view.getUint8(offset + 11)),\n fieldLength: view.getUint8(offset + 16),\n decimal: view.getUint8(offset + 17)\n });\n offset += 32;\n }\n return fields;\n}\n\n/*\n * @param {BinaryChunkReader} binaryReader\nfunction parseRows(binaryReader, fields, nRecords, recordLength, textDecoder) {\n const rows = [];\n for (let i = 0; i < nRecords; i++) {\n const recordView = binaryReader.getDataView(recordLength - 1);\n binaryReader.skip(1);\n // @ts-ignore\n rows.push(parseRow(recordView, fields, textDecoder));\n }\n return rows;\n}\n */\n\n/**\n *\n * @param view\n * @param fields\n * @param textDecoder\n * @returns\n */\nfunction parseRow(\n view: DataView,\n fields: DBFField[],\n textDecoder: TextDecoder\n): {[key: string]: any} {\n const out: {[key: string]: string | number | boolean | null} = {};\n let offset = 0;\n for (const field of fields) {\n const text = textDecoder.decode(\n new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength)\n );\n out[field.name] = parseField(text, field.dataType);\n offset += field.fieldLength;\n }\n\n return out;\n}\n\n/**\n * Should NaN be coerced to null?\n * @param text\n * @param dataType\n * @returns Field depends on a type of the data\n */\nfunction parseField(text: string, dataType: string): string | number | boolean | null {\n switch (dataType) {\n case 'B':\n return parseNumber(text);\n case 'C':\n return parseCharacter(text);\n case 'F':\n return parseNumber(text);\n case 'N':\n return parseNumber(text);\n case 'O':\n return parseNumber(text);\n case 'D':\n return parseDate(text);\n case 'L':\n return parseBoolean(text);\n default:\n throw new Error('Unsupported data type');\n }\n}\n\n/**\n * Parse YYYYMMDD to date in milliseconds\n * @param str YYYYMMDD\n * @returns new Date as a number\n */\nfunction parseDate(str: any): number {\n return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));\n}\n\n/**\n * Read boolean value\n * any of Y, y, T, t coerce to true\n * any of N, n, F, f coerce to false\n * otherwise null\n * @param value\n * @returns boolean | null\n */\nfunction parseBoolean(value: string): boolean | null {\n return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;\n}\n\n/**\n * Return null instead of NaN\n * @param text\n * @returns number | null\n */\nfunction parseNumber(text: string): number | null {\n const number = parseFloat(text);\n return isNaN(number) ? null : number;\n}\n\n/**\n *\n * @param text\n * @returns string | null\n */\nfunction parseCharacter(text: string): string | null {\n return text.trim() || null;\n}\n\n/**\n * Create a standard Arrow-style `Field` from field descriptor.\n * TODO - use `fieldLength` and `decimal` to generate smaller types?\n * @param param0\n * @returns Field\n */\n// eslint-disable\nfunction makeField({name, dataType, fieldLength, decimal}: DBFField): Field {\n switch (dataType) {\n case 'B':\n return new Field(name, new Float64(), true);\n case 'C':\n return new Field(name, new Utf8(), true);\n case 'F':\n return new Field(name, new Float64(), true);\n case 'N':\n return new Field(name, new Float64(), true);\n case 'O':\n return new Field(name, new Float64(), true);\n case 'D':\n return new Field(name, new TimestampMillisecond(), true);\n case 'L':\n return new Field(name, new Bool(), true);\n default:\n throw new Error('Unsupported data type');\n }\n}\n"],"mappings":";AAAA,SACEA,MAAM,EACNC,KAAK,EACLC,IAAI,EACJC,IAAI,EACJC,OAAO,EACPC,oBAAoB,QAEf,oBAAoB;AAC3B,OAAOC,iBAAiB,MAAM,kCAAkC;AAUhE,MAAMC,aAAa,GAAG,IAAI;AAC1B,MAAMC,eAAe,GAAG,EAAE;AAAC,IAEtBC,KAAK;AAAA,WAALA,KAAK;EAALA,KAAK,CAALA,KAAK;EAALA,KAAK,CAALA,KAAK;EAALA,KAAK,CAALA,KAAK;EAALA,KAAK,CAALA,KAAK;EAALA,KAAK,CAALA,KAAK;AAAA,GAALA,KAAK,KAALA,KAAK;AAQV,MAAMC,SAAS,CAAC;EAQdC,WAAW,CAACC,OAA2B,EAAE;IAAA,sCAP1B,IAAIN,iBAAiB,EAAE;IAAA;IAAA,+BAE9BG,KAAK,CAACI,KAAK;IAAA,gCACC;MAClBC,IAAI,EAAE;IACR,CAAC;IAGC,IAAI,CAACC,WAAW,GAAG,IAAIC,WAAW,CAACJ,OAAO,CAACK,QAAQ,CAAC;EACtD;;EAKAC,KAAK,CAACC,WAAwB,EAAQ;IACpC,IAAI,CAACC,YAAY,CAACF,KAAK,CAACC,WAAW,CAAC;IACpC,IAAI,CAACE,KAAK,GAAGC,UAAU,CAAC,IAAI,CAACD,KAAK,EAAE,IAAI,CAACE,MAAM,EAAE,IAAI,CAACH,YAAY,EAAE,IAAI,CAACL,WAAW,CAAC;;EAOvF;;EAEAS,GAAG,GAAS;IACV,IAAI,CAACJ,YAAY,CAACI,GAAG,EAAE;IACvB,IAAI,CAACH,KAAK,GAAGC,UAAU,CAAC,IAAI,CAACD,KAAK,EAAE,IAAI,CAACE,MAAM,EAAE,IAAI,CAACH,YAAY,EAAE,IAAI,CAACL,WAAW,CAAC;IAErF,IAAI,IAAI,CAACM,KAAK,KAAKZ,KAAK,CAACgB,GAAG,EAAE;MAC5B,IAAI,CAACJ,KAAK,GAAGZ,KAAK,CAACiB,KAAK;MACxB,IAAI,CAACH,MAAM,CAACI,KAAK,GAAG,qBAAqB;IAC3C;EACF;AACF;;AAOA,OAAO,SAASC,QAAQ,CACtBT,WAAwB,EAEyB;EAAA;EAAA,IADjDP,OAAyB,uEAAG,CAAC,CAAC;EAE9B,MAAM;IAACK,QAAQ,GAAG;EAAQ,CAAC,GAAGL,OAAO,CAACiB,GAAG,IAAI,CAAC,CAAC;EAE/C,MAAMC,SAAS,GAAG,IAAIpB,SAAS,CAAC;IAACO;EAAQ,CAAC,CAAC;EAC3Ca,SAAS,CAACZ,KAAK,CAACC,WAAW,CAAC;EAC5BW,SAAS,CAACN,GAAG,EAAE;EAEf,MAAM;IAACV,IAAI;IAAEiB;EAAM,CAAC,GAAGD,SAAS,CAACP,MAAM;EACvC,MAAMS,KAAK,GAAG,CAAApB,OAAO,aAAPA,OAAO,0CAAPA,OAAO,CAAEqB,MAAM,oDAAf,gBAAiBC,MAAM,MAAItB,OAAO,aAAPA,OAAO,uCAAPA,OAAO,CAAEiB,GAAG,iDAAZ,aAAcG,KAAK;EAC5D,QAAQA,KAAK;IACX,KAAK,kBAAkB;MAAE;QACvB,MAAMG,KAAqB,GAAG;UAC5BH,KAAK,EAAE,kBAAkB;UACzBD,MAAM;UACNjB;QACF,CAAC;QACD,OAAOqB,KAAK;MACd;IACA,KAAK,OAAO;MACV,OAAO;QAACJ,MAAM;QAAEK,IAAI,EAAEtB;MAAI,CAAC;IAC7B,KAAK,MAAM;IACX;MACE,OAAOA,IAAI;EAAC;AAElB;AAKA,OAAO,gBAAgBuB,iBAAiB,CACtCC,aAAiE,EAEN;EAAA,IAD3D1B,OAAyB,uEAAG,CAAC,CAAC;EAE9B,MAAM;IAACK,QAAQ,GAAG;EAAQ,CAAC,GAAGL,OAAO,CAACiB,GAAG,IAAI,CAAC,CAAC;EAE/C,MAAMU,MAAM,GAAG,IAAI7B,SAAS,CAAC;IAACO;EAAQ,CAAC,CAAC;EACxC,IAAIuB,cAAc,GAAG,KAAK;EAC1B,WAAW,MAAMrB,WAAW,IAAImB,aAAa,EAAE;IAC7CC,MAAM,CAACrB,KAAK,CAACC,WAAW,CAAC;IACzB,IAAI,CAACqB,cAAc,IAAID,MAAM,CAAChB,MAAM,CAACkB,SAAS,EAAE;MAC9CD,cAAc,GAAG,IAAI;MACrB,MAAMD,MAAM,CAAChB,MAAM,CAACkB,SAAS;IAC/B;IAEA,IAAIF,MAAM,CAAChB,MAAM,CAACT,IAAI,CAAC4B,MAAM,GAAG,CAAC,EAAE;MACjC,MAAMH,MAAM,CAAChB,MAAM,CAACT,IAAI;MACxByB,MAAM,CAAChB,MAAM,CAACT,IAAI,GAAG,EAAE;IACzB;EACF;EACAyB,MAAM,CAACf,GAAG,EAAE;EACZ,IAAIe,MAAM,CAAChB,MAAM,CAACT,IAAI,CAAC4B,MAAM,GAAG,CAAC,EAAE;IACjC,MAAMH,MAAM,CAAChB,MAAM,CAACT,IAAI;EAC1B;AACF;AAUA,SAASQ,UAAU,CACjBD,KAAY,EACZE,MAAiB,EACjBH,YAA+B,EAC/BL,WAAwB,EACjB;EAEP,OAAO,IAAI,EAAE;IACX,IAAI;MACF,QAAQM,KAAK;QACX,KAAKZ,KAAK,CAACiB,KAAK;QAChB,KAAKjB,KAAK,CAACgB,GAAG;UACZ,OAAOJ,KAAK;QAEd,KAAKZ,KAAK,CAACI,KAAK;UAGd,MAAM8B,QAAQ,GAAGvB,YAAY,CAACwB,WAAW,CAACpC,eAAe,CAAC;UAC1D,IAAI,CAACmC,QAAQ,EAAE;YACb,OAAOtB,KAAK;UACd;UACAE,MAAM,CAACkB,SAAS,GAAGI,cAAc,CAACF,QAAQ,CAAC;UAC3CpB,MAAM,CAACuB,QAAQ,GAAG;YAChBC,SAAS,EAAE,CAAC;YACZC,SAAS,EAAEzB,MAAM,CAACkB,SAAS,CAACQ,QAAQ;YACpCb,IAAI,EAAE;UACR,CAAC;UACDf,KAAK,GAAGZ,KAAK,CAACyC,iBAAiB;UAC/B;QAEF,KAAKzC,KAAK,CAACyC,iBAAiB;UAE1B,MAAMC,mBAAmB,GAAG/B,YAAY,CAACwB,WAAW;UAElDrB,MAAM,CAACkB,SAAS,CAACW,YAAY,GAAG5C,eAAe,CAChD;UACD,IAAI,CAAC2C,mBAAmB,EAAE;YACxB,OAAO9B,KAAK;UACd;UAEAE,MAAM,CAAC8B,SAAS,GAAGC,qBAAqB,CAACH,mBAAmB,EAAEpC,WAAW,CAAC;UAC1EQ,MAAM,CAACQ,MAAM,GAAG,IAAI/B,MAAM,CAACuB,MAAM,CAAC8B,SAAS,CAACE,GAAG,CAAEC,QAAQ,IAAKC,SAAS,CAACD,QAAQ,CAAC,CAAC,CAAC;UAEnFnC,KAAK,GAAGZ,KAAK,CAACiD,gBAAgB;;UAI9BtC,YAAY,CAACuC,IAAI,CAAC,CAAC,CAAC;UACpB;QAEF,KAAKlD,KAAK,CAACiD,gBAAgB;UACzB,MAAM;YAACE,YAAY,GAAG,CAAC;YAAEX,QAAQ,GAAG;UAAC,CAAC,GAAG,CAAA1B,MAAM,aAANA,MAAM,uBAANA,MAAM,CAAEkB,SAAS,KAAI,CAAC,CAAC;UAChE,OAAOlB,MAAM,CAACT,IAAI,CAAC4B,MAAM,GAAGO,QAAQ,EAAE;YACpC,MAAMY,UAAU,GAAGzC,YAAY,CAACwB,WAAW,CAACgB,YAAY,GAAG,CAAC,CAAC;YAC7D,IAAI,CAACC,UAAU,EAAE;cACf,OAAOxC,KAAK;YACd;YAEAD,YAAY,CAACuC,IAAI,CAAC,CAAC,CAAC;;YAGpB,MAAMG,GAAG,GAAGC,QAAQ,CAACF,UAAU,EAAEtC,MAAM,CAAC8B,SAAS,EAAEtC,WAAW,CAAC;YAC/DQ,MAAM,CAACT,IAAI,CAACkD,IAAI,CAACF,GAAG,CAAC;YAErBvC,MAAM,CAACuB,QAAQ,CAACV,IAAI,GAAGb,MAAM,CAACT,IAAI,CAAC4B,MAAM;UAC3C;UACArB,KAAK,GAAGZ,KAAK,CAACgB,GAAG;UACjB;QAEF;UACEJ,KAAK,GAAGZ,KAAK,CAACiB,KAAK;UACnBH,MAAM,CAACI,KAAK,kCAA2BN,KAAK,CAAE;UAC9C,OAAOA,KAAK;MAAC;IAEnB,CAAC,CAAC,OAAOM,KAAK,EAAE;MACdN,KAAK,GAAGZ,KAAK,CAACiB,KAAK;MACnBH,MAAM,CAACI,KAAK,iCAA2BA,KAAK,CAAWsC,OAAO,CAAE;MAChE,OAAO5C,KAAK;IACd;EACF;AACF;;AAKA,SAASwB,cAAc,CAACqB,UAAoB,EAAa;EACvD,OAAO;IAELC,IAAI,EAAED,UAAU,CAACE,QAAQ,CAAC,CAAC,CAAC,GAAG,IAAI;IACnCC,KAAK,EAAEH,UAAU,CAACE,QAAQ,CAAC,CAAC,CAAC;IAC7BE,GAAG,EAAEJ,UAAU,CAACE,QAAQ,CAAC,CAAC,CAAC;IAE3BnB,QAAQ,EAAEiB,UAAU,CAACK,SAAS,CAAC,CAAC,EAAEhE,aAAa,CAAC;IAEhD6C,YAAY,EAAEc,UAAU,CAACM,SAAS,CAAC,CAAC,EAAEjE,aAAa,CAAC;IAEpDqD,YAAY,EAAEM,UAAU,CAACM,SAAS,CAAC,EAAE,EAAEjE,aAAa,CAAC;IAErDkE,cAAc,EAAEP,UAAU,CAACE,QAAQ,CAAC,EAAE;EACxC,CAAC;AACH;;AAKA,SAASd,qBAAqB,CAACoB,IAAc,EAAE3D,WAAwB,EAAc;EAGnF,MAAM4D,OAAO,GAAG,CAACD,IAAI,CAACE,UAAU,GAAG,CAAC,IAAI,EAAE;EAC1C,MAAMC,MAAkB,GAAG,EAAE;EAC7B,IAAIC,MAAM,GAAG,CAAC;EACd,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGJ,OAAO,EAAEI,CAAC,EAAE,EAAE;IAChC,MAAMC,IAAI,GAAGjE,WAAW,CACrBkE,MAAM,CAAC,IAAIC,UAAU,CAACR,IAAI,CAACS,MAAM,EAAET,IAAI,CAACU,UAAU,GAAGN,MAAM,EAAE,EAAE,CAAC;IAAC,CAEjEO,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC;IAEzBR,MAAM,CAACb,IAAI,CAAC;MACVgB,IAAI;MACJM,QAAQ,EAAEC,MAAM,CAACC,YAAY,CAACd,IAAI,CAACN,QAAQ,CAACU,MAAM,GAAG,EAAE,CAAC,CAAC;MACzDW,WAAW,EAAEf,IAAI,CAACN,QAAQ,CAACU,MAAM,GAAG,EAAE,CAAC;MACvCY,OAAO,EAAEhB,IAAI,CAACN,QAAQ,CAACU,MAAM,GAAG,EAAE;IACpC,CAAC,CAAC;IACFA,MAAM,IAAI,EAAE;EACd;EACA,OAAOD,MAAM;AACf;;AAuBA,SAASd,QAAQ,CACfW,IAAc,EACdG,MAAkB,EAClB9D,WAAwB,EACF;EACtB,MAAM4E,GAAsD,GAAG,CAAC,CAAC;EACjE,IAAIb,MAAM,GAAG,CAAC;EACd,KAAK,MAAMc,KAAK,IAAIf,MAAM,EAAE;IAC1B,MAAMgB,IAAI,GAAG9E,WAAW,CAACkE,MAAM,CAC7B,IAAIC,UAAU,CAACR,IAAI,CAACS,MAAM,EAAET,IAAI,CAACU,UAAU,GAAGN,MAAM,EAAEc,KAAK,CAACH,WAAW,CAAC,CACzE;IACDE,GAAG,CAACC,KAAK,CAACZ,IAAI,CAAC,GAAGc,UAAU,CAACD,IAAI,EAAED,KAAK,CAACN,QAAQ,CAAC;IAClDR,MAAM,IAAIc,KAAK,CAACH,WAAW;EAC7B;EAEA,OAAOE,GAAG;AACZ;;AAQA,SAASG,UAAU,CAACD,IAAY,EAAEP,QAAgB,EAAoC;EACpF,QAAQA,QAAQ;IACd,KAAK,GAAG;MACN,OAAOS,WAAW,CAACF,IAAI,CAAC;IAC1B,KAAK,GAAG;MACN,OAAOG,cAAc,CAACH,IAAI,CAAC;IAC7B,KAAK,GAAG;MACN,OAAOE,WAAW,CAACF,IAAI,CAAC;IAC1B,KAAK,GAAG;MACN,OAAOE,WAAW,CAACF,IAAI,CAAC;IAC1B,KAAK,GAAG;MACN,OAAOE,WAAW,CAACF,IAAI,CAAC;IAC1B,KAAK,GAAG;MACN,OAAOI,SAAS,CAACJ,IAAI,CAAC;IACxB,KAAK,GAAG;MACN,OAAOK,YAAY,CAACL,IAAI,CAAC;IAC3B;MACE,MAAM,IAAIM,KAAK,CAAC,uBAAuB,CAAC;EAAC;AAE/C;;AAOA,SAASF,SAAS,CAACG,GAAQ,EAAU;EACnC,OAAOC,IAAI,CAACC,GAAG,CAACF,GAAG,CAACG,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAEC,QAAQ,CAACJ,GAAG,CAACG,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,EAAEH,GAAG,CAACG,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACtF;;AAUA,SAASL,YAAY,CAACO,KAAa,EAAkB;EACnD,OAAO,SAAS,CAACC,IAAI,CAACD,KAAK,CAAC,GAAG,KAAK,GAAG,SAAS,CAACC,IAAI,CAACD,KAAK,CAAC,GAAG,IAAI,GAAG,IAAI;AAC5E;;AAOA,SAASV,WAAW,CAACF,IAAY,EAAiB;EAChD,MAAMc,MAAM,GAAGC,UAAU,CAACf,IAAI,CAAC;EAC/B,OAAOgB,KAAK,CAACF,MAAM,CAAC,GAAG,IAAI,GAAGA,MAAM;AACtC;;AAOA,SAASX,cAAc,CAACH,IAAY,EAAiB;EACnD,OAAOA,IAAI,CAACiB,IAAI,EAAE,IAAI,IAAI;AAC5B;;AASA,SAASrD,SAAS,OAA0D;EAAA,IAAzD;IAACuB,IAAI;IAAEM,QAAQ;IAAEG,WAAW;IAAEC;EAAiB,CAAC;EACjE,QAAQJ,QAAQ;IACd,KAAK,GAAG;MACN,OAAO,IAAIrF,KAAK,CAAC+E,IAAI,EAAE,IAAI5E,OAAO,EAAE,EAAE,IAAI,CAAC;IAC7C,KAAK,GAAG;MACN,OAAO,IAAIH,KAAK,CAAC+E,IAAI,EAAE,IAAI7E,IAAI,EAAE,EAAE,IAAI,CAAC;IAC1C,KAAK,GAAG;MACN,OAAO,IAAIF,KAAK,CAAC+E,IAAI,EAAE,IAAI5E,OAAO,EAAE,EAAE,IAAI,CAAC;IAC7C,KAAK,GAAG;MACN,OAAO,IAAIH,KAAK,CAAC+E,IAAI,EAAE,IAAI5E,OAAO,EAAE,EAAE,IAAI,CAAC;IAC7C,KAAK,GAAG;MACN,OAAO,IAAIH,KAAK,CAAC+E,IAAI,EAAE,IAAI5E,OAAO,EAAE,EAAE,IAAI,CAAC;IAC7C,KAAK,GAAG;MACN,OAAO,IAAIH,KAAK,CAAC+E,IAAI,EAAE,IAAI3E,oBAAoB,EAAE,EAAE,IAAI,CAAC;IAC1D,KAAK,GAAG;MACN,OAAO,IAAIJ,KAAK,CAAC+E,IAAI,EAAE,IAAI9E,IAAI,EAAE,EAAE,IAAI,CAAC;IAC1C;MACE,MAAM,IAAIiG,KAAK,CAAC,uBAAuB,CAAC;EAAC;AAE/C"}