@loaders.gl/shapefile 3.0.13 → 3.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/dist/dbf-worker.js +1 -1
  2. package/dist/dbf-worker.js.map +1 -1
  3. package/dist/dist.es5.min.js +1 -1
  4. package/dist/dist.es5.min.js.map +1 -1
  5. package/dist/dist.min.js +1 -1
  6. package/dist/dist.min.js.map +1 -1
  7. package/dist/es5/bundle.js +2 -2
  8. package/dist/es5/bundle.js.map +1 -1
  9. package/dist/es5/dbf-loader.js +40 -5
  10. package/dist/es5/dbf-loader.js.map +1 -1
  11. package/dist/es5/index.js +5 -5
  12. package/dist/es5/lib/parsers/parse-dbf.js +231 -87
  13. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
  14. package/dist/es5/lib/parsers/parse-shapefile.js +404 -151
  15. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
  16. package/dist/es5/lib/parsers/parse-shp-geometry.js +96 -49
  17. package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -1
  18. package/dist/es5/lib/parsers/parse-shp-header.js +4 -4
  19. package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -1
  20. package/dist/es5/lib/parsers/parse-shp.js +165 -47
  21. package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
  22. package/dist/es5/lib/parsers/parse-shx.js +11 -11
  23. package/dist/es5/lib/parsers/parse-shx.js.map +1 -1
  24. package/dist/es5/lib/streaming/binary-chunk-reader.js +172 -99
  25. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
  26. package/dist/es5/lib/streaming/binary-reader.js +35 -24
  27. package/dist/es5/lib/streaming/binary-reader.js.map +1 -1
  28. package/dist/es5/lib/streaming/zip-batch-iterators.js +96 -37
  29. package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -1
  30. package/dist/es5/shapefile-loader.js +3 -3
  31. package/dist/es5/shapefile-loader.js.map +1 -1
  32. package/dist/es5/shp-loader.js +41 -6
  33. package/dist/es5/shp-loader.js.map +1 -1
  34. package/dist/esm/dbf-loader.js +1 -1
  35. package/dist/esm/shapefile-loader.js +1 -1
  36. package/dist/esm/shp-loader.js +1 -1
  37. package/dist/shp-worker.js +1 -1
  38. package/dist/shp-worker.js.map +1 -1
  39. package/package.json +5 -5
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
 
3
- const moduleExports = require('./index');
3
+ var moduleExports = require('./index');
4
4
 
5
- const _global = typeof window === 'undefined' ? global : window;
5
+ var _global = typeof window === 'undefined' ? global : window;
6
6
 
7
7
  _global.loaders = _global.loaders || {};
8
8
  module.exports = Object.assign(_global.loaders, moduleExports);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","_global","window","global","loaders","module","exports","Object","assign"],"mappings":";;AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACA,MAAMC,OAAO,GAAG,OAAOC,MAAP,KAAkB,WAAlB,GAAgCC,MAAhC,GAAyCD,MAAzD;;AACAD,OAAO,CAACG,OAAR,GAAkBH,OAAO,CAACG,OAAR,IAAmB,EAArC;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcP,OAAO,CAACG,OAAtB,EAA+BL,aAA/B,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nconst _global = typeof window === 'undefined' ? global : window;\n_global.loaders = _global.loaders || {};\nmodule.exports = Object.assign(_global.loaders, moduleExports);\n"],"file":"bundle.js"}
1
+ {"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","_global","window","global","loaders","module","exports","Object","assign"],"mappings":";;AACA,IAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACA,IAAMC,OAAO,GAAG,OAAOC,MAAP,KAAkB,WAAlB,GAAgCC,MAAhC,GAAyCD,MAAzD;;AACAD,OAAO,CAACG,OAAR,GAAkBH,OAAO,CAACG,OAAR,IAAmB,EAArC;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcP,OAAO,CAACG,OAAtB,EAA+BL,aAA/B,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nconst _global = typeof window === 'undefined' ? global : window;\n_global.loaders = _global.loaders || {};\nmodule.exports = Object.assign(_global.loaders, moduleExports);\n"],"file":"bundle.js"}
@@ -1,14 +1,26 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  exports.DBFLoader = exports.DBFWorkerLoader = void 0;
7
9
 
10
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
11
+
12
+ var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
13
+
14
+ var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
15
+
8
16
  var _parseDbf = require("./lib/parsers/parse-dbf");
9
17
 
10
- const VERSION = typeof "3.0.13" !== 'undefined' ? "3.0.13" : 'latest';
11
- const DBFWorkerLoader = {
18
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
19
+
20
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
21
+
22
+ var VERSION = typeof "3.0.14" !== 'undefined' ? "3.0.14" : 'latest';
23
+ var DBFWorkerLoader = {
12
24
  name: 'DBF',
13
25
  id: 'dbf',
14
26
  module: 'shapefile',
@@ -24,10 +36,33 @@ const DBFWorkerLoader = {
24
36
  }
25
37
  };
26
38
  exports.DBFWorkerLoader = DBFWorkerLoader;
27
- const DBFLoader = { ...DBFWorkerLoader,
28
- parse: async (arrayBuffer, options) => (0, _parseDbf.parseDBF)(arrayBuffer, options),
39
+
40
+ var DBFLoader = _objectSpread(_objectSpread({}, DBFWorkerLoader), {}, {
41
+ parse: function () {
42
+ var _parse = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(arrayBuffer, options) {
43
+ return _regenerator.default.wrap(function _callee$(_context) {
44
+ while (1) {
45
+ switch (_context.prev = _context.next) {
46
+ case 0:
47
+ return _context.abrupt("return", (0, _parseDbf.parseDBF)(arrayBuffer, options));
48
+
49
+ case 1:
50
+ case "end":
51
+ return _context.stop();
52
+ }
53
+ }
54
+ }, _callee);
55
+ }));
56
+
57
+ function parse(_x, _x2) {
58
+ return _parse.apply(this, arguments);
59
+ }
60
+
61
+ return parse;
62
+ }(),
29
63
  parseSync: _parseDbf.parseDBF,
30
64
  parseInBatches: _parseDbf.parseDBFInBatches
31
- };
65
+ });
66
+
32
67
  exports.DBFLoader = DBFLoader;
33
68
  //# sourceMappingURL=dbf-loader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/dbf-loader.ts"],"names":["VERSION","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseDBF","parseInBatches","parseDBFInBatches"],"mappings":";;;;;;;AACA;;AAIA,MAAMA,OAAO,GAAG,oBAAuB,WAAvB,cAAmD,QAAnE;AAKO,MAAMC,eAAuB,GAAG;AACrCC,EAAAA,IAAI,EAAE,KAD+B;AAErCC,EAAAA,EAAE,EAAE,KAFiC;AAGrCC,EAAAA,MAAM,EAAE,WAH6B;AAIrCC,EAAAA,OAAO,EAAEL,OAJ4B;AAKrCM,EAAAA,MAAM,EAAE,IAL6B;AAMrCC,EAAAA,QAAQ,EAAE,OAN2B;AAOrCC,EAAAA,UAAU,EAAE,CAAC,KAAD,CAPyB;AAQrCC,EAAAA,SAAS,EAAE,CAAC,mBAAD,CAR0B;AASrCC,EAAAA,OAAO,EAAE;AACPC,IAAAA,GAAG,EAAE;AACHC,MAAAA,QAAQ,EAAE;AADP;AADE;AAT4B,CAAhC;;AAiBA,MAAMC,SAA2B,GAAG,EACzC,GAAGZ,eADsC;AAEzCa,EAAAA,KAAK,EAAE,OAAOC,WAAP,EAAoBL,OAApB,KAAgC,wBAASK,WAAT,EAAsBL,OAAtB,CAFE;AAGzCM,EAAAA,SAAS,EAAEC,kBAH8B;AAIzCC,EAAAA,cAAc,EAAEC;AAJyB,CAApC","sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches: parseDBFInBatches\n};\n"],"file":"dbf-loader.js"}
1
+ {"version":3,"sources":["../../src/dbf-loader.ts"],"names":["VERSION","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseDBF","parseInBatches","parseDBFInBatches"],"mappings":";;;;;;;;;;;;;;;AACA;;;;;;AAIA,IAAMA,OAAO,GAAG,oBAAuB,WAAvB,cAAmD,QAAnE;AAKO,IAAMC,eAAuB,GAAG;AACrCC,EAAAA,IAAI,EAAE,KAD+B;AAErCC,EAAAA,EAAE,EAAE,KAFiC;AAGrCC,EAAAA,MAAM,EAAE,WAH6B;AAIrCC,EAAAA,OAAO,EAAEL,OAJ4B;AAKrCM,EAAAA,MAAM,EAAE,IAL6B;AAMrCC,EAAAA,QAAQ,EAAE,OAN2B;AAOrCC,EAAAA,UAAU,EAAE,CAAC,KAAD,CAPyB;AAQrCC,EAAAA,SAAS,EAAE,CAAC,mBAAD,CAR0B;AASrCC,EAAAA,OAAO,EAAE;AACPC,IAAAA,GAAG,EAAE;AACHC,MAAAA,QAAQ,EAAE;AADP;AADE;AAT4B,CAAhC;;;AAiBA,IAAMC,SAA2B,mCACnCZ,eADmC;AAEtCa,EAAAA,KAAK;AAAA,2EAAE,iBAAOC,WAAP,EAAoBL,OAApB;AAAA;AAAA;AAAA;AAAA;AAAA,+CAAgC,wBAASK,WAAT,EAAsBL,OAAtB,CAAhC;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAAF;;AAAA;AAAA;AAAA;;AAAA;AAAA,KAFiC;AAGtCM,EAAAA,SAAS,EAAEC,kBAH2B;AAItCC,EAAAA,cAAc,EAAEC;AAJsB,EAAjC","sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches: parseDBFInBatches\n};\n"],"file":"dbf-loader.js"}
package/dist/es5/index.js CHANGED
@@ -5,31 +5,31 @@ Object.defineProperty(exports, "__esModule", {
5
5
  });
6
6
  Object.defineProperty(exports, "ShapefileLoader", {
7
7
  enumerable: true,
8
- get: function () {
8
+ get: function get() {
9
9
  return _shapefileLoader.ShapefileLoader;
10
10
  }
11
11
  });
12
12
  Object.defineProperty(exports, "DBFLoader", {
13
13
  enumerable: true,
14
- get: function () {
14
+ get: function get() {
15
15
  return _dbfLoader.DBFLoader;
16
16
  }
17
17
  });
18
18
  Object.defineProperty(exports, "DBFWorkerLoader", {
19
19
  enumerable: true,
20
- get: function () {
20
+ get: function get() {
21
21
  return _dbfLoader.DBFWorkerLoader;
22
22
  }
23
23
  });
24
24
  Object.defineProperty(exports, "SHPLoader", {
25
25
  enumerable: true,
26
- get: function () {
26
+ get: function get() {
27
27
  return _shpLoader.SHPLoader;
28
28
  }
29
29
  });
30
30
  Object.defineProperty(exports, "SHPWorkerLoader", {
31
31
  enumerable: true,
32
- get: function () {
32
+ get: function get() {
33
33
  return _shpLoader.SHPWorkerLoader;
34
34
  }
35
35
  });
@@ -8,14 +8,32 @@ Object.defineProperty(exports, "__esModule", {
8
8
  exports.parseDBF = parseDBF;
9
9
  exports.parseDBFInBatches = parseDBFInBatches;
10
10
 
11
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
12
+
13
+ var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
14
+
15
+ var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
16
+
11
17
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
12
18
 
19
+ var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
20
+
21
+ var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
22
+
23
+ var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
24
+
13
25
  var _schema = require("@loaders.gl/schema");
14
26
 
15
27
  var _binaryChunkReader = _interopRequireDefault(require("../streaming/binary-chunk-reader"));
16
28
 
17
- const LITTLE_ENDIAN = true;
18
- const DBF_HEADER_SIZE = 32;
29
+ function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
30
+
31
+ function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
32
+
33
+ function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
34
+
35
+ var LITTLE_ENDIAN = true;
36
+ var DBF_HEADER_SIZE = 32;
19
37
  var STATE;
20
38
 
21
39
  (function (STATE) {
@@ -26,10 +44,10 @@ var STATE;
26
44
  STATE[STATE["ERROR"] = 4] = "ERROR";
27
45
  })(STATE || (STATE = {}));
28
46
 
29
- class DBFParser {
30
- constructor({
31
- encoding
32
- }) {
47
+ var DBFParser = function () {
48
+ function DBFParser(_ref) {
49
+ var encoding = _ref.encoding;
50
+ (0, _classCallCheck2.default)(this, DBFParser);
33
51
  (0, _defineProperty2.default)(this, "binaryReader", new _binaryChunkReader.default());
34
52
  (0, _defineProperty2.default)(this, "textDecoder", void 0);
35
53
  (0, _defineProperty2.default)(this, "state", STATE.START);
@@ -39,42 +57,44 @@ class DBFParser {
39
57
  this.textDecoder = new TextDecoder(encoding);
40
58
  }
41
59
 
42
- write(arrayBuffer) {
43
- this.binaryReader.write(arrayBuffer);
44
- this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
45
- }
46
-
47
- end() {
48
- this.binaryReader.end();
49
- this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
50
-
51
- if (this.state !== STATE.END) {
52
- this.state = STATE.ERROR;
53
- this.result.error = 'DBF incomplete file';
60
+ (0, _createClass2.default)(DBFParser, [{
61
+ key: "write",
62
+ value: function write(arrayBuffer) {
63
+ this.binaryReader.write(arrayBuffer);
64
+ this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
54
65
  }
55
- }
56
-
57
- }
58
-
59
- function parseDBF(arrayBuffer, options = {}) {
60
- const loaderOptions = options.dbf || {};
61
- const {
62
- encoding
63
- } = loaderOptions;
64
- const dbfParser = new DBFParser({
65
- encoding
66
+ }, {
67
+ key: "end",
68
+ value: function end() {
69
+ this.binaryReader.end();
70
+ this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
71
+
72
+ if (this.state !== STATE.END) {
73
+ this.state = STATE.ERROR;
74
+ this.result.error = 'DBF incomplete file';
75
+ }
76
+ }
77
+ }]);
78
+ return DBFParser;
79
+ }();
80
+
81
+ function parseDBF(arrayBuffer) {
82
+ var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
83
+ var loaderOptions = options.dbf || {};
84
+ var encoding = loaderOptions.encoding;
85
+ var dbfParser = new DBFParser({
86
+ encoding: encoding
66
87
  });
67
88
  dbfParser.write(arrayBuffer);
68
89
  dbfParser.end();
69
- const {
70
- data,
71
- schema
72
- } = dbfParser.result;
90
+ var _dbfParser$result = dbfParser.result,
91
+ data = _dbfParser$result.data,
92
+ schema = _dbfParser$result.schema;
73
93
 
74
94
  switch (options.tables && options.tables.format) {
75
95
  case 'table':
76
96
  return {
77
- schema,
97
+ schema: schema,
78
98
  rows: data
79
99
  };
80
100
 
@@ -84,35 +104,146 @@ function parseDBF(arrayBuffer, options = {}) {
84
104
  }
85
105
  }
86
106
 
87
- async function* parseDBFInBatches(asyncIterator, options = {}) {
88
- const loaderOptions = options.dbf || {};
89
- const {
90
- encoding
91
- } = loaderOptions;
92
- const parser = new DBFParser({
93
- encoding
94
- });
95
- let headerReturned = false;
107
+ function parseDBFInBatches(_x) {
108
+ return _parseDBFInBatches.apply(this, arguments);
109
+ }
96
110
 
97
- for await (const arrayBuffer of asyncIterator) {
98
- parser.write(arrayBuffer);
111
+ function _parseDBFInBatches() {
112
+ _parseDBFInBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(asyncIterator) {
113
+ var options,
114
+ loaderOptions,
115
+ encoding,
116
+ parser,
117
+ headerReturned,
118
+ _iteratorNormalCompletion,
119
+ _didIteratorError,
120
+ _iteratorError,
121
+ _iterator,
122
+ _step,
123
+ _value,
124
+ arrayBuffer,
125
+ _args = arguments;
126
+
127
+ return _regenerator.default.wrap(function _callee$(_context) {
128
+ while (1) {
129
+ switch (_context.prev = _context.next) {
130
+ case 0:
131
+ options = _args.length > 1 && _args[1] !== undefined ? _args[1] : {};
132
+ loaderOptions = options.dbf || {};
133
+ encoding = loaderOptions.encoding;
134
+ parser = new DBFParser({
135
+ encoding: encoding
136
+ });
137
+ headerReturned = false;
138
+ _iteratorNormalCompletion = true;
139
+ _didIteratorError = false;
140
+ _context.prev = 7;
141
+ _iterator = (0, _asyncIterator2.default)(asyncIterator);
142
+
143
+ case 9:
144
+ _context.next = 11;
145
+ return (0, _awaitAsyncGenerator2.default)(_iterator.next());
146
+
147
+ case 11:
148
+ _step = _context.sent;
149
+ _iteratorNormalCompletion = _step.done;
150
+ _context.next = 15;
151
+ return (0, _awaitAsyncGenerator2.default)(_step.value);
152
+
153
+ case 15:
154
+ _value = _context.sent;
155
+
156
+ if (_iteratorNormalCompletion) {
157
+ _context.next = 30;
158
+ break;
159
+ }
99
160
 
100
- if (!headerReturned && parser.result.dbfHeader) {
101
- headerReturned = true;
102
- yield parser.result.dbfHeader;
103
- }
161
+ arrayBuffer = _value;
162
+ parser.write(arrayBuffer);
104
163
 
105
- if (parser.result.data.length > 0) {
106
- yield parser.result.data;
107
- parser.result.data = [];
108
- }
109
- }
164
+ if (!(!headerReturned && parser.result.dbfHeader)) {
165
+ _context.next = 23;
166
+ break;
167
+ }
110
168
 
111
- parser.end();
169
+ headerReturned = true;
170
+ _context.next = 23;
171
+ return parser.result.dbfHeader;
112
172
 
113
- if (parser.result.data.length > 0) {
114
- yield parser.result.data;
115
- }
173
+ case 23:
174
+ if (!(parser.result.data.length > 0)) {
175
+ _context.next = 27;
176
+ break;
177
+ }
178
+
179
+ _context.next = 26;
180
+ return parser.result.data;
181
+
182
+ case 26:
183
+ parser.result.data = [];
184
+
185
+ case 27:
186
+ _iteratorNormalCompletion = true;
187
+ _context.next = 9;
188
+ break;
189
+
190
+ case 30:
191
+ _context.next = 36;
192
+ break;
193
+
194
+ case 32:
195
+ _context.prev = 32;
196
+ _context.t0 = _context["catch"](7);
197
+ _didIteratorError = true;
198
+ _iteratorError = _context.t0;
199
+
200
+ case 36:
201
+ _context.prev = 36;
202
+ _context.prev = 37;
203
+
204
+ if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
205
+ _context.next = 41;
206
+ break;
207
+ }
208
+
209
+ _context.next = 41;
210
+ return (0, _awaitAsyncGenerator2.default)(_iterator.return());
211
+
212
+ case 41:
213
+ _context.prev = 41;
214
+
215
+ if (!_didIteratorError) {
216
+ _context.next = 44;
217
+ break;
218
+ }
219
+
220
+ throw _iteratorError;
221
+
222
+ case 44:
223
+ return _context.finish(41);
224
+
225
+ case 45:
226
+ return _context.finish(36);
227
+
228
+ case 46:
229
+ parser.end();
230
+
231
+ if (!(parser.result.data.length > 0)) {
232
+ _context.next = 50;
233
+ break;
234
+ }
235
+
236
+ _context.next = 50;
237
+ return parser.result.data;
238
+
239
+ case 50:
240
+ case "end":
241
+ return _context.stop();
242
+ }
243
+ }
244
+ }, _callee, null, [[7, 32, 36, 46], [37,, 41, 45]]);
245
+ }));
246
+ return _parseDBFInBatches.apply(this, arguments);
116
247
  }
117
248
 
118
249
  function parseState(state, result, binaryReader, textDecoder) {
@@ -124,7 +255,7 @@ function parseState(state, result, binaryReader, textDecoder) {
124
255
  return state;
125
256
 
126
257
  case STATE.START:
127
- const dataView = binaryReader.getDataView(DBF_HEADER_SIZE, 'DBF header');
258
+ var dataView = binaryReader.getDataView(DBF_HEADER_SIZE, 'DBF header');
128
259
 
129
260
  if (!dataView) {
130
261
  return state;
@@ -140,33 +271,36 @@ function parseState(state, result, binaryReader, textDecoder) {
140
271
  break;
141
272
 
142
273
  case STATE.FIELD_DESCRIPTORS:
143
- const fieldDescriptorView = binaryReader.getDataView(result.dbfHeader.headerLength - DBF_HEADER_SIZE, 'DBF field descriptors');
274
+ var fieldDescriptorView = binaryReader.getDataView(result.dbfHeader.headerLength - DBF_HEADER_SIZE, 'DBF field descriptors');
144
275
 
145
276
  if (!fieldDescriptorView) {
146
277
  return state;
147
278
  }
148
279
 
149
280
  result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
150
- result.schema = new _schema.Schema(result.dbfFields.map(dbfField => makeField(dbfField)));
281
+ result.schema = new _schema.Schema(result.dbfFields.map(function (dbfField) {
282
+ return makeField(dbfField);
283
+ }));
151
284
  state = STATE.FIELD_PROPERTIES;
152
285
  binaryReader.skip(1);
153
286
  break;
154
287
 
155
288
  case STATE.FIELD_PROPERTIES:
156
- const {
157
- recordLength = 0,
158
- nRecords = 0
159
- } = (result === null || result === void 0 ? void 0 : result.dbfHeader) || {};
289
+ var _ref2 = (result === null || result === void 0 ? void 0 : result.dbfHeader) || {},
290
+ _ref2$recordLength = _ref2.recordLength,
291
+ recordLength = _ref2$recordLength === void 0 ? 0 : _ref2$recordLength,
292
+ _ref2$nRecords = _ref2.nRecords,
293
+ nRecords = _ref2$nRecords === void 0 ? 0 : _ref2$nRecords;
160
294
 
161
295
  while (result.data.length < nRecords) {
162
- const recordView = binaryReader.getDataView(recordLength - 1);
296
+ var recordView = binaryReader.getDataView(recordLength - 1);
163
297
 
164
298
  if (!recordView) {
165
299
  return state;
166
300
  }
167
301
 
168
302
  binaryReader.skip(1);
169
- const row = parseRow(recordView, result.dbfFields, textDecoder);
303
+ var row = parseRow(recordView, result.dbfFields, textDecoder);
170
304
  result.data.push(row);
171
305
  result.progress.rows = result.data.length;
172
306
  }
@@ -200,14 +334,14 @@ function parseDBFHeader(headerView) {
200
334
  }
201
335
 
202
336
  function parseFieldDescriptors(view, textDecoder) {
203
- const nFields = (view.byteLength - 1) / 32;
204
- const fields = [];
205
- let offset = 0;
337
+ var nFields = (view.byteLength - 1) / 32;
338
+ var fields = [];
339
+ var offset = 0;
206
340
 
207
- for (let i = 0; i < nFields; i++) {
208
- const name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11)).replace(/\u0000/g, '');
341
+ for (var i = 0; i < nFields; i++) {
342
+ var name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11)).replace(/\u0000/g, '');
209
343
  fields.push({
210
- name,
344
+ name: name,
211
345
  dataType: String.fromCharCode(view.getUint8(offset + 11)),
212
346
  fieldLength: view.getUint8(offset + 16),
213
347
  decimal: view.getUint8(offset + 17)
@@ -219,13 +353,23 @@ function parseFieldDescriptors(view, textDecoder) {
219
353
  }
220
354
 
221
355
  function parseRow(view, fields, textDecoder) {
222
- const out = {};
223
- let offset = 0;
224
-
225
- for (const field of fields) {
226
- const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
227
- out[field.name] = parseField(text, field.dataType);
228
- offset += field.fieldLength;
356
+ var out = {};
357
+ var offset = 0;
358
+
359
+ var _iterator2 = _createForOfIteratorHelper(fields),
360
+ _step2;
361
+
362
+ try {
363
+ for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
364
+ var field = _step2.value;
365
+ var text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
366
+ out[field.name] = parseField(text, field.dataType);
367
+ offset += field.fieldLength;
368
+ }
369
+ } catch (err) {
370
+ _iterator2.e(err);
371
+ } finally {
372
+ _iterator2.f();
229
373
  }
230
374
 
231
375
  return out;
@@ -268,7 +412,7 @@ function parseBoolean(value) {
268
412
  }
269
413
 
270
414
  function parseNumber(text) {
271
- const number = parseFloat(text);
415
+ var number = parseFloat(text);
272
416
  return isNaN(number) ? null : number;
273
417
  }
274
418
 
@@ -276,12 +420,12 @@ function parseCharacter(text) {
276
420
  return text.trim() || null;
277
421
  }
278
422
 
279
- function makeField({
280
- name,
281
- dataType,
282
- fieldLength,
283
- decimal
284
- }) {
423
+ function makeField(_ref3) {
424
+ var name = _ref3.name,
425
+ dataType = _ref3.dataType,
426
+ fieldLength = _ref3.fieldLength,
427
+ decimal = _ref3.decimal;
428
+
285
429
  switch (dataType) {
286
430
  case 'B':
287
431
  return new _schema.Field(name, new _schema.Float64(), true);