@loaders.gl/json 3.1.0-beta.3 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/bundle.js +885 -181
  2. package/dist/es5/bundle.js +1 -1
  3. package/dist/es5/bundle.js.map +1 -1
  4. package/dist/es5/geojson-loader.js +143 -29
  5. package/dist/es5/geojson-loader.js.map +1 -1
  6. package/dist/es5/index.js +14 -6
  7. package/dist/es5/index.js.map +1 -1
  8. package/dist/es5/json-loader.js +44 -18
  9. package/dist/es5/json-loader.js.map +1 -1
  10. package/dist/es5/lib/clarinet/clarinet.js +359 -335
  11. package/dist/es5/lib/clarinet/clarinet.js.map +1 -1
  12. package/dist/es5/lib/jsonpath/jsonpath.js +101 -54
  13. package/dist/es5/lib/jsonpath/jsonpath.js.map +1 -1
  14. package/dist/es5/lib/parse-json-in-batches.js +251 -72
  15. package/dist/es5/lib/parse-json-in-batches.js.map +1 -1
  16. package/dist/es5/lib/parse-json.js +9 -4
  17. package/dist/es5/lib/parse-json.js.map +1 -1
  18. package/dist/es5/lib/parse-ndjson-in-batches.js +143 -31
  19. package/dist/es5/lib/parse-ndjson-in-batches.js.map +1 -1
  20. package/dist/es5/lib/parse-ndjson.js +2 -2
  21. package/dist/es5/lib/parse-ndjson.js.map +1 -1
  22. package/dist/es5/lib/parser/json-parser.js +117 -95
  23. package/dist/es5/lib/parser/json-parser.js.map +1 -1
  24. package/dist/es5/lib/parser/streaming-json-parser.js +115 -59
  25. package/dist/es5/lib/parser/streaming-json-parser.js.map +1 -1
  26. package/dist/es5/ndjson-loader.js +30 -8
  27. package/dist/es5/ndjson-loader.js.map +1 -1
  28. package/dist/esm/geojson-loader.js +1 -1
  29. package/dist/esm/geojson-loader.js.map +1 -1
  30. package/dist/esm/index.js +1 -0
  31. package/dist/esm/index.js.map +1 -1
  32. package/dist/esm/json-loader.js +1 -1
  33. package/dist/esm/json-loader.js.map +1 -1
  34. package/dist/esm/lib/parse-json-in-batches.js +18 -1
  35. package/dist/esm/lib/parse-json-in-batches.js.map +1 -1
  36. package/dist/esm/ndjson-loader.js +1 -1
  37. package/dist/esm/ndjson-loader.js.map +1 -1
  38. package/dist/geojson-worker.js +781 -180
  39. package/dist/index.d.ts +1 -0
  40. package/dist/index.d.ts.map +1 -1
  41. package/dist/index.js +3 -1
  42. package/dist/lib/parse-json-in-batches.d.ts +1 -0
  43. package/dist/lib/parse-json-in-batches.d.ts.map +1 -1
  44. package/dist/lib/parse-json-in-batches.js +20 -0
  45. package/package.json +5 -5
  46. package/src/index.ts +2 -0
  47. package/src/lib/parse-json-in-batches.ts +23 -1
@@ -6,6 +6,17 @@ Object.defineProperty(exports, "__esModule", {
6
6
  value: true
7
7
  });
8
8
  exports.default = parseJSONInBatches;
9
+ exports.rebuildJsonObject = rebuildJsonObject;
10
+
11
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
12
+
13
+ var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
14
+
15
+ var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
16
+
17
+ var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
18
+
19
+ var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
9
20
 
10
21
  var _schema = require("@loaders.gl/schema");
11
22
 
@@ -13,87 +24,255 @@ var _loaderUtils = require("@loaders.gl/loader-utils");
13
24
 
14
25
  var _streamingJsonParser = _interopRequireDefault(require("./parser/streaming-json-parser"));
15
26
 
16
- async function* parseJSONInBatches(binaryAsyncIterator, options) {
17
- var _options$json;
18
-
19
- const asyncIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
20
- const {
21
- metadata
22
- } = options;
23
- const {
24
- jsonpaths
25
- } = options.json || {};
26
- let isFirstChunk = true;
27
- const schema = null;
28
- const shape = (options === null || options === void 0 ? void 0 : (_options$json = options.json) === null || _options$json === void 0 ? void 0 : _options$json.shape) || 'row-table';
29
- const tableBatchBuilder = new _schema.TableBatchBuilder(schema, { ...options,
30
- shape
31
- });
32
- const parser = new _streamingJsonParser.default({
33
- jsonpaths
34
- });
35
-
36
- for await (const chunk of asyncIterator) {
37
- const rows = parser.write(chunk);
38
- const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();
39
-
40
- if (rows.length > 0 && isFirstChunk) {
41
- if (metadata) {
42
- const initialBatch = {
43
- shape,
44
- batchType: 'partial-result',
45
- data: [],
46
- length: 0,
47
- bytesUsed: 0,
48
- container: parser.getPartialResult(),
49
- jsonpath
50
- };
51
- yield initialBatch;
52
- }
27
+ var _jsonpath2 = _interopRequireDefault(require("./jsonpath/jsonpath"));
53
28
 
54
- isFirstChunk = false;
55
- }
29
+ function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
56
30
 
57
- for (const row of rows) {
58
- tableBatchBuilder.addRow(row);
59
- const batch = tableBatchBuilder.getFullBatch({
60
- jsonpath
61
- });
31
+ function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
62
32
 
63
- if (batch) {
64
- yield batch;
65
- }
66
- }
33
+ function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
67
34
 
68
- tableBatchBuilder.chunkComplete(chunk);
69
- const batch = tableBatchBuilder.getFullBatch({
70
- jsonpath
71
- });
35
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
72
36
 
73
- if (batch) {
74
- yield batch;
75
- }
76
- }
37
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
38
+
39
+ function parseJSONInBatches(_x, _x2) {
40
+ return _parseJSONInBatches.apply(this, arguments);
41
+ }
42
+
43
+ function _parseJSONInBatches() {
44
+ _parseJSONInBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(binaryAsyncIterator, options) {
45
+ var _options$json;
46
+
47
+ var asyncIterator, metadata, _ref, jsonpaths, isFirstChunk, schema, shape, tableBatchBuilder, parser, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, _value, chunk, rows, _jsonpath, initialBatch, _iterator2, _step2, row, _batch2, _batch, jsonpath, batch, finalBatch;
48
+
49
+ return _regenerator.default.wrap(function _callee$(_context) {
50
+ while (1) {
51
+ switch (_context.prev = _context.next) {
52
+ case 0:
53
+ asyncIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
54
+ metadata = options.metadata;
55
+ _ref = options.json || {}, jsonpaths = _ref.jsonpaths;
56
+ isFirstChunk = true;
57
+ schema = null;
58
+ shape = (options === null || options === void 0 ? void 0 : (_options$json = options.json) === null || _options$json === void 0 ? void 0 : _options$json.shape) || 'row-table';
59
+ tableBatchBuilder = new _schema.TableBatchBuilder(schema, _objectSpread(_objectSpread({}, options), {}, {
60
+ shape: shape
61
+ }));
62
+ parser = new _streamingJsonParser.default({
63
+ jsonpaths: jsonpaths
64
+ });
65
+ _iteratorNormalCompletion = true;
66
+ _didIteratorError = false;
67
+ _context.prev = 10;
68
+ _iterator = (0, _asyncIterator2.default)(asyncIterator);
69
+
70
+ case 12:
71
+ _context.next = 14;
72
+ return (0, _awaitAsyncGenerator2.default)(_iterator.next());
73
+
74
+ case 14:
75
+ _step = _context.sent;
76
+ _iteratorNormalCompletion = _step.done;
77
+ _context.next = 18;
78
+ return (0, _awaitAsyncGenerator2.default)(_step.value);
79
+
80
+ case 18:
81
+ _value = _context.sent;
82
+
83
+ if (_iteratorNormalCompletion) {
84
+ _context.next = 57;
85
+ break;
86
+ }
87
+
88
+ chunk = _value;
89
+ rows = parser.write(chunk);
90
+ _jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();
91
+
92
+ if (!(rows.length > 0 && isFirstChunk)) {
93
+ _context.next = 29;
94
+ break;
95
+ }
96
+
97
+ if (!metadata) {
98
+ _context.next = 28;
99
+ break;
100
+ }
101
+
102
+ initialBatch = {
103
+ shape: shape,
104
+ batchType: 'partial-result',
105
+ data: [],
106
+ length: 0,
107
+ bytesUsed: 0,
108
+ container: parser.getPartialResult(),
109
+ jsonpath: _jsonpath
110
+ };
111
+ _context.next = 28;
112
+ return initialBatch;
113
+
114
+ case 28:
115
+ isFirstChunk = false;
116
+
117
+ case 29:
118
+ _iterator2 = _createForOfIteratorHelper(rows);
119
+ _context.prev = 30;
120
+
121
+ _iterator2.s();
122
+
123
+ case 32:
124
+ if ((_step2 = _iterator2.n()).done) {
125
+ _context.next = 41;
126
+ break;
127
+ }
128
+
129
+ row = _step2.value;
130
+ tableBatchBuilder.addRow(row);
131
+ _batch2 = tableBatchBuilder.getFullBatch({
132
+ jsonpath: _jsonpath
133
+ });
134
+
135
+ if (!_batch2) {
136
+ _context.next = 39;
137
+ break;
138
+ }
139
+
140
+ _context.next = 39;
141
+ return _batch2;
142
+
143
+ case 39:
144
+ _context.next = 32;
145
+ break;
77
146
 
78
- const jsonpath = parser.getStreamingJsonPathAsString();
79
- const batch = tableBatchBuilder.getFinalBatch({
80
- jsonpath
81
- });
147
+ case 41:
148
+ _context.next = 46;
149
+ break;
82
150
 
83
- if (batch) {
84
- yield batch;
151
+ case 43:
152
+ _context.prev = 43;
153
+ _context.t0 = _context["catch"](30);
154
+
155
+ _iterator2.e(_context.t0);
156
+
157
+ case 46:
158
+ _context.prev = 46;
159
+
160
+ _iterator2.f();
161
+
162
+ return _context.finish(46);
163
+
164
+ case 49:
165
+ tableBatchBuilder.chunkComplete(chunk);
166
+ _batch = tableBatchBuilder.getFullBatch({
167
+ jsonpath: _jsonpath
168
+ });
169
+
170
+ if (!_batch) {
171
+ _context.next = 54;
172
+ break;
173
+ }
174
+
175
+ _context.next = 54;
176
+ return _batch;
177
+
178
+ case 54:
179
+ _iteratorNormalCompletion = true;
180
+ _context.next = 12;
181
+ break;
182
+
183
+ case 57:
184
+ _context.next = 63;
185
+ break;
186
+
187
+ case 59:
188
+ _context.prev = 59;
189
+ _context.t1 = _context["catch"](10);
190
+ _didIteratorError = true;
191
+ _iteratorError = _context.t1;
192
+
193
+ case 63:
194
+ _context.prev = 63;
195
+ _context.prev = 64;
196
+
197
+ if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
198
+ _context.next = 68;
199
+ break;
200
+ }
201
+
202
+ _context.next = 68;
203
+ return (0, _awaitAsyncGenerator2.default)(_iterator.return());
204
+
205
+ case 68:
206
+ _context.prev = 68;
207
+
208
+ if (!_didIteratorError) {
209
+ _context.next = 71;
210
+ break;
211
+ }
212
+
213
+ throw _iteratorError;
214
+
215
+ case 71:
216
+ return _context.finish(68);
217
+
218
+ case 72:
219
+ return _context.finish(63);
220
+
221
+ case 73:
222
+ jsonpath = parser.getStreamingJsonPathAsString();
223
+ batch = tableBatchBuilder.getFinalBatch({
224
+ jsonpath: jsonpath
225
+ });
226
+
227
+ if (!batch) {
228
+ _context.next = 78;
229
+ break;
230
+ }
231
+
232
+ _context.next = 78;
233
+ return batch;
234
+
235
+ case 78:
236
+ if (!metadata) {
237
+ _context.next = 82;
238
+ break;
239
+ }
240
+
241
+ finalBatch = {
242
+ shape: shape,
243
+ batchType: 'final-result',
244
+ container: parser.getPartialResult(),
245
+ jsonpath: parser.getStreamingJsonPathAsString(),
246
+ data: [],
247
+ length: 0
248
+ };
249
+ _context.next = 82;
250
+ return finalBatch;
251
+
252
+ case 82:
253
+ case "end":
254
+ return _context.stop();
255
+ }
256
+ }
257
+ }, _callee, null, [[10, 59, 63, 73], [30, 43, 46, 49], [64,, 68, 72]]);
258
+ }));
259
+ return _parseJSONInBatches.apply(this, arguments);
260
+ }
261
+
262
+ function rebuildJsonObject(batch, data) {
263
+ (0, _loaderUtils.assert)(batch.batchType === 'final-result');
264
+
265
+ if (batch.jsonpath === '$') {
266
+ return data;
85
267
  }
86
268
 
87
- if (metadata) {
88
- const finalBatch = {
89
- shape,
90
- batchType: 'final-result',
91
- container: parser.getPartialResult(),
92
- jsonpath: parser.getStreamingJsonPathAsString(),
93
- data: [],
94
- length: 0
95
- };
96
- yield finalBatch;
269
+ if (batch.jsonpath && batch.jsonpath.length > 1) {
270
+ var topLevelObject = batch.container;
271
+ var streamingPath = new _jsonpath2.default(batch.jsonpath);
272
+ streamingPath.setFieldAtPath(topLevelObject, data);
273
+ return topLevelObject;
97
274
  }
275
+
276
+ return batch.container;
98
277
  }
99
278
  //# sourceMappingURL=parse-json-in-batches.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/parse-json-in-batches.ts"],"names":["parseJSONInBatches","binaryAsyncIterator","options","asyncIterator","metadata","jsonpaths","json","isFirstChunk","schema","shape","tableBatchBuilder","TableBatchBuilder","parser","StreamingJSONParser","chunk","rows","write","jsonpath","length","getStreamingJsonPathAsString","initialBatch","batchType","data","bytesUsed","container","getPartialResult","row","addRow","batch","getFullBatch","chunkComplete","getFinalBatch","finalBatch"],"mappings":";;;;;;;;;AAEA;;AACA;;AACA;;AAIe,gBAAgBA,kBAAhB,CACbC,mBADa,EAEbC,OAFa,EAGS;AAAA;;AACtB,QAAMC,aAAa,GAAG,0CAAwBF,mBAAxB,CAAtB;AAEA,QAAM;AAACG,IAAAA;AAAD,MAAaF,OAAnB;AACA,QAAM;AAACG,IAAAA;AAAD,MAAcH,OAAO,CAACI,IAAR,IAAgB,EAApC;AAEA,MAAIC,YAAqB,GAAG,IAA5B;AAGA,QAAMC,MAAM,GAAG,IAAf;AACA,QAAMC,KAAK,GAAG,CAAAP,OAAO,SAAP,IAAAA,OAAO,WAAP,6BAAAA,OAAO,CAAEI,IAAT,gEAAeG,KAAf,KAAwB,WAAtC;AAEA,QAAMC,iBAAiB,GAAG,IAAIC,yBAAJ,CAAsBH,MAAtB,EAA8B,EACtD,GAAGN,OADmD;AAEtDO,IAAAA;AAFsD,GAA9B,CAA1B;AAKA,QAAMG,MAAM,GAAG,IAAIC,4BAAJ,CAAwB;AAACR,IAAAA;AAAD,GAAxB,CAAf;;AAEA,aAAW,MAAMS,KAAjB,IAA0BX,aAA1B,EAAyC;AACvC,UAAMY,IAAI,GAAGH,MAAM,CAACI,KAAP,CAAaF,KAAb,CAAb;AAEA,UAAMG,QAAQ,GAAGF,IAAI,CAACG,MAAL,GAAc,CAAd,IAAmBN,MAAM,CAACO,4BAAP,EAApC;;AAEA,QAAIJ,IAAI,CAACG,MAAL,GAAc,CAAd,IAAmBX,YAAvB,EAAqC;AACnC,UAAIH,QAAJ,EAAc;AACZ,cAAMgB,YAAmB,GAAG;AAE1BX,UAAAA,KAF0B;AAG1BY,UAAAA,SAAS,EAAE,gBAHe;AAI1BC,UAAAA,IAAI,EAAE,EAJoB;AAK1BJ,UAAAA,MAAM,EAAE,CALkB;AAM1BK,UAAAA,SAAS,EAAE,CANe;AAQ1BC,UAAAA,SAAS,EAAEZ,MAAM,CAACa,gBAAP,EARe;AAS1BR,UAAAA;AAT0B,SAA5B;AAWA,cAAMG,YAAN;AACD;;AACDb,MAAAA,YAAY,GAAG,KAAf;AAED;;AAGD,SAAK,MAAMmB,GAAX,IAAkBX,IAAlB,EAAwB;AACtBL,MAAAA,iBAAiB,CAACiB,MAAlB,CAAyBD,GAAzB;AAEA,YAAME,KAAK,GAAGlB,iBAAiB,CAACmB,YAAlB,CAA+B;AAACZ,QAAAA;AAAD,OAA/B,CAAd;;AACA,UAAIW,KAAJ,EAAW;AACT,cAAMA,KAAN;AACD;AACF;;AAEDlB,IAAAA,iBAAiB,CAACoB,aAAlB,CAAgChB,KAAhC;AACA,UAAMc,KAAK,GAAGlB,iBAAiB,CAACmB,YAAlB,CAA+B;AAACZ,MAAAA;AAAD,KAA/B,CAAd;;AACA,QAAIW,KAAJ,EAAW;AACT,YAAMA,KAAN;AACD;AACF;;AAGD,QAAMX,QAAQ,GAAGL,MAAM,CAACO,4BAAP,EAAjB;AACA,QAAMS,KAAK,GAAGlB,iBAAiB,CAACqB,aAAlB,CAAgC;AAACd,IAAAA;AAAD,GAAhC,CAAd;;AACA,MAAIW,KAAJ,EAAW;AACT,UAAMA,KAAN;AACD;;AAED,MAAIxB,QAAJ,EAAc;AACZ,UAAM4B,UAAiB,GAAG;AACxBvB,MAAAA,KADwB;AAExBY,MAAAA,SAAS,EAAE,cAFa;AAGxBG,MAAAA,SAAS,EAAEZ,MAAM,CAACa,gBAAP,EAHa;AAIxBR,MAAAA,QAAQ,EAAEL,MAAM,CAACO,4BAAP,EAJc;AAKxBG,MAAAA,IAAI,EAAE,EALkB;AAMxBJ,MAAAA,MAAM,EAAE;AANgB,KAA1B;AASA,UAAMc,UAAN;AACD;AACF","sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions} from '../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from './parser/streaming-json-parser';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport default async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<Batch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // TODO fix Schema deduction\n const schema = null; // new Schema([]);\n const shape = options?.json?.shape || 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: Batch = {\n // Common fields\n shape,\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: Batch = {\n shape,\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n"],"file":"parse-json-in-batches.js"}
1
+ {"version":3,"sources":["../../../src/lib/parse-json-in-batches.ts"],"names":["parseJSONInBatches","binaryAsyncIterator","options","asyncIterator","metadata","json","jsonpaths","isFirstChunk","schema","shape","tableBatchBuilder","TableBatchBuilder","parser","StreamingJSONParser","chunk","rows","write","jsonpath","length","getStreamingJsonPathAsString","initialBatch","batchType","data","bytesUsed","container","getPartialResult","row","addRow","batch","getFullBatch","chunkComplete","getFinalBatch","finalBatch","rebuildJsonObject","topLevelObject","streamingPath","JSONPath","setFieldAtPath"],"mappings":";;;;;;;;;;;;;;;;;;;;AAEA;;AACA;;AACA;;AACA;;;;;;;;;;;;SAI+BA,kB;;;;;oFAAhB,iBACbC,mBADa,EAEbC,OAFa;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAIPC,YAAAA,aAJO,GAIS,0CAAwBF,mBAAxB,CAJT;AAMNG,YAAAA,QANM,GAMMF,OANN,CAMNE,QANM;AAAA,mBAOOF,OAAO,CAACG,IAAR,IAAgB,EAPvB,EAONC,SAPM,QAONA,SAPM;AASTC,YAAAA,YATS,GASe,IATf;AAYPC,YAAAA,MAZO,GAYE,IAZF;AAaPC,YAAAA,KAbO,GAaC,CAAAP,OAAO,SAAP,IAAAA,OAAO,WAAP,6BAAAA,OAAO,CAAEG,IAAT,gEAAeI,KAAf,KAAwB,WAbzB;AAePC,YAAAA,iBAfO,GAea,IAAIC,yBAAJ,CAAsBH,MAAtB,kCACrBN,OADqB;AAExBO,cAAAA,KAAK,EAALA;AAFwB,eAfb;AAoBPG,YAAAA,MApBO,GAoBE,IAAIC,4BAAJ,CAAwB;AAACP,cAAAA,SAAS,EAATA;AAAD,aAAxB,CApBF;AAAA;AAAA;AAAA;AAAA,qDAsBaH,aAtBb;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAsBIW,YAAAA,KAtBJ;AAuBLC,YAAAA,IAvBK,GAuBEH,MAAM,CAACI,KAAP,CAAaF,KAAb,CAvBF;AAyBLG,YAAAA,SAzBK,GAyBMF,IAAI,CAACG,MAAL,GAAc,CAAd,IAAmBN,MAAM,CAACO,4BAAP,EAzBzB;;AAAA,kBA2BPJ,IAAI,CAACG,MAAL,GAAc,CAAd,IAAmBX,YA3BZ;AAAA;AAAA;AAAA;;AAAA,iBA4BLH,QA5BK;AAAA;AAAA;AAAA;;AA6BDgB,YAAAA,YA7BC,GA6BqB;AAE1BX,cAAAA,KAAK,EAALA,KAF0B;AAG1BY,cAAAA,SAAS,EAAE,gBAHe;AAI1BC,cAAAA,IAAI,EAAE,EAJoB;AAK1BJ,cAAAA,MAAM,EAAE,CALkB;AAM1BK,cAAAA,SAAS,EAAE,CANe;AAQ1BC,cAAAA,SAAS,EAAEZ,MAAM,CAACa,gBAAP,EARe;AAS1BR,cAAAA,QAAQ,EAARA;AAT0B,aA7BrB;AAAA;AAwCP,mBAAMG,YAAN;;AAxCO;AA0CTb,YAAAA,YAAY,GAAG,KAAf;;AA1CS;AAAA,oDA+COQ,IA/CP;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AA+CAW,YAAAA,GA/CA;AAgDThB,YAAAA,iBAAiB,CAACiB,MAAlB,CAAyBD,GAAzB;AAEME,YAAAA,OAlDG,GAkDKlB,iBAAiB,CAACmB,YAAlB,CAA+B;AAACZ,cAAAA,QAAQ,EAARA;AAAD,aAA/B,CAlDL;;AAAA,iBAmDLW,OAnDK;AAAA;AAAA;AAAA;;AAAA;AAoDP,mBAAMA,OAAN;;AApDO;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAwDXlB,YAAAA,iBAAiB,CAACoB,aAAlB,CAAgChB,KAAhC;AACMc,YAAAA,MAzDK,GAyDGlB,iBAAiB,CAACmB,YAAlB,CAA+B;AAACZ,cAAAA,QAAQ,EAARA;AAAD,aAA/B,CAzDH;;AAAA,iBA0DPW,MA1DO;AAAA;AAAA;AAAA;;AAAA;AA2DT,mBAAMA,MAAN;;AA3DS;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAgEPX,YAAAA,QAhEO,GAgEIL,MAAM,CAACO,4BAAP,EAhEJ;AAiEPS,YAAAA,KAjEO,GAiEClB,iBAAiB,CAACqB,aAAlB,CAAgC;AAACd,cAAAA,QAAQ,EAARA;AAAD,aAAhC,CAjED;;AAAA,iBAkETW,KAlES;AAAA;AAAA;AAAA;;AAAA;AAmEX,mBAAMA,KAAN;;AAnEW;AAAA,iBAsETxB,QAtES;AAAA;AAAA;AAAA;;AAuEL4B,YAAAA,UAvEK,GAuEe;AACxBvB,cAAAA,KAAK,EAALA,KADwB;AAExBY,cAAAA,SAAS,EAAE,cAFa;AAGxBG,cAAAA,SAAS,EAAEZ,MAAM,CAACa,gBAAP,EAHa;AAIxBR,cAAAA,QAAQ,EAAEL,MAAM,CAACO,4BAAP,EAJc;AAKxBG,cAAAA,IAAI,EAAE,EALkB;AAMxBJ,cAAAA,MAAM,EAAE;AANgB,aAvEf;AAAA;AAgFX,mBAAMc,UAAN;;AAhFW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G;;;;AAoFR,SAASC,iBAAT,CAA2BL,KAA3B,EAAkCN,IAAlC,EAAwC;AAE7C,2BAAOM,KAAK,CAACP,SAAN,KAAoB,cAA3B;;AAGA,MAAIO,KAAK,CAACX,QAAN,KAAmB,GAAvB,EAA4B;AAC1B,WAAOK,IAAP;AACD;;AAGD,MAAIM,KAAK,CAACX,QAAN,IAAkBW,KAAK,CAACX,QAAN,CAAeC,MAAf,GAAwB,CAA9C,EAAiD;AAC/C,QAAMgB,cAAc,GAAGN,KAAK,CAACJ,SAA7B;AACA,QAAMW,aAAa,GAAG,IAAIC,kBAAJ,CAAaR,KAAK,CAACX,QAAnB,CAAtB;AACAkB,IAAAA,aAAa,CAACE,cAAd,CAA6BH,cAA7B,EAA6CZ,IAA7C;AACA,WAAOY,cAAP;AACD;;AAGD,SAAON,KAAK,CAACJ,SAAb;AACD","sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions} from '../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {assert, makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from './parser/streaming-json-parser';\nimport JSONPath from './jsonpath/jsonpath';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport default async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<Batch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // TODO fix Schema deduction\n const schema = null; // new Schema([]);\n const shape = options?.json?.shape || 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: Batch = {\n // Common fields\n shape,\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: Batch = {\n shape,\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n\nexport function rebuildJsonObject(batch, data) {\n // Last batch will have this special type and will provide all the root object of the parsed file\n assert(batch.batchType === 'final-result');\n\n // The streamed JSON data is a top level array (jsonpath = '$'), just return the array of row objects\n if (batch.jsonpath === '$') {\n return data;\n }\n\n // (jsonpath !== '$') The streamed data is not a top level array, so stitch it back in to the top-level object\n if (batch.jsonpath && batch.jsonpath.length > 1) {\n const topLevelObject = batch.container;\n const streamingPath = new JSONPath(batch.jsonpath);\n streamingPath.setFieldAtPath(topLevelObject, data);\n return topLevelObject;\n }\n\n // No jsonpath, in this case nothing was streamed.\n return batch.container;\n}\n"],"file":"parse-json-in-batches.js"}
@@ -1,15 +1,19 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  exports.default = parseJSONSync;
7
9
 
10
+ var _typeof2 = _interopRequireDefault(require("@babel/runtime/helpers/typeof"));
11
+
8
12
  function parseJSONSync(jsonText, options) {
9
13
  try {
10
14
  var _options$json;
11
15
 
12
- const json = JSON.parse(jsonText);
16
+ var json = JSON.parse(jsonText);
13
17
 
14
18
  if ((_options$json = options.json) !== null && _options$json !== void 0 && _options$json.table) {
15
19
  return getFirstArray(json) || json;
@@ -26,9 +30,10 @@ function getFirstArray(json) {
26
30
  return json;
27
31
  }
28
32
 
29
- if (json && typeof json === 'object') {
30
- for (const value of Object.values(json)) {
31
- const array = getFirstArray(value);
33
+ if (json && (0, _typeof2.default)(json) === 'object') {
34
+ for (var _i = 0, _Object$values = Object.values(json); _i < _Object$values.length; _i++) {
35
+ var value = _Object$values[_i];
36
+ var array = getFirstArray(value);
32
37
 
33
38
  if (array) {
34
39
  return array;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/parse-json.ts"],"names":["parseJSONSync","jsonText","options","json","JSON","parse","table","getFirstArray","error","Error","Array","isArray","value","Object","values","array"],"mappings":";;;;;;;AAEe,SAASA,aAAT,CAAuBC,QAAvB,EAAyCC,OAAzC,EAAqE;AAClF,MAAI;AAAA;;AACF,UAAMC,IAAI,GAAGC,IAAI,CAACC,KAAL,CAAWJ,QAAX,CAAb;;AACA,yBAAIC,OAAO,CAACC,IAAZ,0CAAI,cAAcG,KAAlB,EAAyB;AACvB,aAAOC,aAAa,CAACJ,IAAD,CAAb,IAAuBA,IAA9B;AACD;;AACD,WAAOA,IAAP;AACD,GAND,CAME,OAAOK,KAAP,EAAc;AACd,UAAM,IAAIC,KAAJ,CAAU,kCAAV,CAAN;AACD;AACF;;AAED,SAASF,aAAT,CAAuBJ,IAAvB,EAA6B;AAC3B,MAAIO,KAAK,CAACC,OAAN,CAAcR,IAAd,CAAJ,EAAyB;AACvB,WAAOA,IAAP;AACD;;AACD,MAAIA,IAAI,IAAI,OAAOA,IAAP,KAAgB,QAA5B,EAAsC;AACpC,SAAK,MAAMS,KAAX,IAAoBC,MAAM,CAACC,MAAP,CAAcX,IAAd,CAApB,EAAyC;AACvC,YAAMY,KAAK,GAAGR,aAAa,CAACK,KAAD,CAA3B;;AACA,UAAIG,KAAJ,EAAW;AACT,eAAOA,KAAP;AACD;AACF;AACF;;AACD,SAAO,IAAP;AACD","sourcesContent":["import type {JSONLoaderOptions} from '../json-loader';\n\nexport default function parseJSONSync(jsonText: string, options: JSONLoaderOptions) {\n try {\n const json = JSON.parse(jsonText);\n if (options.json?.table) {\n return getFirstArray(json) || json;\n }\n return json;\n } catch (error) {\n throw new Error('JSONLoader: failed to parse JSON');\n }\n}\n\nfunction getFirstArray(json) {\n if (Array.isArray(json)) {\n return json;\n }\n if (json && typeof json === 'object') {\n for (const value of Object.values(json)) {\n const array = getFirstArray(value);\n if (array) {\n return array;\n }\n }\n }\n return null;\n}\n"],"file":"parse-json.js"}
1
+ {"version":3,"sources":["../../../src/lib/parse-json.ts"],"names":["parseJSONSync","jsonText","options","json","JSON","parse","table","getFirstArray","error","Error","Array","isArray","Object","values","value","array"],"mappings":";;;;;;;;;;;AAEe,SAASA,aAAT,CAAuBC,QAAvB,EAAyCC,OAAzC,EAAqE;AAClF,MAAI;AAAA;;AACF,QAAMC,IAAI,GAAGC,IAAI,CAACC,KAAL,CAAWJ,QAAX,CAAb;;AACA,yBAAIC,OAAO,CAACC,IAAZ,0CAAI,cAAcG,KAAlB,EAAyB;AACvB,aAAOC,aAAa,CAACJ,IAAD,CAAb,IAAuBA,IAA9B;AACD;;AACD,WAAOA,IAAP;AACD,GAND,CAME,OAAOK,KAAP,EAAc;AACd,UAAM,IAAIC,KAAJ,CAAU,kCAAV,CAAN;AACD;AACF;;AAED,SAASF,aAAT,CAAuBJ,IAAvB,EAA6B;AAC3B,MAAIO,KAAK,CAACC,OAAN,CAAcR,IAAd,CAAJ,EAAyB;AACvB,WAAOA,IAAP;AACD;;AACD,MAAIA,IAAI,IAAI,sBAAOA,IAAP,MAAgB,QAA5B,EAAsC;AACpC,sCAAoBS,MAAM,CAACC,MAAP,CAAcV,IAAd,CAApB,oCAAyC;AAApC,UAAMW,KAAK,qBAAX;AACH,UAAMC,KAAK,GAAGR,aAAa,CAACO,KAAD,CAA3B;;AACA,UAAIC,KAAJ,EAAW;AACT,eAAOA,KAAP;AACD;AACF;AACF;;AACD,SAAO,IAAP;AACD","sourcesContent":["import type {JSONLoaderOptions} from '../json-loader';\n\nexport default function parseJSONSync(jsonText: string, options: JSONLoaderOptions) {\n try {\n const json = JSON.parse(jsonText);\n if (options.json?.table) {\n return getFirstArray(json) || json;\n }\n return json;\n } catch (error) {\n throw new Error('JSONLoader: failed to parse JSON');\n }\n}\n\nfunction getFirstArray(json) {\n if (Array.isArray(json)) {\n return json;\n }\n if (json && typeof json === 'object') {\n for (const value of Object.values(json)) {\n const array = getFirstArray(value);\n if (array) {\n return array;\n }\n }\n }\n return null;\n}\n"],"file":"parse-json.js"}
@@ -1,46 +1,158 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  exports.default = parseNDJSONInBatches;
7
9
 
10
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
11
+
12
+ var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
13
+
14
+ var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
15
+
16
+ var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
17
+
18
+ var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
19
+
8
20
  var _schema = require("@loaders.gl/schema");
9
21
 
10
22
  var _loaderUtils = require("@loaders.gl/loader-utils");
11
23
 
12
- async function* parseNDJSONInBatches(binaryAsyncIterator, options) {
13
- const textIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
14
- const lineIterator = (0, _loaderUtils.makeLineIterator)(textIterator);
15
- const numberedLineIterator = (0, _loaderUtils.makeNumberedLineIterator)(lineIterator);
16
- const schema = null;
17
- const shape = 'row-table';
18
- const tableBatchBuilder = new _schema.TableBatchBuilder(schema, { ...options,
19
- shape
20
- });
21
-
22
- for await (const {
23
- counter,
24
- line
25
- } of numberedLineIterator) {
26
- try {
27
- const row = JSON.parse(line);
28
- tableBatchBuilder.addRow(row);
29
- tableBatchBuilder.chunkComplete(line);
30
- const batch = tableBatchBuilder.getFullBatch();
31
-
32
- if (batch) {
33
- yield batch;
34
- }
35
- } catch (error) {
36
- throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter));
37
- }
38
- }
24
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
25
+
26
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
27
+
28
+ function parseNDJSONInBatches(_x, _x2) {
29
+ return _parseNDJSONInBatches.apply(this, arguments);
30
+ }
31
+
32
+ function _parseNDJSONInBatches() {
33
+ _parseNDJSONInBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(binaryAsyncIterator, options) {
34
+ var textIterator, lineIterator, numberedLineIterator, schema, shape, tableBatchBuilder, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, _value, _value2, counter, line, row, _batch, batch;
35
+
36
+ return _regenerator.default.wrap(function _callee$(_context) {
37
+ while (1) {
38
+ switch (_context.prev = _context.next) {
39
+ case 0:
40
+ textIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
41
+ lineIterator = (0, _loaderUtils.makeLineIterator)(textIterator);
42
+ numberedLineIterator = (0, _loaderUtils.makeNumberedLineIterator)(lineIterator);
43
+ schema = null;
44
+ shape = 'row-table';
45
+ tableBatchBuilder = new _schema.TableBatchBuilder(schema, _objectSpread(_objectSpread({}, options), {}, {
46
+ shape: shape
47
+ }));
48
+ _iteratorNormalCompletion = true;
49
+ _didIteratorError = false;
50
+ _context.prev = 8;
51
+ _iterator = (0, _asyncIterator2.default)(numberedLineIterator);
52
+
53
+ case 10:
54
+ _context.next = 12;
55
+ return (0, _awaitAsyncGenerator2.default)(_iterator.next());
56
+
57
+ case 12:
58
+ _step = _context.sent;
59
+ _iteratorNormalCompletion = _step.done;
60
+ _context.next = 16;
61
+ return (0, _awaitAsyncGenerator2.default)(_step.value);
62
+
63
+ case 16:
64
+ _value = _context.sent;
65
+
66
+ if (_iteratorNormalCompletion) {
67
+ _context.next = 35;
68
+ break;
69
+ }
70
+
71
+ _value2 = _value, counter = _value2.counter, line = _value2.line;
72
+ _context.prev = 19;
73
+ row = JSON.parse(line);
74
+ tableBatchBuilder.addRow(row);
75
+ tableBatchBuilder.chunkComplete(line);
76
+ _batch = tableBatchBuilder.getFullBatch();
77
+
78
+ if (!_batch) {
79
+ _context.next = 27;
80
+ break;
81
+ }
39
82
 
40
- const batch = tableBatchBuilder.getFinalBatch();
83
+ _context.next = 27;
84
+ return _batch;
41
85
 
42
- if (batch) {
43
- yield batch;
44
- }
86
+ case 27:
87
+ _context.next = 32;
88
+ break;
89
+
90
+ case 29:
91
+ _context.prev = 29;
92
+ _context.t0 = _context["catch"](19);
93
+ throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter));
94
+
95
+ case 32:
96
+ _iteratorNormalCompletion = true;
97
+ _context.next = 10;
98
+ break;
99
+
100
+ case 35:
101
+ _context.next = 41;
102
+ break;
103
+
104
+ case 37:
105
+ _context.prev = 37;
106
+ _context.t1 = _context["catch"](8);
107
+ _didIteratorError = true;
108
+ _iteratorError = _context.t1;
109
+
110
+ case 41:
111
+ _context.prev = 41;
112
+ _context.prev = 42;
113
+
114
+ if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
115
+ _context.next = 46;
116
+ break;
117
+ }
118
+
119
+ _context.next = 46;
120
+ return (0, _awaitAsyncGenerator2.default)(_iterator.return());
121
+
122
+ case 46:
123
+ _context.prev = 46;
124
+
125
+ if (!_didIteratorError) {
126
+ _context.next = 49;
127
+ break;
128
+ }
129
+
130
+ throw _iteratorError;
131
+
132
+ case 49:
133
+ return _context.finish(46);
134
+
135
+ case 50:
136
+ return _context.finish(41);
137
+
138
+ case 51:
139
+ batch = tableBatchBuilder.getFinalBatch();
140
+
141
+ if (!batch) {
142
+ _context.next = 55;
143
+ break;
144
+ }
145
+
146
+ _context.next = 55;
147
+ return batch;
148
+
149
+ case 55:
150
+ case "end":
151
+ return _context.stop();
152
+ }
153
+ }
154
+ }, _callee, null, [[8, 37, 41, 51], [19, 29], [42,, 46, 50]]);
155
+ }));
156
+ return _parseNDJSONInBatches.apply(this, arguments);
45
157
  }
46
158
  //# sourceMappingURL=parse-ndjson-in-batches.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/parse-ndjson-in-batches.ts"],"names":["parseNDJSONInBatches","binaryAsyncIterator","options","textIterator","lineIterator","numberedLineIterator","schema","shape","tableBatchBuilder","TableBatchBuilder","counter","line","row","JSON","parse","addRow","chunkComplete","batch","getFullBatch","error","Error","getFinalBatch"],"mappings":";;;;;;;AACA;;AACA;;AAOe,gBAAgBA,oBAAhB,CACbC,mBADa,EAEbC,OAFa,EAGS;AACtB,QAAMC,YAAY,GAAG,0CAAwBF,mBAAxB,CAArB;AACA,QAAMG,YAAY,GAAG,mCAAiBD,YAAjB,CAArB;AACA,QAAME,oBAAoB,GAAG,2CAAyBD,YAAzB,CAA7B;AAEA,QAAME,MAAM,GAAG,IAAf;AACA,QAAMC,KAAK,GAAG,WAAd;AAEA,QAAMC,iBAAiB,GAAG,IAAIC,yBAAJ,CAAsBH,MAAtB,EAA8B,EACtD,GAAGJ,OADmD;AAEtDK,IAAAA;AAFsD,GAA9B,CAA1B;;AAKA,aAAW,MAAM;AAACG,IAAAA,OAAD;AAAUC,IAAAA;AAAV,GAAjB,IAAoCN,oBAApC,EAA0D;AACxD,QAAI;AACF,YAAMO,GAAG,GAAGC,IAAI,CAACC,KAAL,CAAWH,IAAX,CAAZ;AACAH,MAAAA,iBAAiB,CAACO,MAAlB,CAAyBH,GAAzB;AACAJ,MAAAA,iBAAiB,CAACQ,aAAlB,CAAgCL,IAAhC;AACA,YAAMM,KAAK,GAAGT,iBAAiB,CAACU,YAAlB,EAAd;;AACA,UAAID,KAAJ,EAAW;AACT,cAAMA,KAAN;AACD;AACF,KARD,CAQE,OAAOE,KAAP,EAAc;AACd,YAAM,IAAIC,KAAJ,sDAAwDV,OAAxD,EAAN;AACD;AACF;;AAED,QAAMO,KAAK,GAAGT,iBAAiB,CAACa,aAAlB,EAAd;;AACA,MAAIJ,KAAJ,EAAW;AACT,UAAMA,KAAN;AACD;AACF","sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {\n LoaderOptions,\n makeLineIterator,\n makeNumberedLineIterator,\n makeTextDecoderIterator\n} from '@loaders.gl/loader-utils';\n\nexport default async function* parseNDJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options?: LoaderOptions\n): AsyncIterable<Batch> {\n const textIterator = makeTextDecoderIterator(binaryAsyncIterator);\n const lineIterator = makeLineIterator(textIterator);\n const numberedLineIterator = makeNumberedLineIterator(lineIterator);\n\n const schema = null;\n const shape = 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n for await (const {counter, line} of numberedLineIterator) {\n try {\n const row = JSON.parse(line);\n tableBatchBuilder.addRow(row);\n tableBatchBuilder.chunkComplete(line);\n const batch = tableBatchBuilder.getFullBatch();\n if (batch) {\n yield batch;\n }\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter}`);\n }\n }\n\n const batch = tableBatchBuilder.getFinalBatch();\n if (batch) {\n yield batch;\n }\n}\n"],"file":"parse-ndjson-in-batches.js"}
1
+ {"version":3,"sources":["../../../src/lib/parse-ndjson-in-batches.ts"],"names":["parseNDJSONInBatches","binaryAsyncIterator","options","textIterator","lineIterator","numberedLineIterator","schema","shape","tableBatchBuilder","TableBatchBuilder","counter","line","row","JSON","parse","addRow","chunkComplete","batch","getFullBatch","Error","getFinalBatch"],"mappings":";;;;;;;;;;;;;;;;;;;AACA;;AACA;;;;;;SAO+BA,oB;;;;;sFAAhB,iBACbC,mBADa,EAEbC,OAFa;AAAA;;AAAA;AAAA;AAAA;AAAA;AAIPC,YAAAA,YAJO,GAIQ,0CAAwBF,mBAAxB,CAJR;AAKPG,YAAAA,YALO,GAKQ,mCAAiBD,YAAjB,CALR;AAMPE,YAAAA,oBANO,GAMgB,2CAAyBD,YAAzB,CANhB;AAQPE,YAAAA,MARO,GAQE,IARF;AASPC,YAAAA,KATO,GASC,WATD;AAWPC,YAAAA,iBAXO,GAWa,IAAIC,yBAAJ,CAAsBH,MAAtB,kCACrBJ,OADqB;AAExBK,cAAAA,KAAK,EAALA;AAFwB,eAXb;AAAA;AAAA;AAAA;AAAA,qDAgBuBF,oBAhBvB;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA,8BAgBKK,OAhBL,WAgBKA,OAhBL,EAgBcC,IAhBd,WAgBcA,IAhBd;AAAA;AAkBHC,YAAAA,GAlBG,GAkBGC,IAAI,CAACC,KAAL,CAAWH,IAAX,CAlBH;AAmBTH,YAAAA,iBAAiB,CAACO,MAAlB,CAAyBH,GAAzB;AACAJ,YAAAA,iBAAiB,CAACQ,aAAlB,CAAgCL,IAAhC;AACMM,YAAAA,MArBG,GAqBKT,iBAAiB,CAACU,YAAlB,EArBL;;AAAA,iBAsBLD,MAtBK;AAAA;AAAA;AAAA;;AAAA;AAuBP,mBAAMA,MAAN;;AAvBO;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA,kBA0BH,IAAIE,KAAJ,sDAAwDT,OAAxD,EA1BG;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AA8BPO,YAAAA,KA9BO,GA8BCT,iBAAiB,CAACY,aAAlB,EA9BD;;AAAA,iBA+BTH,KA/BS;AAAA;AAAA;AAAA;;AAAA;AAgCX,mBAAMA,KAAN;;AAhCW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G","sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {\n LoaderOptions,\n makeLineIterator,\n makeNumberedLineIterator,\n makeTextDecoderIterator\n} from '@loaders.gl/loader-utils';\n\nexport default async function* parseNDJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options?: LoaderOptions\n): AsyncIterable<Batch> {\n const textIterator = makeTextDecoderIterator(binaryAsyncIterator);\n const lineIterator = makeLineIterator(textIterator);\n const numberedLineIterator = makeNumberedLineIterator(lineIterator);\n\n const schema = null;\n const shape = 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n for await (const {counter, line} of numberedLineIterator) {\n try {\n const row = JSON.parse(line);\n tableBatchBuilder.addRow(row);\n tableBatchBuilder.chunkComplete(line);\n const batch = tableBatchBuilder.getFullBatch();\n if (batch) {\n yield batch;\n }\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter}`);\n }\n }\n\n const batch = tableBatchBuilder.getFinalBatch();\n if (batch) {\n yield batch;\n }\n}\n"],"file":"parse-ndjson-in-batches.js"}
@@ -6,8 +6,8 @@ Object.defineProperty(exports, "__esModule", {
6
6
  exports.default = parseNDJSONSync;
7
7
 
8
8
  function parseNDJSONSync(ndjsonText) {
9
- const lines = ndjsonText.trim().split('\n');
10
- return lines.map((line, counter) => {
9
+ var lines = ndjsonText.trim().split('\n');
10
+ return lines.map(function (line, counter) {
11
11
  try {
12
12
  return JSON.parse(line);
13
13
  } catch (error) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/parse-ndjson.ts"],"names":["parseNDJSONSync","ndjsonText","lines","trim","split","map","line","counter","JSON","parse","error","Error"],"mappings":";;;;;;;AAAe,SAASA,eAAT,CAAyBC,UAAzB,EAA6C;AAC1D,QAAMC,KAAK,GAAGD,UAAU,CAACE,IAAX,GAAkBC,KAAlB,CAAwB,IAAxB,CAAd;AACA,SAAOF,KAAK,CAACG,GAAN,CAAU,CAACC,IAAD,EAAOC,OAAP,KAAmB;AAClC,QAAI;AACF,aAAOC,IAAI,CAACC,KAAL,CAAWH,IAAX,CAAP;AACD,KAFD,CAEE,OAAOI,KAAP,EAAc;AACd,YAAM,IAAIC,KAAJ,sDAAwDJ,OAAO,GAAG,CAAlE,EAAN;AACD;AACF,GANM,CAAP;AAOD","sourcesContent":["export default function parseNDJSONSync(ndjsonText: string) {\n const lines = ndjsonText.trim().split('\\n');\n return lines.map((line, counter) => {\n try {\n return JSON.parse(line);\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter + 1}`);\n }\n });\n}\n"],"file":"parse-ndjson.js"}
1
+ {"version":3,"sources":["../../../src/lib/parse-ndjson.ts"],"names":["parseNDJSONSync","ndjsonText","lines","trim","split","map","line","counter","JSON","parse","error","Error"],"mappings":";;;;;;;AAAe,SAASA,eAAT,CAAyBC,UAAzB,EAA6C;AAC1D,MAAMC,KAAK,GAAGD,UAAU,CAACE,IAAX,GAAkBC,KAAlB,CAAwB,IAAxB,CAAd;AACA,SAAOF,KAAK,CAACG,GAAN,CAAU,UAACC,IAAD,EAAOC,OAAP,EAAmB;AAClC,QAAI;AACF,aAAOC,IAAI,CAACC,KAAL,CAAWH,IAAX,CAAP;AACD,KAFD,CAEE,OAAOI,KAAP,EAAc;AACd,YAAM,IAAIC,KAAJ,sDAAwDJ,OAAO,GAAG,CAAlE,EAAN;AACD;AACF,GANM,CAAP;AAOD","sourcesContent":["export default function parseNDJSONSync(ndjsonText: string) {\n const lines = ndjsonText.trim().split('\\n');\n return lines.map((line, counter) => {\n try {\n return JSON.parse(line);\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter + 1}`);\n }\n });\n}\n"],"file":"parse-ndjson.js"}