@loaders.gl/json 3.1.0-alpha.4 → 3.1.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/bundle.js +1839 -0
- package/dist/es5/bundle.js +1 -1
- package/dist/es5/bundle.js.map +1 -1
- package/dist/es5/geojson-loader.js +29 -143
- package/dist/es5/geojson-loader.js.map +1 -1
- package/dist/es5/index.js +6 -6
- package/dist/es5/json-loader.js +18 -44
- package/dist/es5/json-loader.js.map +1 -1
- package/dist/es5/lib/clarinet/clarinet.js +406 -481
- package/dist/es5/lib/clarinet/clarinet.js.map +1 -1
- package/dist/es5/lib/jsonpath/jsonpath.js +54 -101
- package/dist/es5/lib/jsonpath/jsonpath.js.map +1 -1
- package/dist/es5/lib/parse-json-in-batches.js +78 -237
- package/dist/es5/lib/parse-json-in-batches.js.map +1 -1
- package/dist/es5/lib/parse-json.js +4 -9
- package/dist/es5/lib/parse-json.js.map +1 -1
- package/dist/es5/lib/parse-ndjson-in-batches.js +31 -143
- package/dist/es5/lib/parse-ndjson-in-batches.js.map +1 -1
- package/dist/es5/lib/parse-ndjson.js +2 -2
- package/dist/es5/lib/parse-ndjson.js.map +1 -1
- package/dist/es5/lib/parser/json-parser.js +114 -137
- package/dist/es5/lib/parser/json-parser.js.map +1 -1
- package/dist/es5/lib/parser/streaming-json-parser.js +63 -128
- package/dist/es5/lib/parser/streaming-json-parser.js.map +1 -1
- package/dist/es5/ndjson-loader.js +11 -30
- package/dist/es5/ndjson-loader.js.map +1 -1
- package/dist/esm/geojson-loader.js +1 -1
- package/dist/esm/geojson-loader.js.map +1 -1
- package/dist/esm/json-loader.js +1 -1
- package/dist/esm/json-loader.js.map +1 -1
- package/dist/esm/lib/clarinet/clarinet.js +222 -260
- package/dist/esm/lib/clarinet/clarinet.js.map +1 -1
- package/dist/esm/lib/parse-ndjson-in-batches.js +1 -1
- package/dist/esm/lib/parse-ndjson-in-batches.js.map +1 -1
- package/dist/esm/lib/parse-ndjson.js +1 -1
- package/dist/esm/lib/parse-ndjson.js.map +1 -1
- package/dist/esm/lib/parser/json-parser.js +48 -47
- package/dist/esm/lib/parser/json-parser.js.map +1 -1
- package/dist/esm/lib/parser/streaming-json-parser.js +29 -34
- package/dist/esm/lib/parser/streaming-json-parser.js.map +1 -1
- package/dist/esm/ndjson-loader.js +3 -1
- package/dist/esm/ndjson-loader.js.map +1 -1
- package/dist/geojson-loader.d.ts +16 -0
- package/dist/geojson-loader.d.ts.map +1 -0
- package/dist/geojson-loader.js +78 -0
- package/dist/geojson-worker.js +1702 -2
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +17 -0
- package/dist/json-loader.d.ts +14 -0
- package/dist/json-loader.d.ts.map +1 -0
- package/dist/json-loader.js +57 -0
- package/dist/jsonl-loader.d.ts +1 -0
- package/dist/jsonl-loader.d.ts.map +1 -0
- package/dist/jsonl-loader.js +54 -0
- package/dist/lib/clarinet/clarinet.d.ts +74 -0
- package/dist/lib/clarinet/clarinet.d.ts.map +1 -0
- package/dist/lib/clarinet/clarinet.js +535 -0
- package/dist/lib/jsonpath/jsonpath.d.ts +32 -0
- package/dist/lib/jsonpath/jsonpath.d.ts.map +1 -0
- package/dist/lib/jsonpath/jsonpath.js +89 -0
- package/dist/lib/parse-json-in-batches.d.ts +4 -0
- package/dist/lib/parse-json-in-batches.d.ts.map +1 -0
- package/dist/lib/parse-json-in-batches.js +80 -0
- package/dist/lib/parse-json.d.ts +3 -0
- package/dist/lib/parse-json.d.ts.map +1 -0
- package/dist/lib/parse-json.js +29 -0
- package/dist/lib/parse-ndjson-in-batches.d.ts +4 -0
- package/dist/lib/parse-ndjson-in-batches.d.ts.map +1 -0
- package/dist/lib/parse-ndjson-in-batches.js +35 -0
- package/dist/lib/parse-ndjson.d.ts +2 -0
- package/dist/lib/parse-ndjson.d.ts.map +1 -0
- package/dist/lib/parse-ndjson.js +14 -0
- package/dist/lib/parser/json-parser.d.ts +22 -0
- package/dist/lib/parser/json-parser.d.ts.map +1 -0
- package/dist/lib/parser/json-parser.js +98 -0
- package/dist/lib/parser/streaming-json-parser.d.ts +37 -0
- package/dist/lib/parser/streaming-json-parser.d.ts.map +1 -0
- package/dist/lib/parser/streaming-json-parser.js +100 -0
- package/dist/ndjson-loader.d.ts +22 -0
- package/dist/ndjson-loader.d.ts.map +1 -0
- package/dist/ndjson-loader.js +35 -0
- package/dist/workers/geojson-worker.d.ts +2 -0
- package/dist/workers/geojson-worker.d.ts.map +1 -0
- package/dist/workers/geojson-worker.js +5 -0
- package/package.json +8 -8
- package/src/lib/clarinet/clarinet.ts +539 -0
- package/src/lib/parser/json-parser.ts +52 -55
- package/src/lib/parser/streaming-json-parser.ts +28 -32
- package/src/ndjson-loader.ts +3 -1
- package/dist/dist.min.js +0 -2
- package/dist/dist.min.js.map +0 -1
- package/dist/geojson-worker.js.map +0 -1
- package/src/lib/clarinet/clarinet.js +0 -578
|
@@ -7,252 +7,93 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
7
7
|
});
|
|
8
8
|
exports.default = parseJSONInBatches;
|
|
9
9
|
|
|
10
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
11
|
-
|
|
12
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
13
|
-
|
|
14
|
-
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
15
|
-
|
|
16
|
-
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
17
|
-
|
|
18
|
-
var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
|
|
19
|
-
|
|
20
10
|
var _schema = require("@loaders.gl/schema");
|
|
21
11
|
|
|
22
12
|
var _loaderUtils = require("@loaders.gl/loader-utils");
|
|
23
13
|
|
|
24
14
|
var _streamingJsonParser = _interopRequireDefault(require("./parser/streaming-json-parser"));
|
|
25
15
|
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
_didIteratorError = false;
|
|
64
|
-
_context.prev = 10;
|
|
65
|
-
_iterator = (0, _asyncIterator2.default)(asyncIterator);
|
|
66
|
-
|
|
67
|
-
case 12:
|
|
68
|
-
_context.next = 14;
|
|
69
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.next());
|
|
70
|
-
|
|
71
|
-
case 14:
|
|
72
|
-
_step = _context.sent;
|
|
73
|
-
_iteratorNormalCompletion = _step.done;
|
|
74
|
-
_context.next = 18;
|
|
75
|
-
return (0, _awaitAsyncGenerator2.default)(_step.value);
|
|
76
|
-
|
|
77
|
-
case 18:
|
|
78
|
-
_value = _context.sent;
|
|
79
|
-
|
|
80
|
-
if (_iteratorNormalCompletion) {
|
|
81
|
-
_context.next = 57;
|
|
82
|
-
break;
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
chunk = _value;
|
|
86
|
-
rows = parser.write(chunk);
|
|
87
|
-
_jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();
|
|
88
|
-
|
|
89
|
-
if (!(rows.length > 0 && isFirstChunk)) {
|
|
90
|
-
_context.next = 29;
|
|
91
|
-
break;
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
if (!metadata) {
|
|
95
|
-
_context.next = 28;
|
|
96
|
-
break;
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
initialBatch = {
|
|
100
|
-
shape: shape,
|
|
101
|
-
batchType: 'partial-result',
|
|
102
|
-
data: [],
|
|
103
|
-
length: 0,
|
|
104
|
-
bytesUsed: 0,
|
|
105
|
-
container: parser.getPartialResult(),
|
|
106
|
-
jsonpath: _jsonpath
|
|
107
|
-
};
|
|
108
|
-
_context.next = 28;
|
|
109
|
-
return initialBatch;
|
|
110
|
-
|
|
111
|
-
case 28:
|
|
112
|
-
isFirstChunk = false;
|
|
113
|
-
|
|
114
|
-
case 29:
|
|
115
|
-
_iterator2 = _createForOfIteratorHelper(rows);
|
|
116
|
-
_context.prev = 30;
|
|
117
|
-
|
|
118
|
-
_iterator2.s();
|
|
119
|
-
|
|
120
|
-
case 32:
|
|
121
|
-
if ((_step2 = _iterator2.n()).done) {
|
|
122
|
-
_context.next = 41;
|
|
123
|
-
break;
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
row = _step2.value;
|
|
127
|
-
tableBatchBuilder.addRow(row);
|
|
128
|
-
_batch2 = tableBatchBuilder.getFullBatch({
|
|
129
|
-
jsonpath: _jsonpath
|
|
130
|
-
});
|
|
131
|
-
|
|
132
|
-
if (!_batch2) {
|
|
133
|
-
_context.next = 39;
|
|
134
|
-
break;
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
_context.next = 39;
|
|
138
|
-
return _batch2;
|
|
139
|
-
|
|
140
|
-
case 39:
|
|
141
|
-
_context.next = 32;
|
|
142
|
-
break;
|
|
143
|
-
|
|
144
|
-
case 41:
|
|
145
|
-
_context.next = 46;
|
|
146
|
-
break;
|
|
147
|
-
|
|
148
|
-
case 43:
|
|
149
|
-
_context.prev = 43;
|
|
150
|
-
_context.t0 = _context["catch"](30);
|
|
151
|
-
|
|
152
|
-
_iterator2.e(_context.t0);
|
|
153
|
-
|
|
154
|
-
case 46:
|
|
155
|
-
_context.prev = 46;
|
|
156
|
-
|
|
157
|
-
_iterator2.f();
|
|
158
|
-
|
|
159
|
-
return _context.finish(46);
|
|
160
|
-
|
|
161
|
-
case 49:
|
|
162
|
-
tableBatchBuilder.chunkComplete(chunk);
|
|
163
|
-
_batch = tableBatchBuilder.getFullBatch({
|
|
164
|
-
jsonpath: _jsonpath
|
|
165
|
-
});
|
|
166
|
-
|
|
167
|
-
if (!_batch) {
|
|
168
|
-
_context.next = 54;
|
|
169
|
-
break;
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
_context.next = 54;
|
|
173
|
-
return _batch;
|
|
174
|
-
|
|
175
|
-
case 54:
|
|
176
|
-
_iteratorNormalCompletion = true;
|
|
177
|
-
_context.next = 12;
|
|
178
|
-
break;
|
|
179
|
-
|
|
180
|
-
case 57:
|
|
181
|
-
_context.next = 63;
|
|
182
|
-
break;
|
|
183
|
-
|
|
184
|
-
case 59:
|
|
185
|
-
_context.prev = 59;
|
|
186
|
-
_context.t1 = _context["catch"](10);
|
|
187
|
-
_didIteratorError = true;
|
|
188
|
-
_iteratorError = _context.t1;
|
|
189
|
-
|
|
190
|
-
case 63:
|
|
191
|
-
_context.prev = 63;
|
|
192
|
-
_context.prev = 64;
|
|
193
|
-
|
|
194
|
-
if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
|
|
195
|
-
_context.next = 68;
|
|
196
|
-
break;
|
|
197
|
-
}
|
|
198
|
-
|
|
199
|
-
_context.next = 68;
|
|
200
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.return());
|
|
201
|
-
|
|
202
|
-
case 68:
|
|
203
|
-
_context.prev = 68;
|
|
204
|
-
|
|
205
|
-
if (!_didIteratorError) {
|
|
206
|
-
_context.next = 71;
|
|
207
|
-
break;
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
throw _iteratorError;
|
|
211
|
-
|
|
212
|
-
case 71:
|
|
213
|
-
return _context.finish(68);
|
|
214
|
-
|
|
215
|
-
case 72:
|
|
216
|
-
return _context.finish(63);
|
|
217
|
-
|
|
218
|
-
case 73:
|
|
219
|
-
jsonpath = parser.getStreamingJsonPathAsString();
|
|
220
|
-
batch = tableBatchBuilder.getFinalBatch({
|
|
221
|
-
jsonpath: jsonpath
|
|
222
|
-
});
|
|
223
|
-
|
|
224
|
-
if (!batch) {
|
|
225
|
-
_context.next = 78;
|
|
226
|
-
break;
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
_context.next = 78;
|
|
230
|
-
return batch;
|
|
16
|
+
async function* parseJSONInBatches(binaryAsyncIterator, options) {
|
|
17
|
+
var _options$json;
|
|
18
|
+
|
|
19
|
+
const asyncIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
|
|
20
|
+
const {
|
|
21
|
+
metadata
|
|
22
|
+
} = options;
|
|
23
|
+
const {
|
|
24
|
+
jsonpaths
|
|
25
|
+
} = options.json || {};
|
|
26
|
+
let isFirstChunk = true;
|
|
27
|
+
const schema = null;
|
|
28
|
+
const shape = (options === null || options === void 0 ? void 0 : (_options$json = options.json) === null || _options$json === void 0 ? void 0 : _options$json.shape) || 'row-table';
|
|
29
|
+
const tableBatchBuilder = new _schema.TableBatchBuilder(schema, { ...options,
|
|
30
|
+
shape
|
|
31
|
+
});
|
|
32
|
+
const parser = new _streamingJsonParser.default({
|
|
33
|
+
jsonpaths
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
for await (const chunk of asyncIterator) {
|
|
37
|
+
const rows = parser.write(chunk);
|
|
38
|
+
const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();
|
|
39
|
+
|
|
40
|
+
if (rows.length > 0 && isFirstChunk) {
|
|
41
|
+
if (metadata) {
|
|
42
|
+
const initialBatch = {
|
|
43
|
+
shape,
|
|
44
|
+
batchType: 'partial-result',
|
|
45
|
+
data: [],
|
|
46
|
+
length: 0,
|
|
47
|
+
bytesUsed: 0,
|
|
48
|
+
container: parser.getPartialResult(),
|
|
49
|
+
jsonpath
|
|
50
|
+
};
|
|
51
|
+
yield initialBatch;
|
|
52
|
+
}
|
|
231
53
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
_context.next = 82;
|
|
235
|
-
break;
|
|
236
|
-
}
|
|
54
|
+
isFirstChunk = false;
|
|
55
|
+
}
|
|
237
56
|
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
data: [],
|
|
244
|
-
length: 0
|
|
245
|
-
};
|
|
246
|
-
_context.next = 82;
|
|
247
|
-
return finalBatch;
|
|
57
|
+
for (const row of rows) {
|
|
58
|
+
tableBatchBuilder.addRow(row);
|
|
59
|
+
const batch = tableBatchBuilder.getFullBatch({
|
|
60
|
+
jsonpath
|
|
61
|
+
});
|
|
248
62
|
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
return _context.stop();
|
|
252
|
-
}
|
|
63
|
+
if (batch) {
|
|
64
|
+
yield batch;
|
|
253
65
|
}
|
|
254
|
-
}
|
|
255
|
-
|
|
256
|
-
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
tableBatchBuilder.chunkComplete(chunk);
|
|
69
|
+
const batch = tableBatchBuilder.getFullBatch({
|
|
70
|
+
jsonpath
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
if (batch) {
|
|
74
|
+
yield batch;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const jsonpath = parser.getStreamingJsonPathAsString();
|
|
79
|
+
const batch = tableBatchBuilder.getFinalBatch({
|
|
80
|
+
jsonpath
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
if (batch) {
|
|
84
|
+
yield batch;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
if (metadata) {
|
|
88
|
+
const finalBatch = {
|
|
89
|
+
shape,
|
|
90
|
+
batchType: 'final-result',
|
|
91
|
+
container: parser.getPartialResult(),
|
|
92
|
+
jsonpath: parser.getStreamingJsonPathAsString(),
|
|
93
|
+
data: [],
|
|
94
|
+
length: 0
|
|
95
|
+
};
|
|
96
|
+
yield finalBatch;
|
|
97
|
+
}
|
|
257
98
|
}
|
|
258
99
|
//# sourceMappingURL=parse-json-in-batches.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/parse-json-in-batches.ts"],"names":["parseJSONInBatches","binaryAsyncIterator","options","asyncIterator","metadata","
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/parse-json-in-batches.ts"],"names":["parseJSONInBatches","binaryAsyncIterator","options","asyncIterator","metadata","jsonpaths","json","isFirstChunk","schema","shape","tableBatchBuilder","TableBatchBuilder","parser","StreamingJSONParser","chunk","rows","write","jsonpath","length","getStreamingJsonPathAsString","initialBatch","batchType","data","bytesUsed","container","getPartialResult","row","addRow","batch","getFullBatch","chunkComplete","getFinalBatch","finalBatch"],"mappings":";;;;;;;;;AAEA;;AACA;;AACA;;AAIe,gBAAgBA,kBAAhB,CACbC,mBADa,EAEbC,OAFa,EAGS;AAAA;;AACtB,QAAMC,aAAa,GAAG,0CAAwBF,mBAAxB,CAAtB;AAEA,QAAM;AAACG,IAAAA;AAAD,MAAaF,OAAnB;AACA,QAAM;AAACG,IAAAA;AAAD,MAAcH,OAAO,CAACI,IAAR,IAAgB,EAApC;AAEA,MAAIC,YAAqB,GAAG,IAA5B;AAGA,QAAMC,MAAM,GAAG,IAAf;AACA,QAAMC,KAAK,GAAG,CAAAP,OAAO,SAAP,IAAAA,OAAO,WAAP,6BAAAA,OAAO,CAAEI,IAAT,gEAAeG,KAAf,KAAwB,WAAtC;AAEA,QAAMC,iBAAiB,GAAG,IAAIC,yBAAJ,CAAsBH,MAAtB,EAA8B,EACtD,GAAGN,OADmD;AAEtDO,IAAAA;AAFsD,GAA9B,CAA1B;AAKA,QAAMG,MAAM,GAAG,IAAIC,4BAAJ,CAAwB;AAACR,IAAAA;AAAD,GAAxB,CAAf;;AAEA,aAAW,MAAMS,KAAjB,IAA0BX,aAA1B,EAAyC;AACvC,UAAMY,IAAI,GAAGH,MAAM,CAACI,KAAP,CAAaF,KAAb,CAAb;AAEA,UAAMG,QAAQ,GAAGF,IAAI,CAACG,MAAL,GAAc,CAAd,IAAmBN,MAAM,CAACO,4BAAP,EAApC;;AAEA,QAAIJ,IAAI,CAACG,MAAL,GAAc,CAAd,IAAmBX,YAAvB,EAAqC;AACnC,UAAIH,QAAJ,EAAc;AACZ,cAAMgB,YAAmB,GAAG;AAE1BX,UAAAA,KAF0B;AAG1BY,UAAAA,SAAS,EAAE,gBAHe;AAI1BC,UAAAA,IAAI,EAAE,EAJoB;AAK1BJ,UAAAA,MAAM,EAAE,CALkB;AAM1BK,UAAAA,SAAS,EAAE,CANe;AAQ1BC,UAAAA,SAAS,EAAEZ,MAAM,CAACa,gBAAP,EARe;AAS1BR,UAAAA;AAT0B,SAA5B;AAWA,cAAMG,YAAN;AACD;;AACDb,MAAAA,YAAY,GAAG,KAAf;AAED;;AAGD,SAAK,MAAMmB,GAAX,IAAkBX,IAAlB,EAAwB;AACtBL,MAAAA,iBAAiB,CAACiB,MAAlB,CAAyBD,GAAzB;AAEA,YAAME,KAAK,GAAGlB,iBAAiB,CAACmB,YAAlB,CAA+B;AAACZ,QAAAA;AAAD,OAA/B,CAAd;;AACA,UAAIW,KAAJ,EAAW;AACT,cAAMA,KAAN;AACD;AACF;;AAEDlB,IAAAA,iBAAiB,CAACoB,aAAlB,CAAgChB,KAAhC;AACA,UAAMc,KAAK,GAAGlB,iBAAiB,CAACmB,YAAlB,CAA+B;AAACZ,MAAAA;AAAD,KAA/B,CAAd;;AACA,QAAIW,KAAJ,EAAW;AACT,YAAMA,KAAN;AACD;AACF;;AAGD,QAAMX,QAAQ,GAAGL,MAAM,CAACO,4BAAP,EAAjB;AACA,QAAMS,KAAK,GAAGlB,iBAAiB,CAACqB,aAAlB,CAAgC;AAACd,IAAAA;AAAD,GAAhC,CAAd;;AACA,MAAIW,KAAJ,EAAW;AACT,UAAMA,KAAN;AACD;;AAED,MAAIxB,QAAJ,EAAc;AACZ,UAAM4B,UAAiB,GAAG;AACxBvB,MAAAA,KADwB;AAExBY,MAAAA,SAAS,EAAE,cAFa;AAGxBG,MAAAA,SAAS,EAAEZ,MAAM,CAACa,gBAAP,EAHa;AAIxBR,MAAAA,QAAQ,EAAEL,MAAM,CAACO,4BAAP,EAJc;AAKxBG,MAAAA,IAAI,EAAE,EALkB;AAMxBJ,MAAAA,MAAM,EAAE;AANgB,KAA1B;AASA,UAAMc,UAAN;AACD;AACF","sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions} from '../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from './parser/streaming-json-parser';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport default async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<Batch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // TODO fix Schema deduction\n const schema = null; // new Schema([]);\n const shape = options?.json?.shape || 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: Batch = {\n // Common fields\n shape,\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: Batch = {\n shape,\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n"],"file":"parse-json-in-batches.js"}
|
|
@@ -1,19 +1,15 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
|
|
5
3
|
Object.defineProperty(exports, "__esModule", {
|
|
6
4
|
value: true
|
|
7
5
|
});
|
|
8
6
|
exports.default = parseJSONSync;
|
|
9
7
|
|
|
10
|
-
var _typeof2 = _interopRequireDefault(require("@babel/runtime/helpers/typeof"));
|
|
11
|
-
|
|
12
8
|
function parseJSONSync(jsonText, options) {
|
|
13
9
|
try {
|
|
14
10
|
var _options$json;
|
|
15
11
|
|
|
16
|
-
|
|
12
|
+
const json = JSON.parse(jsonText);
|
|
17
13
|
|
|
18
14
|
if ((_options$json = options.json) !== null && _options$json !== void 0 && _options$json.table) {
|
|
19
15
|
return getFirstArray(json) || json;
|
|
@@ -30,10 +26,9 @@ function getFirstArray(json) {
|
|
|
30
26
|
return json;
|
|
31
27
|
}
|
|
32
28
|
|
|
33
|
-
if (json &&
|
|
34
|
-
for (
|
|
35
|
-
|
|
36
|
-
var array = getFirstArray(value);
|
|
29
|
+
if (json && typeof json === 'object') {
|
|
30
|
+
for (const value of Object.values(json)) {
|
|
31
|
+
const array = getFirstArray(value);
|
|
37
32
|
|
|
38
33
|
if (array) {
|
|
39
34
|
return array;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/parse-json.ts"],"names":["parseJSONSync","jsonText","options","json","JSON","parse","table","getFirstArray","error","Error","Array","isArray","
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/parse-json.ts"],"names":["parseJSONSync","jsonText","options","json","JSON","parse","table","getFirstArray","error","Error","Array","isArray","value","Object","values","array"],"mappings":";;;;;;;AAEe,SAASA,aAAT,CAAuBC,QAAvB,EAAyCC,OAAzC,EAAqE;AAClF,MAAI;AAAA;;AACF,UAAMC,IAAI,GAAGC,IAAI,CAACC,KAAL,CAAWJ,QAAX,CAAb;;AACA,yBAAIC,OAAO,CAACC,IAAZ,0CAAI,cAAcG,KAAlB,EAAyB;AACvB,aAAOC,aAAa,CAACJ,IAAD,CAAb,IAAuBA,IAA9B;AACD;;AACD,WAAOA,IAAP;AACD,GAND,CAME,OAAOK,KAAP,EAAc;AACd,UAAM,IAAIC,KAAJ,CAAU,kCAAV,CAAN;AACD;AACF;;AAED,SAASF,aAAT,CAAuBJ,IAAvB,EAA6B;AAC3B,MAAIO,KAAK,CAACC,OAAN,CAAcR,IAAd,CAAJ,EAAyB;AACvB,WAAOA,IAAP;AACD;;AACD,MAAIA,IAAI,IAAI,OAAOA,IAAP,KAAgB,QAA5B,EAAsC;AACpC,SAAK,MAAMS,KAAX,IAAoBC,MAAM,CAACC,MAAP,CAAcX,IAAd,CAApB,EAAyC;AACvC,YAAMY,KAAK,GAAGR,aAAa,CAACK,KAAD,CAA3B;;AACA,UAAIG,KAAJ,EAAW;AACT,eAAOA,KAAP;AACD;AACF;AACF;;AACD,SAAO,IAAP;AACD","sourcesContent":["import type {JSONLoaderOptions} from '../json-loader';\n\nexport default function parseJSONSync(jsonText: string, options: JSONLoaderOptions) {\n try {\n const json = JSON.parse(jsonText);\n if (options.json?.table) {\n return getFirstArray(json) || json;\n }\n return json;\n } catch (error) {\n throw new Error('JSONLoader: failed to parse JSON');\n }\n}\n\nfunction getFirstArray(json) {\n if (Array.isArray(json)) {\n return json;\n }\n if (json && typeof json === 'object') {\n for (const value of Object.values(json)) {\n const array = getFirstArray(value);\n if (array) {\n return array;\n }\n }\n }\n return null;\n}\n"],"file":"parse-json.js"}
|
|
@@ -1,158 +1,46 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
|
|
5
3
|
Object.defineProperty(exports, "__esModule", {
|
|
6
4
|
value: true
|
|
7
5
|
});
|
|
8
6
|
exports.default = parseNDJSONInBatches;
|
|
9
7
|
|
|
10
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
11
|
-
|
|
12
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
13
|
-
|
|
14
|
-
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
15
|
-
|
|
16
|
-
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
17
|
-
|
|
18
|
-
var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
|
|
19
|
-
|
|
20
8
|
var _schema = require("@loaders.gl/schema");
|
|
21
9
|
|
|
22
10
|
var _loaderUtils = require("@loaders.gl/loader-utils");
|
|
23
11
|
|
|
24
|
-
function
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
_iterator = (0, _asyncIterator2.default)(numberedLineIterator);
|
|
52
|
-
|
|
53
|
-
case 10:
|
|
54
|
-
_context.next = 12;
|
|
55
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.next());
|
|
56
|
-
|
|
57
|
-
case 12:
|
|
58
|
-
_step = _context.sent;
|
|
59
|
-
_iteratorNormalCompletion = _step.done;
|
|
60
|
-
_context.next = 16;
|
|
61
|
-
return (0, _awaitAsyncGenerator2.default)(_step.value);
|
|
62
|
-
|
|
63
|
-
case 16:
|
|
64
|
-
_value = _context.sent;
|
|
65
|
-
|
|
66
|
-
if (_iteratorNormalCompletion) {
|
|
67
|
-
_context.next = 35;
|
|
68
|
-
break;
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
_value2 = _value, counter = _value2.counter, line = _value2.line;
|
|
72
|
-
_context.prev = 19;
|
|
73
|
-
row = JSON.parse(line);
|
|
74
|
-
tableBatchBuilder.addRow(row);
|
|
75
|
-
tableBatchBuilder.chunkComplete(line);
|
|
76
|
-
_batch = tableBatchBuilder.getFullBatch();
|
|
77
|
-
|
|
78
|
-
if (!_batch) {
|
|
79
|
-
_context.next = 27;
|
|
80
|
-
break;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
_context.next = 27;
|
|
84
|
-
return _batch;
|
|
85
|
-
|
|
86
|
-
case 27:
|
|
87
|
-
_context.next = 32;
|
|
88
|
-
break;
|
|
89
|
-
|
|
90
|
-
case 29:
|
|
91
|
-
_context.prev = 29;
|
|
92
|
-
_context.t0 = _context["catch"](19);
|
|
93
|
-
throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter));
|
|
94
|
-
|
|
95
|
-
case 32:
|
|
96
|
-
_iteratorNormalCompletion = true;
|
|
97
|
-
_context.next = 10;
|
|
98
|
-
break;
|
|
99
|
-
|
|
100
|
-
case 35:
|
|
101
|
-
_context.next = 41;
|
|
102
|
-
break;
|
|
103
|
-
|
|
104
|
-
case 37:
|
|
105
|
-
_context.prev = 37;
|
|
106
|
-
_context.t1 = _context["catch"](8);
|
|
107
|
-
_didIteratorError = true;
|
|
108
|
-
_iteratorError = _context.t1;
|
|
109
|
-
|
|
110
|
-
case 41:
|
|
111
|
-
_context.prev = 41;
|
|
112
|
-
_context.prev = 42;
|
|
113
|
-
|
|
114
|
-
if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
|
|
115
|
-
_context.next = 46;
|
|
116
|
-
break;
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
_context.next = 46;
|
|
120
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.return());
|
|
121
|
-
|
|
122
|
-
case 46:
|
|
123
|
-
_context.prev = 46;
|
|
124
|
-
|
|
125
|
-
if (!_didIteratorError) {
|
|
126
|
-
_context.next = 49;
|
|
127
|
-
break;
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
throw _iteratorError;
|
|
131
|
-
|
|
132
|
-
case 49:
|
|
133
|
-
return _context.finish(46);
|
|
134
|
-
|
|
135
|
-
case 50:
|
|
136
|
-
return _context.finish(41);
|
|
137
|
-
|
|
138
|
-
case 51:
|
|
139
|
-
batch = tableBatchBuilder.getFinalBatch();
|
|
140
|
-
|
|
141
|
-
if (!batch) {
|
|
142
|
-
_context.next = 55;
|
|
143
|
-
break;
|
|
144
|
-
}
|
|
12
|
+
async function* parseNDJSONInBatches(binaryAsyncIterator, options) {
|
|
13
|
+
const textIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
|
|
14
|
+
const lineIterator = (0, _loaderUtils.makeLineIterator)(textIterator);
|
|
15
|
+
const numberedLineIterator = (0, _loaderUtils.makeNumberedLineIterator)(lineIterator);
|
|
16
|
+
const schema = null;
|
|
17
|
+
const shape = 'row-table';
|
|
18
|
+
const tableBatchBuilder = new _schema.TableBatchBuilder(schema, { ...options,
|
|
19
|
+
shape
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
for await (const {
|
|
23
|
+
counter,
|
|
24
|
+
line
|
|
25
|
+
} of numberedLineIterator) {
|
|
26
|
+
try {
|
|
27
|
+
const row = JSON.parse(line);
|
|
28
|
+
tableBatchBuilder.addRow(row);
|
|
29
|
+
tableBatchBuilder.chunkComplete(line);
|
|
30
|
+
const batch = tableBatchBuilder.getFullBatch();
|
|
31
|
+
|
|
32
|
+
if (batch) {
|
|
33
|
+
yield batch;
|
|
34
|
+
}
|
|
35
|
+
} catch (error) {
|
|
36
|
+
throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter));
|
|
37
|
+
}
|
|
38
|
+
}
|
|
145
39
|
|
|
146
|
-
|
|
147
|
-
return batch;
|
|
40
|
+
const batch = tableBatchBuilder.getFinalBatch();
|
|
148
41
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
}, _callee, null, [[8, 37, 41, 51], [19, 29], [42,, 46, 50]]);
|
|
155
|
-
}));
|
|
156
|
-
return _parseNDJSONInBatches.apply(this, arguments);
|
|
42
|
+
if (batch) {
|
|
43
|
+
yield batch;
|
|
44
|
+
}
|
|
157
45
|
}
|
|
158
46
|
//# sourceMappingURL=parse-ndjson-in-batches.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/parse-ndjson-in-batches.ts"],"names":["parseNDJSONInBatches","binaryAsyncIterator","options","textIterator","lineIterator","numberedLineIterator","schema","shape","tableBatchBuilder","TableBatchBuilder","counter","line","row","JSON","parse","addRow","chunkComplete","batch","getFullBatch","Error","getFinalBatch"],"mappings":"
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/parse-ndjson-in-batches.ts"],"names":["parseNDJSONInBatches","binaryAsyncIterator","options","textIterator","lineIterator","numberedLineIterator","schema","shape","tableBatchBuilder","TableBatchBuilder","counter","line","row","JSON","parse","addRow","chunkComplete","batch","getFullBatch","error","Error","getFinalBatch"],"mappings":";;;;;;;AACA;;AACA;;AAOe,gBAAgBA,oBAAhB,CACbC,mBADa,EAEbC,OAFa,EAGS;AACtB,QAAMC,YAAY,GAAG,0CAAwBF,mBAAxB,CAArB;AACA,QAAMG,YAAY,GAAG,mCAAiBD,YAAjB,CAArB;AACA,QAAME,oBAAoB,GAAG,2CAAyBD,YAAzB,CAA7B;AAEA,QAAME,MAAM,GAAG,IAAf;AACA,QAAMC,KAAK,GAAG,WAAd;AAEA,QAAMC,iBAAiB,GAAG,IAAIC,yBAAJ,CAAsBH,MAAtB,EAA8B,EACtD,GAAGJ,OADmD;AAEtDK,IAAAA;AAFsD,GAA9B,CAA1B;;AAKA,aAAW,MAAM;AAACG,IAAAA,OAAD;AAAUC,IAAAA;AAAV,GAAjB,IAAoCN,oBAApC,EAA0D;AACxD,QAAI;AACF,YAAMO,GAAG,GAAGC,IAAI,CAACC,KAAL,CAAWH,IAAX,CAAZ;AACAH,MAAAA,iBAAiB,CAACO,MAAlB,CAAyBH,GAAzB;AACAJ,MAAAA,iBAAiB,CAACQ,aAAlB,CAAgCL,IAAhC;AACA,YAAMM,KAAK,GAAGT,iBAAiB,CAACU,YAAlB,EAAd;;AACA,UAAID,KAAJ,EAAW;AACT,cAAMA,KAAN;AACD;AACF,KARD,CAQE,OAAOE,KAAP,EAAc;AACd,YAAM,IAAIC,KAAJ,sDAAwDV,OAAxD,EAAN;AACD;AACF;;AAED,QAAMO,KAAK,GAAGT,iBAAiB,CAACa,aAAlB,EAAd;;AACA,MAAIJ,KAAJ,EAAW;AACT,UAAMA,KAAN;AACD;AACF","sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {\n LoaderOptions,\n makeLineIterator,\n makeNumberedLineIterator,\n makeTextDecoderIterator\n} from '@loaders.gl/loader-utils';\n\nexport default async function* parseNDJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options?: LoaderOptions\n): AsyncIterable<Batch> {\n const textIterator = makeTextDecoderIterator(binaryAsyncIterator);\n const lineIterator = makeLineIterator(textIterator);\n const numberedLineIterator = makeNumberedLineIterator(lineIterator);\n\n const schema = null;\n const shape = 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n for await (const {counter, line} of numberedLineIterator) {\n try {\n const row = JSON.parse(line);\n tableBatchBuilder.addRow(row);\n tableBatchBuilder.chunkComplete(line);\n const batch = tableBatchBuilder.getFullBatch();\n if (batch) {\n yield batch;\n }\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter}`);\n }\n }\n\n const batch = tableBatchBuilder.getFinalBatch();\n if (batch) {\n yield batch;\n }\n}\n"],"file":"parse-ndjson-in-batches.js"}
|
|
@@ -6,8 +6,8 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
exports.default = parseNDJSONSync;
|
|
7
7
|
|
|
8
8
|
function parseNDJSONSync(ndjsonText) {
|
|
9
|
-
|
|
10
|
-
return lines.map(
|
|
9
|
+
const lines = ndjsonText.trim().split('\n');
|
|
10
|
+
return lines.map((line, counter) => {
|
|
11
11
|
try {
|
|
12
12
|
return JSON.parse(line);
|
|
13
13
|
} catch (error) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/parse-ndjson.ts"],"names":["parseNDJSONSync","ndjsonText","lines","trim","split","map","line","counter","JSON","parse","error","Error"],"mappings":";;;;;;;AAAe,SAASA,eAAT,CAAyBC,UAAzB,EAA6C;AAC1D,
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/parse-ndjson.ts"],"names":["parseNDJSONSync","ndjsonText","lines","trim","split","map","line","counter","JSON","parse","error","Error"],"mappings":";;;;;;;AAAe,SAASA,eAAT,CAAyBC,UAAzB,EAA6C;AAC1D,QAAMC,KAAK,GAAGD,UAAU,CAACE,IAAX,GAAkBC,KAAlB,CAAwB,IAAxB,CAAd;AACA,SAAOF,KAAK,CAACG,GAAN,CAAU,CAACC,IAAD,EAAOC,OAAP,KAAmB;AAClC,QAAI;AACF,aAAOC,IAAI,CAACC,KAAL,CAAWH,IAAX,CAAP;AACD,KAFD,CAEE,OAAOI,KAAP,EAAc;AACd,YAAM,IAAIC,KAAJ,sDAAwDJ,OAAO,GAAG,CAAlE,EAAN;AACD;AACF,GANM,CAAP;AAOD","sourcesContent":["export default function parseNDJSONSync(ndjsonText: string) {\n const lines = ndjsonText.trim().split('\\n');\n return lines.map((line, counter) => {\n try {\n return JSON.parse(line);\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter + 1}`);\n }\n });\n}\n"],"file":"parse-ndjson.js"}
|