@loaders.gl/json 3.4.13 → 3.4.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/bundle.js +1 -1
- package/dist/es5/bundle.js.map +1 -1
- package/dist/es5/geojson-loader.js +34 -103
- package/dist/es5/geojson-loader.js.map +1 -1
- package/dist/es5/index.js +7 -7
- package/dist/es5/json-loader.js +22 -33
- package/dist/es5/json-loader.js.map +1 -1
- package/dist/es5/lib/clarinet/clarinet.js +295 -318
- package/dist/es5/lib/clarinet/clarinet.js.map +1 -1
- package/dist/es5/lib/jsonpath/jsonpath.js +44 -88
- package/dist/es5/lib/jsonpath/jsonpath.js.map +1 -1
- package/dist/es5/lib/parse-json-in-batches.js +74 -181
- package/dist/es5/lib/parse-json-in-batches.js.map +1 -1
- package/dist/es5/lib/parse-json.js +4 -7
- package/dist/es5/lib/parse-json.js.map +1 -1
- package/dist/es5/lib/parse-ndjson-in-batches.js +29 -103
- package/dist/es5/lib/parse-ndjson-in-batches.js.map +1 -1
- package/dist/es5/lib/parse-ndjson.js +2 -2
- package/dist/es5/lib/parse-ndjson.js.map +1 -1
- package/dist/es5/lib/parser/json-parser.js +84 -106
- package/dist/es5/lib/parser/json-parser.js.map +1 -1
- package/dist/es5/lib/parser/streaming-json-parser.js +52 -95
- package/dist/es5/lib/parser/streaming-json-parser.js.map +1 -1
- package/dist/es5/ndgeoson-loader.js +5 -23
- package/dist/es5/ndgeoson-loader.js.map +1 -1
- package/dist/es5/ndjson-loader.js +3 -21
- package/dist/es5/ndjson-loader.js.map +1 -1
- package/dist/esm/geojson-loader.js +1 -1
- package/dist/esm/json-loader.js +1 -1
- package/dist/esm/ndgeoson-loader.js +1 -1
- package/dist/esm/ndjson-loader.js +1 -1
- package/dist/geojson-worker.js +1 -1
- package/package.json +5 -5
|
@@ -6,189 +6,82 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
});
|
|
7
7
|
exports.default = parseJSONInBatches;
|
|
8
8
|
exports.rebuildJsonObject = rebuildJsonObject;
|
|
9
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
10
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
11
|
-
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
12
|
-
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
13
9
|
var _schema = require("@loaders.gl/schema");
|
|
14
10
|
var _loaderUtils = require("@loaders.gl/loader-utils");
|
|
15
11
|
var _streamingJsonParser = _interopRequireDefault(require("./parser/streaming-json-parser"));
|
|
16
|
-
var
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
if (!(rows.length > 0 && isFirstChunk)) {
|
|
62
|
-
_context.next = 24;
|
|
63
|
-
break;
|
|
64
|
-
}
|
|
65
|
-
if (!metadata) {
|
|
66
|
-
_context.next = 23;
|
|
67
|
-
break;
|
|
68
|
-
}
|
|
69
|
-
initialBatch = {
|
|
70
|
-
shape: shape,
|
|
71
|
-
batchType: 'partial-result',
|
|
72
|
-
data: [],
|
|
73
|
-
length: 0,
|
|
74
|
-
bytesUsed: 0,
|
|
75
|
-
container: parser.getPartialResult(),
|
|
76
|
-
jsonpath: _jsonpath
|
|
77
|
-
};
|
|
78
|
-
_context.next = 23;
|
|
79
|
-
return initialBatch;
|
|
80
|
-
case 23:
|
|
81
|
-
isFirstChunk = false;
|
|
82
|
-
case 24:
|
|
83
|
-
_iterator2 = _createForOfIteratorHelper(rows);
|
|
84
|
-
_context.prev = 25;
|
|
85
|
-
_iterator2.s();
|
|
86
|
-
case 27:
|
|
87
|
-
if ((_step2 = _iterator2.n()).done) {
|
|
88
|
-
_context.next = 36;
|
|
89
|
-
break;
|
|
90
|
-
}
|
|
91
|
-
row = _step2.value;
|
|
92
|
-
tableBatchBuilder.addRow(row);
|
|
93
|
-
_batch2 = tableBatchBuilder.getFullBatch({
|
|
94
|
-
jsonpath: _jsonpath
|
|
95
|
-
});
|
|
96
|
-
if (!_batch2) {
|
|
97
|
-
_context.next = 34;
|
|
98
|
-
break;
|
|
99
|
-
}
|
|
100
|
-
_context.next = 34;
|
|
101
|
-
return _batch2;
|
|
102
|
-
case 34:
|
|
103
|
-
_context.next = 27;
|
|
104
|
-
break;
|
|
105
|
-
case 36:
|
|
106
|
-
_context.next = 41;
|
|
107
|
-
break;
|
|
108
|
-
case 38:
|
|
109
|
-
_context.prev = 38;
|
|
110
|
-
_context.t0 = _context["catch"](25);
|
|
111
|
-
_iterator2.e(_context.t0);
|
|
112
|
-
case 41:
|
|
113
|
-
_context.prev = 41;
|
|
114
|
-
_iterator2.f();
|
|
115
|
-
return _context.finish(41);
|
|
116
|
-
case 44:
|
|
117
|
-
tableBatchBuilder.chunkComplete(chunk);
|
|
118
|
-
_batch = tableBatchBuilder.getFullBatch({
|
|
119
|
-
jsonpath: _jsonpath
|
|
120
|
-
});
|
|
121
|
-
if (!_batch) {
|
|
122
|
-
_context.next = 49;
|
|
123
|
-
break;
|
|
124
|
-
}
|
|
125
|
-
_context.next = 49;
|
|
126
|
-
return _batch;
|
|
127
|
-
case 49:
|
|
128
|
-
_iteratorAbruptCompletion = false;
|
|
129
|
-
_context.next = 12;
|
|
130
|
-
break;
|
|
131
|
-
case 52:
|
|
132
|
-
_context.next = 58;
|
|
133
|
-
break;
|
|
134
|
-
case 54:
|
|
135
|
-
_context.prev = 54;
|
|
136
|
-
_context.t1 = _context["catch"](10);
|
|
137
|
-
_didIteratorError = true;
|
|
138
|
-
_iteratorError = _context.t1;
|
|
139
|
-
case 58:
|
|
140
|
-
_context.prev = 58;
|
|
141
|
-
_context.prev = 59;
|
|
142
|
-
if (!(_iteratorAbruptCompletion && _iterator.return != null)) {
|
|
143
|
-
_context.next = 63;
|
|
144
|
-
break;
|
|
145
|
-
}
|
|
146
|
-
_context.next = 63;
|
|
147
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.return());
|
|
148
|
-
case 63:
|
|
149
|
-
_context.prev = 63;
|
|
150
|
-
if (!_didIteratorError) {
|
|
151
|
-
_context.next = 66;
|
|
152
|
-
break;
|
|
153
|
-
}
|
|
154
|
-
throw _iteratorError;
|
|
155
|
-
case 66:
|
|
156
|
-
return _context.finish(63);
|
|
157
|
-
case 67:
|
|
158
|
-
return _context.finish(58);
|
|
159
|
-
case 68:
|
|
160
|
-
jsonpath = parser.getStreamingJsonPathAsString();
|
|
161
|
-
batch = tableBatchBuilder.getFinalBatch({
|
|
162
|
-
jsonpath: jsonpath
|
|
163
|
-
});
|
|
164
|
-
if (!batch) {
|
|
165
|
-
_context.next = 73;
|
|
166
|
-
break;
|
|
167
|
-
}
|
|
168
|
-
_context.next = 73;
|
|
169
|
-
return batch;
|
|
170
|
-
case 73:
|
|
171
|
-
if (!metadata) {
|
|
172
|
-
_context.next = 77;
|
|
173
|
-
break;
|
|
174
|
-
}
|
|
175
|
-
finalBatch = {
|
|
176
|
-
shape: shape,
|
|
177
|
-
batchType: 'final-result',
|
|
178
|
-
container: parser.getPartialResult(),
|
|
179
|
-
jsonpath: parser.getStreamingJsonPathAsString(),
|
|
180
|
-
data: [],
|
|
181
|
-
length: 0
|
|
182
|
-
};
|
|
183
|
-
_context.next = 77;
|
|
184
|
-
return finalBatch;
|
|
185
|
-
case 77:
|
|
186
|
-
case "end":
|
|
187
|
-
return _context.stop();
|
|
12
|
+
var _jsonpath = _interopRequireDefault(require("./jsonpath/jsonpath"));
|
|
13
|
+
async function* parseJSONInBatches(binaryAsyncIterator, options) {
|
|
14
|
+
var _options$json;
|
|
15
|
+
const asyncIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
|
|
16
|
+
const {
|
|
17
|
+
metadata
|
|
18
|
+
} = options;
|
|
19
|
+
const {
|
|
20
|
+
jsonpaths
|
|
21
|
+
} = options.json || {};
|
|
22
|
+
let isFirstChunk = true;
|
|
23
|
+
const schema = null;
|
|
24
|
+
const shape = (options === null || options === void 0 ? void 0 : (_options$json = options.json) === null || _options$json === void 0 ? void 0 : _options$json.shape) || 'row-table';
|
|
25
|
+
const tableBatchBuilder = new _schema.TableBatchBuilder(schema, {
|
|
26
|
+
...options,
|
|
27
|
+
shape
|
|
28
|
+
});
|
|
29
|
+
const parser = new _streamingJsonParser.default({
|
|
30
|
+
jsonpaths
|
|
31
|
+
});
|
|
32
|
+
for await (const chunk of asyncIterator) {
|
|
33
|
+
const rows = parser.write(chunk);
|
|
34
|
+
const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();
|
|
35
|
+
if (rows.length > 0 && isFirstChunk) {
|
|
36
|
+
if (metadata) {
|
|
37
|
+
const initialBatch = {
|
|
38
|
+
shape,
|
|
39
|
+
batchType: 'partial-result',
|
|
40
|
+
data: [],
|
|
41
|
+
length: 0,
|
|
42
|
+
bytesUsed: 0,
|
|
43
|
+
container: parser.getPartialResult(),
|
|
44
|
+
jsonpath
|
|
45
|
+
};
|
|
46
|
+
yield initialBatch;
|
|
47
|
+
}
|
|
48
|
+
isFirstChunk = false;
|
|
49
|
+
}
|
|
50
|
+
for (const row of rows) {
|
|
51
|
+
tableBatchBuilder.addRow(row);
|
|
52
|
+
const batch = tableBatchBuilder.getFullBatch({
|
|
53
|
+
jsonpath
|
|
54
|
+
});
|
|
55
|
+
if (batch) {
|
|
56
|
+
yield batch;
|
|
188
57
|
}
|
|
189
|
-
}
|
|
190
|
-
|
|
191
|
-
|
|
58
|
+
}
|
|
59
|
+
tableBatchBuilder.chunkComplete(chunk);
|
|
60
|
+
const batch = tableBatchBuilder.getFullBatch({
|
|
61
|
+
jsonpath
|
|
62
|
+
});
|
|
63
|
+
if (batch) {
|
|
64
|
+
yield batch;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
const jsonpath = parser.getStreamingJsonPathAsString();
|
|
68
|
+
const batch = tableBatchBuilder.getFinalBatch({
|
|
69
|
+
jsonpath
|
|
70
|
+
});
|
|
71
|
+
if (batch) {
|
|
72
|
+
yield batch;
|
|
73
|
+
}
|
|
74
|
+
if (metadata) {
|
|
75
|
+
const finalBatch = {
|
|
76
|
+
shape,
|
|
77
|
+
batchType: 'final-result',
|
|
78
|
+
container: parser.getPartialResult(),
|
|
79
|
+
jsonpath: parser.getStreamingJsonPathAsString(),
|
|
80
|
+
data: [],
|
|
81
|
+
length: 0
|
|
82
|
+
};
|
|
83
|
+
yield finalBatch;
|
|
84
|
+
}
|
|
192
85
|
}
|
|
193
86
|
function rebuildJsonObject(batch, data) {
|
|
194
87
|
(0, _loaderUtils.assert)(batch.batchType === 'final-result');
|
|
@@ -196,8 +89,8 @@ function rebuildJsonObject(batch, data) {
|
|
|
196
89
|
return data;
|
|
197
90
|
}
|
|
198
91
|
if (batch.jsonpath && batch.jsonpath.length > 1) {
|
|
199
|
-
|
|
200
|
-
|
|
92
|
+
const topLevelObject = batch.container;
|
|
93
|
+
const streamingPath = new _jsonpath.default(batch.jsonpath);
|
|
201
94
|
streamingPath.setFieldAtPath(topLevelObject, data);
|
|
202
95
|
return topLevelObject;
|
|
203
96
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-json-in-batches.js","names":["_schema","require","_loaderUtils","_streamingJsonParser","_interopRequireDefault","_jsonpath2","_createForOfIteratorHelper","o","allowArrayLike","it","Symbol","iterator","Array","isArray","_unsupportedIterableToArray","length","i","F","s","n","done","value","e","_e","f","TypeError","normalCompletion","didErr","err","call","step","next","_e2","return","minLen","_arrayLikeToArray","Object","prototype","toString","slice","constructor","name","from","test","arr","len","arr2","ownKeys","object","enumerableOnly","keys","getOwnPropertySymbols","symbols","filter","sym","getOwnPropertyDescriptor","enumerable","push","apply","_objectSpread","target","arguments","source","forEach","key","_defineProperty2","default","getOwnPropertyDescriptors","defineProperties","defineProperty","_asyncIterator","iterable","method","async","sync","retry","asyncIterator","AsyncFromSyncIterator","AsyncFromSyncIteratorContinuation","r","Promise","reject","resolve","then","_return","ret","throw","_throw","thr","parseJSONInBatches","_x","_x2","_parseJSONInBatches","_wrapAsyncGenerator2","_regenerator","mark","_callee","binaryAsyncIterator","options","_options$json","metadata","_ref","jsonpaths","isFirstChunk","schema","shape","tableBatchBuilder","parser","_iteratorAbruptCompletion","_didIteratorError","_iteratorError","_iterator","_step","chunk","rows","_jsonpath","initialBatch","_iterator2","_step2","row","_batch2","_batch","jsonpath","batch","finalBatch","wrap","_callee$","_context","prev","makeTextDecoderIterator","json","TableBatchBuilder","StreamingJSONParser","_awaitAsyncGenerator2","sent","write","getStreamingJsonPathAsString","batchType","data","bytesUsed","container","getPartialResult","addRow","getFullBatch","t0","finish","chunkComplete","t1","getFinalBatch","stop","rebuildJsonObject","assert","topLevelObject","streamingPath","JSONPath","setFieldAtPath"],"sources":["../../../src/lib/parse-json-in-batches.ts"],"sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions} from '../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {assert, makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from './parser/streaming-json-parser';\nimport JSONPath from './jsonpath/jsonpath';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport default async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<Batch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // TODO fix Schema deduction\n const schema = null; // new Schema([]);\n const shape = options?.json?.shape || 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: Batch = {\n // Common fields\n shape,\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: Batch = {\n shape,\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n\nexport function rebuildJsonObject(batch, data) {\n // Last batch will have this special type and will provide all the root object of the parsed file\n assert(batch.batchType === 'final-result');\n\n // The streamed JSON data is a top level array (jsonpath = '$'), just return the array of row objects\n if (batch.jsonpath === '$') {\n return data;\n }\n\n // (jsonpath !== '$') The streamed data is not a top level array, so stitch it back in to the top-level object\n if (batch.jsonpath && batch.jsonpath.length > 1) {\n const topLevelObject = batch.container;\n const streamingPath = new JSONPath(batch.jsonpath);\n streamingPath.setFieldAtPath(topLevelObject, data);\n return topLevelObject;\n }\n\n // No jsonpath, in this case nothing was streamed.\n return batch.container;\n}\n"],"mappings":";;;;;;;;;;;;AAEA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AACA,IAAAE,oBAAA,GAAAC,sBAAA,CAAAH,OAAA;AACA,IAAAI,UAAA,GAAAD,sBAAA,CAAAH,OAAA;AAA2C,SAAAK,2BAAAC,CAAA,EAAAC,cAAA,QAAAC,EAAA,UAAAC,MAAA,oBAAAH,CAAA,CAAAG,MAAA,CAAAC,QAAA,KAAAJ,CAAA,qBAAAE,EAAA,QAAAG,KAAA,CAAAC,OAAA,CAAAN,CAAA,MAAAE,EAAA,GAAAK,2BAAA,CAAAP,CAAA,MAAAC,cAAA,IAAAD,CAAA,WAAAA,CAAA,CAAAQ,MAAA,qBAAAN,EAAA,EAAAF,CAAA,GAAAE,EAAA,MAAAO,CAAA,UAAAC,CAAA,YAAAA,EAAA,eAAAC,CAAA,EAAAD,CAAA,EAAAE,CAAA,WAAAA,EAAA,QAAAH,CAAA,IAAAT,CAAA,CAAAQ,MAAA,WAAAK,IAAA,mBAAAA,IAAA,SAAAC,KAAA,EAAAd,CAAA,CAAAS,CAAA,UAAAM,CAAA,WAAAA,EAAAC,EAAA,UAAAA,EAAA,KAAAC,CAAA,EAAAP,CAAA,gBAAAQ,SAAA,iJAAAC,gBAAA,SAAAC,MAAA,UAAAC,GAAA,WAAAV,CAAA,WAAAA,EAAA,IAAAT,EAAA,GAAAA,EAAA,CAAAoB,IAAA,CAAAtB,CAAA,MAAAY,CAAA,WAAAA,EAAA,QAAAW,IAAA,GAAArB,EAAA,CAAAsB,IAAA,IAAAL,gBAAA,GAAAI,IAAA,CAAAV,IAAA,SAAAU,IAAA,KAAAR,CAAA,WAAAA,EAAAU,GAAA,IAAAL,MAAA,SAAAC,GAAA,GAAAI,GAAA,KAAAR,CAAA,WAAAA,EAAA,eAAAE,gBAAA,IAAAjB,EAAA,CAAAwB,MAAA,UAAAxB,EAAA,CAAAwB,MAAA,oBAAAN,MAAA,QAAAC,GAAA;AAAA,SAAAd,4BAAAP,CAAA,EAAA2B,MAAA,SAAA3B,CAAA,qBAAAA,CAAA,sBAAA4B,iBAAA,CAAA5B,CAAA,EAAA2B,MAAA,OAAAf,CAAA,GAAAiB,MAAA,CAAAC,SAAA,CAAAC,QAAA,CAAAT,IAAA,CAAAtB,CAAA,EAAAgC,KAAA,aAAApB,CAAA,iBAAAZ,CAAA,CAAAiC,WAAA,EAAArB,CAAA,GAAAZ,CAAA,CAAAiC,WAAA,CAAAC,IAAA,MAAAtB,CAAA,cAAAA,CAAA,mBAAAP,KAAA,CAAA8B,IAAA,CAAAnC,CAAA,OAAAY,CAAA,+DAAAwB,IAAA,CAAAxB,CAAA,UAAAgB,iBAAA,CAAA5B,CAAA,EAAA2B,MAAA;AAAA,SAAAC,kBAAAS,GAAA,EAAAC,GAAA,QAAAA,GAAA,YAAAA,GAAA,GAAAD,GAAA,CAAA7B,MAAA,EAAA8B,GAAA,GAAAD,GAAA,CAAA7B,MAAA,WAAAC,CAAA,MAAA8B,IAAA,OAAAlC,KAAA,CAAAiC,GAAA,GAAA7B,CAAA,GAAA6B,GAAA,EAAA7B,CAAA,IAAA8B,IAAA,CAAA9B,CAAA,IAAA4B,GAAA,CAAA5B,CAAA,UAAA8B,IAAA;AAAA,SAAAC,QAAAC,MAAA,EAAAC,cAAA,QAAAC,IAAA,GAAAd,MAAA,CAAAc,IAAA,CAAAF,MAAA,OAAAZ,MAAA,CAAAe,qBAAA,QAAAC,OAAA,GAAAhB,MAAA,CAAAe,qBAAA,CAAAH,MAAA,GAAAC,cAAA,KAAAG,OAAA,GAAAA,OAAA,CAAAC,MAAA,WAAAC,GAAA,WAAAlB,MAAA,CAAAmB,wBAAA,CAAAP,MAAA,EAAAM,GAAA,EAAAE,UAAA,OAAAN,IAAA,CAAAO,IAAA,CAAAC,KAAA,CAAAR,IAAA,EAAAE,OAAA,YAAAF,IAAA;AAAA,SAAAS,cAAAC,MAAA,aAAA5C,CAAA,MAAAA,CAAA,GAAA6C,SAAA,CAAA9C,MAAA,EAAAC,CAAA,UAAA8C,MAAA,WAAAD,SAAA,CAAA7C,CAAA,IAAA6C,SAAA,CAAA7C,CAAA,QAAAA,CAAA,OAAA+B,OAAA,CAAAX,MAAA,CAAA0B,MAAA,OAAAC,OAAA,WAAAC,GAAA,QAAAC,gBAAA,CAAAC,OAAA,EAAAN,MAAA,EAAAI,GAAA,EAAAF,MAAA,CAAAE,GAAA,SAAA5B,MAAA,CAAA+B,yBAAA,GAAA/B,MAAA,CAAAgC,gBAAA,CAAAR,MAAA,EAAAxB,MAAA,CAAA+B,yBAAA,CAAAL,MAAA,KAAAf,OAAA,CAAAX,MAAA,CAAA0B,MAAA,GAAAC,OAAA,WAAAC,GAAA,IAAA5B,MAAA,CAAAiC,cAAA,CAAAT,MAAA,EAAAI,GAAA,EAAA5B,MAAA,CAAAmB,wBAAA,CAAAO,MAAA,EAAAE,GAAA,iBAAAJ,MAAA;AAAA,SAAAU,eAAAC,QAAA,QAAAC,MAAA,EAAAC,KAAA,EAAAC,IAAA,EAAAC,KAAA,iCAAAjE,MAAA,KAAA+D,KAAA,GAAA/D,MAAA,CAAAkE,aAAA,EAAAF,IAAA,GAAAhE,MAAA,CAAAC,QAAA,GAAAgE,KAAA,WAAAF,KAAA,aAAAD,MAAA,GAAAD,QAAA,CAAAE,KAAA,WAAAD,MAAA,CAAA3C,IAAA,CAAA0C,QAAA,OAAAG,IAAA,aAAAF,MAAA,GAAAD,QAAA,CAAAG,IAAA,eAAAG,qBAAA,CAAAL,MAAA,CAAA3C,IAAA,CAAA0C,QAAA,IAAAE,KAAA,sBAAAC,IAAA,6BAAAjD,SAAA;AAAA,SAAAoD,sBAAA3D,CAAA,aAAA4D,kCAAAC,CAAA,QAAA3C,MAAA,CAAA2C,CAAA,MAAAA,CAAA,SAAAC,OAAA,CAAAC,MAAA,KAAAxD,SAAA,CAAAsD,CAAA,+BAAA3D,IAAA,GAAA2D,CAAA,CAAA3D,IAAA,SAAA4D,OAAA,CAAAE,OAAA,CAAAH,CAAA,CAAA1D,KAAA,EAAA8D,IAAA,WAAA9D,KAAA,aAAAA,KAAA,EAAAA,KAAA,EAAAD,IAAA,EAAAA,IAAA,iBAAAyD,qBAAA,YAAAA,sBAAA3D,CAAA,SAAAA,CAAA,GAAAA,CAAA,OAAAC,CAAA,GAAAD,CAAA,CAAAa,IAAA,KAAA8C,qBAAA,CAAAxC,SAAA,KAAAnB,CAAA,QAAAC,CAAA,QAAAY,IAAA,WAAAA,KAAA,WAAA+C,iCAAA,MAAA3D,CAAA,CAAAuC,KAAA,MAAAxC,CAAA,EAAA2C,SAAA,OAAA5B,MAAA,WAAAmD,QAAA/D,KAAA,QAAAgE,GAAA,QAAAnE,CAAA,CAAAe,MAAA,oBAAAoD,GAAA,GAAAL,OAAA,CAAAE,OAAA,GAAA7D,KAAA,EAAAA,KAAA,EAAAD,IAAA,UAAA0D,iCAAA,CAAAO,GAAA,CAAA3B,KAAA,MAAAxC,CAAA,EAAA2C,SAAA,OAAAyB,KAAA,WAAAC,OAAAlE,KAAA,QAAAmE,GAAA,QAAAtE,CAAA,CAAAe,MAAA,oBAAAuD,GAAA,GAAAR,OAAA,CAAAC,MAAA,CAAA5D,KAAA,IAAAyD,iCAAA,CAAAU,GAAA,CAAA9B,KAAA,MAAAxC,CAAA,EAAA2C,SAAA,aAAAgB,qBAAA,CAAA3D,CAAA;AAAA,SAIZuE,kBAAkBA,CAAAC,EAAA,EAAAC,GAAA;EAAA,OAAAC,mBAAA,CAAAlC,KAAA,OAAAG,SAAA;AAAA;AAAA,SAAA+B,oBAAA;EAAAA,mBAAA,OAAAC,oBAAA,CAAA3B,OAAA,EAAA4B,YAAA,CAAA5B,OAAA,CAAA6B,IAAA,CAAlC,SAAAC,QACbC,mBAAuE,EACvEC,OAA0B;IAAA,IAAAC,aAAA;IAAA,IAAAvB,aAAA,EAAAwB,QAAA,EAAAC,IAAA,EAAAC,SAAA,EAAAC,YAAA,EAAAC,MAAA,EAAAC,KAAA,EAAAC,iBAAA,EAAAC,MAAA,EAAAC,yBAAA,EAAAC,iBAAA,EAAAC,cAAA,EAAAC,SAAA,EAAAC,KAAA,EAAAC,KAAA,EAAAC,IAAA,EAAAC,SAAA,EAAAC,YAAA,EAAAC,UAAA,EAAAC,MAAA,EAAAC,GAAA,EAAAC,OAAA,EAAAC,MAAA,EAAAC,QAAA,EAAAC,KAAA,EAAAC,UAAA;IAAA,OAAA9B,YAAA,CAAA5B,OAAA,CAAA2D,IAAA,UAAAC,SAAAC,QAAA;MAAA,kBAAAA,QAAA,CAAAC,IAAA,GAAAD,QAAA,CAAAhG,IAAA;QAAA;UAEpB6C,aAAa,GAAG,IAAAqD,oCAAuB,EAAChC,mBAAmB,CAAC;UAE3DG,QAAQ,GAAIF,OAAO,CAAnBE,QAAQ;UAAAC,IAAA,GACKH,OAAO,CAACgC,IAAI,IAAI,CAAC,CAAC,EAA/B5B,SAAS,GAAAD,IAAA,CAATC,SAAS;UAEZC,YAAqB,GAAG,IAAI;UAG1BC,MAAM,GAAG,IAAI;UACbC,KAAK,GAAG,CAAAP,OAAO,aAAPA,OAAO,wBAAAC,aAAA,GAAPD,OAAO,CAAEgC,IAAI,cAAA/B,aAAA,uBAAbA,aAAA,CAAeM,KAAK,KAAI,WAAW;UAE3CC,iBAAiB,GAAG,IAAIyB,yBAAiB,CAAC3B,MAAM,EAAA7C,aAAA,CAAAA,aAAA,KACjDuC,OAAO;YACVO,KAAK,EAALA;UAAK,EACN,CAAC;UAEIE,MAAM,GAAG,IAAIyB,4BAAmB,CAAC;YAAC9B,SAAS,EAATA;UAAS,CAAC,CAAC;UAAAM,yBAAA;UAAAC,iBAAA;UAAAkB,QAAA,CAAAC,IAAA;UAAAjB,SAAA,GAAAzC,cAAA,CAEzBM,aAAa;QAAA;UAAAmD,QAAA,CAAAhG,IAAA;UAAA,WAAAsG,qBAAA,CAAAnE,OAAA,EAAA6C,SAAA,CAAAhF,IAAA;QAAA;UAAA,MAAA6E,yBAAA,KAAAI,KAAA,GAAAe,QAAA,CAAAO,IAAA,EAAAlH,IAAA;YAAA2G,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAtBkF,KAAK,GAAAD,KAAA,CAAA3F,KAAA;UACd6F,IAAI,GAAGP,MAAM,CAAC4B,KAAK,CAACtB,KAAK,CAAC;UAE1BS,SAAQ,GAAGR,IAAI,CAACnG,MAAM,GAAG,CAAC,IAAI4F,MAAM,CAAC6B,4BAA4B,CAAC,CAAC;UAAA,MAErEtB,IAAI,CAACnG,MAAM,GAAG,CAAC,IAAIwF,YAAY;YAAAwB,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAA,KAC7BqE,QAAQ;YAAA2B,QAAA,CAAAhG,IAAA;YAAA;UAAA;UACJqF,YAAmB,GAAG;YAE1BX,KAAK,EAALA,KAAK;YACLgC,SAAS,EAAE,gBAAgB;YAC3BC,IAAI,EAAE,EAAE;YACR3H,MAAM,EAAE,CAAC;YACT4H,SAAS,EAAE,CAAC;YAEZC,SAAS,EAAEjC,MAAM,CAACkC,gBAAgB,CAAC,CAAC;YACpCnB,QAAQ,EAARA;UACF,CAAC;UAAAK,QAAA,CAAAhG,IAAA;UACD,OAAMqF,YAAY;QAAA;UAEpBb,YAAY,GAAG,KAAK;QAAC;UAAAc,UAAA,GAAA/G,0BAAA,CAKL4G,IAAI;UAAAa,QAAA,CAAAC,IAAA;UAAAX,UAAA,CAAAnG,CAAA;QAAA;UAAA,KAAAoG,MAAA,GAAAD,UAAA,CAAAlG,CAAA,IAAAC,IAAA;YAAA2G,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAXwF,GAAG,GAAAD,MAAA,CAAAjG,KAAA;UACZqF,iBAAiB,CAACoC,MAAM,CAACvB,GAAG,CAAC;UAEvBI,OAAK,GAAGjB,iBAAiB,CAACqC,YAAY,CAAC;YAACrB,QAAQ,EAARA;UAAQ,CAAC,CAAC;UAAA,KACpDC,OAAK;YAAAI,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAAgG,QAAA,CAAAhG,IAAA;UACP,OAAM4F,OAAK;QAAA;UAAAI,QAAA,CAAAhG,IAAA;UAAA;QAAA;UAAAgG,QAAA,CAAAhG,IAAA;UAAA;QAAA;UAAAgG,QAAA,CAAAC,IAAA;UAAAD,QAAA,CAAAiB,EAAA,GAAAjB,QAAA;UAAAV,UAAA,CAAA/F,CAAA,CAAAyG,QAAA,CAAAiB,EAAA;QAAA;UAAAjB,QAAA,CAAAC,IAAA;UAAAX,UAAA,CAAA7F,CAAA;UAAA,OAAAuG,QAAA,CAAAkB,MAAA;QAAA;UAIfvC,iBAAiB,CAACwC,aAAa,CAACjC,KAAK,CAAC;UAChCU,MAAK,GAAGjB,iBAAiB,CAACqC,YAAY,CAAC;YAACrB,QAAQ,EAARA;UAAQ,CAAC,CAAC;UAAA,KACpDC,MAAK;YAAAI,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAAgG,QAAA,CAAAhG,IAAA;UACP,OAAM4F,MAAK;QAAA;UAAAf,yBAAA;UAAAmB,QAAA,CAAAhG,IAAA;UAAA;QAAA;UAAAgG,QAAA,CAAAhG,IAAA;UAAA;QAAA;UAAAgG,QAAA,CAAAC,IAAA;UAAAD,QAAA,CAAAoB,EAAA,GAAApB,QAAA;UAAAlB,iBAAA;UAAAC,cAAA,GAAAiB,QAAA,CAAAoB,EAAA;QAAA;UAAApB,QAAA,CAAAC,IAAA;UAAAD,QAAA,CAAAC,IAAA;UAAA,MAAApB,yBAAA,IAAAG,SAAA,CAAA9E,MAAA;YAAA8F,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAAgG,QAAA,CAAAhG,IAAA;UAAA,WAAAsG,qBAAA,CAAAnE,OAAA,EAAA6C,SAAA,CAAA9E,MAAA;QAAA;UAAA8F,QAAA,CAAAC,IAAA;UAAA,KAAAnB,iBAAA;YAAAkB,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAA,MAAA+E,cAAA;QAAA;UAAA,OAAAiB,QAAA,CAAAkB,MAAA;QAAA;UAAA,OAAAlB,QAAA,CAAAkB,MAAA;QAAA;UAKTvB,QAAQ,GAAGf,MAAM,CAAC6B,4BAA4B,CAAC,CAAC;UAChDb,KAAK,GAAGjB,iBAAiB,CAAC0C,aAAa,CAAC;YAAC1B,QAAQ,EAARA;UAAQ,CAAC,CAAC;UAAA,KACrDC,KAAK;YAAAI,QAAA,CAAAhG,IAAA;YAAA;UAAA;UAAAgG,QAAA,CAAAhG,IAAA;UACP,OAAM4F,KAAK;QAAA;UAAA,KAGTvB,QAAQ;YAAA2B,QAAA,CAAAhG,IAAA;YAAA;UAAA;UACJ6F,UAAiB,GAAG;YACxBnB,KAAK,EAALA,KAAK;YACLgC,SAAS,EAAE,cAAc;YACzBG,SAAS,EAAEjC,MAAM,CAACkC,gBAAgB,CAAC,CAAC;YACpCnB,QAAQ,EAAEf,MAAM,CAAC6B,4BAA4B,CAAC,CAAC;YAC/CE,IAAI,EAAE,EAAE;YACR3H,MAAM,EAAE;UAEV,CAAC;UAAAgH,QAAA,CAAAhG,IAAA;UACD,OAAM6F,UAAU;QAAA;QAAA;UAAA,OAAAG,QAAA,CAAAsB,IAAA;MAAA;IAAA,GAAArD,OAAA;EAAA,CAEnB;EAAA,OAAAJ,mBAAA,CAAAlC,KAAA,OAAAG,SAAA;AAAA;AAEM,SAASyF,iBAAiBA,CAAC3B,KAAK,EAAEe,IAAI,EAAE;EAE7C,IAAAa,mBAAM,EAAC5B,KAAK,CAACc,SAAS,KAAK,cAAc,CAAC;EAG1C,IAAId,KAAK,CAACD,QAAQ,KAAK,GAAG,EAAE;IAC1B,OAAOgB,IAAI;EACb;EAGA,IAAIf,KAAK,CAACD,QAAQ,IAAIC,KAAK,CAACD,QAAQ,CAAC3G,MAAM,GAAG,CAAC,EAAE;IAC/C,IAAMyI,cAAc,GAAG7B,KAAK,CAACiB,SAAS;IACtC,IAAMa,aAAa,GAAG,IAAIC,kBAAQ,CAAC/B,KAAK,CAACD,QAAQ,CAAC;IAClD+B,aAAa,CAACE,cAAc,CAACH,cAAc,EAAEd,IAAI,CAAC;IAClD,OAAOc,cAAc;EACvB;EAGA,OAAO7B,KAAK,CAACiB,SAAS;AACxB"}
|
|
1
|
+
{"version":3,"file":"parse-json-in-batches.js","names":["_schema","require","_loaderUtils","_streamingJsonParser","_interopRequireDefault","_jsonpath","parseJSONInBatches","binaryAsyncIterator","options","_options$json","asyncIterator","makeTextDecoderIterator","metadata","jsonpaths","json","isFirstChunk","schema","shape","tableBatchBuilder","TableBatchBuilder","parser","StreamingJSONParser","chunk","rows","write","jsonpath","length","getStreamingJsonPathAsString","initialBatch","batchType","data","bytesUsed","container","getPartialResult","row","addRow","batch","getFullBatch","chunkComplete","getFinalBatch","finalBatch","rebuildJsonObject","assert","topLevelObject","streamingPath","JSONPath","setFieldAtPath"],"sources":["../../../src/lib/parse-json-in-batches.ts"],"sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions} from '../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {assert, makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from './parser/streaming-json-parser';\nimport JSONPath from './jsonpath/jsonpath';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport default async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<Batch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // TODO fix Schema deduction\n const schema = null; // new Schema([]);\n const shape = options?.json?.shape || 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: Batch = {\n // Common fields\n shape,\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: Batch = {\n shape,\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n\nexport function rebuildJsonObject(batch, data) {\n // Last batch will have this special type and will provide all the root object of the parsed file\n assert(batch.batchType === 'final-result');\n\n // The streamed JSON data is a top level array (jsonpath = '$'), just return the array of row objects\n if (batch.jsonpath === '$') {\n return data;\n }\n\n // (jsonpath !== '$') The streamed data is not a top level array, so stitch it back in to the top-level object\n if (batch.jsonpath && batch.jsonpath.length > 1) {\n const topLevelObject = batch.container;\n const streamingPath = new JSONPath(batch.jsonpath);\n streamingPath.setFieldAtPath(topLevelObject, data);\n return topLevelObject;\n }\n\n // No jsonpath, in this case nothing was streamed.\n return batch.container;\n}\n"],"mappings":";;;;;;;;AAEA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AACA,IAAAE,oBAAA,GAAAC,sBAAA,CAAAH,OAAA;AACA,IAAAI,SAAA,GAAAD,sBAAA,CAAAH,OAAA;AAIe,gBAAgBK,kBAAkBA,CAC/CC,mBAAuE,EACvEC,OAA0B,EACJ;EAAA,IAAAC,aAAA;EACtB,MAAMC,aAAa,GAAG,IAAAC,oCAAuB,EAACJ,mBAAmB,CAAC;EAElE,MAAM;IAACK;EAAQ,CAAC,GAAGJ,OAAO;EAC1B,MAAM;IAACK;EAAS,CAAC,GAAGL,OAAO,CAACM,IAAI,IAAI,CAAC,CAAC;EAEtC,IAAIC,YAAqB,GAAG,IAAI;EAGhC,MAAMC,MAAM,GAAG,IAAI;EACnB,MAAMC,KAAK,GAAG,CAAAT,OAAO,aAAPA,OAAO,wBAAAC,aAAA,GAAPD,OAAO,CAAEM,IAAI,cAAAL,aAAA,uBAAbA,aAAA,CAAeQ,KAAK,KAAI,WAAW;EAEjD,MAAMC,iBAAiB,GAAG,IAAIC,yBAAiB,CAACH,MAAM,EAAE;IACtD,GAAGR,OAAO;IACVS;EACF,CAAC,CAAC;EAEF,MAAMG,MAAM,GAAG,IAAIC,4BAAmB,CAAC;IAACR;EAAS,CAAC,CAAC;EAEnD,WAAW,MAAMS,KAAK,IAAIZ,aAAa,EAAE;IACvC,MAAMa,IAAI,GAAGH,MAAM,CAACI,KAAK,CAACF,KAAK,CAAC;IAEhC,MAAMG,QAAQ,GAAGF,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIN,MAAM,CAACO,4BAA4B,CAAC,CAAC;IAEzE,IAAIJ,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIX,YAAY,EAAE;MACnC,IAAIH,QAAQ,EAAE;QACZ,MAAMgB,YAAmB,GAAG;UAE1BX,KAAK;UACLY,SAAS,EAAE,gBAAgB;UAC3BC,IAAI,EAAE,EAAE;UACRJ,MAAM,EAAE,CAAC;UACTK,SAAS,EAAE,CAAC;UAEZC,SAAS,EAAEZ,MAAM,CAACa,gBAAgB,CAAC,CAAC;UACpCR;QACF,CAAC;QACD,MAAMG,YAAY;MACpB;MACAb,YAAY,GAAG,KAAK;IAEtB;IAGA,KAAK,MAAMmB,GAAG,IAAIX,IAAI,EAAE;MACtBL,iBAAiB,CAACiB,MAAM,CAACD,GAAG,CAAC;MAE7B,MAAME,KAAK,GAAGlB,iBAAiB,CAACmB,YAAY,CAAC;QAACZ;MAAQ,CAAC,CAAC;MACxD,IAAIW,KAAK,EAAE;QACT,MAAMA,KAAK;MACb;IACF;IAEAlB,iBAAiB,CAACoB,aAAa,CAAChB,KAAK,CAAC;IACtC,MAAMc,KAAK,GAAGlB,iBAAiB,CAACmB,YAAY,CAAC;MAACZ;IAAQ,CAAC,CAAC;IACxD,IAAIW,KAAK,EAAE;MACT,MAAMA,KAAK;IACb;EACF;EAGA,MAAMX,QAAQ,GAAGL,MAAM,CAACO,4BAA4B,CAAC,CAAC;EACtD,MAAMS,KAAK,GAAGlB,iBAAiB,CAACqB,aAAa,CAAC;IAACd;EAAQ,CAAC,CAAC;EACzD,IAAIW,KAAK,EAAE;IACT,MAAMA,KAAK;EACb;EAEA,IAAIxB,QAAQ,EAAE;IACZ,MAAM4B,UAAiB,GAAG;MACxBvB,KAAK;MACLY,SAAS,EAAE,cAAc;MACzBG,SAAS,EAAEZ,MAAM,CAACa,gBAAgB,CAAC,CAAC;MACpCR,QAAQ,EAAEL,MAAM,CAACO,4BAA4B,CAAC,CAAC;MAC/CG,IAAI,EAAE,EAAE;MACRJ,MAAM,EAAE;IAEV,CAAC;IACD,MAAMc,UAAU;EAClB;AACF;AAEO,SAASC,iBAAiBA,CAACL,KAAK,EAAEN,IAAI,EAAE;EAE7C,IAAAY,mBAAM,EAACN,KAAK,CAACP,SAAS,KAAK,cAAc,CAAC;EAG1C,IAAIO,KAAK,CAACX,QAAQ,KAAK,GAAG,EAAE;IAC1B,OAAOK,IAAI;EACb;EAGA,IAAIM,KAAK,CAACX,QAAQ,IAAIW,KAAK,CAACX,QAAQ,CAACC,MAAM,GAAG,CAAC,EAAE;IAC/C,MAAMiB,cAAc,GAAGP,KAAK,CAACJ,SAAS;IACtC,MAAMY,aAAa,GAAG,IAAIC,iBAAQ,CAACT,KAAK,CAACX,QAAQ,CAAC;IAClDmB,aAAa,CAACE,cAAc,CAACH,cAAc,EAAEb,IAAI,CAAC;IAClD,OAAOa,cAAc;EACvB;EAGA,OAAOP,KAAK,CAACJ,SAAS;AACxB"}
|
|
@@ -1,15 +1,13 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
7
6
|
exports.default = parseJSONSync;
|
|
8
|
-
var _typeof2 = _interopRequireDefault(require("@babel/runtime/helpers/typeof"));
|
|
9
7
|
function parseJSONSync(jsonText, options) {
|
|
10
8
|
try {
|
|
11
9
|
var _options$json;
|
|
12
|
-
|
|
10
|
+
const json = JSON.parse(jsonText);
|
|
13
11
|
if ((_options$json = options.json) !== null && _options$json !== void 0 && _options$json.table) {
|
|
14
12
|
return getFirstArray(json) || json;
|
|
15
13
|
}
|
|
@@ -22,10 +20,9 @@ function getFirstArray(json) {
|
|
|
22
20
|
if (Array.isArray(json)) {
|
|
23
21
|
return json;
|
|
24
22
|
}
|
|
25
|
-
if (json &&
|
|
26
|
-
for (
|
|
27
|
-
|
|
28
|
-
var array = getFirstArray(value);
|
|
23
|
+
if (json && typeof json === 'object') {
|
|
24
|
+
for (const value of Object.values(json)) {
|
|
25
|
+
const array = getFirstArray(value);
|
|
29
26
|
if (array) {
|
|
30
27
|
return array;
|
|
31
28
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-json.js","names":["parseJSONSync","jsonText","options","_options$json","json","JSON","parse","table","getFirstArray","error","Error","Array","isArray","
|
|
1
|
+
{"version":3,"file":"parse-json.js","names":["parseJSONSync","jsonText","options","_options$json","json","JSON","parse","table","getFirstArray","error","Error","Array","isArray","value","Object","values","array"],"sources":["../../../src/lib/parse-json.ts"],"sourcesContent":["import type {JSONLoaderOptions} from '../json-loader';\n\nexport default function parseJSONSync(jsonText: string, options: JSONLoaderOptions) {\n try {\n const json = JSON.parse(jsonText);\n if (options.json?.table) {\n return getFirstArray(json) || json;\n }\n return json;\n } catch (error) {\n throw new Error('JSONLoader: failed to parse JSON');\n }\n}\n\nfunction getFirstArray(json) {\n if (Array.isArray(json)) {\n return json;\n }\n if (json && typeof json === 'object') {\n for (const value of Object.values(json)) {\n const array = getFirstArray(value);\n if (array) {\n return array;\n }\n }\n }\n return null;\n}\n"],"mappings":";;;;;;AAEe,SAASA,aAAaA,CAACC,QAAgB,EAAEC,OAA0B,EAAE;EAClF,IAAI;IAAA,IAAAC,aAAA;IACF,MAAMC,IAAI,GAAGC,IAAI,CAACC,KAAK,CAACL,QAAQ,CAAC;IACjC,KAAAE,aAAA,GAAID,OAAO,CAACE,IAAI,cAAAD,aAAA,eAAZA,aAAA,CAAcI,KAAK,EAAE;MACvB,OAAOC,aAAa,CAACJ,IAAI,CAAC,IAAIA,IAAI;IACpC;IACA,OAAOA,IAAI;EACb,CAAC,CAAC,OAAOK,KAAK,EAAE;IACd,MAAM,IAAIC,KAAK,CAAC,kCAAkC,CAAC;EACrD;AACF;AAEA,SAASF,aAAaA,CAACJ,IAAI,EAAE;EAC3B,IAAIO,KAAK,CAACC,OAAO,CAACR,IAAI,CAAC,EAAE;IACvB,OAAOA,IAAI;EACb;EACA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IACpC,KAAK,MAAMS,KAAK,IAAIC,MAAM,CAACC,MAAM,CAACX,IAAI,CAAC,EAAE;MACvC,MAAMY,KAAK,GAAGR,aAAa,CAACK,KAAK,CAAC;MAClC,IAAIG,KAAK,EAAE;QACT,OAAOA,KAAK;MACd;IACF;EACF;EACA,OAAO,IAAI;AACb"}
|
|
@@ -1,114 +1,40 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
7
6
|
exports.default = parseNDJSONInBatches;
|
|
8
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
10
|
-
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
11
|
-
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
12
7
|
var _schema = require("@loaders.gl/schema");
|
|
13
8
|
var _loaderUtils = require("@loaders.gl/loader-utils");
|
|
14
|
-
function
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
_iteratorAbruptCompletion = false;
|
|
36
|
-
_didIteratorError = false;
|
|
37
|
-
_context.prev = 8;
|
|
38
|
-
_iterator = _asyncIterator(numberedLineIterator);
|
|
39
|
-
case 10:
|
|
40
|
-
_context.next = 12;
|
|
41
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.next());
|
|
42
|
-
case 12:
|
|
43
|
-
if (!(_iteratorAbruptCompletion = !(_step = _context.sent).done)) {
|
|
44
|
-
_context.next = 30;
|
|
45
|
-
break;
|
|
46
|
-
}
|
|
47
|
-
_step$value = _step.value, counter = _step$value.counter, line = _step$value.line;
|
|
48
|
-
_context.prev = 14;
|
|
49
|
-
row = JSON.parse(line);
|
|
50
|
-
tableBatchBuilder.addRow(row);
|
|
51
|
-
tableBatchBuilder.chunkComplete(line);
|
|
52
|
-
_batch = tableBatchBuilder.getFullBatch();
|
|
53
|
-
if (!_batch) {
|
|
54
|
-
_context.next = 22;
|
|
55
|
-
break;
|
|
56
|
-
}
|
|
57
|
-
_context.next = 22;
|
|
58
|
-
return _batch;
|
|
59
|
-
case 22:
|
|
60
|
-
_context.next = 27;
|
|
61
|
-
break;
|
|
62
|
-
case 24:
|
|
63
|
-
_context.prev = 24;
|
|
64
|
-
_context.t0 = _context["catch"](14);
|
|
65
|
-
throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter));
|
|
66
|
-
case 27:
|
|
67
|
-
_iteratorAbruptCompletion = false;
|
|
68
|
-
_context.next = 10;
|
|
69
|
-
break;
|
|
70
|
-
case 30:
|
|
71
|
-
_context.next = 36;
|
|
72
|
-
break;
|
|
73
|
-
case 32:
|
|
74
|
-
_context.prev = 32;
|
|
75
|
-
_context.t1 = _context["catch"](8);
|
|
76
|
-
_didIteratorError = true;
|
|
77
|
-
_iteratorError = _context.t1;
|
|
78
|
-
case 36:
|
|
79
|
-
_context.prev = 36;
|
|
80
|
-
_context.prev = 37;
|
|
81
|
-
if (!(_iteratorAbruptCompletion && _iterator.return != null)) {
|
|
82
|
-
_context.next = 41;
|
|
83
|
-
break;
|
|
84
|
-
}
|
|
85
|
-
_context.next = 41;
|
|
86
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.return());
|
|
87
|
-
case 41:
|
|
88
|
-
_context.prev = 41;
|
|
89
|
-
if (!_didIteratorError) {
|
|
90
|
-
_context.next = 44;
|
|
91
|
-
break;
|
|
92
|
-
}
|
|
93
|
-
throw _iteratorError;
|
|
94
|
-
case 44:
|
|
95
|
-
return _context.finish(41);
|
|
96
|
-
case 45:
|
|
97
|
-
return _context.finish(36);
|
|
98
|
-
case 46:
|
|
99
|
-
batch = tableBatchBuilder.getFinalBatch();
|
|
100
|
-
if (!batch) {
|
|
101
|
-
_context.next = 50;
|
|
102
|
-
break;
|
|
103
|
-
}
|
|
104
|
-
_context.next = 50;
|
|
105
|
-
return batch;
|
|
106
|
-
case 50:
|
|
107
|
-
case "end":
|
|
108
|
-
return _context.stop();
|
|
9
|
+
async function* parseNDJSONInBatches(binaryAsyncIterator, options) {
|
|
10
|
+
const textIterator = (0, _loaderUtils.makeTextDecoderIterator)(binaryAsyncIterator);
|
|
11
|
+
const lineIterator = (0, _loaderUtils.makeLineIterator)(textIterator);
|
|
12
|
+
const numberedLineIterator = (0, _loaderUtils.makeNumberedLineIterator)(lineIterator);
|
|
13
|
+
const schema = null;
|
|
14
|
+
const shape = 'row-table';
|
|
15
|
+
const tableBatchBuilder = new _schema.TableBatchBuilder(schema, {
|
|
16
|
+
...options,
|
|
17
|
+
shape
|
|
18
|
+
});
|
|
19
|
+
for await (const {
|
|
20
|
+
counter,
|
|
21
|
+
line
|
|
22
|
+
} of numberedLineIterator) {
|
|
23
|
+
try {
|
|
24
|
+
const row = JSON.parse(line);
|
|
25
|
+
tableBatchBuilder.addRow(row);
|
|
26
|
+
tableBatchBuilder.chunkComplete(line);
|
|
27
|
+
const batch = tableBatchBuilder.getFullBatch();
|
|
28
|
+
if (batch) {
|
|
29
|
+
yield batch;
|
|
109
30
|
}
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
|
|
31
|
+
} catch (error) {
|
|
32
|
+
throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter));
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
const batch = tableBatchBuilder.getFinalBatch();
|
|
36
|
+
if (batch) {
|
|
37
|
+
yield batch;
|
|
38
|
+
}
|
|
113
39
|
}
|
|
114
40
|
//# sourceMappingURL=parse-ndjson-in-batches.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-ndjson-in-batches.js","names":["_schema","require","_loaderUtils","
|
|
1
|
+
{"version":3,"file":"parse-ndjson-in-batches.js","names":["_schema","require","_loaderUtils","parseNDJSONInBatches","binaryAsyncIterator","options","textIterator","makeTextDecoderIterator","lineIterator","makeLineIterator","numberedLineIterator","makeNumberedLineIterator","schema","shape","tableBatchBuilder","TableBatchBuilder","counter","line","row","JSON","parse","addRow","chunkComplete","batch","getFullBatch","error","Error","concat","getFinalBatch"],"sources":["../../../src/lib/parse-ndjson-in-batches.ts"],"sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {\n LoaderOptions,\n makeLineIterator,\n makeNumberedLineIterator,\n makeTextDecoderIterator\n} from '@loaders.gl/loader-utils';\n\nexport default async function* parseNDJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options?: LoaderOptions\n): AsyncIterable<Batch> {\n const textIterator = makeTextDecoderIterator(binaryAsyncIterator);\n const lineIterator = makeLineIterator(textIterator);\n const numberedLineIterator = makeNumberedLineIterator(lineIterator);\n\n const schema = null;\n const shape = 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n for await (const {counter, line} of numberedLineIterator) {\n try {\n const row = JSON.parse(line);\n tableBatchBuilder.addRow(row);\n tableBatchBuilder.chunkComplete(line);\n const batch = tableBatchBuilder.getFullBatch();\n if (batch) {\n yield batch;\n }\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter}`);\n }\n }\n\n const batch = tableBatchBuilder.getFinalBatch();\n if (batch) {\n yield batch;\n }\n}\n"],"mappings":";;;;;;AACA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AAOe,gBAAgBE,oBAAoBA,CACjDC,mBAAuE,EACvEC,OAAuB,EACD;EACtB,MAAMC,YAAY,GAAG,IAAAC,oCAAuB,EAACH,mBAAmB,CAAC;EACjE,MAAMI,YAAY,GAAG,IAAAC,6BAAgB,EAACH,YAAY,CAAC;EACnD,MAAMI,oBAAoB,GAAG,IAAAC,qCAAwB,EAACH,YAAY,CAAC;EAEnE,MAAMI,MAAM,GAAG,IAAI;EACnB,MAAMC,KAAK,GAAG,WAAW;EAEzB,MAAMC,iBAAiB,GAAG,IAAIC,yBAAiB,CAACH,MAAM,EAAE;IACtD,GAAGP,OAAO;IACVQ;EACF,CAAC,CAAC;EAEF,WAAW,MAAM;IAACG,OAAO;IAAEC;EAAI,CAAC,IAAIP,oBAAoB,EAAE;IACxD,IAAI;MACF,MAAMQ,GAAG,GAAGC,IAAI,CAACC,KAAK,CAACH,IAAI,CAAC;MAC5BH,iBAAiB,CAACO,MAAM,CAACH,GAAG,CAAC;MAC7BJ,iBAAiB,CAACQ,aAAa,CAACL,IAAI,CAAC;MACrC,MAAMM,KAAK,GAAGT,iBAAiB,CAACU,YAAY,CAAC,CAAC;MAC9C,IAAID,KAAK,EAAE;QACT,MAAMA,KAAK;MACb;IACF,CAAC,CAAC,OAAOE,KAAK,EAAE;MACd,MAAM,IAAIC,KAAK,+CAAAC,MAAA,CAA+CX,OAAO,CAAE,CAAC;IAC1E;EACF;EAEA,MAAMO,KAAK,GAAGT,iBAAiB,CAACc,aAAa,CAAC,CAAC;EAC/C,IAAIL,KAAK,EAAE;IACT,MAAMA,KAAK;EACb;AACF"}
|
|
@@ -5,8 +5,8 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
5
5
|
});
|
|
6
6
|
exports.default = parseNDJSONSync;
|
|
7
7
|
function parseNDJSONSync(ndjsonText) {
|
|
8
|
-
|
|
9
|
-
return lines.map(
|
|
8
|
+
const lines = ndjsonText.trim().split('\n');
|
|
9
|
+
return lines.map((line, counter) => {
|
|
10
10
|
try {
|
|
11
11
|
return JSON.parse(line);
|
|
12
12
|
} catch (error) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-ndjson.js","names":["parseNDJSONSync","ndjsonText","lines","trim","split","map","line","counter","JSON","parse","error","Error","concat"],"sources":["../../../src/lib/parse-ndjson.ts"],"sourcesContent":["export default function parseNDJSONSync(ndjsonText: string) {\n const lines = ndjsonText.trim().split('\\n');\n return lines.map((line, counter) => {\n try {\n return JSON.parse(line);\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter + 1}`);\n }\n });\n}\n"],"mappings":";;;;;;AAAe,SAASA,eAAeA,CAACC,UAAkB,EAAE;EAC1D,
|
|
1
|
+
{"version":3,"file":"parse-ndjson.js","names":["parseNDJSONSync","ndjsonText","lines","trim","split","map","line","counter","JSON","parse","error","Error","concat"],"sources":["../../../src/lib/parse-ndjson.ts"],"sourcesContent":["export default function parseNDJSONSync(ndjsonText: string) {\n const lines = ndjsonText.trim().split('\\n');\n return lines.map((line, counter) => {\n try {\n return JSON.parse(line);\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter + 1}`);\n }\n });\n}\n"],"mappings":";;;;;;AAAe,SAASA,eAAeA,CAACC,UAAkB,EAAE;EAC1D,MAAMC,KAAK,GAAGD,UAAU,CAACE,IAAI,CAAC,CAAC,CAACC,KAAK,CAAC,IAAI,CAAC;EAC3C,OAAOF,KAAK,CAACG,GAAG,CAAC,CAACC,IAAI,EAAEC,OAAO,KAAK;IAClC,IAAI;MACF,OAAOC,IAAI,CAACC,KAAK,CAACH,IAAI,CAAC;IACzB,CAAC,CAAC,OAAOI,KAAK,EAAE;MACd,MAAM,IAAIC,KAAK,+CAAAC,MAAA,CAA+CL,OAAO,GAAG,CAAC,CAAE,CAAC;IAC9E;EACF,CAAC,CAAC;AACJ"}
|