@loaders.gl/json 4.0.0-alpha.1 → 4.0.0-alpha.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/bundle.js +2 -2
- package/dist/dist.min.js +3094 -0
- package/dist/es5/bundle.js +6 -0
- package/dist/es5/bundle.js.map +1 -0
- package/dist/es5/geojson-loader.js +157 -0
- package/dist/es5/geojson-loader.js.map +1 -0
- package/dist/es5/geojson-writer.js +27 -0
- package/dist/es5/geojson-writer.js.map +1 -0
- package/dist/es5/index.js +69 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/json-loader.js +67 -0
- package/dist/es5/json-loader.js.map +1 -0
- package/dist/es5/json-writer.js +42 -0
- package/dist/es5/json-writer.js.map +1 -0
- package/dist/es5/lib/clarinet/clarinet.js +446 -0
- package/dist/es5/lib/clarinet/clarinet.js.map +1 -0
- package/dist/es5/lib/encoders/encode-utils.js +42 -0
- package/dist/es5/lib/encoders/encode-utils.js.map +1 -0
- package/dist/es5/lib/encoders/geojson-encoder.js +178 -0
- package/dist/es5/lib/encoders/geojson-encoder.js.map +1 -0
- package/dist/es5/lib/encoders/json-encoder.js +30 -0
- package/dist/es5/lib/encoders/json-encoder.js.map +1 -0
- package/dist/es5/lib/encoders/utf8-encoder.js +54 -0
- package/dist/es5/lib/encoders/utf8-encoder.js.map +1 -0
- package/dist/es5/lib/json-parser/json-parser.js +140 -0
- package/dist/es5/lib/json-parser/json-parser.js.map +1 -0
- package/dist/es5/lib/json-parser/streaming-json-parser.js +123 -0
- package/dist/es5/lib/json-parser/streaming-json-parser.js.map +1 -0
- package/dist/es5/lib/jsonpath/jsonpath.js +119 -0
- package/dist/es5/lib/jsonpath/jsonpath.js.map +1 -0
- package/dist/es5/lib/parsers/parse-json-in-batches.js +206 -0
- package/dist/es5/lib/parsers/parse-json-in-batches.js.map +1 -0
- package/dist/es5/lib/parsers/parse-json.js +38 -0
- package/dist/es5/lib/parsers/parse-json.js.map +1 -0
- package/dist/es5/lib/parsers/parse-ndjson-in-batches.js +114 -0
- package/dist/es5/lib/parsers/parse-ndjson-in-batches.js.map +1 -0
- package/dist/es5/lib/parsers/parse-ndjson.js +19 -0
- package/dist/es5/lib/parsers/parse-ndjson.js.map +1 -0
- package/dist/es5/ndgeoson-loader.js +54 -0
- package/dist/es5/ndgeoson-loader.js.map +1 -0
- package/dist/es5/ndjson-loader.js +44 -0
- package/dist/es5/ndjson-loader.js.map +1 -0
- package/dist/es5/workers/geojson-worker.js +6 -0
- package/dist/es5/workers/geojson-worker.js.map +1 -0
- package/dist/esm/bundle.js +4 -0
- package/dist/esm/bundle.js.map +1 -0
- package/dist/esm/geojson-loader.js +79 -0
- package/dist/esm/geojson-loader.js.map +1 -0
- package/dist/esm/geojson-writer.js +18 -0
- package/dist/esm/geojson-writer.js.map +1 -0
- package/dist/esm/index.js +9 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/json-loader.js +48 -0
- package/dist/esm/json-loader.js.map +1 -0
- package/dist/esm/json-writer.js +14 -0
- package/dist/esm/json-writer.js.map +1 -0
- package/dist/esm/lib/clarinet/LICENSE +28 -0
- package/dist/esm/lib/clarinet/clarinet.js +415 -0
- package/dist/esm/lib/clarinet/clarinet.js.map +1 -0
- package/dist/esm/lib/encoders/encode-utils.js +31 -0
- package/dist/esm/lib/encoders/encode-utils.js.map +1 -0
- package/dist/esm/lib/encoders/geojson-encoder.js +98 -0
- package/dist/esm/lib/encoders/geojson-encoder.js.map +1 -0
- package/dist/esm/lib/encoders/json-encoder.js +12 -0
- package/dist/esm/lib/encoders/json-encoder.js.map +1 -0
- package/dist/esm/lib/encoders/utf8-encoder.js +32 -0
- package/dist/esm/lib/encoders/utf8-encoder.js.map +1 -0
- package/dist/{lib/parser → esm/lib/json-parser}/json-parser.js +4 -22
- package/dist/esm/lib/json-parser/json-parser.js.map +1 -0
- package/dist/{lib/parser → esm/lib/json-parser}/streaming-json-parser.js +2 -23
- package/dist/esm/lib/json-parser/streaming-json-parser.js.map +1 -0
- package/dist/esm/lib/jsonpath/jsonpath.js +67 -0
- package/dist/esm/lib/jsonpath/jsonpath.js.map +1 -0
- package/dist/{lib → esm/lib/parsers}/parse-json-in-batches.js +19 -15
- package/dist/esm/lib/parsers/parse-json-in-batches.js.map +1 -0
- package/dist/{lib → esm/lib/parsers}/parse-json.js +4 -9
- package/dist/esm/lib/parsers/parse-json.js.map +1 -0
- package/dist/{lib → esm/lib/parsers}/parse-ndjson-in-batches.js +3 -6
- package/dist/esm/lib/parsers/parse-ndjson-in-batches.js.map +1 -0
- package/dist/esm/lib/parsers/parse-ndjson.js +13 -0
- package/dist/esm/lib/parsers/parse-ndjson.js.map +1 -0
- package/dist/esm/ndgeoson-loader.js +27 -0
- package/dist/esm/ndgeoson-loader.js.map +1 -0
- package/dist/esm/ndjson-loader.js +18 -0
- package/dist/esm/ndjson-loader.js.map +1 -0
- package/dist/esm/workers/geojson-worker.js +4 -0
- package/dist/esm/workers/geojson-worker.js.map +1 -0
- package/dist/geojson-loader.d.ts +16 -0
- package/dist/geojson-loader.d.ts.map +1 -0
- package/dist/geojson-loader.js +65 -69
- package/dist/geojson-worker.js +1016 -232
- package/dist/geojson-writer.d.ts +6 -0
- package/dist/geojson-writer.d.ts.map +1 -0
- package/dist/geojson-writer.js +22 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +24 -6
- package/dist/json-loader.d.ts +17 -0
- package/dist/json-loader.d.ts.map +1 -0
- package/dist/json-loader.js +33 -38
- package/dist/json-writer.d.ts +6 -0
- package/dist/json-writer.d.ts.map +1 -0
- package/dist/json-writer.js +18 -0
- package/dist/lib/clarinet/clarinet.d.ts +74 -0
- package/dist/lib/clarinet/clarinet.d.ts.map +1 -0
- package/dist/lib/clarinet/clarinet.js +510 -493
- package/dist/lib/encoders/encode-utils.d.ts +19 -0
- package/dist/lib/encoders/encode-utils.d.ts.map +1 -0
- package/dist/lib/encoders/encode-utils.js +47 -0
- package/dist/lib/encoders/geojson-encoder.d.ts +14 -0
- package/dist/lib/encoders/geojson-encoder.d.ts.map +1 -0
- package/dist/lib/encoders/geojson-encoder.js +104 -0
- package/dist/lib/encoders/json-encoder.d.ts +16 -0
- package/dist/lib/encoders/json-encoder.d.ts.map +1 -0
- package/dist/lib/encoders/json-encoder.js +22 -0
- package/dist/lib/encoders/utf8-encoder.d.ts +12 -0
- package/dist/lib/encoders/utf8-encoder.d.ts.map +1 -0
- package/dist/lib/encoders/utf8-encoder.js +32 -0
- package/dist/lib/json-parser/json-parser.d.ts +22 -0
- package/dist/lib/json-parser/json-parser.d.ts.map +1 -0
- package/dist/lib/json-parser/json-parser.js +98 -0
- package/dist/lib/json-parser/streaming-json-parser.d.ts +37 -0
- package/dist/lib/json-parser/streaming-json-parser.d.ts.map +1 -0
- package/dist/lib/json-parser/streaming-json-parser.js +100 -0
- package/dist/lib/jsonpath/jsonpath.d.ts +32 -0
- package/dist/lib/jsonpath/jsonpath.d.ts.map +1 -0
- package/dist/lib/jsonpath/jsonpath.js +81 -78
- package/dist/lib/parsers/parse-json-in-batches.d.ts +5 -0
- package/dist/lib/parsers/parse-json-in-batches.d.ts.map +1 -0
- package/dist/lib/parsers/parse-json-in-batches.js +100 -0
- package/dist/lib/parsers/parse-json.d.ts +4 -0
- package/dist/lib/parsers/parse-json.d.ts.map +1 -0
- package/dist/lib/parsers/parse-json.js +32 -0
- package/dist/lib/parsers/parse-ndjson-in-batches.d.ts +4 -0
- package/dist/lib/parsers/parse-ndjson-in-batches.d.ts.map +1 -0
- package/dist/lib/parsers/parse-ndjson-in-batches.js +36 -0
- package/dist/lib/parsers/parse-ndjson.d.ts +3 -0
- package/dist/lib/parsers/parse-ndjson.d.ts.map +1 -0
- package/dist/lib/parsers/parse-ndjson.js +17 -0
- package/dist/ndgeoson-loader.d.ts +34 -0
- package/dist/ndgeoson-loader.d.ts.map +1 -0
- package/dist/ndgeoson-loader.js +37 -0
- package/dist/ndjson-loader.d.ts +4 -0
- package/dist/ndjson-loader.d.ts.map +1 -0
- package/dist/ndjson-loader.js +26 -31
- package/dist/workers/geojson-worker.d.ts +2 -0
- package/dist/workers/geojson-worker.d.ts.map +1 -0
- package/dist/workers/geojson-worker.js +5 -4
- package/package.json +10 -10
- package/src/geojson-loader.ts +10 -6
- package/src/geojson-writer.ts +27 -0
- package/src/index.ts +10 -0
- package/src/json-loader.ts +15 -24
- package/src/json-writer.ts +24 -0
- package/src/lib/encoders/encode-utils.ts +54 -0
- package/src/lib/encoders/geojson-encoder.ts +139 -0
- package/src/lib/encoders/json-encoder.ts +30 -0
- package/src/lib/encoders/utf8-encoder.ts +35 -0
- package/src/lib/{parse-json-in-batches.ts → parsers/parse-json-in-batches.ts} +30 -8
- package/src/lib/{parse-json.ts → parsers/parse-json.ts} +7 -3
- package/src/lib/{parse-ndjson-in-batches.ts → parsers/parse-ndjson-in-batches.ts} +1 -1
- package/src/lib/parsers/parse-ndjson.ts +15 -0
- package/src/ndgeoson-loader.ts +48 -0
- package/src/ndjson-loader.ts +20 -27
- package/dist/bundle.js.map +0 -1
- package/dist/geojson-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/json-loader.js.map +0 -1
- package/dist/jsonl-loader.js +0 -2
- package/dist/jsonl-loader.js.map +0 -1
- package/dist/lib/clarinet/clarinet.js.map +0 -1
- package/dist/lib/jsonpath/jsonpath.js.map +0 -1
- package/dist/lib/parse-json-in-batches.js.map +0 -1
- package/dist/lib/parse-json.js.map +0 -1
- package/dist/lib/parse-ndjson-in-batches.js.map +0 -1
- package/dist/lib/parse-ndjson.js +0 -11
- package/dist/lib/parse-ndjson.js.map +0 -1
- package/dist/lib/parser/json-parser.js.map +0 -1
- package/dist/lib/parser/streaming-json-parser.js.map +0 -1
- package/dist/ndjson-loader.js.map +0 -1
- package/dist/workers/geojson-worker.js.map +0 -1
- package/src/jsonl-loader.ts +0 -53
- package/src/lib/parse-ndjson.ts +0 -10
- /package/dist/{lib → es5/lib}/clarinet/LICENSE +0 -0
- /package/src/lib/{parser → json-parser}/json-parser.ts +0 -0
- /package/src/lib/{parser → json-parser}/streaming-json-parser.ts +0 -0
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { makeRowIterator } from '@loaders.gl/schema';
|
|
2
|
+
export function encodeTableAsJSON(table) {
|
|
3
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
4
|
+
const shape = options.shape || 'object-row-table';
|
|
5
|
+
const strings = [];
|
|
6
|
+
const rowIterator = makeRowIterator(table, shape);
|
|
7
|
+
for (const row of rowIterator) {
|
|
8
|
+
strings.push(JSON.stringify(row));
|
|
9
|
+
}
|
|
10
|
+
return "[".concat(strings.join(','), "]");
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=json-encoder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"json-encoder.js","names":["makeRowIterator","encodeTableAsJSON","table","options","arguments","length","undefined","shape","strings","rowIterator","row","push","JSON","stringify","concat","join"],"sources":["../../../../src/lib/encoders/json-encoder.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright 2022 Foursquare Labs, Inc.\n\nimport {Table, makeRowIterator} from '@loaders.gl/schema';\n\ntype RowArray = unknown[];\ntype RowObject = {[key: string]: unknown};\ntype TableJSON = RowArray[] | RowObject[];\n\nexport type JSONWriterOptions = {\n shape?: 'object-row-table' | 'array-row-table';\n wrapper?: (table: TableJSON) => unknown;\n};\n\n/**\n * Encode a table as a JSON string\n */\nexport function encodeTableAsJSON(table: Table, options: JSONWriterOptions = {}): string {\n const shape = options.shape || 'object-row-table';\n\n const strings: string[] = [];\n const rowIterator = makeRowIterator(table, shape);\n for (const row of rowIterator) {\n // Round elements etc\n // processRow(wrappedRow, table.schema);\n // const wrappedRow = options.wrapper ? options.wrapper(row) : row;\n strings.push(JSON.stringify(row));\n }\n return `[${strings.join(',')}]`;\n}\n"],"mappings":"AAGA,SAAeA,eAAe,QAAO,oBAAoB;AAczD,OAAO,SAASC,iBAAiBA,CAACC,KAAY,EAA2C;EAAA,IAAzCC,OAA0B,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EAC7E,MAAMG,KAAK,GAAGJ,OAAO,CAACI,KAAK,IAAI,kBAAkB;EAEjD,MAAMC,OAAiB,GAAG,EAAE;EAC5B,MAAMC,WAAW,GAAGT,eAAe,CAACE,KAAK,EAAEK,KAAK,CAAC;EACjD,KAAK,MAAMG,GAAG,IAAID,WAAW,EAAE;IAI7BD,OAAO,CAACG,IAAI,CAACC,IAAI,CAACC,SAAS,CAACH,GAAG,CAAC,CAAC;EACnC;EACA,WAAAI,MAAA,CAAWN,OAAO,CAACO,IAAI,CAAC,GAAG,CAAC;AAC9B"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
export class Utf8ArrayBufferEncoder {
|
|
3
|
+
constructor(chunkSize) {
|
|
4
|
+
_defineProperty(this, "chunkSize", void 0);
|
|
5
|
+
_defineProperty(this, "strings", []);
|
|
6
|
+
_defineProperty(this, "totalLength", 0);
|
|
7
|
+
_defineProperty(this, "textEncoder", new TextEncoder());
|
|
8
|
+
this.chunkSize = chunkSize;
|
|
9
|
+
}
|
|
10
|
+
push() {
|
|
11
|
+
for (var _len = arguments.length, strings = new Array(_len), _key = 0; _key < _len; _key++) {
|
|
12
|
+
strings[_key] = arguments[_key];
|
|
13
|
+
}
|
|
14
|
+
for (const string of strings) {
|
|
15
|
+
this.strings.push(string);
|
|
16
|
+
this.totalLength += string.length;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
isFull() {
|
|
20
|
+
return this.totalLength >= this.chunkSize;
|
|
21
|
+
}
|
|
22
|
+
getArrayBufferBatch() {
|
|
23
|
+
return this.textEncoder.encode(this.getStringBatch()).buffer;
|
|
24
|
+
}
|
|
25
|
+
getStringBatch() {
|
|
26
|
+
const stringChunk = this.strings.join('');
|
|
27
|
+
this.strings = [];
|
|
28
|
+
this.totalLength = 0;
|
|
29
|
+
return stringChunk;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
//# sourceMappingURL=utf8-encoder.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utf8-encoder.js","names":["Utf8ArrayBufferEncoder","constructor","chunkSize","_defineProperty","TextEncoder","push","_len","arguments","length","strings","Array","_key","string","totalLength","isFull","getArrayBufferBatch","textEncoder","encode","getStringBatch","buffer","stringChunk","join"],"sources":["../../../../src/lib/encoders/utf8-encoder.ts"],"sourcesContent":["// loaders.gl, MIT License\n\n/* global TextEncoder */\nexport class Utf8ArrayBufferEncoder {\n private readonly chunkSize: number;\n private strings: string[] = [];\n private totalLength = 0;\n private textEncoder: TextEncoder = new TextEncoder();\n\n constructor(chunkSize: number) {\n this.chunkSize = chunkSize;\n }\n\n push(...strings: string[]): void {\n for (const string of strings) {\n this.strings.push(string);\n this.totalLength += string.length;\n }\n }\n\n isFull(): boolean {\n return this.totalLength >= this.chunkSize;\n }\n\n getArrayBufferBatch(): ArrayBufferLike {\n return this.textEncoder.encode(this.getStringBatch()).buffer;\n }\n\n getStringBatch(): string {\n const stringChunk = this.strings.join('');\n this.strings = [];\n this.totalLength = 0;\n return stringChunk;\n }\n}\n"],"mappings":";AAGA,OAAO,MAAMA,sBAAsB,CAAC;EAMlCC,WAAWA,CAACC,SAAiB,EAAE;IAAAC,eAAA;IAAAA,eAAA,kBAJH,EAAE;IAAAA,eAAA,sBACR,CAAC;IAAAA,eAAA,sBACY,IAAIC,WAAW,CAAC,CAAC;IAGlD,IAAI,CAACF,SAAS,GAAGA,SAAS;EAC5B;EAEAG,IAAIA,CAAA,EAA6B;IAAA,SAAAC,IAAA,GAAAC,SAAA,CAAAC,MAAA,EAAzBC,OAAO,OAAAC,KAAA,CAAAJ,IAAA,GAAAK,IAAA,MAAAA,IAAA,GAAAL,IAAA,EAAAK,IAAA;MAAPF,OAAO,CAAAE,IAAA,IAAAJ,SAAA,CAAAI,IAAA;IAAA;IACb,KAAK,MAAMC,MAAM,IAAIH,OAAO,EAAE;MAC5B,IAAI,CAACA,OAAO,CAACJ,IAAI,CAACO,MAAM,CAAC;MACzB,IAAI,CAACC,WAAW,IAAID,MAAM,CAACJ,MAAM;IACnC;EACF;EAEAM,MAAMA,CAAA,EAAY;IAChB,OAAO,IAAI,CAACD,WAAW,IAAI,IAAI,CAACX,SAAS;EAC3C;EAEAa,mBAAmBA,CAAA,EAAoB;IACrC,OAAO,IAAI,CAACC,WAAW,CAACC,MAAM,CAAC,IAAI,CAACC,cAAc,CAAC,CAAC,CAAC,CAACC,MAAM;EAC9D;EAEAD,cAAcA,CAAA,EAAW;IACvB,MAAME,WAAW,GAAG,IAAI,CAACX,OAAO,CAACY,IAAI,CAAC,EAAE,CAAC;IACzC,IAAI,CAACZ,OAAO,GAAG,EAAE;IACjB,IAAI,CAACI,WAAW,GAAG,CAAC;IACpB,OAAOO,WAAW;EACpB;AACF"}
|
|
@@ -4,18 +4,13 @@ import JSONPath from '../jsonpath/jsonpath';
|
|
|
4
4
|
export default class JSONParser {
|
|
5
5
|
constructor(options) {
|
|
6
6
|
_defineProperty(this, "parser", void 0);
|
|
7
|
-
|
|
8
7
|
_defineProperty(this, "result", undefined);
|
|
9
|
-
|
|
10
8
|
_defineProperty(this, "previousStates", []);
|
|
11
|
-
|
|
12
9
|
_defineProperty(this, "currentState", Object.freeze({
|
|
13
10
|
container: [],
|
|
14
11
|
key: null
|
|
15
12
|
}));
|
|
16
|
-
|
|
17
13
|
_defineProperty(this, "jsonpath", new JSONPath());
|
|
18
|
-
|
|
19
14
|
this.reset();
|
|
20
15
|
this.parser = new ClarinetParser({
|
|
21
16
|
onready: () => {
|
|
@@ -25,7 +20,6 @@ export default class JSONParser {
|
|
|
25
20
|
},
|
|
26
21
|
onopenobject: name => {
|
|
27
22
|
this._openObject({});
|
|
28
|
-
|
|
29
23
|
if (typeof name !== 'undefined') {
|
|
30
24
|
this.parser.emit('onkey', name);
|
|
31
25
|
}
|
|
@@ -55,7 +49,6 @@ export default class JSONParser {
|
|
|
55
49
|
...options
|
|
56
50
|
});
|
|
57
51
|
}
|
|
58
|
-
|
|
59
52
|
reset() {
|
|
60
53
|
this.result = undefined;
|
|
61
54
|
this.previousStates = [];
|
|
@@ -65,21 +58,17 @@ export default class JSONParser {
|
|
|
65
58
|
});
|
|
66
59
|
this.jsonpath = new JSONPath();
|
|
67
60
|
}
|
|
68
|
-
|
|
69
61
|
write(chunk) {
|
|
70
62
|
this.parser.write(chunk);
|
|
71
63
|
}
|
|
72
|
-
|
|
73
64
|
close() {
|
|
74
65
|
this.parser.close();
|
|
75
66
|
}
|
|
76
|
-
|
|
77
67
|
_pushOrSet(value) {
|
|
78
68
|
const {
|
|
79
69
|
container,
|
|
80
70
|
key
|
|
81
71
|
} = this.currentState;
|
|
82
|
-
|
|
83
72
|
if (key !== null) {
|
|
84
73
|
container[key] = value;
|
|
85
74
|
this.currentState.key = null;
|
|
@@ -87,12 +76,10 @@ export default class JSONParser {
|
|
|
87
76
|
container.push(value);
|
|
88
77
|
}
|
|
89
78
|
}
|
|
90
|
-
|
|
91
|
-
|
|
79
|
+
_openArray() {
|
|
80
|
+
let newContainer = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : [];
|
|
92
81
|
this.jsonpath.push(null);
|
|
93
|
-
|
|
94
82
|
this._pushOrSet(newContainer);
|
|
95
|
-
|
|
96
83
|
this.previousStates.push(this.currentState);
|
|
97
84
|
this.currentState = {
|
|
98
85
|
container: newContainer,
|
|
@@ -100,17 +87,14 @@ export default class JSONParser {
|
|
|
100
87
|
key: null
|
|
101
88
|
};
|
|
102
89
|
}
|
|
103
|
-
|
|
104
90
|
_closeArray() {
|
|
105
91
|
this.jsonpath.pop();
|
|
106
92
|
this.currentState = this.previousStates.pop();
|
|
107
93
|
}
|
|
108
|
-
|
|
109
|
-
|
|
94
|
+
_openObject() {
|
|
95
|
+
let newContainer = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
110
96
|
this.jsonpath.push(null);
|
|
111
|
-
|
|
112
97
|
this._pushOrSet(newContainer);
|
|
113
|
-
|
|
114
98
|
this.previousStates.push(this.currentState);
|
|
115
99
|
this.currentState = {
|
|
116
100
|
container: newContainer,
|
|
@@ -118,11 +102,9 @@ export default class JSONParser {
|
|
|
118
102
|
key: null
|
|
119
103
|
};
|
|
120
104
|
}
|
|
121
|
-
|
|
122
105
|
_closeObject() {
|
|
123
106
|
this.jsonpath.pop();
|
|
124
107
|
this.currentState = this.previousStates.pop();
|
|
125
108
|
}
|
|
126
|
-
|
|
127
109
|
}
|
|
128
110
|
//# sourceMappingURL=json-parser.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"json-parser.js","names":["ClarinetParser","JSONPath","JSONParser","constructor","options","_defineProperty","undefined","Object","freeze","container","key","reset","parser","onready","jsonpath","previousStates","length","currentState","onopenobject","name","_openObject","emit","onkey","set","oncloseobject","_closeObject","onopenarray","_openArray","onclosearray","_closeArray","onvalue","value","_pushOrSet","onerror","error","onend","result","pop","write","chunk","close","push","newContainer","arguments","isArray"],"sources":["../../../../src/lib/json-parser/json-parser.ts"],"sourcesContent":["// @ts-nocheck\n\nimport ClarinetParser, {ClarinetParserOptions} from '../clarinet/clarinet';\nimport JSONPath from '../jsonpath/jsonpath';\n\n// JSONParser builds a JSON object using the events emitted by the Clarinet parser\n\nexport default class JSONParser {\n readonly parser: ClarinetParser;\n result = undefined;\n previousStates = [];\n currentState = Object.freeze({container: [], key: null});\n jsonpath: JSONPath = new JSONPath();\n\n constructor(options: ClarinetParserOptions) {\n this.reset();\n this.parser = new ClarinetParser({\n onready: () => {\n this.jsonpath = new JSONPath();\n this.previousStates.length = 0;\n this.currentState.container.length = 0;\n },\n\n onopenobject: (name) => {\n this._openObject({});\n if (typeof name !== 'undefined') {\n this.parser.emit('onkey', name);\n }\n },\n\n onkey: (name) => {\n this.jsonpath.set(name);\n this.currentState.key = name;\n },\n\n oncloseobject: () => {\n this._closeObject();\n },\n\n onopenarray: () => {\n this._openArray();\n },\n\n onclosearray: () => {\n this._closeArray();\n },\n\n onvalue: (value) => {\n this._pushOrSet(value);\n },\n\n onerror: (error) => {\n throw error;\n },\n\n onend: () => {\n this.result = this.currentState.container.pop();\n },\n\n ...options\n });\n }\n\n reset(): void {\n this.result = undefined;\n this.previousStates = [];\n this.currentState = Object.freeze({container: [], key: null});\n this.jsonpath = new JSONPath();\n }\n\n write(chunk): void {\n this.parser.write(chunk);\n }\n\n close(): void {\n this.parser.close();\n }\n\n // PRIVATE METHODS\n\n _pushOrSet(value): void {\n const {container, key} = this.currentState;\n if (key !== null) {\n container[key] = value;\n this.currentState.key = null;\n } else {\n container.push(value);\n }\n }\n\n _openArray(newContainer = []): void {\n this.jsonpath.push(null);\n this._pushOrSet(newContainer);\n this.previousStates.push(this.currentState);\n this.currentState = {container: newContainer, isArray: true, key: null};\n }\n\n _closeArray(): void {\n this.jsonpath.pop();\n this.currentState = this.previousStates.pop();\n }\n\n _openObject(newContainer = {}): void {\n this.jsonpath.push(null);\n this._pushOrSet(newContainer);\n this.previousStates.push(this.currentState);\n this.currentState = {container: newContainer, isArray: false, key: null};\n }\n\n _closeObject(): void {\n this.jsonpath.pop();\n this.currentState = this.previousStates.pop();\n }\n}\n"],"mappings":";AAEA,OAAOA,cAAc,MAA+B,sBAAsB;AAC1E,OAAOC,QAAQ,MAAM,sBAAsB;AAI3C,eAAe,MAAMC,UAAU,CAAC;EAO9BC,WAAWA,CAACC,OAA8B,EAAE;IAAAC,eAAA;IAAAA,eAAA,iBALnCC,SAAS;IAAAD,eAAA,yBACD,EAAE;IAAAA,eAAA,uBACJE,MAAM,CAACC,MAAM,CAAC;MAACC,SAAS,EAAE,EAAE;MAAEC,GAAG,EAAE;IAAI,CAAC,CAAC;IAAAL,eAAA,mBACnC,IAAIJ,QAAQ,CAAC,CAAC;IAGjC,IAAI,CAACU,KAAK,CAAC,CAAC;IACZ,IAAI,CAACC,MAAM,GAAG,IAAIZ,cAAc,CAAC;MAC/Ba,OAAO,EAAEA,CAAA,KAAM;QACb,IAAI,CAACC,QAAQ,GAAG,IAAIb,QAAQ,CAAC,CAAC;QAC9B,IAAI,CAACc,cAAc,CAACC,MAAM,GAAG,CAAC;QAC9B,IAAI,CAACC,YAAY,CAACR,SAAS,CAACO,MAAM,GAAG,CAAC;MACxC,CAAC;MAEDE,YAAY,EAAGC,IAAI,IAAK;QACtB,IAAI,CAACC,WAAW,CAAC,CAAC,CAAC,CAAC;QACpB,IAAI,OAAOD,IAAI,KAAK,WAAW,EAAE;UAC/B,IAAI,CAACP,MAAM,CAACS,IAAI,CAAC,OAAO,EAAEF,IAAI,CAAC;QACjC;MACF,CAAC;MAEDG,KAAK,EAAGH,IAAI,IAAK;QACf,IAAI,CAACL,QAAQ,CAACS,GAAG,CAACJ,IAAI,CAAC;QACvB,IAAI,CAACF,YAAY,CAACP,GAAG,GAAGS,IAAI;MAC9B,CAAC;MAEDK,aAAa,EAAEA,CAAA,KAAM;QACnB,IAAI,CAACC,YAAY,CAAC,CAAC;MACrB,CAAC;MAEDC,WAAW,EAAEA,CAAA,KAAM;QACjB,IAAI,CAACC,UAAU,CAAC,CAAC;MACnB,CAAC;MAEDC,YAAY,EAAEA,CAAA,KAAM;QAClB,IAAI,CAACC,WAAW,CAAC,CAAC;MACpB,CAAC;MAEDC,OAAO,EAAGC,KAAK,IAAK;QAClB,IAAI,CAACC,UAAU,CAACD,KAAK,CAAC;MACxB,CAAC;MAEDE,OAAO,EAAGC,KAAK,IAAK;QAClB,MAAMA,KAAK;MACb,CAAC;MAEDC,KAAK,EAAEA,CAAA,KAAM;QACX,IAAI,CAACC,MAAM,GAAG,IAAI,CAACnB,YAAY,CAACR,SAAS,CAAC4B,GAAG,CAAC,CAAC;MACjD,CAAC;MAED,GAAGjC;IACL,CAAC,CAAC;EACJ;EAEAO,KAAKA,CAAA,EAAS;IACZ,IAAI,CAACyB,MAAM,GAAG9B,SAAS;IACvB,IAAI,CAACS,cAAc,GAAG,EAAE;IACxB,IAAI,CAACE,YAAY,GAAGV,MAAM,CAACC,MAAM,CAAC;MAACC,SAAS,EAAE,EAAE;MAAEC,GAAG,EAAE;IAAI,CAAC,CAAC;IAC7D,IAAI,CAACI,QAAQ,GAAG,IAAIb,QAAQ,CAAC,CAAC;EAChC;EAEAqC,KAAKA,CAACC,KAAK,EAAQ;IACjB,IAAI,CAAC3B,MAAM,CAAC0B,KAAK,CAACC,KAAK,CAAC;EAC1B;EAEAC,KAAKA,CAAA,EAAS;IACZ,IAAI,CAAC5B,MAAM,CAAC4B,KAAK,CAAC,CAAC;EACrB;EAIAR,UAAUA,CAACD,KAAK,EAAQ;IACtB,MAAM;MAACtB,SAAS;MAAEC;IAAG,CAAC,GAAG,IAAI,CAACO,YAAY;IAC1C,IAAIP,GAAG,KAAK,IAAI,EAAE;MAChBD,SAAS,CAACC,GAAG,CAAC,GAAGqB,KAAK;MACtB,IAAI,CAACd,YAAY,CAACP,GAAG,GAAG,IAAI;IAC9B,CAAC,MAAM;MACLD,SAAS,CAACgC,IAAI,CAACV,KAAK,CAAC;IACvB;EACF;EAEAJ,UAAUA,CAAA,EAA0B;IAAA,IAAzBe,YAAY,GAAAC,SAAA,CAAA3B,MAAA,QAAA2B,SAAA,QAAArC,SAAA,GAAAqC,SAAA,MAAG,EAAE;IAC1B,IAAI,CAAC7B,QAAQ,CAAC2B,IAAI,CAAC,IAAI,CAAC;IACxB,IAAI,CAACT,UAAU,CAACU,YAAY,CAAC;IAC7B,IAAI,CAAC3B,cAAc,CAAC0B,IAAI,CAAC,IAAI,CAACxB,YAAY,CAAC;IAC3C,IAAI,CAACA,YAAY,GAAG;MAACR,SAAS,EAAEiC,YAAY;MAAEE,OAAO,EAAE,IAAI;MAAElC,GAAG,EAAE;IAAI,CAAC;EACzE;EAEAmB,WAAWA,CAAA,EAAS;IAClB,IAAI,CAACf,QAAQ,CAACuB,GAAG,CAAC,CAAC;IACnB,IAAI,CAACpB,YAAY,GAAG,IAAI,CAACF,cAAc,CAACsB,GAAG,CAAC,CAAC;EAC/C;EAEAjB,WAAWA,CAAA,EAA0B;IAAA,IAAzBsB,YAAY,GAAAC,SAAA,CAAA3B,MAAA,QAAA2B,SAAA,QAAArC,SAAA,GAAAqC,SAAA,MAAG,CAAC,CAAC;IAC3B,IAAI,CAAC7B,QAAQ,CAAC2B,IAAI,CAAC,IAAI,CAAC;IACxB,IAAI,CAACT,UAAU,CAACU,YAAY,CAAC;IAC7B,IAAI,CAAC3B,cAAc,CAAC0B,IAAI,CAAC,IAAI,CAACxB,YAAY,CAAC;IAC3C,IAAI,CAACA,YAAY,GAAG;MAACR,SAAS,EAAEiC,YAAY;MAAEE,OAAO,EAAE,KAAK;MAAElC,GAAG,EAAE;IAAI,CAAC;EAC1E;EAEAe,YAAYA,CAAA,EAAS;IACnB,IAAI,CAACX,QAAQ,CAACuB,GAAG,CAAC,CAAC;IACnB,IAAI,CAACpB,YAAY,GAAG,IAAI,CAACF,cAAc,CAACsB,GAAG,CAAC,CAAC;EAC/C;AACF"}
|
|
@@ -2,92 +2,71 @@ import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
|
2
2
|
import { default as JSONParser } from './json-parser';
|
|
3
3
|
import JSONPath from '../jsonpath/jsonpath';
|
|
4
4
|
export default class StreamingJSONParser extends JSONParser {
|
|
5
|
-
constructor(
|
|
5
|
+
constructor() {
|
|
6
|
+
let options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
|
|
6
7
|
super({
|
|
7
8
|
onopenarray: () => {
|
|
8
9
|
if (!this.streamingArray) {
|
|
9
10
|
if (this._matchJSONPath()) {
|
|
10
11
|
this.streamingJsonPath = this.getJsonPath().clone();
|
|
11
12
|
this.streamingArray = [];
|
|
12
|
-
|
|
13
13
|
this._openArray(this.streamingArray);
|
|
14
|
-
|
|
15
14
|
return;
|
|
16
15
|
}
|
|
17
16
|
}
|
|
18
|
-
|
|
19
17
|
this._openArray();
|
|
20
18
|
},
|
|
21
19
|
onopenobject: name => {
|
|
22
20
|
if (!this.topLevelObject) {
|
|
23
21
|
this.topLevelObject = {};
|
|
24
|
-
|
|
25
22
|
this._openObject(this.topLevelObject);
|
|
26
23
|
} else {
|
|
27
24
|
this._openObject({});
|
|
28
25
|
}
|
|
29
|
-
|
|
30
26
|
if (typeof name !== 'undefined') {
|
|
31
27
|
this.parser.emit('onkey', name);
|
|
32
28
|
}
|
|
33
29
|
}
|
|
34
30
|
});
|
|
35
|
-
|
|
36
31
|
_defineProperty(this, "jsonPaths", void 0);
|
|
37
|
-
|
|
38
32
|
_defineProperty(this, "streamingJsonPath", null);
|
|
39
|
-
|
|
40
33
|
_defineProperty(this, "streamingArray", null);
|
|
41
|
-
|
|
42
34
|
_defineProperty(this, "topLevelObject", null);
|
|
43
|
-
|
|
44
35
|
const jsonpaths = options.jsonpaths || [];
|
|
45
36
|
this.jsonPaths = jsonpaths.map(jsonpath => new JSONPath(jsonpath));
|
|
46
37
|
}
|
|
47
|
-
|
|
48
38
|
write(chunk) {
|
|
49
39
|
super.write(chunk);
|
|
50
40
|
let array = [];
|
|
51
|
-
|
|
52
41
|
if (this.streamingArray) {
|
|
53
42
|
array = [...this.streamingArray];
|
|
54
43
|
this.streamingArray.length = 0;
|
|
55
44
|
}
|
|
56
|
-
|
|
57
45
|
return array;
|
|
58
46
|
}
|
|
59
|
-
|
|
60
47
|
getPartialResult() {
|
|
61
48
|
return this.topLevelObject;
|
|
62
49
|
}
|
|
63
|
-
|
|
64
50
|
getStreamingJsonPath() {
|
|
65
51
|
return this.streamingJsonPath;
|
|
66
52
|
}
|
|
67
|
-
|
|
68
53
|
getStreamingJsonPathAsString() {
|
|
69
54
|
return this.streamingJsonPath && this.streamingJsonPath.toString();
|
|
70
55
|
}
|
|
71
|
-
|
|
72
56
|
getJsonPath() {
|
|
73
57
|
return this.jsonpath;
|
|
74
58
|
}
|
|
75
|
-
|
|
76
59
|
_matchJSONPath() {
|
|
77
60
|
const currentPath = this.getJsonPath();
|
|
78
|
-
|
|
79
61
|
if (this.jsonPaths.length === 0) {
|
|
80
62
|
return true;
|
|
81
63
|
}
|
|
82
|
-
|
|
83
64
|
for (const jsonPath of this.jsonPaths) {
|
|
84
65
|
if (jsonPath.equals(currentPath)) {
|
|
85
66
|
return true;
|
|
86
67
|
}
|
|
87
68
|
}
|
|
88
|
-
|
|
89
69
|
return false;
|
|
90
70
|
}
|
|
91
|
-
|
|
92
71
|
}
|
|
93
72
|
//# sourceMappingURL=streaming-json-parser.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"streaming-json-parser.js","names":["default","JSONParser","JSONPath","StreamingJSONParser","constructor","options","arguments","length","undefined","onopenarray","streamingArray","_matchJSONPath","streamingJsonPath","getJsonPath","clone","_openArray","onopenobject","name","topLevelObject","_openObject","parser","emit","_defineProperty","jsonpaths","jsonPaths","map","jsonpath","write","chunk","array","getPartialResult","getStreamingJsonPath","getStreamingJsonPathAsString","toString","currentPath","jsonPath","equals"],"sources":["../../../../src/lib/json-parser/streaming-json-parser.ts"],"sourcesContent":["import {default as JSONParser} from './json-parser';\nimport JSONPath from '../jsonpath/jsonpath';\n\n/**\n * The `StreamingJSONParser` looks for the first array in the JSON structure.\n * and emits an array of chunks\n */\nexport default class StreamingJSONParser extends JSONParser {\n private jsonPaths: JSONPath[];\n private streamingJsonPath: JSONPath | null = null;\n private streamingArray: any[] | null = null;\n private topLevelObject: object | null = null;\n\n constructor(options: {[key: string]: any} = {}) {\n super({\n onopenarray: () => {\n if (!this.streamingArray) {\n if (this._matchJSONPath()) {\n // @ts-ignore\n this.streamingJsonPath = this.getJsonPath().clone();\n this.streamingArray = [];\n this._openArray(this.streamingArray as []);\n return;\n }\n }\n\n this._openArray();\n },\n\n // Redefine onopenarray to inject value for top-level object\n onopenobject: (name) => {\n if (!this.topLevelObject) {\n this.topLevelObject = {};\n this._openObject(this.topLevelObject);\n } else {\n this._openObject({});\n }\n if (typeof name !== 'undefined') {\n this.parser.emit('onkey', name);\n }\n }\n });\n const jsonpaths = options.jsonpaths || [];\n this.jsonPaths = jsonpaths.map((jsonpath) => new JSONPath(jsonpath));\n }\n\n /**\n * write REDEFINITION\n * - super.write() chunk to parser\n * - get the contents (so far) of \"topmost-level\" array as batch of rows\n * - clear top-level array\n * - return the batch of rows\\\n */\n write(chunk) {\n super.write(chunk);\n let array: any[] = [];\n if (this.streamingArray) {\n array = [...this.streamingArray];\n this.streamingArray.length = 0;\n }\n return array;\n }\n\n /**\n * Returns a partially formed result object\n * Useful for returning the \"wrapper\" object when array is not top level\n * e.g. GeoJSON\n */\n getPartialResult() {\n return this.topLevelObject;\n }\n\n getStreamingJsonPath() {\n return this.streamingJsonPath;\n }\n\n getStreamingJsonPathAsString() {\n return this.streamingJsonPath && this.streamingJsonPath.toString();\n }\n\n getJsonPath() {\n return this.jsonpath;\n }\n\n // PRIVATE METHODS\n\n /**\n * Checks is this.getJsonPath matches the jsonpaths provided in options\n */\n _matchJSONPath() {\n const currentPath = this.getJsonPath();\n // console.debug(`Testing JSONPath`, currentPath);\n\n // Backwards compatibility, match any array\n // TODO implement using wildcard once that is supported\n if (this.jsonPaths.length === 0) {\n return true;\n }\n\n for (const jsonPath of this.jsonPaths) {\n if (jsonPath.equals(currentPath)) {\n return true;\n }\n }\n\n return false;\n }\n}\n"],"mappings":";AAAA,SAAQA,OAAO,IAAIC,UAAU,QAAO,eAAe;AACnD,OAAOC,QAAQ,MAAM,sBAAsB;AAM3C,eAAe,MAAMC,mBAAmB,SAASF,UAAU,CAAC;EAM1DG,WAAWA,CAAA,EAAqC;IAAA,IAApCC,OAA6B,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;IAC5C,KAAK,CAAC;MACJG,WAAW,EAAEA,CAAA,KAAM;QACjB,IAAI,CAAC,IAAI,CAACC,cAAc,EAAE;UACxB,IAAI,IAAI,CAACC,cAAc,CAAC,CAAC,EAAE;YAEzB,IAAI,CAACC,iBAAiB,GAAG,IAAI,CAACC,WAAW,CAAC,CAAC,CAACC,KAAK,CAAC,CAAC;YACnD,IAAI,CAACJ,cAAc,GAAG,EAAE;YACxB,IAAI,CAACK,UAAU,CAAC,IAAI,CAACL,cAAoB,CAAC;YAC1C;UACF;QACF;QAEA,IAAI,CAACK,UAAU,CAAC,CAAC;MACnB,CAAC;MAGDC,YAAY,EAAGC,IAAI,IAAK;QACtB,IAAI,CAAC,IAAI,CAACC,cAAc,EAAE;UACxB,IAAI,CAACA,cAAc,GAAG,CAAC,CAAC;UACxB,IAAI,CAACC,WAAW,CAAC,IAAI,CAACD,cAAc,CAAC;QACvC,CAAC,MAAM;UACL,IAAI,CAACC,WAAW,CAAC,CAAC,CAAC,CAAC;QACtB;QACA,IAAI,OAAOF,IAAI,KAAK,WAAW,EAAE;UAC/B,IAAI,CAACG,MAAM,CAACC,IAAI,CAAC,OAAO,EAAEJ,IAAI,CAAC;QACjC;MACF;IACF,CAAC,CAAC;IAACK,eAAA;IAAAA,eAAA,4BAhCwC,IAAI;IAAAA,eAAA,yBACV,IAAI;IAAAA,eAAA,yBACH,IAAI;IA+B1C,MAAMC,SAAS,GAAGlB,OAAO,CAACkB,SAAS,IAAI,EAAE;IACzC,IAAI,CAACC,SAAS,GAAGD,SAAS,CAACE,GAAG,CAAEC,QAAQ,IAAK,IAAIxB,QAAQ,CAACwB,QAAQ,CAAC,CAAC;EACtE;EASAC,KAAKA,CAACC,KAAK,EAAE;IACX,KAAK,CAACD,KAAK,CAACC,KAAK,CAAC;IAClB,IAAIC,KAAY,GAAG,EAAE;IACrB,IAAI,IAAI,CAACnB,cAAc,EAAE;MACvBmB,KAAK,GAAG,CAAC,GAAG,IAAI,CAACnB,cAAc,CAAC;MAChC,IAAI,CAACA,cAAc,CAACH,MAAM,GAAG,CAAC;IAChC;IACA,OAAOsB,KAAK;EACd;EAOAC,gBAAgBA,CAAA,EAAG;IACjB,OAAO,IAAI,CAACZ,cAAc;EAC5B;EAEAa,oBAAoBA,CAAA,EAAG;IACrB,OAAO,IAAI,CAACnB,iBAAiB;EAC/B;EAEAoB,4BAA4BA,CAAA,EAAG;IAC7B,OAAO,IAAI,CAACpB,iBAAiB,IAAI,IAAI,CAACA,iBAAiB,CAACqB,QAAQ,CAAC,CAAC;EACpE;EAEApB,WAAWA,CAAA,EAAG;IACZ,OAAO,IAAI,CAACa,QAAQ;EACtB;EAOAf,cAAcA,CAAA,EAAG;IACf,MAAMuB,WAAW,GAAG,IAAI,CAACrB,WAAW,CAAC,CAAC;IAKtC,IAAI,IAAI,CAACW,SAAS,CAACjB,MAAM,KAAK,CAAC,EAAE;MAC/B,OAAO,IAAI;IACb;IAEA,KAAK,MAAM4B,QAAQ,IAAI,IAAI,CAACX,SAAS,EAAE;MACrC,IAAIW,QAAQ,CAACC,MAAM,CAACF,WAAW,CAAC,EAAE;QAChC,OAAO,IAAI;MACb;IACF;IAEA,OAAO,KAAK;EACd;AACF"}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
export default class JSONPath {
|
|
3
|
+
constructor() {
|
|
4
|
+
let path = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
|
|
5
|
+
_defineProperty(this, "path", void 0);
|
|
6
|
+
this.path = ['$'];
|
|
7
|
+
if (path instanceof JSONPath) {
|
|
8
|
+
this.path = [...path.path];
|
|
9
|
+
return;
|
|
10
|
+
}
|
|
11
|
+
if (Array.isArray(path)) {
|
|
12
|
+
this.path.push(...path);
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
if (typeof path === 'string') {
|
|
16
|
+
this.path = path.split('.');
|
|
17
|
+
if (this.path[0] !== '$') {
|
|
18
|
+
throw new Error('JSONPaths must start with $');
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
clone() {
|
|
23
|
+
return new JSONPath(this);
|
|
24
|
+
}
|
|
25
|
+
toString() {
|
|
26
|
+
return this.path.join('.');
|
|
27
|
+
}
|
|
28
|
+
push(name) {
|
|
29
|
+
this.path.push(name);
|
|
30
|
+
}
|
|
31
|
+
pop() {
|
|
32
|
+
return this.path.pop();
|
|
33
|
+
}
|
|
34
|
+
set(name) {
|
|
35
|
+
this.path[this.path.length - 1] = name;
|
|
36
|
+
}
|
|
37
|
+
equals(other) {
|
|
38
|
+
if (!this || !other || this.path.length !== other.path.length) {
|
|
39
|
+
return false;
|
|
40
|
+
}
|
|
41
|
+
for (let i = 0; i < this.path.length; ++i) {
|
|
42
|
+
if (this.path[i] !== other.path[i]) {
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
return true;
|
|
47
|
+
}
|
|
48
|
+
setFieldAtPath(object, value) {
|
|
49
|
+
const path = [...this.path];
|
|
50
|
+
path.shift();
|
|
51
|
+
const field = path.pop();
|
|
52
|
+
for (const component of path) {
|
|
53
|
+
object = object[component];
|
|
54
|
+
}
|
|
55
|
+
object[field] = value;
|
|
56
|
+
}
|
|
57
|
+
getFieldAtPath(object) {
|
|
58
|
+
const path = [...this.path];
|
|
59
|
+
path.shift();
|
|
60
|
+
const field = path.pop();
|
|
61
|
+
for (const component of path) {
|
|
62
|
+
object = object[component];
|
|
63
|
+
}
|
|
64
|
+
return object[field];
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
//# sourceMappingURL=jsonpath.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jsonpath.js","names":["JSONPath","constructor","path","arguments","length","undefined","_defineProperty","Array","isArray","push","split","Error","clone","toString","join","name","pop","set","equals","other","i","setFieldAtPath","object","value","shift","field","component","getFieldAtPath"],"sources":["../../../../src/lib/jsonpath/jsonpath.ts"],"sourcesContent":["/**\n * A parser for a minimal subset of the jsonpath standard\n * Full JSON path parsers for JS exist but are quite large (bundle size)\n *\n * Supports\n *\n * `$.component.component.component`\n */\nexport default class JSONPath {\n path: string[];\n\n constructor(path: JSONPath | string[] | string | null = null) {\n this.path = ['$'];\n\n if (path instanceof JSONPath) {\n // @ts-ignore\n this.path = [...path.path];\n return;\n }\n\n if (Array.isArray(path)) {\n this.path.push(...path);\n return;\n }\n\n // Parse a string as a JSONPath\n if (typeof path === 'string') {\n this.path = path.split('.');\n if (this.path[0] !== '$') {\n throw new Error('JSONPaths must start with $');\n }\n }\n }\n\n clone(): JSONPath {\n return new JSONPath(this);\n }\n\n toString(): string {\n return this.path.join('.');\n }\n\n push(name: string): void {\n this.path.push(name);\n }\n\n pop() {\n return this.path.pop();\n }\n\n set(name: string): void {\n this.path[this.path.length - 1] = name;\n }\n\n equals(other: JSONPath): boolean {\n if (!this || !other || this.path.length !== other.path.length) {\n return false;\n }\n\n for (let i = 0; i < this.path.length; ++i) {\n if (this.path[i] !== other.path[i]) {\n return false;\n }\n }\n\n return true;\n }\n\n /**\n * Sets the value pointed at by path\n * TODO - handle root path\n * @param object\n * @param value\n */\n setFieldAtPath(object, value) {\n const path = [...this.path];\n path.shift();\n const field = path.pop();\n for (const component of path) {\n object = object[component];\n }\n // @ts-ignore\n object[field] = value;\n }\n\n /**\n * Gets the value pointed at by path\n * TODO - handle root path\n * @param object\n */\n getFieldAtPath(object) {\n const path = [...this.path];\n path.shift();\n const field = path.pop();\n for (const component of path) {\n object = object[component];\n }\n // @ts-ignore\n return object[field];\n }\n}\n"],"mappings":";AAQA,eAAe,MAAMA,QAAQ,CAAC;EAG5BC,WAAWA,CAAA,EAAmD;IAAA,IAAlDC,IAAyC,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,IAAI;IAAAG,eAAA;IAC1D,IAAI,CAACJ,IAAI,GAAG,CAAC,GAAG,CAAC;IAEjB,IAAIA,IAAI,YAAYF,QAAQ,EAAE;MAE5B,IAAI,CAACE,IAAI,GAAG,CAAC,GAAGA,IAAI,CAACA,IAAI,CAAC;MAC1B;IACF;IAEA,IAAIK,KAAK,CAACC,OAAO,CAACN,IAAI,CAAC,EAAE;MACvB,IAAI,CAACA,IAAI,CAACO,IAAI,CAAC,GAAGP,IAAI,CAAC;MACvB;IACF;IAGA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;MAC5B,IAAI,CAACA,IAAI,GAAGA,IAAI,CAACQ,KAAK,CAAC,GAAG,CAAC;MAC3B,IAAI,IAAI,CAACR,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QACxB,MAAM,IAAIS,KAAK,CAAC,6BAA6B,CAAC;MAChD;IACF;EACF;EAEAC,KAAKA,CAAA,EAAa;IAChB,OAAO,IAAIZ,QAAQ,CAAC,IAAI,CAAC;EAC3B;EAEAa,QAAQA,CAAA,EAAW;IACjB,OAAO,IAAI,CAACX,IAAI,CAACY,IAAI,CAAC,GAAG,CAAC;EAC5B;EAEAL,IAAIA,CAACM,IAAY,EAAQ;IACvB,IAAI,CAACb,IAAI,CAACO,IAAI,CAACM,IAAI,CAAC;EACtB;EAEAC,GAAGA,CAAA,EAAG;IACJ,OAAO,IAAI,CAACd,IAAI,CAACc,GAAG,CAAC,CAAC;EACxB;EAEAC,GAAGA,CAACF,IAAY,EAAQ;IACtB,IAAI,CAACb,IAAI,CAAC,IAAI,CAACA,IAAI,CAACE,MAAM,GAAG,CAAC,CAAC,GAAGW,IAAI;EACxC;EAEAG,MAAMA,CAACC,KAAe,EAAW;IAC/B,IAAI,CAAC,IAAI,IAAI,CAACA,KAAK,IAAI,IAAI,CAACjB,IAAI,CAACE,MAAM,KAAKe,KAAK,CAACjB,IAAI,CAACE,MAAM,EAAE;MAC7D,OAAO,KAAK;IACd;IAEA,KAAK,IAAIgB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG,IAAI,CAAClB,IAAI,CAACE,MAAM,EAAE,EAAEgB,CAAC,EAAE;MACzC,IAAI,IAAI,CAAClB,IAAI,CAACkB,CAAC,CAAC,KAAKD,KAAK,CAACjB,IAAI,CAACkB,CAAC,CAAC,EAAE;QAClC,OAAO,KAAK;MACd;IACF;IAEA,OAAO,IAAI;EACb;EAQAC,cAAcA,CAACC,MAAM,EAAEC,KAAK,EAAE;IAC5B,MAAMrB,IAAI,GAAG,CAAC,GAAG,IAAI,CAACA,IAAI,CAAC;IAC3BA,IAAI,CAACsB,KAAK,CAAC,CAAC;IACZ,MAAMC,KAAK,GAAGvB,IAAI,CAACc,GAAG,CAAC,CAAC;IACxB,KAAK,MAAMU,SAAS,IAAIxB,IAAI,EAAE;MAC5BoB,MAAM,GAAGA,MAAM,CAACI,SAAS,CAAC;IAC5B;IAEAJ,MAAM,CAACG,KAAK,CAAC,GAAGF,KAAK;EACvB;EAOAI,cAAcA,CAACL,MAAM,EAAE;IACrB,MAAMpB,IAAI,GAAG,CAAC,GAAG,IAAI,CAACA,IAAI,CAAC;IAC3BA,IAAI,CAACsB,KAAK,CAAC,CAAC;IACZ,MAAMC,KAAK,GAAGvB,IAAI,CAACc,GAAG,CAAC,CAAC;IACxB,KAAK,MAAMU,SAAS,IAAIxB,IAAI,EAAE;MAC5BoB,MAAM,GAAGA,MAAM,CAACI,SAAS,CAAC;IAC5B;IAEA,OAAOJ,MAAM,CAACG,KAAK,CAAC;EACtB;AACF"}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { TableBatchBuilder } from '@loaders.gl/schema';
|
|
2
|
-
import { makeTextDecoderIterator } from '@loaders.gl/loader-utils';
|
|
3
|
-
import StreamingJSONParser from '
|
|
4
|
-
|
|
2
|
+
import { assert, makeTextDecoderIterator } from '@loaders.gl/loader-utils';
|
|
3
|
+
import StreamingJSONParser from '../json-parser/streaming-json-parser';
|
|
4
|
+
import JSONPath from '../jsonpath/jsonpath';
|
|
5
|
+
export async function* parseJSONInBatches(binaryAsyncIterator, options) {
|
|
5
6
|
var _options$json;
|
|
6
|
-
|
|
7
7
|
const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);
|
|
8
8
|
const {
|
|
9
9
|
metadata
|
|
@@ -14,17 +14,16 @@ export default async function* parseJSONInBatches(binaryAsyncIterator, options)
|
|
|
14
14
|
let isFirstChunk = true;
|
|
15
15
|
const schema = null;
|
|
16
16
|
const shape = (options === null || options === void 0 ? void 0 : (_options$json = options.json) === null || _options$json === void 0 ? void 0 : _options$json.shape) || 'row-table';
|
|
17
|
-
const tableBatchBuilder = new TableBatchBuilder(schema, {
|
|
17
|
+
const tableBatchBuilder = new TableBatchBuilder(schema, {
|
|
18
|
+
...options,
|
|
18
19
|
shape
|
|
19
20
|
});
|
|
20
21
|
const parser = new StreamingJSONParser({
|
|
21
22
|
jsonpaths
|
|
22
23
|
});
|
|
23
|
-
|
|
24
24
|
for await (const chunk of asyncIterator) {
|
|
25
25
|
const rows = parser.write(chunk);
|
|
26
26
|
const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();
|
|
27
|
-
|
|
28
27
|
if (rows.length > 0 && isFirstChunk) {
|
|
29
28
|
if (metadata) {
|
|
30
29
|
const initialBatch = {
|
|
@@ -38,40 +37,32 @@ export default async function* parseJSONInBatches(binaryAsyncIterator, options)
|
|
|
38
37
|
};
|
|
39
38
|
yield initialBatch;
|
|
40
39
|
}
|
|
41
|
-
|
|
42
40
|
isFirstChunk = false;
|
|
43
41
|
}
|
|
44
|
-
|
|
45
42
|
for (const row of rows) {
|
|
46
43
|
tableBatchBuilder.addRow(row);
|
|
47
44
|
const batch = tableBatchBuilder.getFullBatch({
|
|
48
45
|
jsonpath
|
|
49
46
|
});
|
|
50
|
-
|
|
51
47
|
if (batch) {
|
|
52
48
|
yield batch;
|
|
53
49
|
}
|
|
54
50
|
}
|
|
55
|
-
|
|
56
51
|
tableBatchBuilder.chunkComplete(chunk);
|
|
57
52
|
const batch = tableBatchBuilder.getFullBatch({
|
|
58
53
|
jsonpath
|
|
59
54
|
});
|
|
60
|
-
|
|
61
55
|
if (batch) {
|
|
62
56
|
yield batch;
|
|
63
57
|
}
|
|
64
58
|
}
|
|
65
|
-
|
|
66
59
|
const jsonpath = parser.getStreamingJsonPathAsString();
|
|
67
60
|
const batch = tableBatchBuilder.getFinalBatch({
|
|
68
61
|
jsonpath
|
|
69
62
|
});
|
|
70
|
-
|
|
71
63
|
if (batch) {
|
|
72
64
|
yield batch;
|
|
73
65
|
}
|
|
74
|
-
|
|
75
66
|
if (metadata) {
|
|
76
67
|
const finalBatch = {
|
|
77
68
|
shape,
|
|
@@ -84,4 +75,17 @@ export default async function* parseJSONInBatches(binaryAsyncIterator, options)
|
|
|
84
75
|
yield finalBatch;
|
|
85
76
|
}
|
|
86
77
|
}
|
|
78
|
+
export function rebuildJsonObject(batch, data) {
|
|
79
|
+
assert(batch.batchType === 'final-result');
|
|
80
|
+
if (batch.jsonpath === '$') {
|
|
81
|
+
return data;
|
|
82
|
+
}
|
|
83
|
+
if (batch.jsonpath && batch.jsonpath.length > 1) {
|
|
84
|
+
const topLevelObject = batch.container;
|
|
85
|
+
const streamingPath = new JSONPath(batch.jsonpath);
|
|
86
|
+
streamingPath.setFieldAtPath(topLevelObject, data);
|
|
87
|
+
return topLevelObject;
|
|
88
|
+
}
|
|
89
|
+
return batch.container;
|
|
90
|
+
}
|
|
87
91
|
//# sourceMappingURL=parse-json-in-batches.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-json-in-batches.js","names":["TableBatchBuilder","assert","makeTextDecoderIterator","StreamingJSONParser","JSONPath","parseJSONInBatches","binaryAsyncIterator","options","_options$json","asyncIterator","metadata","jsonpaths","json","isFirstChunk","schema","shape","tableBatchBuilder","parser","chunk","rows","write","jsonpath","length","getStreamingJsonPathAsString","initialBatch","batchType","data","bytesUsed","container","getPartialResult","row","addRow","batch","getFullBatch","chunkComplete","getFinalBatch","finalBatch","rebuildJsonObject","topLevelObject","streamingPath","setFieldAtPath"],"sources":["../../../../src/lib/parsers/parse-json-in-batches.ts"],"sourcesContent":["import type {TableBatch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions} from '../../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {assert, makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from '../json-parser/streaming-json-parser';\nimport JSONPath from '../jsonpath/jsonpath';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<TableBatch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // TODO fix Schema deduction\n const schema = null; // new Schema([]);\n const shape = options?.json?.shape || 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: TableBatch = {\n // Common fields\n shape,\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: TableBatch = {\n shape,\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n\nexport function rebuildJsonObject(batch, data) {\n // Last batch will have this special type and will provide all the root object of the parsed file\n assert(batch.batchType === 'final-result');\n\n // The streamed JSON data is a top level array (jsonpath = '$'), just return the array of row objects\n if (batch.jsonpath === '$') {\n return data;\n }\n\n // (jsonpath !== '$') The streamed data is not a top level array, so stitch it back in to the top-level object\n if (batch.jsonpath && batch.jsonpath.length > 1) {\n const topLevelObject = batch.container;\n const streamingPath = new JSONPath(batch.jsonpath);\n streamingPath.setFieldAtPath(topLevelObject, data);\n return topLevelObject;\n }\n\n // No jsonpath, in this case nothing was streamed.\n return batch.container;\n}\n"],"mappings":"AAEA,SAAQA,iBAAiB,QAAO,oBAAoB;AACpD,SAAQC,MAAM,EAAEC,uBAAuB,QAAO,0BAA0B;AACxE,OAAOC,mBAAmB,MAAM,sCAAsC;AACtE,OAAOC,QAAQ,MAAM,sBAAsB;AAI3C,OAAO,gBAAgBC,kBAAkBA,CACvCC,mBAAuE,EACvEC,OAA0B,EACC;EAAA,IAAAC,aAAA;EAC3B,MAAMC,aAAa,GAAGP,uBAAuB,CAACI,mBAAmB,CAAC;EAElE,MAAM;IAACI;EAAQ,CAAC,GAAGH,OAAO;EAC1B,MAAM;IAACI;EAAS,CAAC,GAAGJ,OAAO,CAACK,IAAI,IAAI,CAAC,CAAC;EAEtC,IAAIC,YAAqB,GAAG,IAAI;EAGhC,MAAMC,MAAM,GAAG,IAAI;EACnB,MAAMC,KAAK,GAAG,CAAAR,OAAO,aAAPA,OAAO,wBAAAC,aAAA,GAAPD,OAAO,CAAEK,IAAI,cAAAJ,aAAA,uBAAbA,aAAA,CAAeO,KAAK,KAAI,WAAW;EAEjD,MAAMC,iBAAiB,GAAG,IAAIhB,iBAAiB,CAACc,MAAM,EAAE;IACtD,GAAGP,OAAO;IACVQ;EACF,CAAC,CAAC;EAEF,MAAME,MAAM,GAAG,IAAId,mBAAmB,CAAC;IAACQ;EAAS,CAAC,CAAC;EAEnD,WAAW,MAAMO,KAAK,IAAIT,aAAa,EAAE;IACvC,MAAMU,IAAI,GAAGF,MAAM,CAACG,KAAK,CAACF,KAAK,CAAC;IAEhC,MAAMG,QAAQ,GAAGF,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIL,MAAM,CAACM,4BAA4B,CAAC,CAAC;IAEzE,IAAIJ,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIT,YAAY,EAAE;MACnC,IAAIH,QAAQ,EAAE;QACZ,MAAMc,YAAwB,GAAG;UAE/BT,KAAK;UACLU,SAAS,EAAE,gBAAgB;UAC3BC,IAAI,EAAE,EAAE;UACRJ,MAAM,EAAE,CAAC;UACTK,SAAS,EAAE,CAAC;UAEZC,SAAS,EAAEX,MAAM,CAACY,gBAAgB,CAAC,CAAC;UACpCR;QACF,CAAC;QACD,MAAMG,YAAY;MACpB;MACAX,YAAY,GAAG,KAAK;IAEtB;IAGA,KAAK,MAAMiB,GAAG,IAAIX,IAAI,EAAE;MACtBH,iBAAiB,CAACe,MAAM,CAACD,GAAG,CAAC;MAE7B,MAAME,KAAK,GAAGhB,iBAAiB,CAACiB,YAAY,CAAC;QAACZ;MAAQ,CAAC,CAAC;MACxD,IAAIW,KAAK,EAAE;QACT,MAAMA,KAAK;MACb;IACF;IAEAhB,iBAAiB,CAACkB,aAAa,CAAChB,KAAK,CAAC;IACtC,MAAMc,KAAK,GAAGhB,iBAAiB,CAACiB,YAAY,CAAC;MAACZ;IAAQ,CAAC,CAAC;IACxD,IAAIW,KAAK,EAAE;MACT,MAAMA,KAAK;IACb;EACF;EAGA,MAAMX,QAAQ,GAAGJ,MAAM,CAACM,4BAA4B,CAAC,CAAC;EACtD,MAAMS,KAAK,GAAGhB,iBAAiB,CAACmB,aAAa,CAAC;IAACd;EAAQ,CAAC,CAAC;EACzD,IAAIW,KAAK,EAAE;IACT,MAAMA,KAAK;EACb;EAEA,IAAItB,QAAQ,EAAE;IACZ,MAAM0B,UAAsB,GAAG;MAC7BrB,KAAK;MACLU,SAAS,EAAE,cAAc;MACzBG,SAAS,EAAEX,MAAM,CAACY,gBAAgB,CAAC,CAAC;MACpCR,QAAQ,EAAEJ,MAAM,CAACM,4BAA4B,CAAC,CAAC;MAC/CG,IAAI,EAAE,EAAE;MACRJ,MAAM,EAAE;IAEV,CAAC;IACD,MAAMc,UAAU;EAClB;AACF;AAEA,OAAO,SAASC,iBAAiBA,CAACL,KAAK,EAAEN,IAAI,EAAE;EAE7CzB,MAAM,CAAC+B,KAAK,CAACP,SAAS,KAAK,cAAc,CAAC;EAG1C,IAAIO,KAAK,CAACX,QAAQ,KAAK,GAAG,EAAE;IAC1B,OAAOK,IAAI;EACb;EAGA,IAAIM,KAAK,CAACX,QAAQ,IAAIW,KAAK,CAACX,QAAQ,CAACC,MAAM,GAAG,CAAC,EAAE;IAC/C,MAAMgB,cAAc,GAAGN,KAAK,CAACJ,SAAS;IACtC,MAAMW,aAAa,GAAG,IAAInC,QAAQ,CAAC4B,KAAK,CAACX,QAAQ,CAAC;IAClDkB,aAAa,CAACC,cAAc,CAACF,cAAc,EAAEZ,IAAI,CAAC;IAClD,OAAOY,cAAc;EACvB;EAGA,OAAON,KAAK,CAACJ,SAAS;AACxB"}
|
|
@@ -1,34 +1,29 @@
|
|
|
1
|
-
|
|
1
|
+
import { makeTableFromData } from '@loaders.gl/schema';
|
|
2
|
+
export function parseJSONSync(jsonText, options) {
|
|
2
3
|
try {
|
|
3
4
|
var _options$json;
|
|
4
|
-
|
|
5
5
|
const json = JSON.parse(jsonText);
|
|
6
|
-
|
|
7
6
|
if ((_options$json = options.json) !== null && _options$json !== void 0 && _options$json.table) {
|
|
8
|
-
|
|
7
|
+
const data = getFirstArray(json) || json;
|
|
8
|
+
return makeTableFromData(data);
|
|
9
9
|
}
|
|
10
|
-
|
|
11
10
|
return json;
|
|
12
11
|
} catch (error) {
|
|
13
12
|
throw new Error('JSONLoader: failed to parse JSON');
|
|
14
13
|
}
|
|
15
14
|
}
|
|
16
|
-
|
|
17
15
|
function getFirstArray(json) {
|
|
18
16
|
if (Array.isArray(json)) {
|
|
19
17
|
return json;
|
|
20
18
|
}
|
|
21
|
-
|
|
22
19
|
if (json && typeof json === 'object') {
|
|
23
20
|
for (const value of Object.values(json)) {
|
|
24
21
|
const array = getFirstArray(value);
|
|
25
|
-
|
|
26
22
|
if (array) {
|
|
27
23
|
return array;
|
|
28
24
|
}
|
|
29
25
|
}
|
|
30
26
|
}
|
|
31
|
-
|
|
32
27
|
return null;
|
|
33
28
|
}
|
|
34
29
|
//# sourceMappingURL=parse-json.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-json.js","names":["makeTableFromData","parseJSONSync","jsonText","options","_options$json","json","JSON","parse","table","data","getFirstArray","error","Error","Array","isArray","value","Object","values","array"],"sources":["../../../../src/lib/parsers/parse-json.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport type {RowTable} from '@loaders.gl/schema';\nimport {makeTableFromData} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions} from '../../json-loader';\n\nexport function parseJSONSync(jsonText: string, options: JSONLoaderOptions): RowTable {\n try {\n const json = JSON.parse(jsonText);\n if (options.json?.table) {\n const data = getFirstArray(json) || json;\n return makeTableFromData(data);\n }\n return json;\n } catch (error) {\n throw new Error('JSONLoader: failed to parse JSON');\n }\n}\n\nfunction getFirstArray(json) {\n if (Array.isArray(json)) {\n return json;\n }\n if (json && typeof json === 'object') {\n for (const value of Object.values(json)) {\n const array = getFirstArray(value);\n if (array) {\n return array;\n }\n }\n }\n return null;\n}\n"],"mappings":"AAEA,SAAQA,iBAAiB,QAAO,oBAAoB;AAGpD,OAAO,SAASC,aAAaA,CAACC,QAAgB,EAAEC,OAA0B,EAAY;EACpF,IAAI;IAAA,IAAAC,aAAA;IACF,MAAMC,IAAI,GAAGC,IAAI,CAACC,KAAK,CAACL,QAAQ,CAAC;IACjC,KAAAE,aAAA,GAAID,OAAO,CAACE,IAAI,cAAAD,aAAA,eAAZA,aAAA,CAAcI,KAAK,EAAE;MACvB,MAAMC,IAAI,GAAGC,aAAa,CAACL,IAAI,CAAC,IAAIA,IAAI;MACxC,OAAOL,iBAAiB,CAACS,IAAI,CAAC;IAChC;IACA,OAAOJ,IAAI;EACb,CAAC,CAAC,OAAOM,KAAK,EAAE;IACd,MAAM,IAAIC,KAAK,CAAC,kCAAkC,CAAC;EACrD;AACF;AAEA,SAASF,aAAaA,CAACL,IAAI,EAAE;EAC3B,IAAIQ,KAAK,CAACC,OAAO,CAACT,IAAI,CAAC,EAAE;IACvB,OAAOA,IAAI;EACb;EACA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IACpC,KAAK,MAAMU,KAAK,IAAIC,MAAM,CAACC,MAAM,CAACZ,IAAI,CAAC,EAAE;MACvC,MAAMa,KAAK,GAAGR,aAAa,CAACK,KAAK,CAAC;MAClC,IAAIG,KAAK,EAAE;QACT,OAAOA,KAAK;MACd;IACF;EACF;EACA,OAAO,IAAI;AACb"}
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
import { TableBatchBuilder } from '@loaders.gl/schema';
|
|
2
2
|
import { makeLineIterator, makeNumberedLineIterator, makeTextDecoderIterator } from '@loaders.gl/loader-utils';
|
|
3
|
-
export
|
|
3
|
+
export async function* parseNDJSONInBatches(binaryAsyncIterator, options) {
|
|
4
4
|
const textIterator = makeTextDecoderIterator(binaryAsyncIterator);
|
|
5
5
|
const lineIterator = makeLineIterator(textIterator);
|
|
6
6
|
const numberedLineIterator = makeNumberedLineIterator(lineIterator);
|
|
7
7
|
const schema = null;
|
|
8
8
|
const shape = 'row-table';
|
|
9
|
-
const tableBatchBuilder = new TableBatchBuilder(schema, {
|
|
9
|
+
const tableBatchBuilder = new TableBatchBuilder(schema, {
|
|
10
|
+
...options,
|
|
10
11
|
shape
|
|
11
12
|
});
|
|
12
|
-
|
|
13
13
|
for await (const {
|
|
14
14
|
counter,
|
|
15
15
|
line
|
|
@@ -19,7 +19,6 @@ export default async function* parseNDJSONInBatches(binaryAsyncIterator, options
|
|
|
19
19
|
tableBatchBuilder.addRow(row);
|
|
20
20
|
tableBatchBuilder.chunkComplete(line);
|
|
21
21
|
const batch = tableBatchBuilder.getFullBatch();
|
|
22
|
-
|
|
23
22
|
if (batch) {
|
|
24
23
|
yield batch;
|
|
25
24
|
}
|
|
@@ -27,9 +26,7 @@ export default async function* parseNDJSONInBatches(binaryAsyncIterator, options
|
|
|
27
26
|
throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter));
|
|
28
27
|
}
|
|
29
28
|
}
|
|
30
|
-
|
|
31
29
|
const batch = tableBatchBuilder.getFinalBatch();
|
|
32
|
-
|
|
33
30
|
if (batch) {
|
|
34
31
|
yield batch;
|
|
35
32
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-ndjson-in-batches.js","names":["TableBatchBuilder","makeLineIterator","makeNumberedLineIterator","makeTextDecoderIterator","parseNDJSONInBatches","binaryAsyncIterator","options","textIterator","lineIterator","numberedLineIterator","schema","shape","tableBatchBuilder","counter","line","row","JSON","parse","addRow","chunkComplete","batch","getFullBatch","error","Error","concat","getFinalBatch"],"sources":["../../../../src/lib/parsers/parse-ndjson-in-batches.ts"],"sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {\n LoaderOptions,\n makeLineIterator,\n makeNumberedLineIterator,\n makeTextDecoderIterator\n} from '@loaders.gl/loader-utils';\n\nexport async function* parseNDJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options?: LoaderOptions\n): AsyncIterable<Batch> {\n const textIterator = makeTextDecoderIterator(binaryAsyncIterator);\n const lineIterator = makeLineIterator(textIterator);\n const numberedLineIterator = makeNumberedLineIterator(lineIterator);\n\n const schema = null;\n const shape = 'row-table';\n // @ts-ignore\n const tableBatchBuilder = new TableBatchBuilder(schema, {\n ...options,\n shape\n });\n\n for await (const {counter, line} of numberedLineIterator) {\n try {\n const row = JSON.parse(line);\n tableBatchBuilder.addRow(row);\n tableBatchBuilder.chunkComplete(line);\n const batch = tableBatchBuilder.getFullBatch();\n if (batch) {\n yield batch;\n }\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter}`);\n }\n }\n\n const batch = tableBatchBuilder.getFinalBatch();\n if (batch) {\n yield batch;\n }\n}\n"],"mappings":"AACA,SAAQA,iBAAiB,QAAO,oBAAoB;AACpD,SAEEC,gBAAgB,EAChBC,wBAAwB,EACxBC,uBAAuB,QAClB,0BAA0B;AAEjC,OAAO,gBAAgBC,oBAAoBA,CACzCC,mBAAuE,EACvEC,OAAuB,EACD;EACtB,MAAMC,YAAY,GAAGJ,uBAAuB,CAACE,mBAAmB,CAAC;EACjE,MAAMG,YAAY,GAAGP,gBAAgB,CAACM,YAAY,CAAC;EACnD,MAAME,oBAAoB,GAAGP,wBAAwB,CAACM,YAAY,CAAC;EAEnE,MAAME,MAAM,GAAG,IAAI;EACnB,MAAMC,KAAK,GAAG,WAAW;EAEzB,MAAMC,iBAAiB,GAAG,IAAIZ,iBAAiB,CAACU,MAAM,EAAE;IACtD,GAAGJ,OAAO;IACVK;EACF,CAAC,CAAC;EAEF,WAAW,MAAM;IAACE,OAAO;IAAEC;EAAI,CAAC,IAAIL,oBAAoB,EAAE;IACxD,IAAI;MACF,MAAMM,GAAG,GAAGC,IAAI,CAACC,KAAK,CAACH,IAAI,CAAC;MAC5BF,iBAAiB,CAACM,MAAM,CAACH,GAAG,CAAC;MAC7BH,iBAAiB,CAACO,aAAa,CAACL,IAAI,CAAC;MACrC,MAAMM,KAAK,GAAGR,iBAAiB,CAACS,YAAY,CAAC,CAAC;MAC9C,IAAID,KAAK,EAAE;QACT,MAAMA,KAAK;MACb;IACF,CAAC,CAAC,OAAOE,KAAK,EAAE;MACd,MAAM,IAAIC,KAAK,+CAAAC,MAAA,CAA+CX,OAAO,CAAE,CAAC;IAC1E;EACF;EAEA,MAAMO,KAAK,GAAGR,iBAAiB,CAACa,aAAa,CAAC,CAAC;EAC/C,IAAIL,KAAK,EAAE;IACT,MAAMA,KAAK;EACb;AACF"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { makeTableFromData } from '@loaders.gl/schema';
|
|
2
|
+
export function parseNDJSONSync(ndjsonText) {
|
|
3
|
+
const lines = ndjsonText.trim().split('\n');
|
|
4
|
+
const parsedLines = lines.map((line, counter) => {
|
|
5
|
+
try {
|
|
6
|
+
return JSON.parse(line);
|
|
7
|
+
} catch (error) {
|
|
8
|
+
throw new Error("NDJSONLoader: failed to parse JSON on line ".concat(counter + 1));
|
|
9
|
+
}
|
|
10
|
+
});
|
|
11
|
+
return makeTableFromData(parsedLines);
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=parse-ndjson.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parse-ndjson.js","names":["makeTableFromData","parseNDJSONSync","ndjsonText","lines","trim","split","parsedLines","map","line","counter","JSON","parse","error","Error","concat"],"sources":["../../../../src/lib/parsers/parse-ndjson.ts"],"sourcesContent":["import type {ArrayRowTable, ObjectRowTable} from '@loaders.gl/schema';\nimport {makeTableFromData} from '@loaders.gl/schema';\n\nexport function parseNDJSONSync(ndjsonText: string): ArrayRowTable | ObjectRowTable {\n const lines = ndjsonText.trim().split('\\n');\n const parsedLines = lines.map((line, counter) => {\n try {\n return JSON.parse(line);\n } catch (error) {\n throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter + 1}`);\n }\n });\n\n return makeTableFromData(parsedLines);\n}\n"],"mappings":"AACA,SAAQA,iBAAiB,QAAO,oBAAoB;AAEpD,OAAO,SAASC,eAAeA,CAACC,UAAkB,EAAkC;EAClF,MAAMC,KAAK,GAAGD,UAAU,CAACE,IAAI,CAAC,CAAC,CAACC,KAAK,CAAC,IAAI,CAAC;EAC3C,MAAMC,WAAW,GAAGH,KAAK,CAACI,GAAG,CAAC,CAACC,IAAI,EAAEC,OAAO,KAAK;IAC/C,IAAI;MACF,OAAOC,IAAI,CAACC,KAAK,CAACH,IAAI,CAAC;IACzB,CAAC,CAAC,OAAOI,KAAK,EAAE;MACd,MAAM,IAAIC,KAAK,+CAAAC,MAAA,CAA+CL,OAAO,GAAG,CAAC,CAAE,CAAC;IAC9E;EACF,CAAC,CAAC;EAEF,OAAOT,iBAAiB,CAACM,WAAW,CAAC;AACvC"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { parseNDJSONSync } from './lib/parsers/parse-ndjson';
|
|
2
|
+
import { parseNDJSONInBatches } from './lib/parsers/parse-ndjson-in-batches';
|
|
3
|
+
const VERSION = typeof "4.0.0-alpha.11" !== 'undefined' ? "4.0.0-alpha.11" : 'latest';
|
|
4
|
+
const DEFAULT_NDGEOJSON_LOADER_OPTIONS = {
|
|
5
|
+
geojson: {
|
|
6
|
+
shape: 'object-row-table'
|
|
7
|
+
},
|
|
8
|
+
gis: {
|
|
9
|
+
format: 'geojson'
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
export const NDJSONLoader = {
|
|
13
|
+
name: 'NDJSON',
|
|
14
|
+
id: 'ndjson',
|
|
15
|
+
module: 'json',
|
|
16
|
+
version: VERSION,
|
|
17
|
+
extensions: ['ndjson', 'ndgeojson'],
|
|
18
|
+
mimeTypes: ['application/geo+x-ndjson', 'application/geo+x-ldjson', 'application/jsonlines', 'application/geo+json-seq', 'application/x-ndjson'],
|
|
19
|
+
category: 'table',
|
|
20
|
+
text: true,
|
|
21
|
+
parse: async arrayBuffer => parseNDJSONSync(new TextDecoder().decode(arrayBuffer)),
|
|
22
|
+
parseTextSync: parseNDJSONSync,
|
|
23
|
+
parseInBatches: parseNDJSONInBatches,
|
|
24
|
+
options: DEFAULT_NDGEOJSON_LOADER_OPTIONS
|
|
25
|
+
};
|
|
26
|
+
export const _typecheckNDJSONLoader = NDJSONLoader;
|
|
27
|
+
//# sourceMappingURL=ndgeoson-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ndgeoson-loader.js","names":["parseNDJSONSync","parseNDJSONInBatches","VERSION","DEFAULT_NDGEOJSON_LOADER_OPTIONS","geojson","shape","gis","format","NDJSONLoader","name","id","module","version","extensions","mimeTypes","category","text","parse","arrayBuffer","TextDecoder","decode","parseTextSync","parseInBatches","options","_typecheckNDJSONLoader"],"sources":["../../src/ndgeoson-loader.ts"],"sourcesContent":["import type {LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {parseNDJSONSync} from './lib/parsers/parse-ndjson';\nimport {parseNDJSONInBatches} from './lib/parsers/parse-ndjson-in-batches';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type NDGeoJSONLoaderOptions = LoaderOptions & {\n geojson?: {\n shape?: 'object-row-table';\n };\n gis?: {\n format: 'geojson';\n };\n};\n\nconst DEFAULT_NDGEOJSON_LOADER_OPTIONS = {\n geojson: {\n shape: 'object-row-table'\n },\n gis: {\n format: 'geojson'\n }\n};\n\nexport const NDJSONLoader = {\n name: 'NDJSON',\n id: 'ndjson',\n module: 'json',\n version: VERSION,\n extensions: ['ndjson', 'ndgeojson'],\n mimeTypes: [\n 'application/geo+x-ndjson',\n 'application/geo+x-ldjson',\n 'application/jsonlines', // https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html#cm-batch\n 'application/geo+json-seq',\n 'application/x-ndjson'\n ],\n category: 'table',\n text: true,\n parse: async (arrayBuffer: ArrayBuffer) => parseNDJSONSync(new TextDecoder().decode(arrayBuffer)),\n parseTextSync: parseNDJSONSync,\n parseInBatches: parseNDJSONInBatches,\n options: DEFAULT_NDGEOJSON_LOADER_OPTIONS\n};\n\nexport const _typecheckNDJSONLoader: LoaderWithParser = NDJSONLoader;\n"],"mappings":"AACA,SAAQA,eAAe,QAAO,4BAA4B;AAC1D,SAAQC,oBAAoB,QAAO,uCAAuC;AAI1E,MAAMC,OAAO,GAAG,uBAAkB,KAAK,WAAW,sBAAiB,QAAQ;AAW3E,MAAMC,gCAAgC,GAAG;EACvCC,OAAO,EAAE;IACPC,KAAK,EAAE;EACT,CAAC;EACDC,GAAG,EAAE;IACHC,MAAM,EAAE;EACV;AACF,CAAC;AAED,OAAO,MAAMC,YAAY,GAAG;EAC1BC,IAAI,EAAE,QAAQ;EACdC,EAAE,EAAE,QAAQ;EACZC,MAAM,EAAE,MAAM;EACdC,OAAO,EAAEV,OAAO;EAChBW,UAAU,EAAE,CAAC,QAAQ,EAAE,WAAW,CAAC;EACnCC,SAAS,EAAE,CACT,0BAA0B,EAC1B,0BAA0B,EAC1B,uBAAuB,EACvB,0BAA0B,EAC1B,sBAAsB,CACvB;EACDC,QAAQ,EAAE,OAAO;EACjBC,IAAI,EAAE,IAAI;EACVC,KAAK,EAAE,MAAOC,WAAwB,IAAKlB,eAAe,CAAC,IAAImB,WAAW,CAAC,CAAC,CAACC,MAAM,CAACF,WAAW,CAAC,CAAC;EACjGG,aAAa,EAAErB,eAAe;EAC9BsB,cAAc,EAAErB,oBAAoB;EACpCsB,OAAO,EAAEpB;AACX,CAAC;AAED,OAAO,MAAMqB,sBAAwC,GAAGhB,YAAY"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { parseNDJSONSync } from './lib/parsers/parse-ndjson';
|
|
2
|
+
import { parseNDJSONInBatches } from './lib/parsers/parse-ndjson-in-batches';
|
|
3
|
+
const VERSION = typeof "4.0.0-alpha.11" !== 'undefined' ? "4.0.0-alpha.11" : 'latest';
|
|
4
|
+
export const NDJSONLoader = {
|
|
5
|
+
name: 'NDJSON',
|
|
6
|
+
id: 'ndjson',
|
|
7
|
+
module: 'json',
|
|
8
|
+
version: VERSION,
|
|
9
|
+
extensions: ['ndjson', 'jsonl'],
|
|
10
|
+
mimeTypes: ['application/x-ndjson', 'application/jsonlines', 'application/json-seq'],
|
|
11
|
+
category: 'table',
|
|
12
|
+
text: true,
|
|
13
|
+
parse: async arrayBuffer => parseNDJSONSync(new TextDecoder().decode(arrayBuffer)),
|
|
14
|
+
parseTextSync: parseNDJSONSync,
|
|
15
|
+
parseInBatches: parseNDJSONInBatches,
|
|
16
|
+
options: {}
|
|
17
|
+
};
|
|
18
|
+
//# sourceMappingURL=ndjson-loader.js.map
|