@loaders.gl/json 3.4.11 → 3.4.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,100 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.rebuildJsonObject = void 0;
7
- const schema_1 = require("@loaders.gl/schema");
8
- const loader_utils_1 = require("@loaders.gl/loader-utils");
9
- const streaming_json_parser_1 = __importDefault(require("./parser/streaming-json-parser"));
10
- const jsonpath_1 = __importDefault(require("./jsonpath/jsonpath"));
11
- // TODO - support batch size 0 = no batching/single batch?
12
- // eslint-disable-next-line max-statements, complexity
13
- async function* parseJSONInBatches(binaryAsyncIterator, options) {
14
- const asyncIterator = (0, loader_utils_1.makeTextDecoderIterator)(binaryAsyncIterator);
15
- const { metadata } = options;
16
- const { jsonpaths } = options.json || {};
17
- let isFirstChunk = true;
18
- // TODO fix Schema deduction
19
- const schema = null; // new Schema([]);
20
- const shape = options?.json?.shape || 'row-table';
21
- // @ts-ignore
22
- const tableBatchBuilder = new schema_1.TableBatchBuilder(schema, {
23
- ...options,
24
- shape
25
- });
26
- const parser = new streaming_json_parser_1.default({ jsonpaths });
27
- for await (const chunk of asyncIterator) {
28
- const rows = parser.write(chunk);
29
- const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();
30
- if (rows.length > 0 && isFirstChunk) {
31
- if (metadata) {
32
- const initialBatch = {
33
- // Common fields
34
- shape,
35
- batchType: 'partial-result',
36
- data: [],
37
- length: 0,
38
- bytesUsed: 0,
39
- // JSON additions
40
- container: parser.getPartialResult(),
41
- jsonpath
42
- };
43
- yield initialBatch;
44
- }
45
- isFirstChunk = false;
46
- // schema = deduceSchema(rows);
47
- }
48
- // Add the row
49
- for (const row of rows) {
50
- tableBatchBuilder.addRow(row);
51
- // If a batch has been completed, emit it
52
- const batch = tableBatchBuilder.getFullBatch({ jsonpath });
53
- if (batch) {
54
- yield batch;
55
- }
56
- }
57
- tableBatchBuilder.chunkComplete(chunk);
58
- const batch = tableBatchBuilder.getFullBatch({ jsonpath });
59
- if (batch) {
60
- yield batch;
61
- }
62
- }
63
- // yield final batch
64
- const jsonpath = parser.getStreamingJsonPathAsString();
65
- const batch = tableBatchBuilder.getFinalBatch({ jsonpath });
66
- if (batch) {
67
- yield batch;
68
- }
69
- if (metadata) {
70
- const finalBatch = {
71
- shape,
72
- batchType: 'final-result',
73
- container: parser.getPartialResult(),
74
- jsonpath: parser.getStreamingJsonPathAsString(),
75
- data: [],
76
- length: 0
77
- // schema: null
78
- };
79
- yield finalBatch;
80
- }
81
- }
82
- exports.default = parseJSONInBatches;
83
- function rebuildJsonObject(batch, data) {
84
- // Last batch will have this special type and will provide all the root object of the parsed file
85
- (0, loader_utils_1.assert)(batch.batchType === 'final-result');
86
- // The streamed JSON data is a top level array (jsonpath = '$'), just return the array of row objects
87
- if (batch.jsonpath === '$') {
88
- return data;
89
- }
90
- // (jsonpath !== '$') The streamed data is not a top level array, so stitch it back in to the top-level object
91
- if (batch.jsonpath && batch.jsonpath.length > 1) {
92
- const topLevelObject = batch.container;
93
- const streamingPath = new jsonpath_1.default(batch.jsonpath);
94
- streamingPath.setFieldAtPath(topLevelObject, data);
95
- return topLevelObject;
96
- }
97
- // No jsonpath, in this case nothing was streamed.
98
- return batch.container;
99
- }
100
- exports.rebuildJsonObject = rebuildJsonObject;
@@ -1,29 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- function parseJSONSync(jsonText, options) {
4
- try {
5
- const json = JSON.parse(jsonText);
6
- if (options.json?.table) {
7
- return getFirstArray(json) || json;
8
- }
9
- return json;
10
- }
11
- catch (error) {
12
- throw new Error('JSONLoader: failed to parse JSON');
13
- }
14
- }
15
- exports.default = parseJSONSync;
16
- function getFirstArray(json) {
17
- if (Array.isArray(json)) {
18
- return json;
19
- }
20
- if (json && typeof json === 'object') {
21
- for (const value of Object.values(json)) {
22
- const array = getFirstArray(value);
23
- if (array) {
24
- return array;
25
- }
26
- }
27
- }
28
- return null;
29
- }
@@ -1,35 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const schema_1 = require("@loaders.gl/schema");
4
- const loader_utils_1 = require("@loaders.gl/loader-utils");
5
- async function* parseNDJSONInBatches(binaryAsyncIterator, options) {
6
- const textIterator = (0, loader_utils_1.makeTextDecoderIterator)(binaryAsyncIterator);
7
- const lineIterator = (0, loader_utils_1.makeLineIterator)(textIterator);
8
- const numberedLineIterator = (0, loader_utils_1.makeNumberedLineIterator)(lineIterator);
9
- const schema = null;
10
- const shape = 'row-table';
11
- // @ts-ignore
12
- const tableBatchBuilder = new schema_1.TableBatchBuilder(schema, {
13
- ...options,
14
- shape
15
- });
16
- for await (const { counter, line } of numberedLineIterator) {
17
- try {
18
- const row = JSON.parse(line);
19
- tableBatchBuilder.addRow(row);
20
- tableBatchBuilder.chunkComplete(line);
21
- const batch = tableBatchBuilder.getFullBatch();
22
- if (batch) {
23
- yield batch;
24
- }
25
- }
26
- catch (error) {
27
- throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter}`);
28
- }
29
- }
30
- const batch = tableBatchBuilder.getFinalBatch();
31
- if (batch) {
32
- yield batch;
33
- }
34
- }
35
- exports.default = parseNDJSONInBatches;
@@ -1,14 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- function parseNDJSONSync(ndjsonText) {
4
- const lines = ndjsonText.trim().split('\n');
5
- return lines.map((line, counter) => {
6
- try {
7
- return JSON.parse(line);
8
- }
9
- catch (error) {
10
- throw new Error(`NDJSONLoader: failed to parse JSON on line ${counter + 1}`);
11
- }
12
- });
13
- }
14
- exports.default = parseNDJSONSync;
@@ -1,98 +0,0 @@
1
- "use strict";
2
- // @ts-nocheck
3
- var __importDefault = (this && this.__importDefault) || function (mod) {
4
- return (mod && mod.__esModule) ? mod : { "default": mod };
5
- };
6
- Object.defineProperty(exports, "__esModule", { value: true });
7
- const clarinet_1 = __importDefault(require("../clarinet/clarinet"));
8
- const jsonpath_1 = __importDefault(require("../jsonpath/jsonpath"));
9
- // JSONParser builds a JSON object using the events emitted by the Clarinet parser
10
- class JSONParser {
11
- constructor(options) {
12
- this.result = undefined;
13
- this.previousStates = [];
14
- this.currentState = Object.freeze({ container: [], key: null });
15
- this.jsonpath = new jsonpath_1.default();
16
- this.reset();
17
- this.parser = new clarinet_1.default({
18
- onready: () => {
19
- this.jsonpath = new jsonpath_1.default();
20
- this.previousStates.length = 0;
21
- this.currentState.container.length = 0;
22
- },
23
- onopenobject: (name) => {
24
- this._openObject({});
25
- if (typeof name !== 'undefined') {
26
- this.parser.emit('onkey', name);
27
- }
28
- },
29
- onkey: (name) => {
30
- this.jsonpath.set(name);
31
- this.currentState.key = name;
32
- },
33
- oncloseobject: () => {
34
- this._closeObject();
35
- },
36
- onopenarray: () => {
37
- this._openArray();
38
- },
39
- onclosearray: () => {
40
- this._closeArray();
41
- },
42
- onvalue: (value) => {
43
- this._pushOrSet(value);
44
- },
45
- onerror: (error) => {
46
- throw error;
47
- },
48
- onend: () => {
49
- this.result = this.currentState.container.pop();
50
- },
51
- ...options
52
- });
53
- }
54
- reset() {
55
- this.result = undefined;
56
- this.previousStates = [];
57
- this.currentState = Object.freeze({ container: [], key: null });
58
- this.jsonpath = new jsonpath_1.default();
59
- }
60
- write(chunk) {
61
- this.parser.write(chunk);
62
- }
63
- close() {
64
- this.parser.close();
65
- }
66
- // PRIVATE METHODS
67
- _pushOrSet(value) {
68
- const { container, key } = this.currentState;
69
- if (key !== null) {
70
- container[key] = value;
71
- this.currentState.key = null;
72
- }
73
- else {
74
- container.push(value);
75
- }
76
- }
77
- _openArray(newContainer = []) {
78
- this.jsonpath.push(null);
79
- this._pushOrSet(newContainer);
80
- this.previousStates.push(this.currentState);
81
- this.currentState = { container: newContainer, isArray: true, key: null };
82
- }
83
- _closeArray() {
84
- this.jsonpath.pop();
85
- this.currentState = this.previousStates.pop();
86
- }
87
- _openObject(newContainer = {}) {
88
- this.jsonpath.push(null);
89
- this._pushOrSet(newContainer);
90
- this.previousStates.push(this.currentState);
91
- this.currentState = { container: newContainer, isArray: false, key: null };
92
- }
93
- _closeObject() {
94
- this.jsonpath.pop();
95
- this.currentState = this.previousStates.pop();
96
- }
97
- }
98
- exports.default = JSONParser;
@@ -1,100 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- const json_parser_1 = __importDefault(require("./json-parser"));
7
- const jsonpath_1 = __importDefault(require("../jsonpath/jsonpath"));
8
- /**
9
- * The `StreamingJSONParser` looks for the first array in the JSON structure.
10
- * and emits an array of chunks
11
- */
12
- class StreamingJSONParser extends json_parser_1.default {
13
- constructor(options = {}) {
14
- super({
15
- onopenarray: () => {
16
- if (!this.streamingArray) {
17
- if (this._matchJSONPath()) {
18
- // @ts-ignore
19
- this.streamingJsonPath = this.getJsonPath().clone();
20
- this.streamingArray = [];
21
- this._openArray(this.streamingArray);
22
- return;
23
- }
24
- }
25
- this._openArray();
26
- },
27
- // Redefine onopenarray to inject value for top-level object
28
- onopenobject: (name) => {
29
- if (!this.topLevelObject) {
30
- this.topLevelObject = {};
31
- this._openObject(this.topLevelObject);
32
- }
33
- else {
34
- this._openObject({});
35
- }
36
- if (typeof name !== 'undefined') {
37
- this.parser.emit('onkey', name);
38
- }
39
- }
40
- });
41
- this.streamingJsonPath = null;
42
- this.streamingArray = null;
43
- this.topLevelObject = null;
44
- const jsonpaths = options.jsonpaths || [];
45
- this.jsonPaths = jsonpaths.map((jsonpath) => new jsonpath_1.default(jsonpath));
46
- }
47
- /**
48
- * write REDEFINITION
49
- * - super.write() chunk to parser
50
- * - get the contents (so far) of "topmost-level" array as batch of rows
51
- * - clear top-level array
52
- * - return the batch of rows\
53
- */
54
- write(chunk) {
55
- super.write(chunk);
56
- let array = [];
57
- if (this.streamingArray) {
58
- array = [...this.streamingArray];
59
- this.streamingArray.length = 0;
60
- }
61
- return array;
62
- }
63
- /**
64
- * Returns a partially formed result object
65
- * Useful for returning the "wrapper" object when array is not top level
66
- * e.g. GeoJSON
67
- */
68
- getPartialResult() {
69
- return this.topLevelObject;
70
- }
71
- getStreamingJsonPath() {
72
- return this.streamingJsonPath;
73
- }
74
- getStreamingJsonPathAsString() {
75
- return this.streamingJsonPath && this.streamingJsonPath.toString();
76
- }
77
- getJsonPath() {
78
- return this.jsonpath;
79
- }
80
- // PRIVATE METHODS
81
- /**
82
- * Checks is this.getJsonPath matches the jsonpaths provided in options
83
- */
84
- _matchJSONPath() {
85
- const currentPath = this.getJsonPath();
86
- // console.debug(`Testing JSONPath`, currentPath);
87
- // Backwards compatibility, match any array
88
- // TODO implement using wildcard once that is supported
89
- if (this.jsonPaths.length === 0) {
90
- return true;
91
- }
92
- for (const jsonPath of this.jsonPaths) {
93
- if (jsonPath.equals(currentPath)) {
94
- return true;
95
- }
96
- }
97
- return false;
98
- }
99
- }
100
- exports.default = StreamingJSONParser;
@@ -1,40 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports._typecheckNDJSONLoader = exports.NDJSONLoader = void 0;
7
- const parse_ndjson_1 = __importDefault(require("./lib/parse-ndjson"));
8
- const parse_ndjson_in_batches_1 = __importDefault(require("./lib/parse-ndjson-in-batches"));
9
- // __VERSION__ is injected by babel-plugin-version-inline
10
- // @ts-ignore TS2304: Cannot find name '__VERSION__'.
11
- const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
12
- const DEFAULT_NDGEOJSON_LOADER_OPTIONS = {
13
- geojson: {
14
- shape: 'object-row-table'
15
- },
16
- gis: {
17
- format: 'geojson'
18
- }
19
- };
20
- exports.NDJSONLoader = {
21
- name: 'NDJSON',
22
- id: 'ndjson',
23
- module: 'json',
24
- version: VERSION,
25
- extensions: ['ndjson', 'ndgeojson'],
26
- mimeTypes: [
27
- 'application/geo+x-ndjson',
28
- 'application/geo+x-ldjson',
29
- 'application/jsonlines',
30
- 'application/geo+json-seq',
31
- 'application/x-ndjson'
32
- ],
33
- category: 'table',
34
- text: true,
35
- parse: async (arrayBuffer) => (0, parse_ndjson_1.default)(new TextDecoder().decode(arrayBuffer)),
36
- parseTextSync: parse_ndjson_1.default,
37
- parseInBatches: parse_ndjson_in_batches_1.default,
38
- options: DEFAULT_NDGEOJSON_LOADER_OPTIONS
39
- };
40
- exports._typecheckNDJSONLoader = exports.NDJSONLoader;
@@ -1,29 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.NDJSONLoader = void 0;
7
- const parse_ndjson_1 = __importDefault(require("./lib/parse-ndjson"));
8
- const parse_ndjson_in_batches_1 = __importDefault(require("./lib/parse-ndjson-in-batches"));
9
- // __VERSION__ is injected by babel-plugin-version-inline
10
- // @ts-ignore TS2304: Cannot find name '__VERSION__'.
11
- const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
12
- exports.NDJSONLoader = {
13
- name: 'NDJSON',
14
- id: 'ndjson',
15
- module: 'json',
16
- version: VERSION,
17
- extensions: ['ndjson', 'jsonl'],
18
- mimeTypes: [
19
- 'application/x-ndjson',
20
- 'application/jsonlines',
21
- 'application/json-seq'
22
- ],
23
- category: 'table',
24
- text: true,
25
- parse: async (arrayBuffer) => (0, parse_ndjson_1.default)(new TextDecoder().decode(arrayBuffer)),
26
- parseTextSync: parse_ndjson_1.default,
27
- parseInBatches: parse_ndjson_in_batches_1.default,
28
- options: {}
29
- };
@@ -1,5 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const loader_utils_1 = require("@loaders.gl/loader-utils");
4
- const geojson_loader_1 = require("../geojson-loader");
5
- (0, loader_utils_1.createLoaderWorker)(geojson_loader_1.GeoJSONLoader);