@loaders.gl/json 4.0.0-alpha.4 → 4.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/bundle.js +2 -2
- package/dist/dist.min.js +3094 -0
- package/dist/es5/bundle.js +6 -0
- package/dist/es5/bundle.js.map +1 -0
- package/dist/es5/geojson-loader.js +157 -0
- package/dist/es5/geojson-loader.js.map +1 -0
- package/dist/es5/geojson-writer.js +27 -0
- package/dist/es5/geojson-writer.js.map +1 -0
- package/dist/es5/index.js +69 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/json-loader.js +67 -0
- package/dist/es5/json-loader.js.map +1 -0
- package/dist/es5/json-writer.js +42 -0
- package/dist/es5/json-writer.js.map +1 -0
- package/dist/es5/lib/clarinet/clarinet.js +446 -0
- package/dist/es5/lib/clarinet/clarinet.js.map +1 -0
- package/dist/es5/lib/encoders/encode-utils.js +42 -0
- package/dist/es5/lib/encoders/encode-utils.js.map +1 -0
- package/dist/es5/lib/encoders/geojson-encoder.js +178 -0
- package/dist/es5/lib/encoders/geojson-encoder.js.map +1 -0
- package/dist/es5/lib/encoders/json-encoder.js +30 -0
- package/dist/es5/lib/encoders/json-encoder.js.map +1 -0
- package/dist/es5/lib/encoders/utf8-encoder.js +54 -0
- package/dist/es5/lib/encoders/utf8-encoder.js.map +1 -0
- package/dist/es5/lib/json-parser/json-parser.js +140 -0
- package/dist/es5/lib/json-parser/json-parser.js.map +1 -0
- package/dist/es5/lib/json-parser/streaming-json-parser.js +123 -0
- package/dist/es5/lib/json-parser/streaming-json-parser.js.map +1 -0
- package/dist/es5/lib/jsonpath/jsonpath.js +119 -0
- package/dist/es5/lib/jsonpath/jsonpath.js.map +1 -0
- package/dist/es5/lib/parsers/parse-json-in-batches.js +206 -0
- package/dist/es5/lib/parsers/parse-json-in-batches.js.map +1 -0
- package/dist/es5/lib/parsers/parse-json.js +38 -0
- package/dist/es5/lib/parsers/parse-json.js.map +1 -0
- package/dist/es5/lib/parsers/parse-ndjson-in-batches.js +114 -0
- package/dist/es5/lib/parsers/parse-ndjson-in-batches.js.map +1 -0
- package/dist/es5/lib/parsers/parse-ndjson.js +19 -0
- package/dist/es5/lib/parsers/parse-ndjson.js.map +1 -0
- package/dist/es5/ndgeoson-loader.js +54 -0
- package/dist/es5/ndgeoson-loader.js.map +1 -0
- package/dist/es5/ndjson-loader.js +44 -0
- package/dist/es5/ndjson-loader.js.map +1 -0
- package/dist/es5/workers/geojson-worker.js +6 -0
- package/dist/es5/workers/geojson-worker.js.map +1 -0
- package/dist/esm/bundle.js +4 -0
- package/dist/esm/bundle.js.map +1 -0
- package/dist/esm/geojson-loader.js +79 -0
- package/dist/esm/geojson-loader.js.map +1 -0
- package/dist/esm/geojson-writer.js +18 -0
- package/dist/esm/geojson-writer.js.map +1 -0
- package/dist/esm/index.js +9 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/json-loader.js +48 -0
- package/dist/esm/json-loader.js.map +1 -0
- package/dist/esm/json-writer.js +14 -0
- package/dist/esm/json-writer.js.map +1 -0
- package/dist/esm/lib/clarinet/LICENSE +28 -0
- package/dist/esm/lib/clarinet/clarinet.js +415 -0
- package/dist/esm/lib/clarinet/clarinet.js.map +1 -0
- package/dist/esm/lib/encoders/encode-utils.js +31 -0
- package/dist/esm/lib/encoders/encode-utils.js.map +1 -0
- package/dist/esm/lib/encoders/geojson-encoder.js +98 -0
- package/dist/esm/lib/encoders/geojson-encoder.js.map +1 -0
- package/dist/esm/lib/encoders/json-encoder.js +12 -0
- package/dist/esm/lib/encoders/json-encoder.js.map +1 -0
- package/dist/esm/lib/encoders/utf8-encoder.js +32 -0
- package/dist/esm/lib/encoders/utf8-encoder.js.map +1 -0
- package/dist/{lib/parser → esm/lib/json-parser}/json-parser.js +4 -22
- package/dist/esm/lib/json-parser/json-parser.js.map +1 -0
- package/dist/{lib/parser → esm/lib/json-parser}/streaming-json-parser.js +2 -23
- package/dist/esm/lib/json-parser/streaming-json-parser.js.map +1 -0
- package/dist/esm/lib/jsonpath/jsonpath.js +67 -0
- package/dist/esm/lib/jsonpath/jsonpath.js.map +1 -0
- package/dist/{lib → esm/lib/parsers}/parse-json-in-batches.js +19 -15
- package/dist/esm/lib/parsers/parse-json-in-batches.js.map +1 -0
- package/dist/{lib → esm/lib/parsers}/parse-json.js +4 -9
- package/dist/esm/lib/parsers/parse-json.js.map +1 -0
- package/dist/{lib → esm/lib/parsers}/parse-ndjson-in-batches.js +3 -6
- package/dist/esm/lib/parsers/parse-ndjson-in-batches.js.map +1 -0
- package/dist/esm/lib/parsers/parse-ndjson.js +13 -0
- package/dist/esm/lib/parsers/parse-ndjson.js.map +1 -0
- package/dist/esm/ndgeoson-loader.js +27 -0
- package/dist/esm/ndgeoson-loader.js.map +1 -0
- package/dist/esm/ndjson-loader.js +18 -0
- package/dist/esm/ndjson-loader.js.map +1 -0
- package/dist/esm/workers/geojson-worker.js +4 -0
- package/dist/esm/workers/geojson-worker.js.map +1 -0
- package/dist/geojson-loader.d.ts +16 -0
- package/dist/geojson-loader.d.ts.map +1 -0
- package/dist/geojson-loader.js +65 -69
- package/dist/geojson-worker.js +1016 -232
- package/dist/geojson-writer.d.ts +6 -0
- package/dist/geojson-writer.d.ts.map +1 -0
- package/dist/geojson-writer.js +22 -0
- package/dist/index.d.ts +13 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +24 -6
- package/dist/json-loader.d.ts +17 -0
- package/dist/json-loader.d.ts.map +1 -0
- package/dist/json-loader.js +33 -38
- package/dist/json-writer.d.ts +6 -0
- package/dist/json-writer.d.ts.map +1 -0
- package/dist/json-writer.js +18 -0
- package/dist/lib/clarinet/clarinet.d.ts +74 -0
- package/dist/lib/clarinet/clarinet.d.ts.map +1 -0
- package/dist/lib/clarinet/clarinet.js +510 -493
- package/dist/lib/encoders/encode-utils.d.ts +19 -0
- package/dist/lib/encoders/encode-utils.d.ts.map +1 -0
- package/dist/lib/encoders/encode-utils.js +47 -0
- package/dist/lib/encoders/geojson-encoder.d.ts +14 -0
- package/dist/lib/encoders/geojson-encoder.d.ts.map +1 -0
- package/dist/lib/encoders/geojson-encoder.js +104 -0
- package/dist/lib/encoders/json-encoder.d.ts +16 -0
- package/dist/lib/encoders/json-encoder.d.ts.map +1 -0
- package/dist/lib/encoders/json-encoder.js +22 -0
- package/dist/lib/encoders/utf8-encoder.d.ts +12 -0
- package/dist/lib/encoders/utf8-encoder.d.ts.map +1 -0
- package/dist/lib/encoders/utf8-encoder.js +32 -0
- package/dist/lib/json-parser/json-parser.d.ts +22 -0
- package/dist/lib/json-parser/json-parser.d.ts.map +1 -0
- package/dist/lib/json-parser/json-parser.js +98 -0
- package/dist/lib/json-parser/streaming-json-parser.d.ts +37 -0
- package/dist/lib/json-parser/streaming-json-parser.d.ts.map +1 -0
- package/dist/lib/json-parser/streaming-json-parser.js +100 -0
- package/dist/lib/jsonpath/jsonpath.d.ts +32 -0
- package/dist/lib/jsonpath/jsonpath.d.ts.map +1 -0
- package/dist/lib/jsonpath/jsonpath.js +81 -78
- package/dist/lib/parsers/parse-json-in-batches.d.ts +5 -0
- package/dist/lib/parsers/parse-json-in-batches.d.ts.map +1 -0
- package/dist/lib/parsers/parse-json-in-batches.js +100 -0
- package/dist/lib/parsers/parse-json.d.ts +4 -0
- package/dist/lib/parsers/parse-json.d.ts.map +1 -0
- package/dist/lib/parsers/parse-json.js +32 -0
- package/dist/lib/parsers/parse-ndjson-in-batches.d.ts +4 -0
- package/dist/lib/parsers/parse-ndjson-in-batches.d.ts.map +1 -0
- package/dist/lib/parsers/parse-ndjson-in-batches.js +36 -0
- package/dist/lib/parsers/parse-ndjson.d.ts +3 -0
- package/dist/lib/parsers/parse-ndjson.d.ts.map +1 -0
- package/dist/lib/parsers/parse-ndjson.js +17 -0
- package/dist/ndgeoson-loader.d.ts +34 -0
- package/dist/ndgeoson-loader.d.ts.map +1 -0
- package/dist/ndgeoson-loader.js +37 -0
- package/dist/ndjson-loader.d.ts +4 -0
- package/dist/ndjson-loader.d.ts.map +1 -0
- package/dist/ndjson-loader.js +26 -31
- package/dist/workers/geojson-worker.d.ts +2 -0
- package/dist/workers/geojson-worker.d.ts.map +1 -0
- package/dist/workers/geojson-worker.js +5 -4
- package/package.json +10 -10
- package/src/geojson-loader.ts +10 -6
- package/src/geojson-writer.ts +27 -0
- package/src/index.ts +10 -0
- package/src/json-loader.ts +15 -24
- package/src/json-writer.ts +24 -0
- package/src/lib/encoders/encode-utils.ts +54 -0
- package/src/lib/encoders/geojson-encoder.ts +139 -0
- package/src/lib/encoders/json-encoder.ts +30 -0
- package/src/lib/encoders/utf8-encoder.ts +35 -0
- package/src/lib/{parse-json-in-batches.ts → parsers/parse-json-in-batches.ts} +30 -8
- package/src/lib/{parse-json.ts → parsers/parse-json.ts} +7 -3
- package/src/lib/{parse-ndjson-in-batches.ts → parsers/parse-ndjson-in-batches.ts} +1 -1
- package/src/lib/parsers/parse-ndjson.ts +15 -0
- package/src/ndgeoson-loader.ts +48 -0
- package/src/ndjson-loader.ts +20 -27
- package/dist/bundle.js.map +0 -1
- package/dist/geojson-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/json-loader.js.map +0 -1
- package/dist/jsonl-loader.js +0 -2
- package/dist/jsonl-loader.js.map +0 -1
- package/dist/lib/clarinet/clarinet.js.map +0 -1
- package/dist/lib/jsonpath/jsonpath.js.map +0 -1
- package/dist/lib/parse-json-in-batches.js.map +0 -1
- package/dist/lib/parse-json.js.map +0 -1
- package/dist/lib/parse-ndjson-in-batches.js.map +0 -1
- package/dist/lib/parse-ndjson.js +0 -11
- package/dist/lib/parse-ndjson.js.map +0 -1
- package/dist/lib/parser/json-parser.js.map +0 -1
- package/dist/lib/parser/streaming-json-parser.js.map +0 -1
- package/dist/ndjson-loader.js.map +0 -1
- package/dist/workers/geojson-worker.js.map +0 -1
- package/src/jsonl-loader.ts +0 -53
- package/src/lib/parse-ndjson.ts +0 -10
- /package/dist/{lib → es5/lib}/clarinet/LICENSE +0 -0
- /package/src/lib/{parser → json-parser}/json-parser.ts +0 -0
- /package/src/lib/{parser → json-parser}/streaming-json-parser.ts +0 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { Table } from '@loaders.gl/schema';
|
|
2
|
+
type Row = {
|
|
3
|
+
[key: string]: unknown;
|
|
4
|
+
};
|
|
5
|
+
/**
|
|
6
|
+
* Attempts to identify which column contains geometry
|
|
7
|
+
* Currently just returns name (key) of first object-valued column
|
|
8
|
+
* @todo look for hints in schema metadata
|
|
9
|
+
* @todo look for WKB
|
|
10
|
+
*/
|
|
11
|
+
export declare function detectGeometryColumnIndex(table: Table): number;
|
|
12
|
+
/**
|
|
13
|
+
* Return a row as a property (key/value) object, excluding selected columns
|
|
14
|
+
*/
|
|
15
|
+
export declare function getRowPropertyObject(table: Table, row: Row, excludeColumnIndices?: number[]): {
|
|
16
|
+
[columnName: string]: unknown;
|
|
17
|
+
};
|
|
18
|
+
export {};
|
|
19
|
+
//# sourceMappingURL=encode-utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"encode-utils.d.ts","sourceRoot":"","sources":["../../../src/lib/encoders/encode-utils.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,KAAK,EAAsD,MAAM,oBAAoB,CAAC;AAE9F,KAAK,GAAG,GAAG;IAAC,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CAAC,CAAC;AAEpC;;;;;GAKG;AACH,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAsB9D;AAED;;GAEG;AACH,wBAAgB,oBAAoB,CAClC,KAAK,EAAE,KAAK,EACZ,GAAG,EAAE,GAAG,EACR,oBAAoB,GAAE,MAAM,EAAO,GAClC;IAAC,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAA;CAAC,CASjC"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// loaders.gl, MIT license
|
|
3
|
+
// Copyright 2022 Foursquare Labs, Inc.
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.getRowPropertyObject = exports.detectGeometryColumnIndex = void 0;
|
|
6
|
+
const schema_1 = require("@loaders.gl/schema");
|
|
7
|
+
/**
|
|
8
|
+
* Attempts to identify which column contains geometry
|
|
9
|
+
* Currently just returns name (key) of first object-valued column
|
|
10
|
+
* @todo look for hints in schema metadata
|
|
11
|
+
* @todo look for WKB
|
|
12
|
+
*/
|
|
13
|
+
function detectGeometryColumnIndex(table) {
|
|
14
|
+
// TODO - look for hints in schema metadata
|
|
15
|
+
// look for a column named geometry
|
|
16
|
+
const geometryIndex = table.schema?.fields.findIndex((field) => field.name === 'geometry') ?? -1;
|
|
17
|
+
if (geometryIndex > -1) {
|
|
18
|
+
return geometryIndex;
|
|
19
|
+
}
|
|
20
|
+
// look at the data
|
|
21
|
+
// TODO - this drags in the indices
|
|
22
|
+
if ((0, schema_1.getTableLength)(table) > 0) {
|
|
23
|
+
const row = (0, schema_1.getTableRowAsArray)(table, 0);
|
|
24
|
+
for (let columnIndex = 0; columnIndex < (0, schema_1.getTableNumCols)(table); columnIndex++) {
|
|
25
|
+
const value = row?.[columnIndex];
|
|
26
|
+
if (value && typeof value === 'object') {
|
|
27
|
+
return columnIndex;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
throw new Error('Failed to detect geometry column');
|
|
32
|
+
}
|
|
33
|
+
exports.detectGeometryColumnIndex = detectGeometryColumnIndex;
|
|
34
|
+
/**
|
|
35
|
+
* Return a row as a property (key/value) object, excluding selected columns
|
|
36
|
+
*/
|
|
37
|
+
function getRowPropertyObject(table, row, excludeColumnIndices = []) {
|
|
38
|
+
const properties = {};
|
|
39
|
+
for (let columnIndex = 0; columnIndex < (0, schema_1.getTableNumCols)(table); ++columnIndex) {
|
|
40
|
+
const columnName = table.schema?.fields[columnIndex].name;
|
|
41
|
+
if (columnName && !excludeColumnIndices.includes(columnIndex)) {
|
|
42
|
+
properties[columnName] = row[columnName];
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return properties;
|
|
46
|
+
}
|
|
47
|
+
exports.getRowPropertyObject = getRowPropertyObject;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { TableBatch } from '@loaders.gl/schema';
|
|
2
|
+
export type GeoJSONWriterOptions = {
|
|
3
|
+
geojson?: {
|
|
4
|
+
featureArray?: boolean;
|
|
5
|
+
geometryColumn?: number | null;
|
|
6
|
+
};
|
|
7
|
+
chunkSize?: number;
|
|
8
|
+
};
|
|
9
|
+
/**
|
|
10
|
+
* Encode a table as GeoJSON
|
|
11
|
+
*/
|
|
12
|
+
export declare function encodeTableAsGeojsonInBatches(batchIterator: AsyncIterable<TableBatch>, // | Iterable<TableBatch>,
|
|
13
|
+
inputOpts?: GeoJSONWriterOptions): AsyncIterable<ArrayBuffer>;
|
|
14
|
+
//# sourceMappingURL=geojson-encoder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"geojson-encoder.d.ts","sourceRoot":"","sources":["../../../src/lib/encoders/geojson-encoder.ts"],"names":[],"mappings":"AAIA,OAAO,EAAQ,UAAU,EAAsB,MAAM,oBAAoB,CAAC;AAM1E,MAAM,MAAM,oBAAoB,GAAG;IACjC,OAAO,CAAC,EAAE;QACR,YAAY,CAAC,EAAE,OAAO,CAAC;QACvB,cAAc,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;KAChC,CAAC;IACF,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,CAAC;AAEF;;GAEG;AAEH,wBAAuB,6BAA6B,CAClD,aAAa,EAAE,aAAa,CAAC,UAAU,CAAC,EAAE,0BAA0B;AACpE,SAAS,GAAE,oBAAyB,GACnC,aAAa,CAAC,WAAW,CAAC,CAqD5B"}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// loaders.gl, MIT license
|
|
3
|
+
// Copyright 2022 Foursquare Labs, Inc.
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.encodeTableAsGeojsonInBatches = void 0;
|
|
6
|
+
const schema_1 = require("@loaders.gl/schema");
|
|
7
|
+
const schema_2 = require("@loaders.gl/schema");
|
|
8
|
+
const encode_utils_1 = require("./encode-utils");
|
|
9
|
+
const utf8_encoder_1 = require("./utf8-encoder");
|
|
10
|
+
/**
|
|
11
|
+
* Encode a table as GeoJSON
|
|
12
|
+
*/
|
|
13
|
+
// eslint-disable-next-line max-statements
|
|
14
|
+
async function* encodeTableAsGeojsonInBatches(batchIterator, // | Iterable<TableBatch>,
|
|
15
|
+
inputOpts = {}) {
|
|
16
|
+
const options = { geojson: {}, chunkSize: 10000, ...inputOpts };
|
|
17
|
+
const utf8Encoder = new utf8_encoder_1.Utf8ArrayBufferEncoder(options.chunkSize);
|
|
18
|
+
if (!options.geojson.featureArray) {
|
|
19
|
+
utf8Encoder.push('{\n', '"type": "FeatureCollection",\n', '"features":\n');
|
|
20
|
+
}
|
|
21
|
+
utf8Encoder.push('['); // Note no newline
|
|
22
|
+
let geometryColumn = options.geojson.geometryColumn;
|
|
23
|
+
let isFirstLine = true;
|
|
24
|
+
for await (const batch of batchIterator) {
|
|
25
|
+
const { table, start, end = (0, schema_1.getTableLength)(batch.table) - start } = batch;
|
|
26
|
+
// Deduce geometry column if not already done
|
|
27
|
+
if (!geometryColumn) {
|
|
28
|
+
geometryColumn = geometryColumn || (0, encode_utils_1.detectGeometryColumnIndex)(table);
|
|
29
|
+
}
|
|
30
|
+
for (let rowIndex = start; rowIndex < end; ++rowIndex) {
|
|
31
|
+
// Add a comma except on final feature
|
|
32
|
+
if (!isFirstLine) {
|
|
33
|
+
utf8Encoder.push(',');
|
|
34
|
+
}
|
|
35
|
+
utf8Encoder.push('\n');
|
|
36
|
+
isFirstLine = false;
|
|
37
|
+
encodeRow(table, rowIndex, geometryColumn, utf8Encoder);
|
|
38
|
+
// eslint-disable-next-line max-depth
|
|
39
|
+
if (utf8Encoder.isFull()) {
|
|
40
|
+
yield utf8Encoder.getArrayBufferBatch();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
const arrayBufferBatch = utf8Encoder.getArrayBufferBatch();
|
|
44
|
+
if (arrayBufferBatch.byteLength > 0) {
|
|
45
|
+
yield arrayBufferBatch;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
utf8Encoder.push('\n');
|
|
49
|
+
// Add completing rows and emit final batch
|
|
50
|
+
utf8Encoder.push(']\n');
|
|
51
|
+
if (!options.geojson.featureArray) {
|
|
52
|
+
utf8Encoder.push('}');
|
|
53
|
+
}
|
|
54
|
+
// Note: Since we pushed a few final lines, the last batch will always exist, no need to check first
|
|
55
|
+
yield utf8Encoder.getArrayBufferBatch();
|
|
56
|
+
}
|
|
57
|
+
exports.encodeTableAsGeojsonInBatches = encodeTableAsGeojsonInBatches;
|
|
58
|
+
// Helpers
|
|
59
|
+
/**
|
|
60
|
+
* Encode a row. Currently this ignores properties in the geometry column.
|
|
61
|
+
*/
|
|
62
|
+
function encodeRow(table, rowIndex, geometryColumnIndex, utf8Encoder) {
|
|
63
|
+
const row = (0, schema_2.getTableRowAsObject)(table, rowIndex);
|
|
64
|
+
if (!row)
|
|
65
|
+
return;
|
|
66
|
+
const featureWithProperties = getFeatureFromRow(table, row, geometryColumnIndex);
|
|
67
|
+
const featureString = JSON.stringify(featureWithProperties);
|
|
68
|
+
utf8Encoder.push(featureString);
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Encode a row as a Feature. Currently this ignores properties objects in the geometry column.
|
|
72
|
+
*/
|
|
73
|
+
function getFeatureFromRow(table, row, geometryColumnIndex) {
|
|
74
|
+
// Extract non-feature/geometry properties
|
|
75
|
+
const properties = (0, encode_utils_1.getRowPropertyObject)(table, row, [geometryColumnIndex]);
|
|
76
|
+
// Extract geometry feature
|
|
77
|
+
const columnName = table.schema?.fields[geometryColumnIndex].name;
|
|
78
|
+
let featureOrGeometry = columnName && row[columnName];
|
|
79
|
+
// GeoJSON support null geometries
|
|
80
|
+
if (!featureOrGeometry) {
|
|
81
|
+
// @ts-ignore Feature type does not support null geometries
|
|
82
|
+
return { type: 'Feature', geometry: null, properties };
|
|
83
|
+
}
|
|
84
|
+
// Support string geometries?
|
|
85
|
+
// TODO: This assumes GeoJSON strings, which may not be the correct format
|
|
86
|
+
// (could be WKT, encoded WKB...)
|
|
87
|
+
if (typeof featureOrGeometry === 'string') {
|
|
88
|
+
try {
|
|
89
|
+
featureOrGeometry = JSON.parse(featureOrGeometry);
|
|
90
|
+
}
|
|
91
|
+
catch (err) {
|
|
92
|
+
throw new Error('Invalid string geometry');
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
if (typeof featureOrGeometry !== 'object' || typeof featureOrGeometry?.type !== 'string') {
|
|
96
|
+
throw new Error('invalid geometry column value');
|
|
97
|
+
}
|
|
98
|
+
if (featureOrGeometry?.type === 'Feature') {
|
|
99
|
+
// @ts-ignore Feature type does not support null geometries
|
|
100
|
+
return { ...featureOrGeometry, properties };
|
|
101
|
+
}
|
|
102
|
+
// @ts-ignore Feature type does not support null geometries
|
|
103
|
+
return { type: 'Feature', geometry: featureOrGeometry, properties };
|
|
104
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Table } from '@loaders.gl/schema';
|
|
2
|
+
type RowArray = unknown[];
|
|
3
|
+
type RowObject = {
|
|
4
|
+
[key: string]: unknown;
|
|
5
|
+
};
|
|
6
|
+
type TableJSON = RowArray[] | RowObject[];
|
|
7
|
+
export type JSONWriterOptions = {
|
|
8
|
+
shape?: 'object-row-table' | 'array-row-table';
|
|
9
|
+
wrapper?: (table: TableJSON) => unknown;
|
|
10
|
+
};
|
|
11
|
+
/**
|
|
12
|
+
* Encode a table as a JSON string
|
|
13
|
+
*/
|
|
14
|
+
export declare function encodeTableAsJSON(table: Table, options?: JSONWriterOptions): string;
|
|
15
|
+
export {};
|
|
16
|
+
//# sourceMappingURL=json-encoder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"json-encoder.d.ts","sourceRoot":"","sources":["../../../src/lib/encoders/json-encoder.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,KAAK,EAAkB,MAAM,oBAAoB,CAAC;AAE1D,KAAK,QAAQ,GAAG,OAAO,EAAE,CAAC;AAC1B,KAAK,SAAS,GAAG;IAAC,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;CAAC,CAAC;AAC1C,KAAK,SAAS,GAAG,QAAQ,EAAE,GAAG,SAAS,EAAE,CAAC;AAE1C,MAAM,MAAM,iBAAiB,GAAG;IAC9B,KAAK,CAAC,EAAE,kBAAkB,GAAG,iBAAiB,CAAC;IAC/C,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CACzC,CAAC;AAEF;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,GAAE,iBAAsB,GAAG,MAAM,CAYvF"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// loaders.gl, MIT license
|
|
3
|
+
// Copyright 2022 Foursquare Labs, Inc.
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.encodeTableAsJSON = void 0;
|
|
6
|
+
const schema_1 = require("@loaders.gl/schema");
|
|
7
|
+
/**
|
|
8
|
+
* Encode a table as a JSON string
|
|
9
|
+
*/
|
|
10
|
+
function encodeTableAsJSON(table, options = {}) {
|
|
11
|
+
const shape = options.shape || 'object-row-table';
|
|
12
|
+
const strings = [];
|
|
13
|
+
const rowIterator = (0, schema_1.makeRowIterator)(table, shape);
|
|
14
|
+
for (const row of rowIterator) {
|
|
15
|
+
// Round elements etc
|
|
16
|
+
// processRow(wrappedRow, table.schema);
|
|
17
|
+
// const wrappedRow = options.wrapper ? options.wrapper(row) : row;
|
|
18
|
+
strings.push(JSON.stringify(row));
|
|
19
|
+
}
|
|
20
|
+
return `[${strings.join(',')}]`;
|
|
21
|
+
}
|
|
22
|
+
exports.encodeTableAsJSON = encodeTableAsJSON;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export declare class Utf8ArrayBufferEncoder {
|
|
2
|
+
private readonly chunkSize;
|
|
3
|
+
private strings;
|
|
4
|
+
private totalLength;
|
|
5
|
+
private textEncoder;
|
|
6
|
+
constructor(chunkSize: number);
|
|
7
|
+
push(...strings: string[]): void;
|
|
8
|
+
isFull(): boolean;
|
|
9
|
+
getArrayBufferBatch(): ArrayBufferLike;
|
|
10
|
+
getStringBatch(): string;
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=utf8-encoder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"utf8-encoder.d.ts","sourceRoot":"","sources":["../../../src/lib/encoders/utf8-encoder.ts"],"names":[],"mappings":"AAGA,qBAAa,sBAAsB;IACjC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAS;IACnC,OAAO,CAAC,OAAO,CAAgB;IAC/B,OAAO,CAAC,WAAW,CAAK;IACxB,OAAO,CAAC,WAAW,CAAkC;gBAEzC,SAAS,EAAE,MAAM;IAI7B,IAAI,CAAC,GAAG,OAAO,EAAE,MAAM,EAAE,GAAG,IAAI;IAOhC,MAAM,IAAI,OAAO;IAIjB,mBAAmB,IAAI,eAAe;IAItC,cAAc,IAAI,MAAM;CAMzB"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// loaders.gl, MIT License
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.Utf8ArrayBufferEncoder = void 0;
|
|
5
|
+
/* global TextEncoder */
|
|
6
|
+
class Utf8ArrayBufferEncoder {
|
|
7
|
+
constructor(chunkSize) {
|
|
8
|
+
this.strings = [];
|
|
9
|
+
this.totalLength = 0;
|
|
10
|
+
this.textEncoder = new TextEncoder();
|
|
11
|
+
this.chunkSize = chunkSize;
|
|
12
|
+
}
|
|
13
|
+
push(...strings) {
|
|
14
|
+
for (const string of strings) {
|
|
15
|
+
this.strings.push(string);
|
|
16
|
+
this.totalLength += string.length;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
isFull() {
|
|
20
|
+
return this.totalLength >= this.chunkSize;
|
|
21
|
+
}
|
|
22
|
+
getArrayBufferBatch() {
|
|
23
|
+
return this.textEncoder.encode(this.getStringBatch()).buffer;
|
|
24
|
+
}
|
|
25
|
+
getStringBatch() {
|
|
26
|
+
const stringChunk = this.strings.join('');
|
|
27
|
+
this.strings = [];
|
|
28
|
+
this.totalLength = 0;
|
|
29
|
+
return stringChunk;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
exports.Utf8ArrayBufferEncoder = Utf8ArrayBufferEncoder;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import ClarinetParser, { ClarinetParserOptions } from '../clarinet/clarinet';
|
|
2
|
+
import JSONPath from '../jsonpath/jsonpath';
|
|
3
|
+
export default class JSONParser {
|
|
4
|
+
readonly parser: ClarinetParser;
|
|
5
|
+
result: undefined;
|
|
6
|
+
previousStates: never[];
|
|
7
|
+
currentState: Readonly<{
|
|
8
|
+
container: never[];
|
|
9
|
+
key: null;
|
|
10
|
+
}>;
|
|
11
|
+
jsonpath: JSONPath;
|
|
12
|
+
constructor(options: ClarinetParserOptions);
|
|
13
|
+
reset(): void;
|
|
14
|
+
write(chunk: any): void;
|
|
15
|
+
close(): void;
|
|
16
|
+
_pushOrSet(value: any): void;
|
|
17
|
+
_openArray(newContainer?: never[]): void;
|
|
18
|
+
_closeArray(): void;
|
|
19
|
+
_openObject(newContainer?: {}): void;
|
|
20
|
+
_closeObject(): void;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=json-parser.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"json-parser.d.ts","sourceRoot":"","sources":["../../../src/lib/json-parser/json-parser.ts"],"names":[],"mappings":"AAEA,OAAO,cAAc,EAAE,EAAC,qBAAqB,EAAC,MAAM,sBAAsB,CAAC;AAC3E,OAAO,QAAQ,MAAM,sBAAsB,CAAC;AAI5C,MAAM,CAAC,OAAO,OAAO,UAAU;IAC7B,QAAQ,CAAC,MAAM,EAAE,cAAc,CAAC;IAChC,MAAM,YAAa;IACnB,cAAc,UAAM;IACpB,YAAY;;;OAA6C;IACzD,QAAQ,EAAE,QAAQ,CAAkB;gBAExB,OAAO,EAAE,qBAAqB;IAiD1C,KAAK,IAAI,IAAI;IAOb,KAAK,CAAC,KAAK,KAAA,GAAG,IAAI;IAIlB,KAAK,IAAI,IAAI;IAMb,UAAU,CAAC,KAAK,KAAA,GAAG,IAAI;IAUvB,UAAU,CAAC,YAAY,UAAK,GAAG,IAAI;IAOnC,WAAW,IAAI,IAAI;IAKnB,WAAW,CAAC,YAAY,KAAK,GAAG,IAAI;IAOpC,YAAY,IAAI,IAAI;CAIrB"}
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// @ts-nocheck
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
const clarinet_1 = __importDefault(require("../clarinet/clarinet"));
|
|
8
|
+
const jsonpath_1 = __importDefault(require("../jsonpath/jsonpath"));
|
|
9
|
+
// JSONParser builds a JSON object using the events emitted by the Clarinet parser
|
|
10
|
+
class JSONParser {
|
|
11
|
+
constructor(options) {
|
|
12
|
+
this.result = undefined;
|
|
13
|
+
this.previousStates = [];
|
|
14
|
+
this.currentState = Object.freeze({ container: [], key: null });
|
|
15
|
+
this.jsonpath = new jsonpath_1.default();
|
|
16
|
+
this.reset();
|
|
17
|
+
this.parser = new clarinet_1.default({
|
|
18
|
+
onready: () => {
|
|
19
|
+
this.jsonpath = new jsonpath_1.default();
|
|
20
|
+
this.previousStates.length = 0;
|
|
21
|
+
this.currentState.container.length = 0;
|
|
22
|
+
},
|
|
23
|
+
onopenobject: (name) => {
|
|
24
|
+
this._openObject({});
|
|
25
|
+
if (typeof name !== 'undefined') {
|
|
26
|
+
this.parser.emit('onkey', name);
|
|
27
|
+
}
|
|
28
|
+
},
|
|
29
|
+
onkey: (name) => {
|
|
30
|
+
this.jsonpath.set(name);
|
|
31
|
+
this.currentState.key = name;
|
|
32
|
+
},
|
|
33
|
+
oncloseobject: () => {
|
|
34
|
+
this._closeObject();
|
|
35
|
+
},
|
|
36
|
+
onopenarray: () => {
|
|
37
|
+
this._openArray();
|
|
38
|
+
},
|
|
39
|
+
onclosearray: () => {
|
|
40
|
+
this._closeArray();
|
|
41
|
+
},
|
|
42
|
+
onvalue: (value) => {
|
|
43
|
+
this._pushOrSet(value);
|
|
44
|
+
},
|
|
45
|
+
onerror: (error) => {
|
|
46
|
+
throw error;
|
|
47
|
+
},
|
|
48
|
+
onend: () => {
|
|
49
|
+
this.result = this.currentState.container.pop();
|
|
50
|
+
},
|
|
51
|
+
...options
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
reset() {
|
|
55
|
+
this.result = undefined;
|
|
56
|
+
this.previousStates = [];
|
|
57
|
+
this.currentState = Object.freeze({ container: [], key: null });
|
|
58
|
+
this.jsonpath = new jsonpath_1.default();
|
|
59
|
+
}
|
|
60
|
+
write(chunk) {
|
|
61
|
+
this.parser.write(chunk);
|
|
62
|
+
}
|
|
63
|
+
close() {
|
|
64
|
+
this.parser.close();
|
|
65
|
+
}
|
|
66
|
+
// PRIVATE METHODS
|
|
67
|
+
_pushOrSet(value) {
|
|
68
|
+
const { container, key } = this.currentState;
|
|
69
|
+
if (key !== null) {
|
|
70
|
+
container[key] = value;
|
|
71
|
+
this.currentState.key = null;
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
container.push(value);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
_openArray(newContainer = []) {
|
|
78
|
+
this.jsonpath.push(null);
|
|
79
|
+
this._pushOrSet(newContainer);
|
|
80
|
+
this.previousStates.push(this.currentState);
|
|
81
|
+
this.currentState = { container: newContainer, isArray: true, key: null };
|
|
82
|
+
}
|
|
83
|
+
_closeArray() {
|
|
84
|
+
this.jsonpath.pop();
|
|
85
|
+
this.currentState = this.previousStates.pop();
|
|
86
|
+
}
|
|
87
|
+
_openObject(newContainer = {}) {
|
|
88
|
+
this.jsonpath.push(null);
|
|
89
|
+
this._pushOrSet(newContainer);
|
|
90
|
+
this.previousStates.push(this.currentState);
|
|
91
|
+
this.currentState = { container: newContainer, isArray: false, key: null };
|
|
92
|
+
}
|
|
93
|
+
_closeObject() {
|
|
94
|
+
this.jsonpath.pop();
|
|
95
|
+
this.currentState = this.previousStates.pop();
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
exports.default = JSONParser;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { default as JSONParser } from './json-parser';
|
|
2
|
+
import JSONPath from '../jsonpath/jsonpath';
|
|
3
|
+
/**
|
|
4
|
+
* The `StreamingJSONParser` looks for the first array in the JSON structure.
|
|
5
|
+
* and emits an array of chunks
|
|
6
|
+
*/
|
|
7
|
+
export default class StreamingJSONParser extends JSONParser {
|
|
8
|
+
private jsonPaths;
|
|
9
|
+
private streamingJsonPath;
|
|
10
|
+
private streamingArray;
|
|
11
|
+
private topLevelObject;
|
|
12
|
+
constructor(options?: {
|
|
13
|
+
[key: string]: any;
|
|
14
|
+
});
|
|
15
|
+
/**
|
|
16
|
+
* write REDEFINITION
|
|
17
|
+
* - super.write() chunk to parser
|
|
18
|
+
* - get the contents (so far) of "topmost-level" array as batch of rows
|
|
19
|
+
* - clear top-level array
|
|
20
|
+
* - return the batch of rows\
|
|
21
|
+
*/
|
|
22
|
+
write(chunk: any): any[];
|
|
23
|
+
/**
|
|
24
|
+
* Returns a partially formed result object
|
|
25
|
+
* Useful for returning the "wrapper" object when array is not top level
|
|
26
|
+
* e.g. GeoJSON
|
|
27
|
+
*/
|
|
28
|
+
getPartialResult(): object | null;
|
|
29
|
+
getStreamingJsonPath(): JSONPath | null;
|
|
30
|
+
getStreamingJsonPathAsString(): string | null;
|
|
31
|
+
getJsonPath(): JSONPath;
|
|
32
|
+
/**
|
|
33
|
+
* Checks is this.getJsonPath matches the jsonpaths provided in options
|
|
34
|
+
*/
|
|
35
|
+
_matchJSONPath(): boolean;
|
|
36
|
+
}
|
|
37
|
+
//# sourceMappingURL=streaming-json-parser.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"streaming-json-parser.d.ts","sourceRoot":"","sources":["../../../src/lib/json-parser/streaming-json-parser.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,OAAO,IAAI,UAAU,EAAC,MAAM,eAAe,CAAC;AACpD,OAAO,QAAQ,MAAM,sBAAsB,CAAC;AAE5C;;;GAGG;AACH,MAAM,CAAC,OAAO,OAAO,mBAAoB,SAAQ,UAAU;IACzD,OAAO,CAAC,SAAS,CAAa;IAC9B,OAAO,CAAC,iBAAiB,CAAyB;IAClD,OAAO,CAAC,cAAc,CAAsB;IAC5C,OAAO,CAAC,cAAc,CAAuB;gBAEjC,OAAO,GAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAM;IAiC9C;;;;;;OAMG;IACH,KAAK,CAAC,KAAK,KAAA;IAUX;;;;OAIG;IACH,gBAAgB;IAIhB,oBAAoB;IAIpB,4BAA4B;IAI5B,WAAW;IAMX;;OAEG;IACH,cAAc;CAkBf"}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const json_parser_1 = __importDefault(require("./json-parser"));
|
|
7
|
+
const jsonpath_1 = __importDefault(require("../jsonpath/jsonpath"));
|
|
8
|
+
/**
|
|
9
|
+
* The `StreamingJSONParser` looks for the first array in the JSON structure.
|
|
10
|
+
* and emits an array of chunks
|
|
11
|
+
*/
|
|
12
|
+
class StreamingJSONParser extends json_parser_1.default {
|
|
13
|
+
constructor(options = {}) {
|
|
14
|
+
super({
|
|
15
|
+
onopenarray: () => {
|
|
16
|
+
if (!this.streamingArray) {
|
|
17
|
+
if (this._matchJSONPath()) {
|
|
18
|
+
// @ts-ignore
|
|
19
|
+
this.streamingJsonPath = this.getJsonPath().clone();
|
|
20
|
+
this.streamingArray = [];
|
|
21
|
+
this._openArray(this.streamingArray);
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
this._openArray();
|
|
26
|
+
},
|
|
27
|
+
// Redefine onopenarray to inject value for top-level object
|
|
28
|
+
onopenobject: (name) => {
|
|
29
|
+
if (!this.topLevelObject) {
|
|
30
|
+
this.topLevelObject = {};
|
|
31
|
+
this._openObject(this.topLevelObject);
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
this._openObject({});
|
|
35
|
+
}
|
|
36
|
+
if (typeof name !== 'undefined') {
|
|
37
|
+
this.parser.emit('onkey', name);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
this.streamingJsonPath = null;
|
|
42
|
+
this.streamingArray = null;
|
|
43
|
+
this.topLevelObject = null;
|
|
44
|
+
const jsonpaths = options.jsonpaths || [];
|
|
45
|
+
this.jsonPaths = jsonpaths.map((jsonpath) => new jsonpath_1.default(jsonpath));
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* write REDEFINITION
|
|
49
|
+
* - super.write() chunk to parser
|
|
50
|
+
* - get the contents (so far) of "topmost-level" array as batch of rows
|
|
51
|
+
* - clear top-level array
|
|
52
|
+
* - return the batch of rows\
|
|
53
|
+
*/
|
|
54
|
+
write(chunk) {
|
|
55
|
+
super.write(chunk);
|
|
56
|
+
let array = [];
|
|
57
|
+
if (this.streamingArray) {
|
|
58
|
+
array = [...this.streamingArray];
|
|
59
|
+
this.streamingArray.length = 0;
|
|
60
|
+
}
|
|
61
|
+
return array;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Returns a partially formed result object
|
|
65
|
+
* Useful for returning the "wrapper" object when array is not top level
|
|
66
|
+
* e.g. GeoJSON
|
|
67
|
+
*/
|
|
68
|
+
getPartialResult() {
|
|
69
|
+
return this.topLevelObject;
|
|
70
|
+
}
|
|
71
|
+
getStreamingJsonPath() {
|
|
72
|
+
return this.streamingJsonPath;
|
|
73
|
+
}
|
|
74
|
+
getStreamingJsonPathAsString() {
|
|
75
|
+
return this.streamingJsonPath && this.streamingJsonPath.toString();
|
|
76
|
+
}
|
|
77
|
+
getJsonPath() {
|
|
78
|
+
return this.jsonpath;
|
|
79
|
+
}
|
|
80
|
+
// PRIVATE METHODS
|
|
81
|
+
/**
|
|
82
|
+
* Checks is this.getJsonPath matches the jsonpaths provided in options
|
|
83
|
+
*/
|
|
84
|
+
_matchJSONPath() {
|
|
85
|
+
const currentPath = this.getJsonPath();
|
|
86
|
+
// console.debug(`Testing JSONPath`, currentPath);
|
|
87
|
+
// Backwards compatibility, match any array
|
|
88
|
+
// TODO implement using wildcard once that is supported
|
|
89
|
+
if (this.jsonPaths.length === 0) {
|
|
90
|
+
return true;
|
|
91
|
+
}
|
|
92
|
+
for (const jsonPath of this.jsonPaths) {
|
|
93
|
+
if (jsonPath.equals(currentPath)) {
|
|
94
|
+
return true;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return false;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
exports.default = StreamingJSONParser;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* A parser for a minimal subset of the jsonpath standard
|
|
3
|
+
* Full JSON path parsers for JS exist but are quite large (bundle size)
|
|
4
|
+
*
|
|
5
|
+
* Supports
|
|
6
|
+
*
|
|
7
|
+
* `$.component.component.component`
|
|
8
|
+
*/
|
|
9
|
+
export default class JSONPath {
|
|
10
|
+
path: string[];
|
|
11
|
+
constructor(path?: JSONPath | string[] | string | null);
|
|
12
|
+
clone(): JSONPath;
|
|
13
|
+
toString(): string;
|
|
14
|
+
push(name: string): void;
|
|
15
|
+
pop(): string | undefined;
|
|
16
|
+
set(name: string): void;
|
|
17
|
+
equals(other: JSONPath): boolean;
|
|
18
|
+
/**
|
|
19
|
+
* Sets the value pointed at by path
|
|
20
|
+
* TODO - handle root path
|
|
21
|
+
* @param object
|
|
22
|
+
* @param value
|
|
23
|
+
*/
|
|
24
|
+
setFieldAtPath(object: any, value: any): void;
|
|
25
|
+
/**
|
|
26
|
+
* Gets the value pointed at by path
|
|
27
|
+
* TODO - handle root path
|
|
28
|
+
* @param object
|
|
29
|
+
*/
|
|
30
|
+
getFieldAtPath(object: any): any;
|
|
31
|
+
}
|
|
32
|
+
//# sourceMappingURL=jsonpath.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jsonpath.d.ts","sourceRoot":"","sources":["../../../src/lib/jsonpath/jsonpath.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AACH,MAAM,CAAC,OAAO,OAAO,QAAQ;IAC3B,IAAI,EAAE,MAAM,EAAE,CAAC;gBAEH,IAAI,GAAE,QAAQ,GAAG,MAAM,EAAE,GAAG,MAAM,GAAG,IAAW;IAuB5D,KAAK,IAAI,QAAQ;IAIjB,QAAQ,IAAI,MAAM;IAIlB,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAIxB,GAAG;IAIH,GAAG,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAIvB,MAAM,CAAC,KAAK,EAAE,QAAQ,GAAG,OAAO;IAchC;;;;;OAKG;IACH,cAAc,CAAC,MAAM,KAAA,EAAE,KAAK,KAAA;IAW5B;;;;OAIG;IACH,cAAc,CAAC,MAAM,KAAA;CAUtB"}
|