@loaders.gl/json 4.1.0-alpha.10 → 4.1.0-alpha.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/dist.dev.js CHANGED
@@ -1720,7 +1720,7 @@ Char: ${this.c}`;
1720
1720
  }
1721
1721
 
1722
1722
  // src/json-loader.ts
1723
- var VERSION = true ? "4.1.0-alpha.10" : "latest";
1723
+ var VERSION = true ? "4.1.0-alpha.11" : "latest";
1724
1724
  var JSONLoader = {
1725
1725
  name: "JSON",
1726
1726
  id: "json",
@@ -1812,7 +1812,7 @@ Char: ${this.c}`;
1812
1812
  }
1813
1813
 
1814
1814
  // src/ndjson-loader.ts
1815
- var VERSION2 = true ? "4.1.0-alpha.10" : "latest";
1815
+ var VERSION2 = true ? "4.1.0-alpha.11" : "latest";
1816
1816
  var NDJSONLoader = {
1817
1817
  name: "NDJSON",
1818
1818
  id: "ndjson",
@@ -2881,7 +2881,7 @@ Char: ${this.c}`;
2881
2881
  }
2882
2882
 
2883
2883
  // src/geojson-loader.ts
2884
- var VERSION3 = true ? "4.1.0-alpha.10" : "latest";
2884
+ var VERSION3 = true ? "4.1.0-alpha.11" : "latest";
2885
2885
  var GeoJSONWorkerLoader = {
2886
2886
  name: "GeoJSON",
2887
2887
  id: "geojson",
@@ -1,6 +1,6 @@
1
1
  import { geojsonToBinary } from '@loaders.gl/gis';
2
2
  import { parseJSONInBatches } from "./lib/parsers/parse-json-in-batches.js";
3
- const VERSION = typeof "4.1.0-alpha.10" !== 'undefined' ? "4.1.0-alpha.10" : 'latest';
3
+ const VERSION = typeof "4.1.0-alpha.11" !== 'undefined' ? "4.1.0-alpha.11" : 'latest';
4
4
  export const GeoJSONWorkerLoader = {
5
5
  name: 'GeoJSON',
6
6
  id: 'geojson',
@@ -2430,7 +2430,7 @@ Char: ${this.c}`;
2430
2430
  }
2431
2431
 
2432
2432
  // src/geojson-loader.ts
2433
- var VERSION = true ? "4.1.0-alpha.10" : "latest";
2433
+ var VERSION = true ? "4.1.0-alpha.11" : "latest";
2434
2434
  var GeoJSONWorkerLoader = {
2435
2435
  name: "GeoJSON",
2436
2436
  id: "geojson",
@@ -1,6 +1,6 @@
1
1
  import { parseJSONSync } from "./lib/parsers/parse-json.js";
2
2
  import { parseJSONInBatches } from "./lib/parsers/parse-json-in-batches.js";
3
- const VERSION = typeof "4.1.0-alpha.10" !== 'undefined' ? "4.1.0-alpha.10" : 'latest';
3
+ const VERSION = typeof "4.1.0-alpha.11" !== 'undefined' ? "4.1.0-alpha.11" : 'latest';
4
4
  export const JSONLoader = {
5
5
  name: 'JSON',
6
6
  id: 'json',
@@ -1 +1 @@
1
- {"version":3,"file":"parse-json-in-batches.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-json-in-batches.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,oBAAoB,CAAC;AACnD,OAAO,KAAK,EAAC,iBAAiB,EAAE,aAAa,EAAE,SAAS,EAAC,MAAM,mBAAmB,CAAC;AAQnF,wBAAuB,kBAAkB,CACvC,mBAAmB,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACvE,OAAO,EAAE,iBAAiB,GACzB,aAAa,CAAC,UAAU,GAAG,aAAa,GAAG,SAAS,CAAC,CA2EvD;AAED,wBAAgB,iBAAiB,CAAC,KAAK,KAAA,EAAE,IAAI,KAAA,OAmB5C"}
1
+ {"version":3,"file":"parse-json-in-batches.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-json-in-batches.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAS,UAAU,EAAC,MAAM,oBAAoB,CAAC;AAC3D,OAAO,KAAK,EAAC,iBAAiB,EAAE,aAAa,EAAE,SAAS,EAAC,MAAM,mBAAmB,CAAC;AAQnF,wBAAuB,kBAAkB,CACvC,mBAAmB,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACvE,OAAO,EAAE,iBAAiB,GACzB,aAAa,CAAC,UAAU,GAAG,aAAa,GAAG,SAAS,CAAC,CA2EvD;AAED,wBAAgB,iBAAiB,CAAC,KAAK,KAAA,EAAE,IAAI,KAAA,OAmB5C"}
@@ -1 +1 @@
1
- {"version":3,"file":"parse-json-in-batches.js","names":["TableBatchBuilder","assert","makeTextDecoderIterator","StreamingJSONParser","JSONPath","parseJSONInBatches","binaryAsyncIterator","options","asyncIterator","metadata","jsonpaths","json","isFirstChunk","schema","tableBatchBuilder","parser","chunk","rows","write","jsonpath","length","getStreamingJsonPathAsString","_options$json","initialBatch","shape","batchType","data","bytesUsed","container","getPartialResult","row","addRow","batch","getFullBatch","chunkComplete","getFinalBatch","finalBatch","rebuildJsonObject","topLevelObject","streamingPath","setFieldAtPath"],"sources":["../../../src/lib/parsers/parse-json-in-batches.ts"],"sourcesContent":["import type {TableBatch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions, MetadataBatch, JSONBatch} from '../../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {assert, makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from '../json-parser/streaming-json-parser';\nimport JSONPath from '../jsonpath/jsonpath';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<TableBatch | MetadataBatch | JSONBatch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // @ts-expect-error TODO fix Schema deduction\n const schema: Schema = null;\n const tableBatchBuilder = new TableBatchBuilder(schema, options);\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: TableBatch = {\n // Common fields\n shape: options?.json?.shape || 'array-row-table',\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: JSONBatch = {\n shape: 'json',\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n /** Data Just to avoid crashing? */\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n\nexport function rebuildJsonObject(batch, data) {\n // Last batch will have this special type and will provide all the root object of the parsed file\n assert(batch.batchType === 'final-result');\n\n // The streamed JSON data is a top level array (jsonpath = '$'), just return the array of row objects\n if (batch.jsonpath === '$') {\n return data;\n }\n\n // (jsonpath !== '$') The streamed data is not a top level array, so stitch it back in to the top-level object\n if (batch.jsonpath && batch.jsonpath.length > 1) {\n const topLevelObject = batch.container;\n const streamingPath = new JSONPath(batch.jsonpath);\n streamingPath.setFieldAtPath(topLevelObject, data);\n return topLevelObject;\n }\n\n // No jsonpath, in this case nothing was streamed.\n return batch.container;\n}\n"],"mappings":"AAEA,SAAQA,iBAAiB,QAAO,oBAAoB;AACpD,SAAQC,MAAM,EAAEC,uBAAuB,QAAO,0BAA0B;AAAC,OAClEC,mBAAmB;AAAA,OACnBC,QAAQ;AAIf,OAAO,gBAAgBC,kBAAkBA,CACvCC,mBAAuE,EACvEC,OAA0B,EAC6B;EACvD,MAAMC,aAAa,GAAGN,uBAAuB,CAACI,mBAAmB,CAAC;EAElE,MAAM;IAACG;EAAQ,CAAC,GAAGF,OAAO;EAC1B,MAAM;IAACG;EAAS,CAAC,GAAGH,OAAO,CAACI,IAAI,IAAI,CAAC,CAAC;EAEtC,IAAIC,YAAqB,GAAG,IAAI;EAGhC,MAAMC,MAAc,GAAG,IAAI;EAC3B,MAAMC,iBAAiB,GAAG,IAAId,iBAAiB,CAACa,MAAM,EAAEN,OAAO,CAAC;EAEhE,MAAMQ,MAAM,GAAG,IAAIZ,mBAAmB,CAAC;IAACO;EAAS,CAAC,CAAC;EAEnD,WAAW,MAAMM,KAAK,IAAIR,aAAa,EAAE;IACvC,MAAMS,IAAI,GAAGF,MAAM,CAACG,KAAK,CAACF,KAAK,CAAC;IAEhC,MAAMG,QAAQ,GAAGF,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIL,MAAM,CAACM,4BAA4B,CAAC,CAAC;IAEzE,IAAIJ,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIR,YAAY,EAAE;MACnC,IAAIH,QAAQ,EAAE;QAAA,IAAAa,aAAA;QACZ,MAAMC,YAAwB,GAAG;UAE/BC,KAAK,EAAE,CAAAjB,OAAO,aAAPA,OAAO,wBAAAe,aAAA,GAAPf,OAAO,CAAEI,IAAI,cAAAW,aAAA,uBAAbA,aAAA,CAAeE,KAAK,KAAI,iBAAiB;UAChDC,SAAS,EAAE,gBAAgB;UAC3BC,IAAI,EAAE,EAAE;UACRN,MAAM,EAAE,CAAC;UACTO,SAAS,EAAE,CAAC;UAEZC,SAAS,EAAEb,MAAM,CAACc,gBAAgB,CAAC,CAAC;UACpCV;QACF,CAAC;QACD,MAAMI,YAAY;MACpB;MACAX,YAAY,GAAG,KAAK;IAEtB;IAGA,KAAK,MAAMkB,GAAG,IAAIb,IAAI,EAAE;MACtBH,iBAAiB,CAACiB,MAAM,CAACD,GAAG,CAAC;MAE7B,MAAME,KAAK,GAAGlB,iBAAiB,CAACmB,YAAY,CAAC;QAACd;MAAQ,CAAC,CAAC;MACxD,IAAIa,KAAK,EAAE;QACT,MAAMA,KAAK;MACb;IACF;IAEAlB,iBAAiB,CAACoB,aAAa,CAAClB,KAAK,CAAC;IACtC,MAAMgB,KAAK,GAAGlB,iBAAiB,CAACmB,YAAY,CAAC;MAACd;IAAQ,CAAC,CAAC;IACxD,IAAIa,KAAK,EAAE;MACT,MAAMA,KAAK;IACb;EACF;EAGA,MAAMb,QAAQ,GAAGJ,MAAM,CAACM,4BAA4B,CAAC,CAAC;EACtD,MAAMW,KAAK,GAAGlB,iBAAiB,CAACqB,aAAa,CAAC;IAAChB;EAAQ,CAAC,CAAC;EACzD,IAAIa,KAAK,EAAE;IACT,MAAMA,KAAK;EACb;EAEA,IAAIvB,QAAQ,EAAE;IACZ,MAAM2B,UAAqB,GAAG;MAC5BZ,KAAK,EAAE,MAAM;MACbC,SAAS,EAAE,cAAc;MACzBG,SAAS,EAAEb,MAAM,CAACc,gBAAgB,CAAC,CAAC;MACpCV,QAAQ,EAAEJ,MAAM,CAACM,4BAA4B,CAAC,CAAC;MAE/CK,IAAI,EAAE,EAAE;MACRN,MAAM,EAAE;IAEV,CAAC;IACD,MAAMgB,UAAU;EAClB;AACF;AAEA,OAAO,SAASC,iBAAiBA,CAACL,KAAK,EAAEN,IAAI,EAAE;EAE7CzB,MAAM,CAAC+B,KAAK,CAACP,SAAS,KAAK,cAAc,CAAC;EAG1C,IAAIO,KAAK,CAACb,QAAQ,KAAK,GAAG,EAAE;IAC1B,OAAOO,IAAI;EACb;EAGA,IAAIM,KAAK,CAACb,QAAQ,IAAIa,KAAK,CAACb,QAAQ,CAACC,MAAM,GAAG,CAAC,EAAE;IAC/C,MAAMkB,cAAc,GAAGN,KAAK,CAACJ,SAAS;IACtC,MAAMW,aAAa,GAAG,IAAInC,QAAQ,CAAC4B,KAAK,CAACb,QAAQ,CAAC;IAClDoB,aAAa,CAACC,cAAc,CAACF,cAAc,EAAEZ,IAAI,CAAC;IAClD,OAAOY,cAAc;EACvB;EAGA,OAAON,KAAK,CAACJ,SAAS;AACxB"}
1
+ {"version":3,"file":"parse-json-in-batches.js","names":["TableBatchBuilder","assert","makeTextDecoderIterator","StreamingJSONParser","JSONPath","parseJSONInBatches","binaryAsyncIterator","options","asyncIterator","metadata","jsonpaths","json","isFirstChunk","schema","tableBatchBuilder","parser","chunk","rows","write","jsonpath","length","getStreamingJsonPathAsString","_options$json","initialBatch","shape","batchType","data","bytesUsed","container","getPartialResult","row","addRow","batch","getFullBatch","chunkComplete","getFinalBatch","finalBatch","rebuildJsonObject","topLevelObject","streamingPath","setFieldAtPath"],"sources":["../../../src/lib/parsers/parse-json-in-batches.ts"],"sourcesContent":["import type {Schema, TableBatch} from '@loaders.gl/schema';\nimport type {JSONLoaderOptions, MetadataBatch, JSONBatch} from '../../json-loader';\nimport {TableBatchBuilder} from '@loaders.gl/schema';\nimport {assert, makeTextDecoderIterator} from '@loaders.gl/loader-utils';\nimport StreamingJSONParser from '../json-parser/streaming-json-parser';\nimport JSONPath from '../jsonpath/jsonpath';\n\n// TODO - support batch size 0 = no batching/single batch?\n// eslint-disable-next-line max-statements, complexity\nexport async function* parseJSONInBatches(\n binaryAsyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: JSONLoaderOptions\n): AsyncIterable<TableBatch | MetadataBatch | JSONBatch> {\n const asyncIterator = makeTextDecoderIterator(binaryAsyncIterator);\n\n const {metadata} = options;\n const {jsonpaths} = options.json || {};\n\n let isFirstChunk: boolean = true;\n\n // @ts-expect-error TODO fix Schema deduction\n const schema: Schema = null;\n const tableBatchBuilder = new TableBatchBuilder(schema, options);\n\n const parser = new StreamingJSONParser({jsonpaths});\n\n for await (const chunk of asyncIterator) {\n const rows = parser.write(chunk);\n\n const jsonpath = rows.length > 0 && parser.getStreamingJsonPathAsString();\n\n if (rows.length > 0 && isFirstChunk) {\n if (metadata) {\n const initialBatch: TableBatch = {\n // Common fields\n shape: options?.json?.shape || 'array-row-table',\n batchType: 'partial-result',\n data: [],\n length: 0,\n bytesUsed: 0,\n // JSON additions\n container: parser.getPartialResult(),\n jsonpath\n };\n yield initialBatch;\n }\n isFirstChunk = false;\n // schema = deduceSchema(rows);\n }\n\n // Add the row\n for (const row of rows) {\n tableBatchBuilder.addRow(row);\n // If a batch has been completed, emit it\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n tableBatchBuilder.chunkComplete(chunk);\n const batch = tableBatchBuilder.getFullBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n }\n\n // yield final batch\n const jsonpath = parser.getStreamingJsonPathAsString();\n const batch = tableBatchBuilder.getFinalBatch({jsonpath});\n if (batch) {\n yield batch;\n }\n\n if (metadata) {\n const finalBatch: JSONBatch = {\n shape: 'json',\n batchType: 'final-result',\n container: parser.getPartialResult(),\n jsonpath: parser.getStreamingJsonPathAsString(),\n /** Data Just to avoid crashing? */\n data: [],\n length: 0\n // schema: null\n };\n yield finalBatch;\n }\n}\n\nexport function rebuildJsonObject(batch, data) {\n // Last batch will have this special type and will provide all the root object of the parsed file\n assert(batch.batchType === 'final-result');\n\n // The streamed JSON data is a top level array (jsonpath = '$'), just return the array of row objects\n if (batch.jsonpath === '$') {\n return data;\n }\n\n // (jsonpath !== '$') The streamed data is not a top level array, so stitch it back in to the top-level object\n if (batch.jsonpath && batch.jsonpath.length > 1) {\n const topLevelObject = batch.container;\n const streamingPath = new JSONPath(batch.jsonpath);\n streamingPath.setFieldAtPath(topLevelObject, data);\n return topLevelObject;\n }\n\n // No jsonpath, in this case nothing was streamed.\n return batch.container;\n}\n"],"mappings":"AAEA,SAAQA,iBAAiB,QAAO,oBAAoB;AACpD,SAAQC,MAAM,EAAEC,uBAAuB,QAAO,0BAA0B;AAAC,OAClEC,mBAAmB;AAAA,OACnBC,QAAQ;AAIf,OAAO,gBAAgBC,kBAAkBA,CACvCC,mBAAuE,EACvEC,OAA0B,EAC6B;EACvD,MAAMC,aAAa,GAAGN,uBAAuB,CAACI,mBAAmB,CAAC;EAElE,MAAM;IAACG;EAAQ,CAAC,GAAGF,OAAO;EAC1B,MAAM;IAACG;EAAS,CAAC,GAAGH,OAAO,CAACI,IAAI,IAAI,CAAC,CAAC;EAEtC,IAAIC,YAAqB,GAAG,IAAI;EAGhC,MAAMC,MAAc,GAAG,IAAI;EAC3B,MAAMC,iBAAiB,GAAG,IAAId,iBAAiB,CAACa,MAAM,EAAEN,OAAO,CAAC;EAEhE,MAAMQ,MAAM,GAAG,IAAIZ,mBAAmB,CAAC;IAACO;EAAS,CAAC,CAAC;EAEnD,WAAW,MAAMM,KAAK,IAAIR,aAAa,EAAE;IACvC,MAAMS,IAAI,GAAGF,MAAM,CAACG,KAAK,CAACF,KAAK,CAAC;IAEhC,MAAMG,QAAQ,GAAGF,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIL,MAAM,CAACM,4BAA4B,CAAC,CAAC;IAEzE,IAAIJ,IAAI,CAACG,MAAM,GAAG,CAAC,IAAIR,YAAY,EAAE;MACnC,IAAIH,QAAQ,EAAE;QAAA,IAAAa,aAAA;QACZ,MAAMC,YAAwB,GAAG;UAE/BC,KAAK,EAAE,CAAAjB,OAAO,aAAPA,OAAO,wBAAAe,aAAA,GAAPf,OAAO,CAAEI,IAAI,cAAAW,aAAA,uBAAbA,aAAA,CAAeE,KAAK,KAAI,iBAAiB;UAChDC,SAAS,EAAE,gBAAgB;UAC3BC,IAAI,EAAE,EAAE;UACRN,MAAM,EAAE,CAAC;UACTO,SAAS,EAAE,CAAC;UAEZC,SAAS,EAAEb,MAAM,CAACc,gBAAgB,CAAC,CAAC;UACpCV;QACF,CAAC;QACD,MAAMI,YAAY;MACpB;MACAX,YAAY,GAAG,KAAK;IAEtB;IAGA,KAAK,MAAMkB,GAAG,IAAIb,IAAI,EAAE;MACtBH,iBAAiB,CAACiB,MAAM,CAACD,GAAG,CAAC;MAE7B,MAAME,KAAK,GAAGlB,iBAAiB,CAACmB,YAAY,CAAC;QAACd;MAAQ,CAAC,CAAC;MACxD,IAAIa,KAAK,EAAE;QACT,MAAMA,KAAK;MACb;IACF;IAEAlB,iBAAiB,CAACoB,aAAa,CAAClB,KAAK,CAAC;IACtC,MAAMgB,KAAK,GAAGlB,iBAAiB,CAACmB,YAAY,CAAC;MAACd;IAAQ,CAAC,CAAC;IACxD,IAAIa,KAAK,EAAE;MACT,MAAMA,KAAK;IACb;EACF;EAGA,MAAMb,QAAQ,GAAGJ,MAAM,CAACM,4BAA4B,CAAC,CAAC;EACtD,MAAMW,KAAK,GAAGlB,iBAAiB,CAACqB,aAAa,CAAC;IAAChB;EAAQ,CAAC,CAAC;EACzD,IAAIa,KAAK,EAAE;IACT,MAAMA,KAAK;EACb;EAEA,IAAIvB,QAAQ,EAAE;IACZ,MAAM2B,UAAqB,GAAG;MAC5BZ,KAAK,EAAE,MAAM;MACbC,SAAS,EAAE,cAAc;MACzBG,SAAS,EAAEb,MAAM,CAACc,gBAAgB,CAAC,CAAC;MACpCV,QAAQ,EAAEJ,MAAM,CAACM,4BAA4B,CAAC,CAAC;MAE/CK,IAAI,EAAE,EAAE;MACRN,MAAM,EAAE;IAEV,CAAC;IACD,MAAMgB,UAAU;EAClB;AACF;AAEA,OAAO,SAASC,iBAAiBA,CAACL,KAAK,EAAEN,IAAI,EAAE;EAE7CzB,MAAM,CAAC+B,KAAK,CAACP,SAAS,KAAK,cAAc,CAAC;EAG1C,IAAIO,KAAK,CAACb,QAAQ,KAAK,GAAG,EAAE;IAC1B,OAAOO,IAAI;EACb;EAGA,IAAIM,KAAK,CAACb,QAAQ,IAAIa,KAAK,CAACb,QAAQ,CAACC,MAAM,GAAG,CAAC,EAAE;IAC/C,MAAMkB,cAAc,GAAGN,KAAK,CAACJ,SAAS;IACtC,MAAMW,aAAa,GAAG,IAAInC,QAAQ,CAAC4B,KAAK,CAACb,QAAQ,CAAC;IAClDoB,aAAa,CAACC,cAAc,CAACF,cAAc,EAAEZ,IAAI,CAAC;IAClD,OAAOY,cAAc;EACvB;EAGA,OAAON,KAAK,CAACJ,SAAS;AACxB"}
@@ -1,6 +1,6 @@
1
1
  import { parseNDJSONSync } from "./lib/parsers/parse-ndjson.js";
2
2
  import { parseNDJSONInBatches } from "./lib/parsers/parse-ndjson-in-batches.js";
3
- const VERSION = typeof "4.1.0-alpha.10" !== 'undefined' ? "4.1.0-alpha.10" : 'latest';
3
+ const VERSION = typeof "4.1.0-alpha.11" !== 'undefined' ? "4.1.0-alpha.11" : 'latest';
4
4
  export const NDJSONLoader = {
5
5
  name: 'NDJSON',
6
6
  id: 'ndjson',
@@ -1,6 +1,6 @@
1
1
  import { parseNDJSONSync } from "./lib/parsers/parse-ndjson.js";
2
2
  import { parseNDJSONInBatches } from "./lib/parsers/parse-ndjson-in-batches.js";
3
- const VERSION = typeof "4.1.0-alpha.10" !== 'undefined' ? "4.1.0-alpha.10" : 'latest';
3
+ const VERSION = typeof "4.1.0-alpha.11" !== 'undefined' ? "4.1.0-alpha.11" : 'latest';
4
4
  export const NDJSONLoader = {
5
5
  name: 'NDJSON',
6
6
  id: 'ndjson',
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@loaders.gl/json",
3
- "version": "4.1.0-alpha.10",
3
+ "version": "4.1.0-alpha.11",
4
4
  "description": "Framework-independent loader for JSON and streaming JSON formats",
5
5
  "license": "MIT",
6
6
  "type": "module",
@@ -43,9 +43,9 @@
43
43
  "build-worker": "esbuild src/workers/geojson-worker.ts --bundle --outfile=dist/geojson-worker.js --define:__VERSION__=\\\"$npm_package_version\\\""
44
44
  },
45
45
  "dependencies": {
46
- "@loaders.gl/gis": "4.1.0-alpha.10",
47
- "@loaders.gl/loader-utils": "4.1.0-alpha.10",
48
- "@loaders.gl/schema": "4.1.0-alpha.10"
46
+ "@loaders.gl/gis": "4.1.0-alpha.11",
47
+ "@loaders.gl/loader-utils": "4.1.0-alpha.11",
48
+ "@loaders.gl/schema": "4.1.0-alpha.11"
49
49
  },
50
- "gitHead": "19f43c2d90d8b50860c3f8e487429779a386287d"
50
+ "gitHead": "5d3e23bf93762b48c8c1d6d926ede7a97fe43ab0"
51
51
  }
@@ -1,4 +1,4 @@
1
- import type {TableBatch} from '@loaders.gl/schema';
1
+ import type {Schema, TableBatch} from '@loaders.gl/schema';
2
2
  import type {JSONLoaderOptions, MetadataBatch, JSONBatch} from '../../json-loader';
3
3
  import {TableBatchBuilder} from '@loaders.gl/schema';
4
4
  import {assert, makeTextDecoderIterator} from '@loaders.gl/loader-utils';