@loaders.gl/core 4.0.1 → 4.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/dist.dev.js CHANGED
@@ -125,7 +125,7 @@ var __exports__ = (() => {
125
125
  }
126
126
 
127
127
  // ../worker-utils/src/lib/env-utils/version.ts
128
- var NPM_TAG = "beta";
128
+ var NPM_TAG = "latest";
129
129
  function getVersion() {
130
130
  if (!globalThis._loadersgl_?.version) {
131
131
  globalThis._loadersgl_ = globalThis._loadersgl_ || {};
@@ -591,7 +591,11 @@ var __exports__ = (() => {
591
591
  url = options.workerUrl;
592
592
  }
593
593
  if (options._workerType === "test") {
594
- url = `modules/${worker.module}/dist/${workerFile}`;
594
+ if (isBrowser2) {
595
+ url = `modules/${worker.module}/dist/${workerFile}`;
596
+ } else {
597
+ url = `modules/${worker.module}/src/workers/${worker.id}-worker-node.ts`;
598
+ }
595
599
  }
596
600
  if (!url) {
597
601
  let version = worker.version;
@@ -2993,6 +2997,52 @@ var __exports__ = (() => {
2993
2997
  throw new Error(`${loader.name} loader: 'parseSync' not supported by this loader, use 'parse' instead. ${context.url || ""}`);
2994
2998
  }
2995
2999
 
3000
+ // ../schema/src/lib/table/simple-table/table-accessors.ts
3001
+ function isTable(table) {
3002
+ const shape = typeof table === "object" && table?.shape;
3003
+ switch (shape) {
3004
+ case "array-row-table":
3005
+ case "object-row-table":
3006
+ return Array.isArray(table.data);
3007
+ case "geojson-table":
3008
+ return Array.isArray(table.features);
3009
+ case "columnar-table":
3010
+ return table.data && typeof table.data === "object";
3011
+ case "arrow-table":
3012
+ return Boolean(table?.data?.numRows !== void 0);
3013
+ default:
3014
+ return false;
3015
+ }
3016
+ }
3017
+ function getTableLength(table) {
3018
+ switch (table.shape) {
3019
+ case "array-row-table":
3020
+ case "object-row-table":
3021
+ return table.data.length;
3022
+ case "geojson-table":
3023
+ return table.features.length;
3024
+ case "arrow-table":
3025
+ const arrowTable = table.data;
3026
+ return arrowTable.numRows;
3027
+ case "columnar-table":
3028
+ for (const column of Object.values(table.data)) {
3029
+ return column.length || 0;
3030
+ }
3031
+ return 0;
3032
+ default:
3033
+ throw new Error("table");
3034
+ }
3035
+ }
3036
+
3037
+ // ../schema/src/lib/table/simple-table/make-table-from-batches.ts
3038
+ function makeBatchFromTable(table) {
3039
+ return {
3040
+ ...table,
3041
+ length: getTableLength(table),
3042
+ batchType: "data"
3043
+ };
3044
+ }
3045
+
2996
3046
  // src/lib/api/parse-in-batches.ts
2997
3047
  async function parseInBatches(data, loaders, options, context) {
2998
3048
  const loaderArray = Array.isArray(loaders) ? loaders : void 0;
@@ -3043,22 +3093,26 @@ var __exports__ = (() => {
3043
3093
  if (loader.parseInBatches) {
3044
3094
  return loader.parseInBatches(transformedIterator, options, context);
3045
3095
  }
3046
- async function* parseChunkInBatches() {
3047
- const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);
3048
- const parsedData = await parse(arrayBuffer, loader, {
3049
- ...options,
3050
- mimeType: loader.mimeTypes[0]
3051
- }, context);
3052
- const batch = {
3053
- mimeType: loader.mimeTypes[0],
3054
- shape: Array.isArray(parsedData) ? "row-table" : "unknown",
3055
- batchType: "data",
3056
- data: parsedData,
3057
- length: Array.isArray(parsedData) ? parsedData.length : 1
3058
- };
3059
- yield batch;
3060
- }
3061
- return parseChunkInBatches();
3096
+ return parseChunkInBatches(transformedIterator, loader, options, context);
3097
+ }
3098
+ async function* parseChunkInBatches(transformedIterator, loader, options, context) {
3099
+ const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);
3100
+ const parsedData = await parse(arrayBuffer, loader, {
3101
+ ...options,
3102
+ mimeType: loader.mimeTypes[0]
3103
+ }, context);
3104
+ const batch = convertDataToBatch(parsedData, loader);
3105
+ yield batch;
3106
+ }
3107
+ function convertDataToBatch(parsedData, loader) {
3108
+ const batch = isTable(parsedData) ? makeBatchFromTable(parsedData) : {
3109
+ shape: "unknown",
3110
+ batchType: "data",
3111
+ data: parsedData,
3112
+ length: Array.isArray(parsedData) ? parsedData.length : 1
3113
+ };
3114
+ batch.mimeType = loader.mimeTypes[0];
3115
+ return batch;
3062
3116
  }
3063
3117
  async function applyInputTransforms(inputIterator, transforms = []) {
3064
3118
  let iteratorChain = inputIterator;
@@ -3140,7 +3194,7 @@ var __exports__ = (() => {
3140
3194
  if (writer.text && writer.encodeText) {
3141
3195
  return await writer.encodeText(data, options);
3142
3196
  }
3143
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
3197
+ if (writer.text) {
3144
3198
  const arrayBuffer = await encodeTable(data, writer, options);
3145
3199
  return new TextDecoder().decode(arrayBuffer);
3146
3200
  }
@@ -3210,7 +3264,7 @@ var __exports__ = (() => {
3210
3264
  if (writer.text && writer.encodeText) {
3211
3265
  return await writer.encodeText(data, options);
3212
3266
  }
3213
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
3267
+ if (writer.text) {
3214
3268
  const arrayBuffer = await encode(data, writer, options);
3215
3269
  return new TextDecoder().decode(arrayBuffer);
3216
3270
  }
package/dist/index.cjs CHANGED
@@ -1079,6 +1079,7 @@ function parseWithLoaderSync(loader, data, options, context) {
1079
1079
  }
1080
1080
 
1081
1081
  // src/lib/api/parse-in-batches.ts
1082
+ var import_schema = require("@loaders.gl/schema");
1082
1083
  var import_loader_utils10 = require("@loaders.gl/loader-utils");
1083
1084
  async function parseInBatches(data, loaders, options, context) {
1084
1085
  const loaderArray = Array.isArray(loaders) ? loaders : void 0;
@@ -1129,25 +1130,29 @@ async function parseToOutputIterator(loader, data, options, context) {
1129
1130
  if (loader.parseInBatches) {
1130
1131
  return loader.parseInBatches(transformedIterator, options, context);
1131
1132
  }
1132
- async function* parseChunkInBatches() {
1133
- const arrayBuffer = await (0, import_loader_utils10.concatenateArrayBuffersAsync)(transformedIterator);
1134
- const parsedData = await parse(
1135
- arrayBuffer,
1136
- loader,
1137
- // TODO - Hack: supply loaders MIME type to ensure we match it
1138
- { ...options, mimeType: loader.mimeTypes[0] },
1139
- context
1140
- );
1141
- const batch = {
1142
- mimeType: loader.mimeTypes[0],
1143
- shape: Array.isArray(parsedData) ? "row-table" : "unknown",
1144
- batchType: "data",
1145
- data: parsedData,
1146
- length: Array.isArray(parsedData) ? parsedData.length : 1
1147
- };
1148
- yield batch;
1149
- }
1150
- return parseChunkInBatches();
1133
+ return parseChunkInBatches(transformedIterator, loader, options, context);
1134
+ }
1135
+ async function* parseChunkInBatches(transformedIterator, loader, options, context) {
1136
+ const arrayBuffer = await (0, import_loader_utils10.concatenateArrayBuffersAsync)(transformedIterator);
1137
+ const parsedData = await parse(
1138
+ arrayBuffer,
1139
+ loader,
1140
+ // TODO - Hack: supply loaders MIME type to ensure we match it
1141
+ { ...options, mimeType: loader.mimeTypes[0] },
1142
+ context
1143
+ );
1144
+ const batch = convertDataToBatch(parsedData, loader);
1145
+ yield batch;
1146
+ }
1147
+ function convertDataToBatch(parsedData, loader) {
1148
+ const batch = (0, import_schema.isTable)(parsedData) ? (0, import_schema.makeBatchFromTable)(parsedData) : {
1149
+ shape: "unknown",
1150
+ batchType: "data",
1151
+ data: parsedData,
1152
+ length: Array.isArray(parsedData) ? parsedData.length : 1
1153
+ };
1154
+ batch.mimeType = loader.mimeTypes[0];
1155
+ return batch;
1151
1156
  }
1152
1157
  async function applyInputTransforms(inputIterator, transforms = []) {
1153
1158
  let iteratorChain = inputIterator;
@@ -1232,7 +1237,7 @@ async function encodeTableAsText(data, writer, options) {
1232
1237
  if (writer.text && writer.encodeText) {
1233
1238
  return await writer.encodeText(data, options);
1234
1239
  }
1235
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
1240
+ if (writer.text) {
1236
1241
  const arrayBuffer = await encodeTable(data, writer, options);
1237
1242
  return new TextDecoder().decode(arrayBuffer);
1238
1243
  }
@@ -1304,7 +1309,7 @@ async function encodeText(data, writer, options) {
1304
1309
  if (writer.text && writer.encodeText) {
1305
1310
  return await writer.encodeText(data, options);
1306
1311
  }
1307
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
1312
+ if (writer.text) {
1308
1313
  const arrayBuffer = await encode(data, writer, options);
1309
1314
  return new TextDecoder().decode(arrayBuffer);
1310
1315
  }
@@ -1,6 +1,6 @@
1
- import { Writer, WriterOptionsType } from '@loaders.gl/loader-utils';
1
+ import { WriterOptionsType, WriterWithEncoder } from '@loaders.gl/loader-utils';
2
2
  import { Table } from '@loaders.gl/schema';
3
- export declare function encodeTable<WriterT extends Writer = Writer>(data: Table, writer: WriterT, options?: WriterOptionsType<WriterT>): Promise<ArrayBuffer>;
4
- export declare function encodeTableAsText<WriterT extends Writer = Writer>(data: Table, writer: WriterT, options?: WriterOptionsType<WriterT>): Promise<string>;
5
- export declare function encodeTableInBatches<WriterT extends Writer = Writer>(data: Table, writer: WriterT, options?: WriterOptionsType<WriterT>): AsyncIterable<ArrayBuffer>;
3
+ export declare function encodeTable<WriterT extends WriterWithEncoder = WriterWithEncoder>(data: Table, writer: WriterT, options?: WriterOptionsType<WriterT>): Promise<ArrayBuffer>;
4
+ export declare function encodeTableAsText<WriterT extends WriterWithEncoder = WriterWithEncoder>(data: Table, writer: WriterT, options?: WriterOptionsType<WriterT>): Promise<string>;
5
+ export declare function encodeTableInBatches<WriterT extends WriterWithEncoder = WriterWithEncoder>(data: Table, writer: WriterT, options?: WriterOptionsType<WriterT>): AsyncIterable<ArrayBuffer>;
6
6
  //# sourceMappingURL=encode-table.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"encode-table.d.ts","sourceRoot":"","sources":["../../../src/lib/api/encode-table.ts"],"names":[],"mappings":"AAKA,OAAO,EAA0B,MAAM,EAAE,iBAAiB,EAAC,MAAM,0BAA0B,CAAC;AAC5F,OAAO,EAAC,KAAK,EAAC,MAAM,oBAAoB,CAAC;AAEzC,wBAAsB,WAAW,CAAC,OAAO,SAAS,MAAM,GAAG,MAAM,EAC/D,IAAI,EAAE,KAAK,EACX,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,iBAAiB,CAAC,OAAO,CAAC,GACnC,OAAO,CAAC,WAAW,CAAC,CAwBtB;AAED,wBAAsB,iBAAiB,CAAC,OAAO,SAAS,MAAM,GAAG,MAAM,EACrE,IAAI,EAAE,KAAK,EACX,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,iBAAiB,CAAC,OAAO,CAAC,GACnC,OAAO,CAAC,MAAM,CAAC,CAUjB;AAED,wBAAgB,oBAAoB,CAAC,OAAO,SAAS,MAAM,GAAG,MAAM,EAClE,IAAI,EAAE,KAAK,EACX,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,iBAAiB,CAAC,OAAO,CAAC,GACnC,aAAa,CAAC,WAAW,CAAC,CAQ5B"}
1
+ {"version":3,"file":"encode-table.d.ts","sourceRoot":"","sources":["../../../src/lib/api/encode-table.ts"],"names":[],"mappings":"AAKA,OAAO,EAEL,iBAAiB,EACjB,iBAAiB,EAClB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAC,KAAK,EAAC,MAAM,oBAAoB,CAAC;AAEzC,wBAAsB,WAAW,CAAC,OAAO,SAAS,iBAAiB,GAAG,iBAAiB,EACrF,IAAI,EAAE,KAAK,EACX,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,iBAAiB,CAAC,OAAO,CAAC,GACnC,OAAO,CAAC,WAAW,CAAC,CAwBtB;AAED,wBAAsB,iBAAiB,CAAC,OAAO,SAAS,iBAAiB,GAAG,iBAAiB,EAC3F,IAAI,EAAE,KAAK,EACX,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,iBAAiB,CAAC,OAAO,CAAC,GACnC,OAAO,CAAC,MAAM,CAAC,CAUjB;AAED,wBAAgB,oBAAoB,CAAC,OAAO,SAAS,iBAAiB,GAAG,iBAAiB,EACxF,IAAI,EAAE,KAAK,EACX,MAAM,EAAE,OAAO,EACf,OAAO,CAAC,EAAE,iBAAiB,CAAC,OAAO,CAAC,GACnC,aAAa,CAAC,WAAW,CAAC,CAQ5B"}
@@ -21,7 +21,7 @@ export async function encodeTableAsText(data, writer, options) {
21
21
  if (writer.text && writer.encodeText) {
22
22
  return await writer.encodeText(data, options);
23
23
  }
24
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
24
+ if (writer.text) {
25
25
  const arrayBuffer = await encodeTable(data, writer, options);
26
26
  return new TextDecoder().decode(arrayBuffer);
27
27
  }
@@ -1 +1 @@
1
- {"version":3,"file":"encode-table.js","names":["concatenateArrayBuffers","encodeTable","data","writer","options","encode","encodeText","text","TextEncoder","encodeInBatches","batches","encodeTableInBatches","chunks","batch","push","Error","encodeTableAsText","arrayBuffer","TextDecoder","decode","name","dataIterator","getIterator","start","end","length"],"sources":["../../../src/lib/api/encode-table.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n// Copyright 2022 Foursquare Labs, Inc\n\n/* global TextEncoder, TextDecoder */\nimport {concatenateArrayBuffers, Writer, WriterOptionsType} from '@loaders.gl/loader-utils';\nimport {Table} from '@loaders.gl/schema';\n\nexport async function encodeTable<WriterT extends Writer = Writer>(\n data: Table,\n writer: WriterT,\n options?: WriterOptionsType<WriterT>\n): Promise<ArrayBuffer> {\n if (writer.encode) {\n return await writer.encode(data, options);\n }\n\n if (writer.encodeText) {\n const text = await writer.encodeText(data, options);\n return new TextEncoder().encode(text);\n }\n\n if (writer.encodeInBatches) {\n // Create an iterator representing the data\n // TODO - Assumes this is a table\n const batches = encodeTableInBatches(data, writer, options);\n\n // Concatenate the output\n const chunks: ArrayBuffer[] = [];\n for await (const batch of batches) {\n chunks.push(batch);\n }\n return concatenateArrayBuffers(...chunks);\n }\n\n throw new Error('Writer could not encode data');\n}\n\nexport async function encodeTableAsText<WriterT extends Writer = Writer>(\n data: Table,\n writer: WriterT,\n options?: WriterOptionsType<WriterT>\n): Promise<string> {\n if (writer.text && writer.encodeText) {\n return await writer.encodeText(data, options);\n }\n\n if (writer.text && (writer.encode || writer.encodeInBatches)) {\n const arrayBuffer = await encodeTable(data, writer, options);\n return new TextDecoder().decode(arrayBuffer);\n }\n throw new Error(`Writer ${writer.name} could not encode data as text`);\n}\n\nexport function encodeTableInBatches<WriterT extends Writer = Writer>(\n data: Table,\n writer: WriterT,\n options?: WriterOptionsType<WriterT>\n): AsyncIterable<ArrayBuffer> {\n if (writer.encodeInBatches) {\n const dataIterator = getIterator(data);\n // @ts-expect-error\n return writer.encodeInBatches(dataIterator, options);\n }\n // TODO -fall back to atomic encode?\n throw new Error('Writer could not encode data in batches');\n}\n\nfunction getIterator(data: any): Iterable<{start: number; end: number}> {\n const dataIterator = [{...data, start: 0, end: data.length}];\n return dataIterator;\n}\n"],"mappings":"AAKA,SAAQA,uBAAuB,QAAkC,0BAA0B;AAG3F,OAAO,eAAeC,WAAWA,CAC/BC,IAAW,EACXC,MAAe,EACfC,OAAoC,EACd;EACtB,IAAID,MAAM,CAACE,MAAM,EAAE;IACjB,OAAO,MAAMF,MAAM,CAACE,MAAM,CAACH,IAAI,EAAEE,OAAO,CAAC;EAC3C;EAEA,IAAID,MAAM,CAACG,UAAU,EAAE;IACrB,MAAMC,IAAI,GAAG,MAAMJ,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;IACnD,OAAO,IAAII,WAAW,CAAC,CAAC,CAACH,MAAM,CAACE,IAAI,CAAC;EACvC;EAEA,IAAIJ,MAAM,CAACM,eAAe,EAAE;IAG1B,MAAMC,OAAO,GAAGC,oBAAoB,CAACT,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IAG3D,MAAMQ,MAAqB,GAAG,EAAE;IAChC,WAAW,MAAMC,KAAK,IAAIH,OAAO,EAAE;MACjCE,MAAM,CAACE,IAAI,CAACD,KAAK,CAAC;IACpB;IACA,OAAOb,uBAAuB,CAAC,GAAGY,MAAM,CAAC;EAC3C;EAEA,MAAM,IAAIG,KAAK,CAAC,8BAA8B,CAAC;AACjD;AAEA,OAAO,eAAeC,iBAAiBA,CACrCd,IAAW,EACXC,MAAe,EACfC,OAAoC,EACnB;EACjB,IAAID,MAAM,CAACI,IAAI,IAAIJ,MAAM,CAACG,UAAU,EAAE;IACpC,OAAO,MAAMH,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;EAC/C;EAEA,IAAID,MAAM,CAACI,IAAI,KAAKJ,MAAM,CAACE,MAAM,IAAIF,MAAM,CAACM,eAAe,CAAC,EAAE;IAC5D,MAAMQ,WAAW,GAAG,MAAMhB,WAAW,CAACC,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IAC5D,OAAO,IAAIc,WAAW,CAAC,CAAC,CAACC,MAAM,CAACF,WAAW,CAAC;EAC9C;EACA,MAAM,IAAIF,KAAK,CAAE,UAASZ,MAAM,CAACiB,IAAK,gCAA+B,CAAC;AACxE;AAEA,OAAO,SAAST,oBAAoBA,CAClCT,IAAW,EACXC,MAAe,EACfC,OAAoC,EACR;EAC5B,IAAID,MAAM,CAACM,eAAe,EAAE;IAC1B,MAAMY,YAAY,GAAGC,WAAW,CAACpB,IAAI,CAAC;IAEtC,OAAOC,MAAM,CAACM,eAAe,CAACY,YAAY,EAAEjB,OAAO,CAAC;EACtD;EAEA,MAAM,IAAIW,KAAK,CAAC,yCAAyC,CAAC;AAC5D;AAEA,SAASO,WAAWA,CAACpB,IAAS,EAA0C;EACtE,MAAMmB,YAAY,GAAG,CAAC;IAAC,GAAGnB,IAAI;IAAEqB,KAAK,EAAE,CAAC;IAAEC,GAAG,EAAEtB,IAAI,CAACuB;EAAM,CAAC,CAAC;EAC5D,OAAOJ,YAAY;AACrB"}
1
+ {"version":3,"file":"encode-table.js","names":["concatenateArrayBuffers","encodeTable","data","writer","options","encode","encodeText","text","TextEncoder","encodeInBatches","batches","encodeTableInBatches","chunks","batch","push","Error","encodeTableAsText","arrayBuffer","TextDecoder","decode","name","dataIterator","getIterator","start","end","length"],"sources":["../../../src/lib/api/encode-table.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n// Copyright 2022 Foursquare Labs, Inc\n\n/* global TextEncoder, TextDecoder */\nimport {\n concatenateArrayBuffers,\n WriterOptionsType,\n WriterWithEncoder\n} from '@loaders.gl/loader-utils';\nimport {Table} from '@loaders.gl/schema';\n\nexport async function encodeTable<WriterT extends WriterWithEncoder = WriterWithEncoder>(\n data: Table,\n writer: WriterT,\n options?: WriterOptionsType<WriterT>\n): Promise<ArrayBuffer> {\n if (writer.encode) {\n return await writer.encode(data, options);\n }\n\n if (writer.encodeText) {\n const text = await writer.encodeText(data, options);\n return new TextEncoder().encode(text);\n }\n\n if (writer.encodeInBatches) {\n // Create an iterator representing the data\n // TODO - Assumes this is a table\n const batches = encodeTableInBatches(data, writer, options);\n\n // Concatenate the output\n const chunks: ArrayBuffer[] = [];\n for await (const batch of batches) {\n chunks.push(batch);\n }\n return concatenateArrayBuffers(...chunks);\n }\n\n throw new Error('Writer could not encode data');\n}\n\nexport async function encodeTableAsText<WriterT extends WriterWithEncoder = WriterWithEncoder>(\n data: Table,\n writer: WriterT,\n options?: WriterOptionsType<WriterT>\n): Promise<string> {\n if (writer.text && writer.encodeText) {\n return await writer.encodeText(data, options);\n }\n\n if (writer.text) {\n const arrayBuffer = await encodeTable(data, writer, options);\n return new TextDecoder().decode(arrayBuffer);\n }\n throw new Error(`Writer ${writer.name} could not encode data as text`);\n}\n\nexport function encodeTableInBatches<WriterT extends WriterWithEncoder = WriterWithEncoder>(\n data: Table,\n writer: WriterT,\n options?: WriterOptionsType<WriterT>\n): AsyncIterable<ArrayBuffer> {\n if (writer.encodeInBatches) {\n const dataIterator = getIterator(data);\n // @ts-expect-error\n return writer.encodeInBatches(dataIterator, options);\n }\n // TODO -fall back to atomic encode?\n throw new Error('Writer could not encode data in batches');\n}\n\nfunction getIterator(data: any): Iterable<{start: number; end: number}> {\n const dataIterator = [{...data, start: 0, end: data.length}];\n return dataIterator;\n}\n"],"mappings":"AAKA,SACEA,uBAAuB,QAGlB,0BAA0B;AAGjC,OAAO,eAAeC,WAAWA,CAC/BC,IAAW,EACXC,MAAe,EACfC,OAAoC,EACd;EACtB,IAAID,MAAM,CAACE,MAAM,EAAE;IACjB,OAAO,MAAMF,MAAM,CAACE,MAAM,CAACH,IAAI,EAAEE,OAAO,CAAC;EAC3C;EAEA,IAAID,MAAM,CAACG,UAAU,EAAE;IACrB,MAAMC,IAAI,GAAG,MAAMJ,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;IACnD,OAAO,IAAII,WAAW,CAAC,CAAC,CAACH,MAAM,CAACE,IAAI,CAAC;EACvC;EAEA,IAAIJ,MAAM,CAACM,eAAe,EAAE;IAG1B,MAAMC,OAAO,GAAGC,oBAAoB,CAACT,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IAG3D,MAAMQ,MAAqB,GAAG,EAAE;IAChC,WAAW,MAAMC,KAAK,IAAIH,OAAO,EAAE;MACjCE,MAAM,CAACE,IAAI,CAACD,KAAK,CAAC;IACpB;IACA,OAAOb,uBAAuB,CAAC,GAAGY,MAAM,CAAC;EAC3C;EAEA,MAAM,IAAIG,KAAK,CAAC,8BAA8B,CAAC;AACjD;AAEA,OAAO,eAAeC,iBAAiBA,CACrCd,IAAW,EACXC,MAAe,EACfC,OAAoC,EACnB;EACjB,IAAID,MAAM,CAACI,IAAI,IAAIJ,MAAM,CAACG,UAAU,EAAE;IACpC,OAAO,MAAMH,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;EAC/C;EAEA,IAAID,MAAM,CAACI,IAAI,EAAE;IACf,MAAMU,WAAW,GAAG,MAAMhB,WAAW,CAACC,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IAC5D,OAAO,IAAIc,WAAW,CAAC,CAAC,CAACC,MAAM,CAACF,WAAW,CAAC;EAC9C;EACA,MAAM,IAAIF,KAAK,CAAE,UAASZ,MAAM,CAACiB,IAAK,gCAA+B,CAAC;AACxE;AAEA,OAAO,SAAST,oBAAoBA,CAClCT,IAAW,EACXC,MAAe,EACfC,OAAoC,EACR;EAC5B,IAAID,MAAM,CAACM,eAAe,EAAE;IAC1B,MAAMY,YAAY,GAAGC,WAAW,CAACpB,IAAI,CAAC;IAEtC,OAAOC,MAAM,CAACM,eAAe,CAACY,YAAY,EAAEjB,OAAO,CAAC;EACtD;EAEA,MAAM,IAAIW,KAAK,CAAC,yCAAyC,CAAC;AAC5D;AAEA,SAASO,WAAWA,CAACpB,IAAS,EAA0C;EACtE,MAAMmB,YAAY,GAAG,CAAC;IAAC,GAAGnB,IAAI;IAAEqB,KAAK,EAAE,CAAC;IAAEC,GAAG,EAAEtB,IAAI,CAACuB;EAAM,CAAC,CAAC;EAC5D,OAAOJ,YAAY;AACrB"}
@@ -1,33 +1,33 @@
1
- import { Writer, WriterOptions } from '@loaders.gl/loader-utils';
1
+ import { WriterOptions, WriterWithEncoder } from '@loaders.gl/loader-utils';
2
2
  /**
3
3
  * Encode loaded data into a binary ArrayBuffer using the specified Writer.
4
4
  */
5
- export declare function encode(data: unknown, writer: Writer, options?: WriterOptions): Promise<ArrayBuffer>;
5
+ export declare function encode(data: unknown, writer: WriterWithEncoder, options?: WriterOptions): Promise<ArrayBuffer>;
6
6
  /**
7
7
  * Encode loaded data into a binary ArrayBuffer using the specified Writer.
8
8
  */
9
- export declare function encodeSync(data: unknown, writer: Writer, options?: WriterOptions): ArrayBuffer;
9
+ export declare function encodeSync(data: unknown, writer: WriterWithEncoder, options?: WriterOptions): ArrayBuffer;
10
10
  /**
11
11
  * Encode loaded data to text using the specified Writer
12
12
  * @note This is a convenience function not intended for production use on large input data.
13
13
  * It is not optimized for performance. Data maybe converted from text to binary and back.
14
14
  * @throws if the writer does not generate text output
15
15
  */
16
- export declare function encodeText(data: unknown, writer: Writer, options?: WriterOptions): Promise<string>;
16
+ export declare function encodeText(data: unknown, writer: WriterWithEncoder, options?: WriterOptions): Promise<string>;
17
17
  /**
18
18
  * Encode loaded data to text using the specified Writer
19
19
  * @note This is a convenience function not intended for production use on large input data.
20
20
  * It is not optimized for performance. Data maybe converted from text to binary and back.
21
21
  * @throws if the writer does not generate text output
22
22
  */
23
- export declare function encodeTextSync(data: unknown, writer: Writer, options?: WriterOptions): string;
23
+ export declare function encodeTextSync(data: unknown, writer: WriterWithEncoder, options?: WriterOptions): string;
24
24
  /**
25
25
  * Encode loaded data into a sequence (iterator) of binary ArrayBuffers using the specified Writer.
26
26
  */
27
- export declare function encodeInBatches(data: unknown, writer: Writer, options?: WriterOptions): AsyncIterable<ArrayBuffer>;
27
+ export declare function encodeInBatches(data: unknown, writer: WriterWithEncoder, options?: WriterOptions): AsyncIterable<ArrayBuffer>;
28
28
  /**
29
29
  * Encode data stored in a file (on disk) to another file.
30
30
  * @note Node.js only. This function enables using command-line converters as "writers".
31
31
  */
32
- export declare function encodeURLtoURL(inputUrl: string, outputUrl: string, writer: Writer, options?: WriterOptions): Promise<string>;
32
+ export declare function encodeURLtoURL(inputUrl: string, outputUrl: string, writer: WriterWithEncoder, options?: WriterOptions): Promise<string>;
33
33
  //# sourceMappingURL=encode.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"encode.d.ts","sourceRoot":"","sources":["../../../src/lib/api/encode.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,MAAM,EAAE,aAAa,EAAsB,MAAM,0BAA0B,CAAC;AAOpF;;GAEG;AACH,wBAAsB,MAAM,CAC1B,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,WAAW,CAAC,CAuDtB;AAED;;GAEG;AACH,wBAAgB,UAAU,CAAC,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,aAAa,GAAG,WAAW,CAK9F;AAED;;;;;GAKG;AACH,wBAAsB,UAAU,CAC9B,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,MAAM,CAAC,CAWjB;AAED;;;;;GAKG;AACH,wBAAgB,cAAc,CAAC,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,aAAa,GAAG,MAAM,CAW7F;AAED;;GAEG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,aAAa,GACtB,aAAa,CAAC,WAAW,CAAC,CAQ5B;AAED;;;GAGG;AACH,wBAAsB,cAAc,CAClC,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,MAAM,EACd,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,MAAM,CAAC,CAQjB"}
1
+ {"version":3,"file":"encode.d.ts","sourceRoot":"","sources":["../../../src/lib/api/encode.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,aAAa,EAAE,iBAAiB,EAAsB,MAAM,0BAA0B,CAAC;AAO/F;;GAEG;AACH,wBAAsB,MAAM,CAC1B,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,iBAAiB,EACzB,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,WAAW,CAAC,CAuDtB;AAED;;GAEG;AACH,wBAAgB,UAAU,CACxB,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,iBAAiB,EACzB,OAAO,CAAC,EAAE,aAAa,GACtB,WAAW,CAKb;AAED;;;;;GAKG;AACH,wBAAsB,UAAU,CAC9B,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,iBAAiB,EACzB,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,MAAM,CAAC,CAWjB;AAED;;;;;GAKG;AACH,wBAAgB,cAAc,CAC5B,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,iBAAiB,EACzB,OAAO,CAAC,EAAE,aAAa,GACtB,MAAM,CAWR;AAED;;GAEG;AACH,wBAAgB,eAAe,CAC7B,IAAI,EAAE,OAAO,EACb,MAAM,EAAE,iBAAiB,EACzB,OAAO,CAAC,EAAE,aAAa,GACtB,aAAa,CAAC,WAAW,CAAC,CAQ5B;AAED;;;GAGG;AACH,wBAAsB,cAAc,CAClC,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,iBAAiB,EACzB,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,MAAM,CAAC,CAQjB"}
@@ -51,7 +51,7 @@ export async function encodeText(data, writer, options) {
51
51
  if (writer.text && writer.encodeText) {
52
52
  return await writer.encodeText(data, options);
53
53
  }
54
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
54
+ if (writer.text) {
55
55
  const arrayBuffer = await encode(data, writer, options);
56
56
  return new TextDecoder().decode(arrayBuffer);
57
57
  }
@@ -1 +1 @@
1
- {"version":3,"file":"encode.js","names":["canEncodeWithWorker","concatenateArrayBuffers","resolvePath","NodeFile","processOnWorker","isBrowser","fetchFile","getLoaderOptions","encode","data","writer","options","globalOptions","encodeSync","encodeText","TextEncoder","encodeInBatches","batches","chunks","batch","push","encodeURLtoURL","tmpInputFilename","getTemporaryFilename","file","write","tmpOutputFilename","outputFilename","response","arrayBuffer","Error","text","TextDecoder","decode","name","encodeTextSync","dataIterator","getIterator","inputUrl","outputUrl","start","end","length","filename"],"sources":["../../../src/lib/api/encode.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport {Writer, WriterOptions, canEncodeWithWorker} from '@loaders.gl/loader-utils';\nimport {concatenateArrayBuffers, resolvePath, NodeFile} from '@loaders.gl/loader-utils';\nimport {processOnWorker} from '@loaders.gl/worker-utils';\nimport {isBrowser} from '@loaders.gl/loader-utils';\nimport {fetchFile} from '../fetch/fetch-file';\nimport {getLoaderOptions} from './loader-options';\n\n/**\n * Encode loaded data into a binary ArrayBuffer using the specified Writer.\n */\nexport async function encode(\n data: unknown,\n writer: Writer,\n options?: WriterOptions\n): Promise<ArrayBuffer> {\n const globalOptions = getLoaderOptions() as WriterOptions;\n // const globalOptions: WriterOptions = {}; // getWriterOptions();\n options = {...globalOptions, ...options};\n if (canEncodeWithWorker(writer, options)) {\n return await processOnWorker(writer, data, options);\n }\n\n // TODO Merge default writer options with options argument like it is done in load module.\n if (writer.encode) {\n return await writer.encode(data, options);\n }\n\n if (writer.encodeSync) {\n return writer.encodeSync(data, options);\n }\n\n if (writer.encodeText) {\n return new TextEncoder().encode(await writer.encodeText(data, options));\n }\n\n if (writer.encodeInBatches) {\n // Create an iterator representing the data\n // TODO - Assumes this is a table\n const batches = encodeInBatches(data, writer, options);\n\n // Concatenate the output\n const chunks: unknown[] = [];\n for await (const batch of batches) {\n chunks.push(batch);\n }\n // @ts-ignore\n return concatenateArrayBuffers(...chunks);\n }\n\n if (!isBrowser && writer.encodeURLtoURL) {\n // TODO - how to generate filenames with correct extensions?\n const tmpInputFilename = getTemporaryFilename('input');\n const file = new NodeFile(tmpInputFilename, 'w');\n await file.write(data as ArrayBuffer);\n\n const tmpOutputFilename = getTemporaryFilename('output');\n\n const outputFilename = await encodeURLtoURL(\n tmpInputFilename,\n tmpOutputFilename,\n writer,\n options\n );\n\n const response = await fetchFile(outputFilename);\n return response.arrayBuffer();\n }\n\n throw new Error('Writer could not encode data');\n}\n\n/**\n * Encode loaded data into a binary ArrayBuffer using the specified Writer.\n */\nexport function encodeSync(data: unknown, writer: Writer, options?: WriterOptions): ArrayBuffer {\n if (writer.encodeSync) {\n return writer.encodeSync(data, options);\n }\n throw new Error('Writer could not synchronously encode data');\n}\n\n/**\n * Encode loaded data to text using the specified Writer\n * @note This is a convenience function not intended for production use on large input data.\n * It is not optimized for performance. Data maybe converted from text to binary and back.\n * @throws if the writer does not generate text output\n */\nexport async function encodeText(\n data: unknown,\n writer: Writer,\n options?: WriterOptions\n): Promise<string> {\n if (writer.text && writer.encodeText) {\n return await writer.encodeText(data, options);\n }\n\n if (writer.text && (writer.encode || writer.encodeInBatches)) {\n const arrayBuffer = await encode(data, writer, options);\n return new TextDecoder().decode(arrayBuffer);\n }\n\n throw new Error(`Writer ${writer.name} could not encode data as text`);\n}\n\n/**\n * Encode loaded data to text using the specified Writer\n * @note This is a convenience function not intended for production use on large input data.\n * It is not optimized for performance. Data maybe converted from text to binary and back.\n * @throws if the writer does not generate text output\n */\nexport function encodeTextSync(data: unknown, writer: Writer, options?: WriterOptions): string {\n if (writer.text && writer.encodeTextSync) {\n return writer.encodeTextSync(data, options);\n }\n\n if (writer.text && writer.encodeSync) {\n const arrayBuffer = encodeSync(data, writer, options);\n return new TextDecoder().decode(arrayBuffer);\n }\n\n throw new Error(`Writer ${writer.name} could not encode data as text`);\n}\n\n/**\n * Encode loaded data into a sequence (iterator) of binary ArrayBuffers using the specified Writer.\n */\nexport function encodeInBatches(\n data: unknown,\n writer: Writer,\n options?: WriterOptions\n): AsyncIterable<ArrayBuffer> {\n if (writer.encodeInBatches) {\n const dataIterator = getIterator(data);\n // @ts-expect-error\n return writer.encodeInBatches(dataIterator, options);\n }\n // TODO -fall back to atomic encode?\n throw new Error('Writer could not encode data in batches');\n}\n\n/**\n * Encode data stored in a file (on disk) to another file.\n * @note Node.js only. This function enables using command-line converters as \"writers\".\n */\nexport async function encodeURLtoURL(\n inputUrl: string,\n outputUrl: string,\n writer: Writer,\n options?: WriterOptions\n): Promise<string> {\n inputUrl = resolvePath(inputUrl);\n outputUrl = resolvePath(outputUrl);\n if (isBrowser || !writer.encodeURLtoURL) {\n throw new Error();\n }\n const outputFilename = await writer.encodeURLtoURL(inputUrl, outputUrl, options);\n return outputFilename;\n}\n\n/**\n * @todo TODO - this is an unacceptable hack!!!\n */\nfunction getIterator(data: any): Iterable<{table: any; start: number; end: number}> {\n const dataIterator = [{...data, start: 0, end: data.length}];\n return dataIterator;\n}\n\n/**\n * @todo Move to utils\n */\nfunction getTemporaryFilename(filename: string): string {\n return `/tmp/${filename}`;\n}\n"],"mappings":"AAGA,SAA+BA,mBAAmB,QAAO,0BAA0B;AACnF,SAAQC,uBAAuB,EAAEC,WAAW,EAAEC,QAAQ,QAAO,0BAA0B;AACvF,SAAQC,eAAe,QAAO,0BAA0B;AACxD,SAAQC,SAAS,QAAO,0BAA0B;AAAC,SAC3CC,SAAS;AAAA,SACTC,gBAAgB;AAKxB,OAAO,eAAeC,MAAMA,CAC1BC,IAAa,EACbC,MAAc,EACdC,OAAuB,EACD;EACtB,MAAMC,aAAa,GAAGL,gBAAgB,CAAC,CAAkB;EAEzDI,OAAO,GAAG;IAAC,GAAGC,aAAa;IAAE,GAAGD;EAAO,CAAC;EACxC,IAAIX,mBAAmB,CAACU,MAAM,EAAEC,OAAO,CAAC,EAAE;IACxC,OAAO,MAAMP,eAAe,CAACM,MAAM,EAAED,IAAI,EAAEE,OAAO,CAAC;EACrD;EAGA,IAAID,MAAM,CAACF,MAAM,EAAE;IACjB,OAAO,MAAME,MAAM,CAACF,MAAM,CAACC,IAAI,EAAEE,OAAO,CAAC;EAC3C;EAEA,IAAID,MAAM,CAACG,UAAU,EAAE;IACrB,OAAOH,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;EACzC;EAEA,IAAID,MAAM,CAACI,UAAU,EAAE;IACrB,OAAO,IAAIC,WAAW,CAAC,CAAC,CAACP,MAAM,CAAC,MAAME,MAAM,CAACI,UAAU,CAACL,IAAI,EAAEE,OAAO,CAAC,CAAC;EACzE;EAEA,IAAID,MAAM,CAACM,eAAe,EAAE;IAG1B,MAAMC,OAAO,GAAGD,eAAe,CAACP,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IAGtD,MAAMO,MAAiB,GAAG,EAAE;IAC5B,WAAW,MAAMC,KAAK,IAAIF,OAAO,EAAE;MACjCC,MAAM,CAACE,IAAI,CAACD,KAAK,CAAC;IACpB;IAEA,OAAOlB,uBAAuB,CAAC,GAAGiB,MAAM,CAAC;EAC3C;EAEA,IAAI,CAACb,SAAS,IAAIK,MAAM,CAACW,cAAc,EAAE;IAEvC,MAAMC,gBAAgB,GAAGC,oBAAoB,CAAC,OAAO,CAAC;IACtD,MAAMC,IAAI,GAAG,IAAIrB,QAAQ,CAACmB,gBAAgB,EAAE,GAAG,CAAC;IAChD,MAAME,IAAI,CAACC,KAAK,CAAChB,IAAmB,CAAC;IAErC,MAAMiB,iBAAiB,GAAGH,oBAAoB,CAAC,QAAQ,CAAC;IAExD,MAAMI,cAAc,GAAG,MAAMN,cAAc,CACzCC,gBAAgB,EAChBI,iBAAiB,EACjBhB,MAAM,EACNC,OACF,CAAC;IAED,MAAMiB,QAAQ,GAAG,MAAMtB,SAAS,CAACqB,cAAc,CAAC;IAChD,OAAOC,QAAQ,CAACC,WAAW,CAAC,CAAC;EAC/B;EAEA,MAAM,IAAIC,KAAK,CAAC,8BAA8B,CAAC;AACjD;AAKA,OAAO,SAASjB,UAAUA,CAACJ,IAAa,EAAEC,MAAc,EAAEC,OAAuB,EAAe;EAC9F,IAAID,MAAM,CAACG,UAAU,EAAE;IACrB,OAAOH,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;EACzC;EACA,MAAM,IAAImB,KAAK,CAAC,4CAA4C,CAAC;AAC/D;AAQA,OAAO,eAAehB,UAAUA,CAC9BL,IAAa,EACbC,MAAc,EACdC,OAAuB,EACN;EACjB,IAAID,MAAM,CAACqB,IAAI,IAAIrB,MAAM,CAACI,UAAU,EAAE;IACpC,OAAO,MAAMJ,MAAM,CAACI,UAAU,CAACL,IAAI,EAAEE,OAAO,CAAC;EAC/C;EAEA,IAAID,MAAM,CAACqB,IAAI,KAAKrB,MAAM,CAACF,MAAM,IAAIE,MAAM,CAACM,eAAe,CAAC,EAAE;IAC5D,MAAMa,WAAW,GAAG,MAAMrB,MAAM,CAACC,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IACvD,OAAO,IAAIqB,WAAW,CAAC,CAAC,CAACC,MAAM,CAACJ,WAAW,CAAC;EAC9C;EAEA,MAAM,IAAIC,KAAK,CAAE,UAASpB,MAAM,CAACwB,IAAK,gCAA+B,CAAC;AACxE;AAQA,OAAO,SAASC,cAAcA,CAAC1B,IAAa,EAAEC,MAAc,EAAEC,OAAuB,EAAU;EAC7F,IAAID,MAAM,CAACqB,IAAI,IAAIrB,MAAM,CAACyB,cAAc,EAAE;IACxC,OAAOzB,MAAM,CAACyB,cAAc,CAAC1B,IAAI,EAAEE,OAAO,CAAC;EAC7C;EAEA,IAAID,MAAM,CAACqB,IAAI,IAAIrB,MAAM,CAACG,UAAU,EAAE;IACpC,MAAMgB,WAAW,GAAGhB,UAAU,CAACJ,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IACrD,OAAO,IAAIqB,WAAW,CAAC,CAAC,CAACC,MAAM,CAACJ,WAAW,CAAC;EAC9C;EAEA,MAAM,IAAIC,KAAK,CAAE,UAASpB,MAAM,CAACwB,IAAK,gCAA+B,CAAC;AACxE;AAKA,OAAO,SAASlB,eAAeA,CAC7BP,IAAa,EACbC,MAAc,EACdC,OAAuB,EACK;EAC5B,IAAID,MAAM,CAACM,eAAe,EAAE;IAC1B,MAAMoB,YAAY,GAAGC,WAAW,CAAC5B,IAAI,CAAC;IAEtC,OAAOC,MAAM,CAACM,eAAe,CAACoB,YAAY,EAAEzB,OAAO,CAAC;EACtD;EAEA,MAAM,IAAImB,KAAK,CAAC,yCAAyC,CAAC;AAC5D;AAMA,OAAO,eAAeT,cAAcA,CAClCiB,QAAgB,EAChBC,SAAiB,EACjB7B,MAAc,EACdC,OAAuB,EACN;EACjB2B,QAAQ,GAAGpC,WAAW,CAACoC,QAAQ,CAAC;EAChCC,SAAS,GAAGrC,WAAW,CAACqC,SAAS,CAAC;EAClC,IAAIlC,SAAS,IAAI,CAACK,MAAM,CAACW,cAAc,EAAE;IACvC,MAAM,IAAIS,KAAK,CAAC,CAAC;EACnB;EACA,MAAMH,cAAc,GAAG,MAAMjB,MAAM,CAACW,cAAc,CAACiB,QAAQ,EAAEC,SAAS,EAAE5B,OAAO,CAAC;EAChF,OAAOgB,cAAc;AACvB;AAKA,SAASU,WAAWA,CAAC5B,IAAS,EAAsD;EAClF,MAAM2B,YAAY,GAAG,CAAC;IAAC,GAAG3B,IAAI;IAAE+B,KAAK,EAAE,CAAC;IAAEC,GAAG,EAAEhC,IAAI,CAACiC;EAAM,CAAC,CAAC;EAC5D,OAAON,YAAY;AACrB;AAKA,SAASb,oBAAoBA,CAACoB,QAAgB,EAAU;EACtD,OAAQ,QAAOA,QAAS,EAAC;AAC3B"}
1
+ {"version":3,"file":"encode.js","names":["canEncodeWithWorker","concatenateArrayBuffers","resolvePath","NodeFile","processOnWorker","isBrowser","fetchFile","getLoaderOptions","encode","data","writer","options","globalOptions","encodeSync","encodeText","TextEncoder","encodeInBatches","batches","chunks","batch","push","encodeURLtoURL","tmpInputFilename","getTemporaryFilename","file","write","tmpOutputFilename","outputFilename","response","arrayBuffer","Error","text","TextDecoder","decode","name","encodeTextSync","dataIterator","getIterator","inputUrl","outputUrl","start","end","length","filename"],"sources":["../../../src/lib/api/encode.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport {WriterOptions, WriterWithEncoder, canEncodeWithWorker} from '@loaders.gl/loader-utils';\nimport {concatenateArrayBuffers, resolvePath, NodeFile} from '@loaders.gl/loader-utils';\nimport {processOnWorker} from '@loaders.gl/worker-utils';\nimport {isBrowser} from '@loaders.gl/loader-utils';\nimport {fetchFile} from '../fetch/fetch-file';\nimport {getLoaderOptions} from './loader-options';\n\n/**\n * Encode loaded data into a binary ArrayBuffer using the specified Writer.\n */\nexport async function encode(\n data: unknown,\n writer: WriterWithEncoder,\n options?: WriterOptions\n): Promise<ArrayBuffer> {\n const globalOptions = getLoaderOptions() as WriterOptions;\n // const globalOptions: WriterOptions = {}; // getWriterOptions();\n options = {...globalOptions, ...options};\n if (canEncodeWithWorker(writer, options)) {\n return await processOnWorker(writer, data, options);\n }\n\n // TODO Merge default writer options with options argument like it is done in load module.\n if (writer.encode) {\n return await writer.encode(data, options);\n }\n\n if (writer.encodeSync) {\n return writer.encodeSync(data, options);\n }\n\n if (writer.encodeText) {\n return new TextEncoder().encode(await writer.encodeText(data, options));\n }\n\n if (writer.encodeInBatches) {\n // Create an iterator representing the data\n // TODO - Assumes this is a table\n const batches = encodeInBatches(data, writer, options);\n\n // Concatenate the output\n const chunks: unknown[] = [];\n for await (const batch of batches) {\n chunks.push(batch);\n }\n // @ts-ignore\n return concatenateArrayBuffers(...chunks);\n }\n\n if (!isBrowser && writer.encodeURLtoURL) {\n // TODO - how to generate filenames with correct extensions?\n const tmpInputFilename = getTemporaryFilename('input');\n const file = new NodeFile(tmpInputFilename, 'w');\n await file.write(data as ArrayBuffer);\n\n const tmpOutputFilename = getTemporaryFilename('output');\n\n const outputFilename = await encodeURLtoURL(\n tmpInputFilename,\n tmpOutputFilename,\n writer,\n options\n );\n\n const response = await fetchFile(outputFilename);\n return response.arrayBuffer();\n }\n\n throw new Error('Writer could not encode data');\n}\n\n/**\n * Encode loaded data into a binary ArrayBuffer using the specified Writer.\n */\nexport function encodeSync(\n data: unknown,\n writer: WriterWithEncoder,\n options?: WriterOptions\n): ArrayBuffer {\n if (writer.encodeSync) {\n return writer.encodeSync(data, options);\n }\n throw new Error('Writer could not synchronously encode data');\n}\n\n/**\n * Encode loaded data to text using the specified Writer\n * @note This is a convenience function not intended for production use on large input data.\n * It is not optimized for performance. Data maybe converted from text to binary and back.\n * @throws if the writer does not generate text output\n */\nexport async function encodeText(\n data: unknown,\n writer: WriterWithEncoder,\n options?: WriterOptions\n): Promise<string> {\n if (writer.text && writer.encodeText) {\n return await writer.encodeText(data, options);\n }\n\n if (writer.text) {\n const arrayBuffer = await encode(data, writer, options);\n return new TextDecoder().decode(arrayBuffer);\n }\n\n throw new Error(`Writer ${writer.name} could not encode data as text`);\n}\n\n/**\n * Encode loaded data to text using the specified Writer\n * @note This is a convenience function not intended for production use on large input data.\n * It is not optimized for performance. Data maybe converted from text to binary and back.\n * @throws if the writer does not generate text output\n */\nexport function encodeTextSync(\n data: unknown,\n writer: WriterWithEncoder,\n options?: WriterOptions\n): string {\n if (writer.text && writer.encodeTextSync) {\n return writer.encodeTextSync(data, options);\n }\n\n if (writer.text && writer.encodeSync) {\n const arrayBuffer = encodeSync(data, writer, options);\n return new TextDecoder().decode(arrayBuffer);\n }\n\n throw new Error(`Writer ${writer.name} could not encode data as text`);\n}\n\n/**\n * Encode loaded data into a sequence (iterator) of binary ArrayBuffers using the specified Writer.\n */\nexport function encodeInBatches(\n data: unknown,\n writer: WriterWithEncoder,\n options?: WriterOptions\n): AsyncIterable<ArrayBuffer> {\n if (writer.encodeInBatches) {\n const dataIterator = getIterator(data);\n // @ts-expect-error\n return writer.encodeInBatches(dataIterator, options);\n }\n // TODO -fall back to atomic encode?\n throw new Error('Writer could not encode data in batches');\n}\n\n/**\n * Encode data stored in a file (on disk) to another file.\n * @note Node.js only. This function enables using command-line converters as \"writers\".\n */\nexport async function encodeURLtoURL(\n inputUrl: string,\n outputUrl: string,\n writer: WriterWithEncoder,\n options?: WriterOptions\n): Promise<string> {\n inputUrl = resolvePath(inputUrl);\n outputUrl = resolvePath(outputUrl);\n if (isBrowser || !writer.encodeURLtoURL) {\n throw new Error();\n }\n const outputFilename = await writer.encodeURLtoURL(inputUrl, outputUrl, options);\n return outputFilename;\n}\n\n/**\n * @todo TODO - this is an unacceptable hack!!!\n */\nfunction getIterator(data: any): Iterable<{table: any; start: number; end: number}> {\n const dataIterator = [{...data, start: 0, end: data.length}];\n return dataIterator;\n}\n\n/**\n * @todo Move to utils\n */\nfunction getTemporaryFilename(filename: string): string {\n return `/tmp/${filename}`;\n}\n"],"mappings":"AAGA,SAA0CA,mBAAmB,QAAO,0BAA0B;AAC9F,SAAQC,uBAAuB,EAAEC,WAAW,EAAEC,QAAQ,QAAO,0BAA0B;AACvF,SAAQC,eAAe,QAAO,0BAA0B;AACxD,SAAQC,SAAS,QAAO,0BAA0B;AAAC,SAC3CC,SAAS;AAAA,SACTC,gBAAgB;AAKxB,OAAO,eAAeC,MAAMA,CAC1BC,IAAa,EACbC,MAAyB,EACzBC,OAAuB,EACD;EACtB,MAAMC,aAAa,GAAGL,gBAAgB,CAAC,CAAkB;EAEzDI,OAAO,GAAG;IAAC,GAAGC,aAAa;IAAE,GAAGD;EAAO,CAAC;EACxC,IAAIX,mBAAmB,CAACU,MAAM,EAAEC,OAAO,CAAC,EAAE;IACxC,OAAO,MAAMP,eAAe,CAACM,MAAM,EAAED,IAAI,EAAEE,OAAO,CAAC;EACrD;EAGA,IAAID,MAAM,CAACF,MAAM,EAAE;IACjB,OAAO,MAAME,MAAM,CAACF,MAAM,CAACC,IAAI,EAAEE,OAAO,CAAC;EAC3C;EAEA,IAAID,MAAM,CAACG,UAAU,EAAE;IACrB,OAAOH,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;EACzC;EAEA,IAAID,MAAM,CAACI,UAAU,EAAE;IACrB,OAAO,IAAIC,WAAW,CAAC,CAAC,CAACP,MAAM,CAAC,MAAME,MAAM,CAACI,UAAU,CAACL,IAAI,EAAEE,OAAO,CAAC,CAAC;EACzE;EAEA,IAAID,MAAM,CAACM,eAAe,EAAE;IAG1B,MAAMC,OAAO,GAAGD,eAAe,CAACP,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IAGtD,MAAMO,MAAiB,GAAG,EAAE;IAC5B,WAAW,MAAMC,KAAK,IAAIF,OAAO,EAAE;MACjCC,MAAM,CAACE,IAAI,CAACD,KAAK,CAAC;IACpB;IAEA,OAAOlB,uBAAuB,CAAC,GAAGiB,MAAM,CAAC;EAC3C;EAEA,IAAI,CAACb,SAAS,IAAIK,MAAM,CAACW,cAAc,EAAE;IAEvC,MAAMC,gBAAgB,GAAGC,oBAAoB,CAAC,OAAO,CAAC;IACtD,MAAMC,IAAI,GAAG,IAAIrB,QAAQ,CAACmB,gBAAgB,EAAE,GAAG,CAAC;IAChD,MAAME,IAAI,CAACC,KAAK,CAAChB,IAAmB,CAAC;IAErC,MAAMiB,iBAAiB,GAAGH,oBAAoB,CAAC,QAAQ,CAAC;IAExD,MAAMI,cAAc,GAAG,MAAMN,cAAc,CACzCC,gBAAgB,EAChBI,iBAAiB,EACjBhB,MAAM,EACNC,OACF,CAAC;IAED,MAAMiB,QAAQ,GAAG,MAAMtB,SAAS,CAACqB,cAAc,CAAC;IAChD,OAAOC,QAAQ,CAACC,WAAW,CAAC,CAAC;EAC/B;EAEA,MAAM,IAAIC,KAAK,CAAC,8BAA8B,CAAC;AACjD;AAKA,OAAO,SAASjB,UAAUA,CACxBJ,IAAa,EACbC,MAAyB,EACzBC,OAAuB,EACV;EACb,IAAID,MAAM,CAACG,UAAU,EAAE;IACrB,OAAOH,MAAM,CAACG,UAAU,CAACJ,IAAI,EAAEE,OAAO,CAAC;EACzC;EACA,MAAM,IAAImB,KAAK,CAAC,4CAA4C,CAAC;AAC/D;AAQA,OAAO,eAAehB,UAAUA,CAC9BL,IAAa,EACbC,MAAyB,EACzBC,OAAuB,EACN;EACjB,IAAID,MAAM,CAACqB,IAAI,IAAIrB,MAAM,CAACI,UAAU,EAAE;IACpC,OAAO,MAAMJ,MAAM,CAACI,UAAU,CAACL,IAAI,EAAEE,OAAO,CAAC;EAC/C;EAEA,IAAID,MAAM,CAACqB,IAAI,EAAE;IACf,MAAMF,WAAW,GAAG,MAAMrB,MAAM,CAACC,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IACvD,OAAO,IAAIqB,WAAW,CAAC,CAAC,CAACC,MAAM,CAACJ,WAAW,CAAC;EAC9C;EAEA,MAAM,IAAIC,KAAK,CAAE,UAASpB,MAAM,CAACwB,IAAK,gCAA+B,CAAC;AACxE;AAQA,OAAO,SAASC,cAAcA,CAC5B1B,IAAa,EACbC,MAAyB,EACzBC,OAAuB,EACf;EACR,IAAID,MAAM,CAACqB,IAAI,IAAIrB,MAAM,CAACyB,cAAc,EAAE;IACxC,OAAOzB,MAAM,CAACyB,cAAc,CAAC1B,IAAI,EAAEE,OAAO,CAAC;EAC7C;EAEA,IAAID,MAAM,CAACqB,IAAI,IAAIrB,MAAM,CAACG,UAAU,EAAE;IACpC,MAAMgB,WAAW,GAAGhB,UAAU,CAACJ,IAAI,EAAEC,MAAM,EAAEC,OAAO,CAAC;IACrD,OAAO,IAAIqB,WAAW,CAAC,CAAC,CAACC,MAAM,CAACJ,WAAW,CAAC;EAC9C;EAEA,MAAM,IAAIC,KAAK,CAAE,UAASpB,MAAM,CAACwB,IAAK,gCAA+B,CAAC;AACxE;AAKA,OAAO,SAASlB,eAAeA,CAC7BP,IAAa,EACbC,MAAyB,EACzBC,OAAuB,EACK;EAC5B,IAAID,MAAM,CAACM,eAAe,EAAE;IAC1B,MAAMoB,YAAY,GAAGC,WAAW,CAAC5B,IAAI,CAAC;IAEtC,OAAOC,MAAM,CAACM,eAAe,CAACoB,YAAY,EAAEzB,OAAO,CAAC;EACtD;EAEA,MAAM,IAAImB,KAAK,CAAC,yCAAyC,CAAC;AAC5D;AAMA,OAAO,eAAeT,cAAcA,CAClCiB,QAAgB,EAChBC,SAAiB,EACjB7B,MAAyB,EACzBC,OAAuB,EACN;EACjB2B,QAAQ,GAAGpC,WAAW,CAACoC,QAAQ,CAAC;EAChCC,SAAS,GAAGrC,WAAW,CAACqC,SAAS,CAAC;EAClC,IAAIlC,SAAS,IAAI,CAACK,MAAM,CAACW,cAAc,EAAE;IACvC,MAAM,IAAIS,KAAK,CAAC,CAAC;EACnB;EACA,MAAMH,cAAc,GAAG,MAAMjB,MAAM,CAACW,cAAc,CAACiB,QAAQ,EAAEC,SAAS,EAAE5B,OAAO,CAAC;EAChF,OAAOgB,cAAc;AACvB;AAKA,SAASU,WAAWA,CAAC5B,IAAS,EAAsD;EAClF,MAAM2B,YAAY,GAAG,CAAC;IAAC,GAAG3B,IAAI;IAAE+B,KAAK,EAAE,CAAC;IAAEC,GAAG,EAAEhC,IAAI,CAACiC;EAAM,CAAC,CAAC;EAC5D,OAAON,YAAY;AACrB;AAKA,SAASb,oBAAoBA,CAACoB,QAAgB,EAAU;EACtD,OAAQ,QAAOA,QAAS,EAAC;AAC3B"}
@@ -1,3 +1,4 @@
1
+ import { isTable, makeBatchFromTable } from '@loaders.gl/schema';
1
2
  import { concatenateArrayBuffersAsync } from '@loaders.gl/loader-utils';
2
3
  import { isLoaderObject } from "../loader-utils/normalize-loader.js";
3
4
  import { normalizeOptions } from "../loader-utils/option-utils.js";
@@ -55,22 +56,26 @@ async function parseToOutputIterator(loader, data, options, context) {
55
56
  if (loader.parseInBatches) {
56
57
  return loader.parseInBatches(transformedIterator, options, context);
57
58
  }
58
- async function* parseChunkInBatches() {
59
- const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);
60
- const parsedData = await parse(arrayBuffer, loader, {
61
- ...options,
62
- mimeType: loader.mimeTypes[0]
63
- }, context);
64
- const batch = {
65
- mimeType: loader.mimeTypes[0],
66
- shape: Array.isArray(parsedData) ? 'row-table' : 'unknown',
67
- batchType: 'data',
68
- data: parsedData,
69
- length: Array.isArray(parsedData) ? parsedData.length : 1
70
- };
71
- yield batch;
72
- }
73
- return parseChunkInBatches();
59
+ return parseChunkInBatches(transformedIterator, loader, options, context);
60
+ }
61
+ async function* parseChunkInBatches(transformedIterator, loader, options, context) {
62
+ const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);
63
+ const parsedData = await parse(arrayBuffer, loader, {
64
+ ...options,
65
+ mimeType: loader.mimeTypes[0]
66
+ }, context);
67
+ const batch = convertDataToBatch(parsedData, loader);
68
+ yield batch;
69
+ }
70
+ function convertDataToBatch(parsedData, loader) {
71
+ const batch = isTable(parsedData) ? makeBatchFromTable(parsedData) : {
72
+ shape: 'unknown',
73
+ batchType: 'data',
74
+ data: parsedData,
75
+ length: Array.isArray(parsedData) ? parsedData.length : 1
76
+ };
77
+ batch.mimeType = loader.mimeTypes[0];
78
+ return batch;
74
79
  }
75
80
  async function applyInputTransforms(inputIterator) {
76
81
  let transforms = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
@@ -1 +1 @@
1
- {"version":3,"file":"parse-in-batches.js","names":["concatenateArrayBuffersAsync","isLoaderObject","normalizeOptions","getLoaderContext","getAsyncIterableFromData","getResourceUrl","selectLoader","parse","parseInBatches","data","loaders","options","context","loaderArray","Array","isArray","undefined","url","loader","_parseInBatches","_parse","parseWithLoaderInBatches","outputIterator","parseToOutputIterator","metadata","metadataBatch","batchType","_loader","_context","bytesUsed","makeMetadataBatchIterator","iterator","inputIterator","transformedIterator","applyInputTransforms","transforms","parseChunkInBatches","arrayBuffer","parsedData","mimeType","mimeTypes","batch","shape","length","arguments","iteratorChain","transformBatches"],"sources":["../../../src/lib/api/parse-in-batches.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport type {Batch} from '@loaders.gl/schema';\nimport type {Loader, LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils';\nimport type {LoaderContext, BatchableDataType} from '@loaders.gl/loader-utils';\nimport type {LoaderBatchType, LoaderOptionsType} from '@loaders.gl/loader-utils';\nimport {concatenateArrayBuffersAsync} from '@loaders.gl/loader-utils';\nimport {isLoaderObject} from '../loader-utils/normalize-loader';\nimport {normalizeOptions} from '../loader-utils/option-utils';\nimport {getLoaderContext} from '../loader-utils/loader-context';\nimport {getAsyncIterableFromData} from '../loader-utils/get-data';\nimport {getResourceUrl} from '../utils/resource-utils';\nimport {selectLoader} from './select-loader';\n\n// Ensure `parse` is available in context if loader falls back to `parse`\nimport {parse} from './parse';\n\n/**\n * Parses `data` synchronously using a specified loader\n */\nexport async function parseInBatches<\n LoaderT extends Loader,\n OptionsT extends LoaderOptions = LoaderOptionsType<LoaderT>\n>(\n data: BatchableDataType,\n loader: LoaderT,\n options?: OptionsT,\n context?: LoaderContext\n): Promise<AsyncIterable<LoaderBatchType<LoaderT>>>;\n\n/**\n * Parses `data` using one of the supplied loaders\n */\nexport async function parseInBatches(\n data: BatchableDataType,\n loaders: Loader[],\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<AsyncIterable<unknown>>;\n\n/**\n * Parses `data` in batches by selecting a pre-registered loader\n */\nexport async function parseInBatches(\n data: BatchableDataType,\n options?: LoaderOptions\n): Promise<AsyncIterable<unknown>>;\n\n/**\n * Parses `data` using a specified loader\n * @param data\n * @param loaders\n * @param options\n * @param context\n */\nexport async function parseInBatches(\n data: BatchableDataType,\n loaders?: Loader | Loader[] | LoaderOptions,\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<AsyncIterable<unknown> | Iterable<unknown>> {\n const loaderArray = Array.isArray(loaders) ? loaders : undefined;\n\n // Signature: parseInBatches(data, options, url) - Uses registered loaders\n if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {\n context = undefined; // context not supported in short signature\n options = loaders as LoaderOptions;\n loaders = undefined;\n }\n\n data = await data; // Resolve any promise\n options = options || {};\n\n // Extract a url for auto detection\n const url = getResourceUrl(data);\n\n // Chooses a loader and normalizes it\n // Note - only uses URL and contentType for streams and iterator inputs\n const loader = await selectLoader(data as ArrayBuffer, loaders as Loader | Loader[], options);\n // Note: if options.nothrow was set, it is possible that no loader was found, if so just return null\n if (!loader) {\n return [];\n }\n\n // Normalize options\n options = normalizeOptions(options, loader, loaderArray, url);\n context = getLoaderContext(\n {url, _parseInBatches: parseInBatches, _parse: parse, loaders: loaderArray},\n options,\n context || null\n );\n\n return await parseWithLoaderInBatches(loader as LoaderWithParser, data, options, context);\n}\n\n/**\n * Loader has been selected and context has been prepared, see if we need to emit a metadata batch\n */\nasync function parseWithLoaderInBatches(\n loader: LoaderWithParser,\n data: BatchableDataType,\n options: LoaderOptions,\n context: LoaderContext\n): Promise<AsyncIterable<unknown>> {\n const outputIterator = await parseToOutputIterator(loader, data, options, context);\n\n // Generate metadata batch if requested\n if (!options.metadata) {\n return outputIterator;\n }\n\n const metadataBatch = {\n batchType: 'metadata',\n metadata: {\n _loader: loader,\n _context: context\n },\n // Populate with some default fields to avoid crashing\n data: [],\n bytesUsed: 0\n };\n\n async function* makeMetadataBatchIterator(\n iterator: Iterable<unknown> | AsyncIterable<unknown>\n ): AsyncIterable<unknown> {\n yield metadataBatch;\n yield* iterator;\n }\n\n return makeMetadataBatchIterator(outputIterator);\n}\n\n/**\n * Prep work is done, now it is time to start parsing into an output operator\n * The approach depends on which parse function the loader exposes\n * `parseInBatches` (preferred), `parse` (fallback)\n */\nasync function parseToOutputIterator(\n loader: LoaderWithParser,\n data: BatchableDataType,\n options: LoaderOptions,\n context: LoaderContext\n): Promise<AsyncIterable<unknown>> {\n // Get an iterator from the input\n const inputIterator = await getAsyncIterableFromData(data, options);\n\n // Apply any iterator transforms (options.transforms)\n const transformedIterator = await applyInputTransforms(inputIterator, options?.transforms || []);\n\n // If loader supports parseInBatches, we are done\n if (loader.parseInBatches) {\n return loader.parseInBatches(transformedIterator, options, context);\n }\n\n // Fallback: load atomically using `parse` concatenating input iterator into single chunk\n async function* parseChunkInBatches() {\n const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);\n // Call `parse` instead of `loader.parse` to ensure we can call workers etc.\n const parsedData = await parse(\n arrayBuffer,\n loader,\n // TODO - Hack: supply loaders MIME type to ensure we match it\n {...options, mimeType: loader.mimeTypes[0]},\n context\n );\n // yield a single batch, the output from loader.parse()\n // TODO - run through batch builder to apply options etc...\n const batch: Batch = {\n mimeType: loader.mimeTypes[0],\n shape: Array.isArray(parsedData) ? 'row-table' : 'unknown',\n batchType: 'data',\n data: parsedData,\n length: Array.isArray(parsedData) ? parsedData.length : 1\n };\n yield batch;\n }\n\n return parseChunkInBatches();\n}\n\ntype TransformBatches = (\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n) => AsyncIterable<ArrayBuffer>;\n\n/**\n * Create an iterator chain with any transform iterators (crypto, decompression)\n * @param inputIterator\n * @param options\n */\nasync function applyInputTransforms(\n inputIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n transforms: TransformBatches[] = []\n): Promise<AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>> {\n let iteratorChain = inputIterator;\n for await (const transformBatches of transforms) {\n iteratorChain = transformBatches(iteratorChain);\n }\n return iteratorChain;\n}\n"],"mappings":"AAOA,SAAQA,4BAA4B,QAAO,0BAA0B;AAAC,SAC9DC,cAAc;AAAA,SACdC,gBAAgB;AAAA,SAChBC,gBAAgB;AAAA,SAChBC,wBAAwB;AAAA,SACxBC,cAAc;AAAA,SACdC,YAAY;AAAA,SAGZC,KAAK;AAwCb,OAAO,eAAeC,cAAcA,CAClCC,IAAuB,EACvBC,OAA2C,EAC3CC,OAAuB,EACvBC,OAAuB,EAC8B;EACrD,MAAMC,WAAW,GAAGC,KAAK,CAACC,OAAO,CAACL,OAAO,CAAC,GAAGA,OAAO,GAAGM,SAAS;EAGhE,IAAI,CAACF,KAAK,CAACC,OAAO,CAACL,OAAO,CAAC,IAAI,CAACT,cAAc,CAACS,OAAO,CAAC,EAAE;IACvDE,OAAO,GAAGI,SAAS;IACnBL,OAAO,GAAGD,OAAwB;IAClCA,OAAO,GAAGM,SAAS;EACrB;EAEAP,IAAI,GAAG,MAAMA,IAAI;EACjBE,OAAO,GAAGA,OAAO,IAAI,CAAC,CAAC;EAGvB,MAAMM,GAAG,GAAGZ,cAAc,CAACI,IAAI,CAAC;EAIhC,MAAMS,MAAM,GAAG,MAAMZ,YAAY,CAACG,IAAI,EAAiBC,OAAO,EAAuBC,OAAO,CAAC;EAE7F,IAAI,CAACO,MAAM,EAAE;IACX,OAAO,EAAE;EACX;EAGAP,OAAO,GAAGT,gBAAgB,CAACS,OAAO,EAAEO,MAAM,EAAEL,WAAW,EAAEI,GAAG,CAAC;EAC7DL,OAAO,GAAGT,gBAAgB,CACxB;IAACc,GAAG;IAAEE,eAAe,EAAEX,cAAc;IAAEY,MAAM,EAAEb,KAAK;IAAEG,OAAO,EAAEG;EAAW,CAAC,EAC3EF,OAAO,EACPC,OAAO,IAAI,IACb,CAAC;EAED,OAAO,MAAMS,wBAAwB,CAACH,MAAM,EAAsBT,IAAI,EAAEE,OAAO,EAAEC,OAAO,CAAC;AAC3F;AAKA,eAAeS,wBAAwBA,CACrCH,MAAwB,EACxBT,IAAuB,EACvBE,OAAsB,EACtBC,OAAsB,EACW;EACjC,MAAMU,cAAc,GAAG,MAAMC,qBAAqB,CAACL,MAAM,EAAET,IAAI,EAAEE,OAAO,EAAEC,OAAO,CAAC;EAGlF,IAAI,CAACD,OAAO,CAACa,QAAQ,EAAE;IACrB,OAAOF,cAAc;EACvB;EAEA,MAAMG,aAAa,GAAG;IACpBC,SAAS,EAAE,UAAU;IACrBF,QAAQ,EAAE;MACRG,OAAO,EAAET,MAAM;MACfU,QAAQ,EAAEhB;IACZ,CAAC;IAEDH,IAAI,EAAE,EAAE;IACRoB,SAAS,EAAE;EACb,CAAC;EAED,gBAAgBC,yBAAyBA,CACvCC,QAAoD,EAC5B;IACxB,MAAMN,aAAa;IACnB,OAAOM,QAAQ;EACjB;EAEA,OAAOD,yBAAyB,CAACR,cAAc,CAAC;AAClD;AAOA,eAAeC,qBAAqBA,CAClCL,MAAwB,EACxBT,IAAuB,EACvBE,OAAsB,EACtBC,OAAsB,EACW;EAEjC,MAAMoB,aAAa,GAAG,MAAM5B,wBAAwB,CAACK,IAAI,EAAEE,OAAO,CAAC;EAGnE,MAAMsB,mBAAmB,GAAG,MAAMC,oBAAoB,CAACF,aAAa,EAAE,CAAArB,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEwB,UAAU,KAAI,EAAE,CAAC;EAGhG,IAAIjB,MAAM,CAACV,cAAc,EAAE;IACzB,OAAOU,MAAM,CAACV,cAAc,CAACyB,mBAAmB,EAAEtB,OAAO,EAAEC,OAAO,CAAC;EACrE;EAGA,gBAAgBwB,mBAAmBA,CAAA,EAAG;IACpC,MAAMC,WAAW,GAAG,MAAMrC,4BAA4B,CAACiC,mBAAmB,CAAC;IAE3E,MAAMK,UAAU,GAAG,MAAM/B,KAAK,CAC5B8B,WAAW,EACXnB,MAAM,EAEN;MAAC,GAAGP,OAAO;MAAE4B,QAAQ,EAAErB,MAAM,CAACsB,SAAS,CAAC,CAAC;IAAC,CAAC,EAC3C5B,OACF,CAAC;IAGD,MAAM6B,KAAY,GAAG;MACnBF,QAAQ,EAAErB,MAAM,CAACsB,SAAS,CAAC,CAAC,CAAC;MAC7BE,KAAK,EAAE5B,KAAK,CAACC,OAAO,CAACuB,UAAU,CAAC,GAAG,WAAW,GAAG,SAAS;MAC1DZ,SAAS,EAAE,MAAM;MACjBjB,IAAI,EAAE6B,UAAU;MAChBK,MAAM,EAAE7B,KAAK,CAACC,OAAO,CAACuB,UAAU,CAAC,GAAGA,UAAU,CAACK,MAAM,GAAG;IAC1D,CAAC;IACD,MAAMF,KAAK;EACb;EAEA,OAAOL,mBAAmB,CAAC,CAAC;AAC9B;AAWA,eAAeF,oBAAoBA,CACjCF,aAAiE,EAEJ;EAAA,IAD7DG,UAA8B,GAAAS,SAAA,CAAAD,MAAA,QAAAC,SAAA,QAAA5B,SAAA,GAAA4B,SAAA,MAAG,EAAE;EAEnC,IAAIC,aAAa,GAAGb,aAAa;EACjC,WAAW,MAAMc,gBAAgB,IAAIX,UAAU,EAAE;IAC/CU,aAAa,GAAGC,gBAAgB,CAACD,aAAa,CAAC;EACjD;EACA,OAAOA,aAAa;AACtB"}
1
+ {"version":3,"file":"parse-in-batches.js","names":["isTable","makeBatchFromTable","concatenateArrayBuffersAsync","isLoaderObject","normalizeOptions","getLoaderContext","getAsyncIterableFromData","getResourceUrl","selectLoader","parse","parseInBatches","data","loaders","options","context","loaderArray","Array","isArray","undefined","url","loader","_parseInBatches","_parse","parseWithLoaderInBatches","outputIterator","parseToOutputIterator","metadata","metadataBatch","batchType","_loader","_context","bytesUsed","makeMetadataBatchIterator","iterator","inputIterator","transformedIterator","applyInputTransforms","transforms","parseChunkInBatches","arrayBuffer","parsedData","mimeType","mimeTypes","batch","convertDataToBatch","shape","length","arguments","iteratorChain","transformBatches"],"sources":["../../../src/lib/api/parse-in-batches.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport {isTable, makeBatchFromTable, type Batch} from '@loaders.gl/schema';\nimport type {Loader, LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils';\nimport type {LoaderContext, BatchableDataType} from '@loaders.gl/loader-utils';\nimport type {LoaderBatchType, LoaderOptionsType} from '@loaders.gl/loader-utils';\nimport {concatenateArrayBuffersAsync} from '@loaders.gl/loader-utils';\nimport {isLoaderObject} from '../loader-utils/normalize-loader';\nimport {normalizeOptions} from '../loader-utils/option-utils';\nimport {getLoaderContext} from '../loader-utils/loader-context';\nimport {getAsyncIterableFromData} from '../loader-utils/get-data';\nimport {getResourceUrl} from '../utils/resource-utils';\nimport {selectLoader} from './select-loader';\n\n// Ensure `parse` is available in context if loader falls back to `parse`\nimport {parse} from './parse';\n\n/**\n * Parses `data` synchronously using a specified loader\n */\nexport async function parseInBatches<\n LoaderT extends Loader,\n OptionsT extends LoaderOptions = LoaderOptionsType<LoaderT>\n>(\n data: BatchableDataType,\n loader: LoaderT,\n options?: OptionsT,\n context?: LoaderContext\n): Promise<AsyncIterable<LoaderBatchType<LoaderT>>>;\n\n/**\n * Parses `data` using one of the supplied loaders\n */\nexport async function parseInBatches(\n data: BatchableDataType,\n loaders: Loader[],\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<AsyncIterable<unknown>>;\n\n/**\n * Parses `data` in batches by selecting a pre-registered loader\n */\nexport async function parseInBatches(\n data: BatchableDataType,\n options?: LoaderOptions\n): Promise<AsyncIterable<unknown>>;\n\n/**\n * Parses `data` using a specified loader\n * @param data\n * @param loaders\n * @param options\n * @param context\n */\nexport async function parseInBatches(\n data: BatchableDataType,\n loaders?: Loader | Loader[] | LoaderOptions,\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<AsyncIterable<unknown> | Iterable<unknown>> {\n const loaderArray = Array.isArray(loaders) ? loaders : undefined;\n\n // Signature: parseInBatches(data, options, url) - Uses registered loaders\n if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {\n context = undefined; // context not supported in short signature\n options = loaders as LoaderOptions;\n loaders = undefined;\n }\n\n data = await data; // Resolve any promise\n options = options || {};\n\n // Extract a url for auto detection\n const url = getResourceUrl(data);\n\n // Chooses a loader and normalizes it\n // Note - only uses URL and contentType for streams and iterator inputs\n const loader = await selectLoader(data as ArrayBuffer, loaders as Loader | Loader[], options);\n // Note: if options.nothrow was set, it is possible that no loader was found, if so just return null\n if (!loader) {\n return [];\n }\n\n // Normalize options\n options = normalizeOptions(options, loader, loaderArray, url);\n context = getLoaderContext(\n {url, _parseInBatches: parseInBatches, _parse: parse, loaders: loaderArray},\n options,\n context || null\n );\n\n return await parseWithLoaderInBatches(loader as LoaderWithParser, data, options, context);\n}\n\n/**\n * Loader has been selected and context has been prepared, see if we need to emit a metadata batch\n */\nasync function parseWithLoaderInBatches(\n loader: LoaderWithParser,\n data: BatchableDataType,\n options: LoaderOptions,\n context: LoaderContext\n): Promise<AsyncIterable<unknown>> {\n const outputIterator = await parseToOutputIterator(loader, data, options, context);\n\n // Generate metadata batch if requested\n if (!options.metadata) {\n return outputIterator;\n }\n\n const metadataBatch = {\n batchType: 'metadata',\n metadata: {\n _loader: loader,\n _context: context\n },\n // Populate with some default fields to avoid crashing\n data: [],\n bytesUsed: 0\n };\n\n async function* makeMetadataBatchIterator(\n iterator: Iterable<unknown> | AsyncIterable<unknown>\n ): AsyncIterable<unknown> {\n yield metadataBatch;\n yield* iterator;\n }\n\n return makeMetadataBatchIterator(outputIterator);\n}\n\n/**\n * Prep work is done, now it is time to start parsing into an output operator\n * The approach depends on which parse function the loader exposes\n * `parseInBatches` (preferred), `parse` (fallback)\n */\nasync function parseToOutputIterator(\n loader: LoaderWithParser,\n data: BatchableDataType,\n options: LoaderOptions,\n context: LoaderContext\n): Promise<AsyncIterable<unknown>> {\n // Get an iterator from the input\n const inputIterator = await getAsyncIterableFromData(data, options);\n\n // Apply any iterator transforms (options.transforms)\n const transformedIterator = await applyInputTransforms(inputIterator, options?.transforms || []);\n\n // If loader supports parseInBatches, we are done\n if (loader.parseInBatches) {\n return loader.parseInBatches(transformedIterator, options, context);\n }\n\n return parseChunkInBatches(transformedIterator, loader, options, context);\n}\n\n// Fallback: load atomically using `parse` concatenating input iterator into single chunk\nasync function* parseChunkInBatches(\n transformedIterator: Iterable<ArrayBuffer> | AsyncIterable<ArrayBuffer>,\n loader: Loader,\n options: LoaderOptions,\n context: LoaderContext\n): AsyncIterable<Batch> {\n const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);\n // Call `parse` instead of `loader.parse` to ensure we can call workers etc.\n const parsedData = await parse(\n arrayBuffer,\n loader,\n // TODO - Hack: supply loaders MIME type to ensure we match it\n {...options, mimeType: loader.mimeTypes[0]},\n context\n );\n\n // yield a single batch, the output from loader.parse() repackaged as a batch\n const batch = convertDataToBatch(parsedData, loader);\n\n yield batch;\n}\n\n/**\n * Convert parsed data into a single batch\n * @todo run through batch builder to apply options etc...\n */\nfunction convertDataToBatch(parsedData: unknown, loader: Loader): Batch {\n const batch: Batch = isTable(parsedData)\n ? makeBatchFromTable(parsedData)\n : {\n shape: 'unknown',\n batchType: 'data',\n data: parsedData,\n length: Array.isArray(parsedData) ? parsedData.length : 1\n };\n\n batch.mimeType = loader.mimeTypes[0];\n\n return batch;\n}\n\ntype TransformBatches = (\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n) => AsyncIterable<ArrayBuffer>;\n\n/**\n * Create an iterator chain with any transform iterators (crypto, decompression)\n * @param inputIterator\n * @param options\n */\nasync function applyInputTransforms(\n inputIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n transforms: TransformBatches[] = []\n): Promise<AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>> {\n let iteratorChain = inputIterator;\n for await (const transformBatches of transforms) {\n iteratorChain = transformBatches(iteratorChain);\n }\n return iteratorChain;\n}\n"],"mappings":"AAGA,SAAQA,OAAO,EAAEC,kBAAkB,QAAmB,oBAAoB;AAI1E,SAAQC,4BAA4B,QAAO,0BAA0B;AAAC,SAC9DC,cAAc;AAAA,SACdC,gBAAgB;AAAA,SAChBC,gBAAgB;AAAA,SAChBC,wBAAwB;AAAA,SACxBC,cAAc;AAAA,SACdC,YAAY;AAAA,SAGZC,KAAK;AAwCb,OAAO,eAAeC,cAAcA,CAClCC,IAAuB,EACvBC,OAA2C,EAC3CC,OAAuB,EACvBC,OAAuB,EAC8B;EACrD,MAAMC,WAAW,GAAGC,KAAK,CAACC,OAAO,CAACL,OAAO,CAAC,GAAGA,OAAO,GAAGM,SAAS;EAGhE,IAAI,CAACF,KAAK,CAACC,OAAO,CAACL,OAAO,CAAC,IAAI,CAACT,cAAc,CAACS,OAAO,CAAC,EAAE;IACvDE,OAAO,GAAGI,SAAS;IACnBL,OAAO,GAAGD,OAAwB;IAClCA,OAAO,GAAGM,SAAS;EACrB;EAEAP,IAAI,GAAG,MAAMA,IAAI;EACjBE,OAAO,GAAGA,OAAO,IAAI,CAAC,CAAC;EAGvB,MAAMM,GAAG,GAAGZ,cAAc,CAACI,IAAI,CAAC;EAIhC,MAAMS,MAAM,GAAG,MAAMZ,YAAY,CAACG,IAAI,EAAiBC,OAAO,EAAuBC,OAAO,CAAC;EAE7F,IAAI,CAACO,MAAM,EAAE;IACX,OAAO,EAAE;EACX;EAGAP,OAAO,GAAGT,gBAAgB,CAACS,OAAO,EAAEO,MAAM,EAAEL,WAAW,EAAEI,GAAG,CAAC;EAC7DL,OAAO,GAAGT,gBAAgB,CACxB;IAACc,GAAG;IAAEE,eAAe,EAAEX,cAAc;IAAEY,MAAM,EAAEb,KAAK;IAAEG,OAAO,EAAEG;EAAW,CAAC,EAC3EF,OAAO,EACPC,OAAO,IAAI,IACb,CAAC;EAED,OAAO,MAAMS,wBAAwB,CAACH,MAAM,EAAsBT,IAAI,EAAEE,OAAO,EAAEC,OAAO,CAAC;AAC3F;AAKA,eAAeS,wBAAwBA,CACrCH,MAAwB,EACxBT,IAAuB,EACvBE,OAAsB,EACtBC,OAAsB,EACW;EACjC,MAAMU,cAAc,GAAG,MAAMC,qBAAqB,CAACL,MAAM,EAAET,IAAI,EAAEE,OAAO,EAAEC,OAAO,CAAC;EAGlF,IAAI,CAACD,OAAO,CAACa,QAAQ,EAAE;IACrB,OAAOF,cAAc;EACvB;EAEA,MAAMG,aAAa,GAAG;IACpBC,SAAS,EAAE,UAAU;IACrBF,QAAQ,EAAE;MACRG,OAAO,EAAET,MAAM;MACfU,QAAQ,EAAEhB;IACZ,CAAC;IAEDH,IAAI,EAAE,EAAE;IACRoB,SAAS,EAAE;EACb,CAAC;EAED,gBAAgBC,yBAAyBA,CACvCC,QAAoD,EAC5B;IACxB,MAAMN,aAAa;IACnB,OAAOM,QAAQ;EACjB;EAEA,OAAOD,yBAAyB,CAACR,cAAc,CAAC;AAClD;AAOA,eAAeC,qBAAqBA,CAClCL,MAAwB,EACxBT,IAAuB,EACvBE,OAAsB,EACtBC,OAAsB,EACW;EAEjC,MAAMoB,aAAa,GAAG,MAAM5B,wBAAwB,CAACK,IAAI,EAAEE,OAAO,CAAC;EAGnE,MAAMsB,mBAAmB,GAAG,MAAMC,oBAAoB,CAACF,aAAa,EAAE,CAAArB,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEwB,UAAU,KAAI,EAAE,CAAC;EAGhG,IAAIjB,MAAM,CAACV,cAAc,EAAE;IACzB,OAAOU,MAAM,CAACV,cAAc,CAACyB,mBAAmB,EAAEtB,OAAO,EAAEC,OAAO,CAAC;EACrE;EAEA,OAAOwB,mBAAmB,CAACH,mBAAmB,EAAEf,MAAM,EAAEP,OAAO,EAAEC,OAAO,CAAC;AAC3E;AAGA,gBAAgBwB,mBAAmBA,CACjCH,mBAAuE,EACvEf,MAAc,EACdP,OAAsB,EACtBC,OAAsB,EACA;EACtB,MAAMyB,WAAW,GAAG,MAAMrC,4BAA4B,CAACiC,mBAAmB,CAAC;EAE3E,MAAMK,UAAU,GAAG,MAAM/B,KAAK,CAC5B8B,WAAW,EACXnB,MAAM,EAEN;IAAC,GAAGP,OAAO;IAAE4B,QAAQ,EAAErB,MAAM,CAACsB,SAAS,CAAC,CAAC;EAAC,CAAC,EAC3C5B,OACF,CAAC;EAGD,MAAM6B,KAAK,GAAGC,kBAAkB,CAACJ,UAAU,EAAEpB,MAAM,CAAC;EAEpD,MAAMuB,KAAK;AACb;AAMA,SAASC,kBAAkBA,CAACJ,UAAmB,EAAEpB,MAAc,EAAS;EACtE,MAAMuB,KAAY,GAAG3C,OAAO,CAACwC,UAAU,CAAC,GACpCvC,kBAAkB,CAACuC,UAAU,CAAC,GAC9B;IACEK,KAAK,EAAE,SAAS;IAChBjB,SAAS,EAAE,MAAM;IACjBjB,IAAI,EAAE6B,UAAU;IAChBM,MAAM,EAAE9B,KAAK,CAACC,OAAO,CAACuB,UAAU,CAAC,GAAGA,UAAU,CAACM,MAAM,GAAG;EAC1D,CAAC;EAELH,KAAK,CAACF,QAAQ,GAAGrB,MAAM,CAACsB,SAAS,CAAC,CAAC,CAAC;EAEpC,OAAOC,KAAK;AACd;AAWA,eAAeP,oBAAoBA,CACjCF,aAAiE,EAEJ;EAAA,IAD7DG,UAA8B,GAAAU,SAAA,CAAAD,MAAA,QAAAC,SAAA,QAAA7B,SAAA,GAAA6B,SAAA,MAAG,EAAE;EAEnC,IAAIC,aAAa,GAAGd,aAAa;EACjC,WAAW,MAAMe,gBAAgB,IAAIZ,UAAU,EAAE;IAC/CW,aAAa,GAAGC,gBAAgB,CAACD,aAAa,CAAC;EACjD;EACA,OAAOA,aAAa;AACtB"}
@@ -36,61 +36,69 @@ function isTransferable(object) {
36
36
  }
37
37
 
38
38
  // ../worker-utils/src/lib/worker-farm/worker-body.ts
39
- function getParentPort() {
39
+ async function getParentPort() {
40
40
  let parentPort;
41
41
  try {
42
42
  eval("globalThis.parentPort = require('worker_threads').parentPort");
43
43
  parentPort = globalThis.parentPort;
44
44
  } catch {
45
+ try {
46
+ eval("globalThis.workerThreadsPromise = import('worker_threads')");
47
+ const workerThreads = await globalThis.workerThreadsPromise;
48
+ parentPort = workerThreads.parentPort;
49
+ } catch (error) {
50
+ console.error(error.message);
51
+ }
45
52
  }
46
53
  return parentPort;
47
54
  }
48
55
  var onMessageWrapperMap = /* @__PURE__ */ new Map();
49
56
  var WorkerBody = class {
50
57
  /** Check that we are actually in a worker thread */
51
- static inWorkerThread() {
52
- return typeof self !== "undefined" || Boolean(getParentPort());
58
+ static async inWorkerThread() {
59
+ return typeof self !== "undefined" || Boolean(await getParentPort());
53
60
  }
54
61
  /*
55
62
  * (type: WorkerMessageType, payload: WorkerMessagePayload) => any
56
63
  */
57
64
  static set onmessage(onMessage) {
58
- function handleMessage(message) {
59
- const parentPort3 = getParentPort();
60
- const { type, payload } = parentPort3 ? message : message.data;
65
+ async function handleMessage(message) {
66
+ const parentPort2 = await getParentPort();
67
+ const { type, payload } = parentPort2 ? message : message.data;
61
68
  onMessage(type, payload);
62
69
  }
63
- const parentPort2 = getParentPort();
64
- if (parentPort2) {
65
- parentPort2.on("message", handleMessage);
66
- parentPort2.on("exit", () => console.debug("Node worker closing"));
67
- } else {
68
- globalThis.onmessage = handleMessage;
69
- }
70
+ getParentPort().then((parentPort2) => {
71
+ if (parentPort2) {
72
+ parentPort2.on("message", handleMessage);
73
+ parentPort2.on("exit", () => console.debug("Node worker closing"));
74
+ } else {
75
+ globalThis.onmessage = handleMessage;
76
+ }
77
+ });
70
78
  }
71
- static addEventListener(onMessage) {
79
+ static async addEventListener(onMessage) {
72
80
  let onMessageWrapper = onMessageWrapperMap.get(onMessage);
73
81
  if (!onMessageWrapper) {
74
- onMessageWrapper = (message) => {
82
+ onMessageWrapper = async (message) => {
75
83
  if (!isKnownMessage(message)) {
76
84
  return;
77
85
  }
78
- const parentPort3 = getParentPort();
86
+ const parentPort3 = await getParentPort();
79
87
  const { type, payload } = parentPort3 ? message : message.data;
80
88
  onMessage(type, payload);
81
89
  };
82
90
  }
83
- const parentPort2 = getParentPort();
91
+ const parentPort2 = await getParentPort();
84
92
  if (parentPort2) {
85
93
  console.error("not implemented");
86
94
  } else {
87
95
  globalThis.addEventListener("message", onMessageWrapper);
88
96
  }
89
97
  }
90
- static removeEventListener(onMessage) {
98
+ static async removeEventListener(onMessage) {
91
99
  const onMessageWrapper = onMessageWrapperMap.get(onMessage);
92
100
  onMessageWrapperMap.delete(onMessage);
93
- const parentPort2 = getParentPort();
101
+ const parentPort2 = await getParentPort();
94
102
  if (parentPort2) {
95
103
  console.error("not implemented");
96
104
  } else {
@@ -102,10 +110,10 @@ var WorkerBody = class {
102
110
  * @param type
103
111
  * @param payload
104
112
  */
105
- static postMessage(type, payload) {
113
+ static async postMessage(type, payload) {
106
114
  const data = { source: "loaders.gl", type, payload };
107
115
  const transferList = getTransferList(payload);
108
- const parentPort2 = getParentPort();
116
+ const parentPort2 = await getParentPort();
109
117
  if (parentPort2) {
110
118
  parentPort2.postMessage(data, transferList);
111
119
  } else {
@@ -120,8 +128,8 @@ function isKnownMessage(message) {
120
128
 
121
129
  // ../loader-utils/src/lib/worker-loader-utils/create-loader-worker.ts
122
130
  var requestId = 0;
123
- function createLoaderWorker(loader) {
124
- if (!WorkerBody.inWorkerThread()) {
131
+ async function createLoaderWorker(loader) {
132
+ if (!await WorkerBody.inWorkerThread()) {
125
133
  return;
126
134
  }
127
135
  WorkerBody.onmessage = async (type, payload) => {
@@ -200,7 +208,7 @@ async function parseData({
200
208
  }
201
209
 
202
210
  // src/null-loader.ts
203
- var VERSION = true ? "4.0.1" : "latest";
211
+ var VERSION = true ? "4.0.3" : "latest";
204
212
  var NullLoader = {
205
213
  name: "Null loader",
206
214
  id: "null",
@@ -36,61 +36,69 @@
36
36
  }
37
37
 
38
38
  // ../worker-utils/src/lib/worker-farm/worker-body.ts
39
- function getParentPort() {
39
+ async function getParentPort() {
40
40
  let parentPort;
41
41
  try {
42
42
  eval("globalThis.parentPort = require('worker_threads').parentPort");
43
43
  parentPort = globalThis.parentPort;
44
44
  } catch {
45
+ try {
46
+ eval("globalThis.workerThreadsPromise = import('worker_threads')");
47
+ const workerThreads = await globalThis.workerThreadsPromise;
48
+ parentPort = workerThreads.parentPort;
49
+ } catch (error) {
50
+ console.error(error.message);
51
+ }
45
52
  }
46
53
  return parentPort;
47
54
  }
48
55
  var onMessageWrapperMap = /* @__PURE__ */ new Map();
49
56
  var WorkerBody = class {
50
57
  /** Check that we are actually in a worker thread */
51
- static inWorkerThread() {
52
- return typeof self !== "undefined" || Boolean(getParentPort());
58
+ static async inWorkerThread() {
59
+ return typeof self !== "undefined" || Boolean(await getParentPort());
53
60
  }
54
61
  /*
55
62
  * (type: WorkerMessageType, payload: WorkerMessagePayload) => any
56
63
  */
57
64
  static set onmessage(onMessage) {
58
- function handleMessage(message) {
59
- const parentPort3 = getParentPort();
60
- const { type, payload } = parentPort3 ? message : message.data;
65
+ async function handleMessage(message) {
66
+ const parentPort2 = await getParentPort();
67
+ const { type, payload } = parentPort2 ? message : message.data;
61
68
  onMessage(type, payload);
62
69
  }
63
- const parentPort2 = getParentPort();
64
- if (parentPort2) {
65
- parentPort2.on("message", handleMessage);
66
- parentPort2.on("exit", () => console.debug("Node worker closing"));
67
- } else {
68
- globalThis.onmessage = handleMessage;
69
- }
70
+ getParentPort().then((parentPort2) => {
71
+ if (parentPort2) {
72
+ parentPort2.on("message", handleMessage);
73
+ parentPort2.on("exit", () => console.debug("Node worker closing"));
74
+ } else {
75
+ globalThis.onmessage = handleMessage;
76
+ }
77
+ });
70
78
  }
71
- static addEventListener(onMessage) {
79
+ static async addEventListener(onMessage) {
72
80
  let onMessageWrapper = onMessageWrapperMap.get(onMessage);
73
81
  if (!onMessageWrapper) {
74
- onMessageWrapper = (message) => {
82
+ onMessageWrapper = async (message) => {
75
83
  if (!isKnownMessage(message)) {
76
84
  return;
77
85
  }
78
- const parentPort3 = getParentPort();
86
+ const parentPort3 = await getParentPort();
79
87
  const { type, payload } = parentPort3 ? message : message.data;
80
88
  onMessage(type, payload);
81
89
  };
82
90
  }
83
- const parentPort2 = getParentPort();
91
+ const parentPort2 = await getParentPort();
84
92
  if (parentPort2) {
85
93
  console.error("not implemented");
86
94
  } else {
87
95
  globalThis.addEventListener("message", onMessageWrapper);
88
96
  }
89
97
  }
90
- static removeEventListener(onMessage) {
98
+ static async removeEventListener(onMessage) {
91
99
  const onMessageWrapper = onMessageWrapperMap.get(onMessage);
92
100
  onMessageWrapperMap.delete(onMessage);
93
- const parentPort2 = getParentPort();
101
+ const parentPort2 = await getParentPort();
94
102
  if (parentPort2) {
95
103
  console.error("not implemented");
96
104
  } else {
@@ -102,10 +110,10 @@
102
110
  * @param type
103
111
  * @param payload
104
112
  */
105
- static postMessage(type, payload) {
113
+ static async postMessage(type, payload) {
106
114
  const data = { source: "loaders.gl", type, payload };
107
115
  const transferList = getTransferList(payload);
108
- const parentPort2 = getParentPort();
116
+ const parentPort2 = await getParentPort();
109
117
  if (parentPort2) {
110
118
  parentPort2.postMessage(data, transferList);
111
119
  } else {
@@ -120,8 +128,8 @@
120
128
 
121
129
  // ../loader-utils/src/lib/worker-loader-utils/create-loader-worker.ts
122
130
  var requestId = 0;
123
- function createLoaderWorker(loader) {
124
- if (!WorkerBody.inWorkerThread()) {
131
+ async function createLoaderWorker(loader) {
132
+ if (!await WorkerBody.inWorkerThread()) {
125
133
  return;
126
134
  }
127
135
  WorkerBody.onmessage = async (type, payload) => {
@@ -200,7 +208,7 @@
200
208
  }
201
209
 
202
210
  // src/null-loader.ts
203
- var VERSION = true ? "4.0.1" : "latest";
211
+ var VERSION = true ? "4.0.3" : "latest";
204
212
  var NullLoader = {
205
213
  name: "Null loader",
206
214
  id: "null",
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=null-worker-node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"null-worker-node.d.ts","sourceRoot":"","sources":["../../src/workers/null-worker-node.ts"],"names":[],"mappings":""}
@@ -0,0 +1,4 @@
1
+ import { createLoaderWorker } from '@loaders.gl/loader-utils';
2
+ import { NullLoader } from "../null-loader.js";
3
+ createLoaderWorker(NullLoader);
4
+ //# sourceMappingURL=null-worker-node.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"null-worker-node.js","names":["createLoaderWorker","NullLoader"],"sources":["../../src/workers/null-worker-node.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\nimport {NullLoader} from '../null-loader';\n\ncreateLoaderWorker(NullLoader);\n"],"mappings":"AAGA,SAAQA,kBAAkB,QAAO,0BAA0B;AAAC,SACpDC,UAAU;AAElBD,kBAAkB,CAACC,UAAU,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@loaders.gl/core",
3
- "version": "4.0.1",
3
+ "version": "4.0.3",
4
4
  "description": "The core API for working with loaders.gl loaders and writers",
5
5
  "license": "MIT",
6
6
  "type": "module",
@@ -49,9 +49,9 @@
49
49
  },
50
50
  "dependencies": {
51
51
  "@babel/runtime": "^7.3.1",
52
- "@loaders.gl/loader-utils": "4.0.1",
53
- "@loaders.gl/worker-utils": "4.0.1",
52
+ "@loaders.gl/loader-utils": "4.0.3",
53
+ "@loaders.gl/worker-utils": "4.0.3",
54
54
  "@probe.gl/log": "^4.0.2"
55
55
  },
56
- "gitHead": "765e5a26a6bf3f2cc02cabffc4a1e3665ec92a53"
56
+ "gitHead": "03c871839b36c997249dabae1844df53a35d3760"
57
57
  }
@@ -3,10 +3,14 @@
3
3
  // Copyright 2022 Foursquare Labs, Inc
4
4
 
5
5
  /* global TextEncoder, TextDecoder */
6
- import {concatenateArrayBuffers, Writer, WriterOptionsType} from '@loaders.gl/loader-utils';
6
+ import {
7
+ concatenateArrayBuffers,
8
+ WriterOptionsType,
9
+ WriterWithEncoder
10
+ } from '@loaders.gl/loader-utils';
7
11
  import {Table} from '@loaders.gl/schema';
8
12
 
9
- export async function encodeTable<WriterT extends Writer = Writer>(
13
+ export async function encodeTable<WriterT extends WriterWithEncoder = WriterWithEncoder>(
10
14
  data: Table,
11
15
  writer: WriterT,
12
16
  options?: WriterOptionsType<WriterT>
@@ -36,7 +40,7 @@ export async function encodeTable<WriterT extends Writer = Writer>(
36
40
  throw new Error('Writer could not encode data');
37
41
  }
38
42
 
39
- export async function encodeTableAsText<WriterT extends Writer = Writer>(
43
+ export async function encodeTableAsText<WriterT extends WriterWithEncoder = WriterWithEncoder>(
40
44
  data: Table,
41
45
  writer: WriterT,
42
46
  options?: WriterOptionsType<WriterT>
@@ -45,14 +49,14 @@ export async function encodeTableAsText<WriterT extends Writer = Writer>(
45
49
  return await writer.encodeText(data, options);
46
50
  }
47
51
 
48
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
52
+ if (writer.text) {
49
53
  const arrayBuffer = await encodeTable(data, writer, options);
50
54
  return new TextDecoder().decode(arrayBuffer);
51
55
  }
52
56
  throw new Error(`Writer ${writer.name} could not encode data as text`);
53
57
  }
54
58
 
55
- export function encodeTableInBatches<WriterT extends Writer = Writer>(
59
+ export function encodeTableInBatches<WriterT extends WriterWithEncoder = WriterWithEncoder>(
56
60
  data: Table,
57
61
  writer: WriterT,
58
62
  options?: WriterOptionsType<WriterT>
@@ -1,7 +1,7 @@
1
1
  // loaders.gl, MIT license
2
2
  // Copyright (c) vis.gl contributors
3
3
 
4
- import {Writer, WriterOptions, canEncodeWithWorker} from '@loaders.gl/loader-utils';
4
+ import {WriterOptions, WriterWithEncoder, canEncodeWithWorker} from '@loaders.gl/loader-utils';
5
5
  import {concatenateArrayBuffers, resolvePath, NodeFile} from '@loaders.gl/loader-utils';
6
6
  import {processOnWorker} from '@loaders.gl/worker-utils';
7
7
  import {isBrowser} from '@loaders.gl/loader-utils';
@@ -13,7 +13,7 @@ import {getLoaderOptions} from './loader-options';
13
13
  */
14
14
  export async function encode(
15
15
  data: unknown,
16
- writer: Writer,
16
+ writer: WriterWithEncoder,
17
17
  options?: WriterOptions
18
18
  ): Promise<ArrayBuffer> {
19
19
  const globalOptions = getLoaderOptions() as WriterOptions;
@@ -75,7 +75,11 @@ export async function encode(
75
75
  /**
76
76
  * Encode loaded data into a binary ArrayBuffer using the specified Writer.
77
77
  */
78
- export function encodeSync(data: unknown, writer: Writer, options?: WriterOptions): ArrayBuffer {
78
+ export function encodeSync(
79
+ data: unknown,
80
+ writer: WriterWithEncoder,
81
+ options?: WriterOptions
82
+ ): ArrayBuffer {
79
83
  if (writer.encodeSync) {
80
84
  return writer.encodeSync(data, options);
81
85
  }
@@ -90,14 +94,14 @@ export function encodeSync(data: unknown, writer: Writer, options?: WriterOption
90
94
  */
91
95
  export async function encodeText(
92
96
  data: unknown,
93
- writer: Writer,
97
+ writer: WriterWithEncoder,
94
98
  options?: WriterOptions
95
99
  ): Promise<string> {
96
100
  if (writer.text && writer.encodeText) {
97
101
  return await writer.encodeText(data, options);
98
102
  }
99
103
 
100
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
104
+ if (writer.text) {
101
105
  const arrayBuffer = await encode(data, writer, options);
102
106
  return new TextDecoder().decode(arrayBuffer);
103
107
  }
@@ -111,7 +115,11 @@ export async function encodeText(
111
115
  * It is not optimized for performance. Data maybe converted from text to binary and back.
112
116
  * @throws if the writer does not generate text output
113
117
  */
114
- export function encodeTextSync(data: unknown, writer: Writer, options?: WriterOptions): string {
118
+ export function encodeTextSync(
119
+ data: unknown,
120
+ writer: WriterWithEncoder,
121
+ options?: WriterOptions
122
+ ): string {
115
123
  if (writer.text && writer.encodeTextSync) {
116
124
  return writer.encodeTextSync(data, options);
117
125
  }
@@ -129,7 +137,7 @@ export function encodeTextSync(data: unknown, writer: Writer, options?: WriterOp
129
137
  */
130
138
  export function encodeInBatches(
131
139
  data: unknown,
132
- writer: Writer,
140
+ writer: WriterWithEncoder,
133
141
  options?: WriterOptions
134
142
  ): AsyncIterable<ArrayBuffer> {
135
143
  if (writer.encodeInBatches) {
@@ -148,7 +156,7 @@ export function encodeInBatches(
148
156
  export async function encodeURLtoURL(
149
157
  inputUrl: string,
150
158
  outputUrl: string,
151
- writer: Writer,
159
+ writer: WriterWithEncoder,
152
160
  options?: WriterOptions
153
161
  ): Promise<string> {
154
162
  inputUrl = resolvePath(inputUrl);
@@ -1,7 +1,7 @@
1
1
  // loaders.gl, MIT license
2
2
  // Copyright (c) vis.gl contributors
3
3
 
4
- import type {Batch} from '@loaders.gl/schema';
4
+ import {isTable, makeBatchFromTable, type Batch} from '@loaders.gl/schema';
5
5
  import type {Loader, LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils';
6
6
  import type {LoaderContext, BatchableDataType} from '@loaders.gl/loader-utils';
7
7
  import type {LoaderBatchType, LoaderOptionsType} from '@loaders.gl/loader-utils';
@@ -153,30 +153,49 @@ async function parseToOutputIterator(
153
153
  return loader.parseInBatches(transformedIterator, options, context);
154
154
  }
155
155
 
156
- // Fallback: load atomically using `parse` concatenating input iterator into single chunk
157
- async function* parseChunkInBatches() {
158
- const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);
159
- // Call `parse` instead of `loader.parse` to ensure we can call workers etc.
160
- const parsedData = await parse(
161
- arrayBuffer,
162
- loader,
163
- // TODO - Hack: supply loaders MIME type to ensure we match it
164
- {...options, mimeType: loader.mimeTypes[0]},
165
- context
166
- );
167
- // yield a single batch, the output from loader.parse()
168
- // TODO - run through batch builder to apply options etc...
169
- const batch: Batch = {
170
- mimeType: loader.mimeTypes[0],
171
- shape: Array.isArray(parsedData) ? 'row-table' : 'unknown',
172
- batchType: 'data',
173
- data: parsedData,
174
- length: Array.isArray(parsedData) ? parsedData.length : 1
175
- };
176
- yield batch;
177
- }
156
+ return parseChunkInBatches(transformedIterator, loader, options, context);
157
+ }
158
+
159
+ // Fallback: load atomically using `parse` concatenating input iterator into single chunk
160
+ async function* parseChunkInBatches(
161
+ transformedIterator: Iterable<ArrayBuffer> | AsyncIterable<ArrayBuffer>,
162
+ loader: Loader,
163
+ options: LoaderOptions,
164
+ context: LoaderContext
165
+ ): AsyncIterable<Batch> {
166
+ const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);
167
+ // Call `parse` instead of `loader.parse` to ensure we can call workers etc.
168
+ const parsedData = await parse(
169
+ arrayBuffer,
170
+ loader,
171
+ // TODO - Hack: supply loaders MIME type to ensure we match it
172
+ {...options, mimeType: loader.mimeTypes[0]},
173
+ context
174
+ );
178
175
 
179
- return parseChunkInBatches();
176
+ // yield a single batch, the output from loader.parse() repackaged as a batch
177
+ const batch = convertDataToBatch(parsedData, loader);
178
+
179
+ yield batch;
180
+ }
181
+
182
+ /**
183
+ * Convert parsed data into a single batch
184
+ * @todo run through batch builder to apply options etc...
185
+ */
186
+ function convertDataToBatch(parsedData: unknown, loader: Loader): Batch {
187
+ const batch: Batch = isTable(parsedData)
188
+ ? makeBatchFromTable(parsedData)
189
+ : {
190
+ shape: 'unknown',
191
+ batchType: 'data',
192
+ data: parsedData,
193
+ length: Array.isArray(parsedData) ? parsedData.length : 1
194
+ };
195
+
196
+ batch.mimeType = loader.mimeTypes[0];
197
+
198
+ return batch;
180
199
  }
181
200
 
182
201
  type TransformBatches = (
@@ -0,0 +1,7 @@
1
+ // loaders.gl, MIT license
2
+ // Copyright (c) vis.gl contributors
3
+
4
+ import {createLoaderWorker} from '@loaders.gl/loader-utils';
5
+ import {NullLoader} from '../null-loader';
6
+
7
+ createLoaderWorker(NullLoader);