@loaders.gl/core 3.1.3 → 4.0.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.js +2 -2
- package/dist/bundle.js.map +1 -0
- package/dist/{es5/core-addons → core-addons}/README.md +0 -0
- package/dist/core-addons/write-file-browser.js +1 -59
- package/dist/{es5/core-addons → core-addons}/write-file-browser.js.map +0 -0
- package/dist/dist.min.js +18 -18
- package/dist/index.js +29 -102
- package/dist/index.js.map +1 -0
- package/dist/iterators/batch-iterators/timed-batch-iterator.js +17 -20
- package/dist/iterators/batch-iterators/timed-batch-iterator.js.map +1 -0
- package/dist/iterators/make-iterator/make-array-buffer-iterator.js +16 -25
- package/dist/iterators/make-iterator/make-array-buffer-iterator.js.map +1 -0
- package/dist/iterators/make-iterator/make-blob-iterator.js +12 -20
- package/dist/iterators/make-iterator/make-blob-iterator.js.map +1 -0
- package/dist/iterators/make-iterator/make-iterator.js +29 -36
- package/dist/iterators/make-iterator/make-iterator.js.map +1 -0
- package/dist/iterators/make-iterator/make-stream-iterator.js +32 -89
- package/dist/iterators/make-iterator/make-stream-iterator.js.map +1 -0
- package/dist/iterators/make-iterator/make-string-iterator.js +12 -22
- package/dist/iterators/make-iterator/make-string-iterator.js.map +1 -0
- package/dist/iterators/make-stream/make-dom-stream.js +32 -44
- package/dist/iterators/make-stream/make-dom-stream.js.map +1 -0
- package/dist/iterators/make-stream/make-node-stream.js +68 -50
- package/dist/iterators/make-stream/make-node-stream.js.map +1 -0
- package/dist/javascript-utils/is-type.js +21 -41
- package/dist/javascript-utils/is-type.js.map +1 -0
- package/dist/lib/api/encode.js +81 -101
- package/dist/lib/api/encode.js.map +1 -0
- package/dist/lib/api/load-in-batches.js +27 -30
- package/dist/lib/api/load-in-batches.js.map +1 -0
- package/dist/lib/api/load.js +24 -40
- package/dist/lib/api/load.js.map +1 -0
- package/dist/lib/api/parse-in-batches.js +92 -113
- package/dist/lib/api/parse-in-batches.js.map +1 -0
- package/dist/lib/api/parse-sync.js +54 -56
- package/dist/lib/api/parse-sync.js.map +1 -0
- package/dist/lib/api/parse.js +61 -71
- package/dist/lib/api/parse.js.map +1 -0
- package/dist/lib/api/register-loaders.js +23 -29
- package/dist/lib/api/register-loaders.js.map +1 -0
- package/dist/lib/api/save.js +9 -13
- package/dist/lib/api/save.js.map +1 -0
- package/dist/lib/api/select-loader.js +194 -205
- package/dist/lib/api/select-loader.js.map +1 -0
- package/dist/lib/api/set-loader-options.js +4 -11
- package/dist/lib/api/set-loader-options.js.map +1 -0
- package/dist/lib/common.js +2 -2
- package/dist/{es5/lib → lib}/common.js.map +0 -0
- package/dist/lib/fetch/fetch-error-message.js +17 -21
- package/dist/lib/fetch/fetch-error-message.js.map +1 -0
- package/dist/lib/fetch/fetch-file.js +15 -25
- package/dist/lib/fetch/fetch-file.js.map +1 -0
- package/dist/lib/fetch/read-array-buffer.js +27 -39
- package/dist/lib/fetch/read-array-buffer.js.map +1 -0
- package/dist/lib/fetch/read-file.js +16 -28
- package/dist/lib/fetch/read-file.js.map +1 -0
- package/dist/lib/fetch/write-file.js +23 -20
- package/dist/lib/fetch/write-file.js.map +1 -0
- package/dist/lib/filesystems/browser-filesystem.js +107 -118
- package/dist/lib/filesystems/browser-filesystem.js.map +1 -0
- package/dist/lib/filesystems/filesystem.js +2 -2
- package/dist/{es5/lib → lib}/filesystems/filesystem.js.map +0 -0
- package/dist/lib/filesystems/read-array-buffer.js +8 -28
- package/dist/lib/filesystems/read-array-buffer.js.map +1 -0
- package/dist/lib/init.js +4 -8
- package/dist/lib/init.js.map +1 -0
- package/dist/lib/loader-utils/check-errors.js +22 -26
- package/dist/lib/loader-utils/check-errors.js.map +1 -0
- package/dist/lib/loader-utils/get-data.js +122 -122
- package/dist/lib/loader-utils/get-data.js.map +1 -0
- package/dist/lib/loader-utils/loader-context.js +34 -47
- package/dist/lib/loader-utils/loader-context.js.map +1 -0
- package/dist/lib/loader-utils/loggers.js +46 -39
- package/dist/lib/loader-utils/loggers.js.map +1 -0
- package/dist/lib/loader-utils/normalize-loader.js +41 -50
- package/dist/lib/loader-utils/normalize-loader.js.map +1 -0
- package/dist/lib/loader-utils/option-defaults.js +35 -39
- package/dist/lib/loader-utils/option-defaults.js.map +1 -0
- package/dist/lib/loader-utils/option-utils.js +119 -156
- package/dist/lib/loader-utils/option-utils.js.map +1 -0
- package/dist/lib/progress/fetch-progress.js +56 -54
- package/dist/lib/progress/fetch-progress.js.map +1 -0
- package/dist/lib/utils/mime-type-utils.js +17 -38
- package/dist/lib/utils/mime-type-utils.js.map +1 -0
- package/dist/lib/utils/resource-utils.js +52 -70
- package/dist/lib/utils/resource-utils.js.map +1 -0
- package/dist/lib/utils/response-utils.js +105 -104
- package/dist/lib/utils/response-utils.js.map +1 -0
- package/dist/null-loader.js +31 -41
- package/dist/null-loader.js.map +1 -0
- package/dist/null-worker.js +1 -1
- package/dist/workers/null-worker.js +4 -5
- package/dist/workers/null-worker.js.map +1 -0
- package/package.json +9 -7
- package/dist/es5/bundle.js +0 -7
- package/dist/es5/bundle.js.map +0 -1
- package/dist/es5/core-addons/write-file-browser.js +0 -2
- package/dist/es5/index.js +0 -374
- package/dist/es5/index.js.map +0 -1
- package/dist/es5/iterators/batch-iterators/timed-batch-iterator.js +0 -131
- package/dist/es5/iterators/batch-iterators/timed-batch-iterator.js.map +0 -1
- package/dist/es5/iterators/make-iterator/make-array-buffer-iterator.js +0 -61
- package/dist/es5/iterators/make-iterator/make-array-buffer-iterator.js.map +0 -1
- package/dist/es5/iterators/make-iterator/make-blob-iterator.js +0 -61
- package/dist/es5/iterators/make-iterator/make-blob-iterator.js.map +0 -1
- package/dist/es5/iterators/make-iterator/make-iterator.js +0 -42
- package/dist/es5/iterators/make-iterator/make-iterator.js.map +0 -1
- package/dist/es5/iterators/make-iterator/make-stream-iterator.js +0 -184
- package/dist/es5/iterators/make-iterator/make-stream-iterator.js.map +0 -1
- package/dist/es5/iterators/make-iterator/make-string-iterator.js +0 -49
- package/dist/es5/iterators/make-iterator/make-string-iterator.js.map +0 -1
- package/dist/es5/iterators/make-stream/make-dom-stream.js +0 -86
- package/dist/es5/iterators/make-stream/make-dom-stream.js.map +0 -1
- package/dist/es5/iterators/make-stream/make-node-stream.js +0 -217
- package/dist/es5/iterators/make-stream/make-node-stream.js.map +0 -1
- package/dist/es5/javascript-utils/is-type.js +0 -115
- package/dist/es5/javascript-utils/is-type.js.map +0 -1
- package/dist/es5/lib/api/encode.js +0 -304
- package/dist/es5/lib/api/encode.js.map +0 -1
- package/dist/es5/lib/api/load-in-batches.js +0 -83
- package/dist/es5/lib/api/load-in-batches.js.map +0 -1
- package/dist/es5/lib/api/load.js +0 -81
- package/dist/es5/lib/api/load.js.map +0 -1
- package/dist/es5/lib/api/parse-in-batches.js +0 -366
- package/dist/es5/lib/api/parse-in-batches.js.map +0 -1
- package/dist/es5/lib/api/parse-sync.js +0 -75
- package/dist/es5/lib/api/parse-sync.js.map +0 -1
- package/dist/es5/lib/api/parse.js +0 -171
- package/dist/es5/lib/api/parse.js.map +0 -1
- package/dist/es5/lib/api/register-loaders.js +0 -63
- package/dist/es5/lib/api/register-loaders.js.map +0 -1
- package/dist/es5/lib/api/save.js +0 -55
- package/dist/es5/lib/api/save.js.map +0 -1
- package/dist/es5/lib/api/select-loader.js +0 -378
- package/dist/es5/lib/api/select-loader.js.map +0 -1
- package/dist/es5/lib/api/set-loader-options.js +0 -13
- package/dist/es5/lib/api/set-loader-options.js.map +0 -1
- package/dist/es5/lib/common.js +0 -2
- package/dist/es5/lib/fetch/fetch-error-message.js +0 -72
- package/dist/es5/lib/fetch/fetch-error-message.js.map +0 -1
- package/dist/es5/lib/fetch/fetch-file.js +0 -63
- package/dist/es5/lib/fetch/fetch-file.js.map +0 -1
- package/dist/es5/lib/fetch/read-array-buffer.js +0 -100
- package/dist/es5/lib/fetch/read-array-buffer.js.map +0 -1
- package/dist/es5/lib/fetch/read-file.js +0 -26
- package/dist/es5/lib/fetch/read-file.js.map +0 -1
- package/dist/es5/lib/fetch/write-file.js +0 -63
- package/dist/es5/lib/fetch/write-file.js.map +0 -1
- package/dist/es5/lib/filesystems/browser-filesystem.js +0 -312
- package/dist/es5/lib/filesystems/browser-filesystem.js.map +0 -1
- package/dist/es5/lib/filesystems/filesystem.js +0 -2
- package/dist/es5/lib/filesystems/read-array-buffer.js +0 -53
- package/dist/es5/lib/filesystems/read-array-buffer.js.map +0 -1
- package/dist/es5/lib/init.js +0 -13
- package/dist/es5/lib/init.js.map +0 -1
- package/dist/es5/lib/loader-utils/check-errors.js +0 -73
- package/dist/es5/lib/loader-utils/check-errors.js.map +0 -1
- package/dist/es5/lib/loader-utils/get-data.js +0 -314
- package/dist/es5/lib/loader-utils/get-data.js.map +0 -1
- package/dist/es5/lib/loader-utils/loader-context.js +0 -57
- package/dist/es5/lib/loader-utils/loader-context.js.map +0 -1
- package/dist/es5/lib/loader-utils/loggers.js +0 -110
- package/dist/es5/lib/loader-utils/loggers.js.map +0 -1
- package/dist/es5/lib/loader-utils/normalize-loader.js +0 -59
- package/dist/es5/lib/loader-utils/normalize-loader.js.map +0 -1
- package/dist/es5/lib/loader-utils/option-defaults.js +0 -47
- package/dist/es5/lib/loader-utils/option-defaults.js.map +0 -1
- package/dist/es5/lib/loader-utils/option-utils.js +0 -191
- package/dist/es5/lib/loader-utils/option-utils.js.map +0 -1
- package/dist/es5/lib/progress/fetch-progress.js +0 -169
- package/dist/es5/lib/progress/fetch-progress.js.map +0 -1
- package/dist/es5/lib/utils/mime-type-utils.js +0 -30
- package/dist/es5/lib/utils/mime-type-utils.js.map +0 -1
- package/dist/es5/lib/utils/resource-utils.js +0 -72
- package/dist/es5/lib/utils/resource-utils.js.map +0 -1
- package/dist/es5/lib/utils/response-utils.js +0 -255
- package/dist/es5/lib/utils/response-utils.js.map +0 -1
- package/dist/es5/null-loader.js +0 -169
- package/dist/es5/null-loader.js.map +0 -1
- package/dist/es5/workers/null-worker.js +0 -8
- package/dist/es5/workers/null-worker.js.map +0 -1
- package/dist/esm/bundle.js +0 -5
- package/dist/esm/bundle.js.map +0 -1
- package/dist/esm/core-addons/README.md +0 -1
- package/dist/esm/core-addons/write-file-browser.js +0 -2
- package/dist/esm/core-addons/write-file-browser.js.map +0 -1
- package/dist/esm/index.js +0 -29
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/iterators/batch-iterators/timed-batch-iterator.js +0 -19
- package/dist/esm/iterators/batch-iterators/timed-batch-iterator.js.map +0 -1
- package/dist/esm/iterators/make-iterator/make-array-buffer-iterator.js +0 -18
- package/dist/esm/iterators/make-iterator/make-array-buffer-iterator.js.map +0 -1
- package/dist/esm/iterators/make-iterator/make-blob-iterator.js +0 -13
- package/dist/esm/iterators/make-iterator/make-blob-iterator.js.map +0 -1
- package/dist/esm/iterators/make-iterator/make-iterator.js +0 -30
- package/dist/esm/iterators/make-iterator/make-iterator.js.map +0 -1
- package/dist/esm/iterators/make-iterator/make-stream-iterator.js +0 -39
- package/dist/esm/iterators/make-iterator/make-stream-iterator.js.map +0 -1
- package/dist/esm/iterators/make-iterator/make-string-iterator.js +0 -14
- package/dist/esm/iterators/make-iterator/make-string-iterator.js.map +0 -1
- package/dist/esm/iterators/make-stream/make-dom-stream.js +0 -34
- package/dist/esm/iterators/make-stream/make-dom-stream.js.map +0 -1
- package/dist/esm/iterators/make-stream/make-node-stream.js +0 -74
- package/dist/esm/iterators/make-stream/make-node-stream.js.map +0 -1
- package/dist/esm/javascript-utils/is-type.js +0 -21
- package/dist/esm/javascript-utils/is-type.js.map +0 -1
- package/dist/esm/lib/api/encode.js +0 -91
- package/dist/esm/lib/api/encode.js.map +0 -1
- package/dist/esm/lib/api/load-in-batches.js +0 -30
- package/dist/esm/lib/api/load-in-batches.js.map +0 -1
- package/dist/esm/lib/api/load.js +0 -25
- package/dist/esm/lib/api/load.js.map +0 -1
- package/dist/esm/lib/api/parse-in-batches.js +0 -99
- package/dist/esm/lib/api/parse-in-batches.js.map +0 -1
- package/dist/esm/lib/api/parse-sync.js +0 -57
- package/dist/esm/lib/api/parse-sync.js.map +0 -1
- package/dist/esm/lib/api/parse.js +0 -64
- package/dist/esm/lib/api/parse.js.map +0 -1
- package/dist/esm/lib/api/register-loaders.js +0 -29
- package/dist/esm/lib/api/register-loaders.js.map +0 -1
- package/dist/esm/lib/api/save.js +0 -11
- package/dist/esm/lib/api/save.js.map +0 -1
- package/dist/esm/lib/api/select-loader.js +0 -228
- package/dist/esm/lib/api/select-loader.js.map +0 -1
- package/dist/esm/lib/api/set-loader-options.js +0 -5
- package/dist/esm/lib/api/set-loader-options.js.map +0 -1
- package/dist/esm/lib/common.js +0 -2
- package/dist/esm/lib/common.js.map +0 -1
- package/dist/esm/lib/fetch/fetch-error-message.js +0 -21
- package/dist/esm/lib/fetch/fetch-error-message.js.map +0 -1
- package/dist/esm/lib/fetch/fetch-file.js +0 -17
- package/dist/esm/lib/fetch/fetch-file.js.map +0 -1
- package/dist/esm/lib/fetch/read-array-buffer.js +0 -29
- package/dist/esm/lib/fetch/read-array-buffer.js.map +0 -1
- package/dist/esm/lib/fetch/read-file.js +0 -17
- package/dist/esm/lib/fetch/read-file.js.map +0 -1
- package/dist/esm/lib/fetch/write-file.js +0 -25
- package/dist/esm/lib/fetch/write-file.js.map +0 -1
- package/dist/esm/lib/filesystems/browser-filesystem.js +0 -115
- package/dist/esm/lib/filesystems/browser-filesystem.js.map +0 -1
- package/dist/esm/lib/filesystems/filesystem.js +0 -2
- package/dist/esm/lib/filesystems/filesystem.js.map +0 -1
- package/dist/esm/lib/filesystems/read-array-buffer.js +0 -9
- package/dist/esm/lib/filesystems/read-array-buffer.js.map +0 -1
- package/dist/esm/lib/init.js +0 -6
- package/dist/esm/lib/init.js.map +0 -1
- package/dist/esm/lib/loader-utils/check-errors.js +0 -26
- package/dist/esm/lib/loader-utils/check-errors.js.map +0 -1
- package/dist/esm/lib/loader-utils/get-data.js +0 -129
- package/dist/esm/lib/loader-utils/get-data.js.map +0 -1
- package/dist/esm/lib/loader-utils/loader-context.js +0 -36
- package/dist/esm/lib/loader-utils/loader-context.js.map +0 -1
- package/dist/esm/lib/loader-utils/loggers.js +0 -48
- package/dist/esm/lib/loader-utils/loggers.js.map +0 -1
- package/dist/esm/lib/loader-utils/normalize-loader.js +0 -43
- package/dist/esm/lib/loader-utils/normalize-loader.js.map +0 -1
- package/dist/esm/lib/loader-utils/option-defaults.js +0 -37
- package/dist/esm/lib/loader-utils/option-defaults.js.map +0 -1
- package/dist/esm/lib/loader-utils/option-utils.js +0 -140
- package/dist/esm/lib/loader-utils/option-utils.js.map +0 -1
- package/dist/esm/lib/progress/fetch-progress.js +0 -61
- package/dist/esm/lib/progress/fetch-progress.js.map +0 -1
- package/dist/esm/lib/utils/mime-type-utils.js +0 -21
- package/dist/esm/lib/utils/mime-type-utils.js.map +0 -1
- package/dist/esm/lib/utils/resource-utils.js +0 -60
- package/dist/esm/lib/utils/resource-utils.js.map +0 -1
- package/dist/esm/lib/utils/response-utils.js +0 -115
- package/dist/esm/lib/utils/response-utils.js.map +0 -1
- package/dist/esm/null-loader.js +0 -34
- package/dist/esm/null-loader.js.map +0 -1
- package/dist/esm/workers/null-worker.js +0 -4
- package/dist/esm/workers/null-worker.js.map +0 -1
package/dist/lib/api/encode.js
CHANGED
|
@@ -1,111 +1,91 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
1
|
+
import { concatenateArrayBuffers, resolvePath } from '@loaders.gl/loader-utils';
|
|
2
|
+
import { isBrowser } from '@loaders.gl/loader-utils';
|
|
3
|
+
import { writeFile } from '../fetch/write-file';
|
|
4
|
+
import { fetchFile } from '../fetch/fetch-file';
|
|
5
|
+
export async function encode(data, writer, options) {
|
|
6
|
+
if (writer.encode) {
|
|
7
|
+
return await writer.encode(data, options);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
if (writer.encodeSync) {
|
|
11
|
+
return writer.encodeSync(data, options);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
if (writer.encodeText) {
|
|
15
|
+
return new TextEncoder().encode(await writer.encodeText(data, options));
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if (writer.encodeInBatches) {
|
|
19
|
+
const batches = encodeInBatches(data, writer, options);
|
|
20
|
+
const chunks = [];
|
|
21
|
+
|
|
22
|
+
for await (const batch of batches) {
|
|
23
|
+
chunks.push(batch);
|
|
15
24
|
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
}
|
|
31
|
-
// @ts-ignore
|
|
32
|
-
return (0, loader_utils_1.concatenateArrayBuffers)(...chunks);
|
|
33
|
-
}
|
|
34
|
-
if (!loader_utils_2.isBrowser && writer.encodeURLtoURL) {
|
|
35
|
-
// TODO - how to generate filenames with correct extensions?
|
|
36
|
-
const tmpInputFilename = getTemporaryFilename('input');
|
|
37
|
-
await (0, write_file_1.writeFile)(tmpInputFilename, data);
|
|
38
|
-
const tmpOutputFilename = getTemporaryFilename('output');
|
|
39
|
-
const outputFilename = await encodeURLtoURL(tmpInputFilename, tmpOutputFilename, writer, options);
|
|
40
|
-
const response = await (0, fetch_file_1.fetchFile)(outputFilename);
|
|
41
|
-
return response.arrayBuffer();
|
|
42
|
-
}
|
|
43
|
-
throw new Error('Writer could not encode data');
|
|
25
|
+
|
|
26
|
+
return concatenateArrayBuffers(...chunks);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (!isBrowser && writer.encodeURLtoURL) {
|
|
30
|
+
const tmpInputFilename = getTemporaryFilename('input');
|
|
31
|
+
await writeFile(tmpInputFilename, data);
|
|
32
|
+
const tmpOutputFilename = getTemporaryFilename('output');
|
|
33
|
+
const outputFilename = await encodeURLtoURL(tmpInputFilename, tmpOutputFilename, writer, options);
|
|
34
|
+
const response = await fetchFile(outputFilename);
|
|
35
|
+
return response.arrayBuffer();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
throw new Error('Writer could not encode data');
|
|
44
39
|
}
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
return writer.encodeSync(data, options);
|
|
52
|
-
}
|
|
53
|
-
throw new Error('Writer could not synchronously encode data');
|
|
40
|
+
export function encodeSync(data, writer, options) {
|
|
41
|
+
if (writer.encodeSync) {
|
|
42
|
+
return writer.encodeSync(data, options);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
throw new Error('Writer could not synchronously encode data');
|
|
54
46
|
}
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
if (writer.text && (writer.encode || writer.encodeInBatches)) {
|
|
67
|
-
const arrayBuffer = await encode(data, writer, options);
|
|
68
|
-
return new TextDecoder().decode(arrayBuffer);
|
|
69
|
-
}
|
|
70
|
-
throw new Error('Writer could not encode data as text');
|
|
47
|
+
export async function encodeText(data, writer, options) {
|
|
48
|
+
if (writer.text && writer.encodeText) {
|
|
49
|
+
return await writer.encodeText(data, options);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (writer.text && (writer.encode || writer.encodeInBatches)) {
|
|
53
|
+
const arrayBuffer = await encode(data, writer, options);
|
|
54
|
+
return new TextDecoder().decode(arrayBuffer);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
throw new Error('Writer could not encode data as text');
|
|
71
58
|
}
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
return writer.encodeInBatches(dataIterator, options);
|
|
80
|
-
}
|
|
81
|
-
// TODO -fall back to atomic encode?
|
|
82
|
-
throw new Error('Writer could not encode data in batches');
|
|
59
|
+
export function encodeInBatches(data, writer, options) {
|
|
60
|
+
if (writer.encodeInBatches) {
|
|
61
|
+
const dataIterator = getIterator(data);
|
|
62
|
+
return writer.encodeInBatches(dataIterator, options);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
throw new Error('Writer could not encode data in batches');
|
|
83
66
|
}
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
}
|
|
95
|
-
const outputFilename = await writer.encodeURLtoURL(inputUrl, outputUrl, options);
|
|
96
|
-
return outputFilename;
|
|
67
|
+
export async function encodeURLtoURL(inputUrl, outputUrl, writer, options) {
|
|
68
|
+
inputUrl = resolvePath(inputUrl);
|
|
69
|
+
outputUrl = resolvePath(outputUrl);
|
|
70
|
+
|
|
71
|
+
if (isBrowser || !writer.encodeURLtoURL) {
|
|
72
|
+
throw new Error();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const outputFilename = await writer.encodeURLtoURL(inputUrl, outputUrl, options);
|
|
76
|
+
return outputFilename;
|
|
97
77
|
}
|
|
98
|
-
|
|
99
|
-
/**
|
|
100
|
-
* @todo TODO - this is an unacceptable hack!!!
|
|
101
|
-
*/
|
|
78
|
+
|
|
102
79
|
function getIterator(data) {
|
|
103
|
-
|
|
104
|
-
|
|
80
|
+
const dataIterator = [{
|
|
81
|
+
table: data,
|
|
82
|
+
start: 0,
|
|
83
|
+
end: data.length
|
|
84
|
+
}];
|
|
85
|
+
return dataIterator;
|
|
105
86
|
}
|
|
106
|
-
|
|
107
|
-
* @todo Move to utils
|
|
108
|
-
*/
|
|
87
|
+
|
|
109
88
|
function getTemporaryFilename(filename) {
|
|
110
|
-
|
|
89
|
+
return "/tmp/".concat(filename);
|
|
111
90
|
}
|
|
91
|
+
//# sourceMappingURL=encode.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/api/encode.ts"],"names":["concatenateArrayBuffers","resolvePath","isBrowser","writeFile","fetchFile","encode","data","writer","options","encodeSync","encodeText","TextEncoder","encodeInBatches","batches","chunks","batch","push","encodeURLtoURL","tmpInputFilename","getTemporaryFilename","tmpOutputFilename","outputFilename","response","arrayBuffer","Error","text","TextDecoder","decode","dataIterator","getIterator","inputUrl","outputUrl","table","start","end","length","filename"],"mappings":"AACA,SAAQA,uBAAR,EAAiCC,WAAjC,QAAmD,0BAAnD;AACA,SAAQC,SAAR,QAAwB,0BAAxB;AACA,SAAQC,SAAR,QAAwB,qBAAxB;AACA,SAAQC,SAAR,QAAwB,qBAAxB;AAKA,OAAO,eAAeC,MAAf,CACLC,IADK,EAELC,MAFK,EAGLC,OAHK,EAIiB;AAEtB,MAAID,MAAM,CAACF,MAAX,EAAmB;AACjB,WAAO,MAAME,MAAM,CAACF,MAAP,CAAcC,IAAd,EAAoBE,OAApB,CAAb;AACD;;AAED,MAAID,MAAM,CAACE,UAAX,EAAuB;AACrB,WAAOF,MAAM,CAACE,UAAP,CAAkBH,IAAlB,EAAwBE,OAAxB,CAAP;AACD;;AAED,MAAID,MAAM,CAACG,UAAX,EAAuB;AACrB,WAAO,IAAIC,WAAJ,GAAkBN,MAAlB,CAAyB,MAAME,MAAM,CAACG,UAAP,CAAkBJ,IAAlB,EAAwBE,OAAxB,CAA/B,CAAP;AACD;;AAED,MAAID,MAAM,CAACK,eAAX,EAA4B;AAG1B,UAAMC,OAAO,GAAGD,eAAe,CAACN,IAAD,EAAOC,MAAP,EAAeC,OAAf,CAA/B;AAGA,UAAMM,MAAa,GAAG,EAAtB;;AACA,eAAW,MAAMC,KAAjB,IAA0BF,OAA1B,EAAmC;AACjCC,MAAAA,MAAM,CAACE,IAAP,CAAYD,KAAZ;AACD;;AAED,WAAOf,uBAAuB,CAAC,GAAGc,MAAJ,CAA9B;AACD;;AAED,MAAI,CAACZ,SAAD,IAAcK,MAAM,CAACU,cAAzB,EAAyC;AAEvC,UAAMC,gBAAgB,GAAGC,oBAAoB,CAAC,OAAD,CAA7C;AACA,UAAMhB,SAAS,CAACe,gBAAD,EAAmBZ,IAAnB,CAAf;AAEA,UAAMc,iBAAiB,GAAGD,oBAAoB,CAAC,QAAD,CAA9C;AAEA,UAAME,cAAc,GAAG,MAAMJ,cAAc,CACzCC,gBADyC,EAEzCE,iBAFyC,EAGzCb,MAHyC,EAIzCC,OAJyC,CAA3C;AAOA,UAAMc,QAAQ,GAAG,MAAMlB,SAAS,CAACiB,cAAD,CAAhC;AACA,WAAOC,QAAQ,CAACC,WAAT,EAAP;AACD;;AAED,QAAM,IAAIC,KAAJ,CAAU,8BAAV,CAAN;AACD;AAKD,OAAO,SAASf,UAAT,CAAoBH,IAApB,EAA+BC,MAA/B,EAA+CC,OAA/C,EAAqF;AAC1F,MAAID,MAAM,CAACE,UAAX,EAAuB;AACrB,WAAOF,MAAM,CAACE,UAAP,CAAkBH,IAAlB,EAAwBE,OAAxB,CAAP;AACD;;AACD,QAAM,IAAIgB,KAAJ,CAAU,4CAAV,CAAN;AACD;AAQD,OAAO,eAAed,UAAf,CACLJ,IADK,EAELC,MAFK,EAGLC,OAHK,EAIY;AACjB,MAAID,MAAM,CAACkB,IAAP,IAAelB,MAAM,CAACG,UAA1B,EAAsC;AACpC,WAAO,MAAMH,MAAM,CAACG,UAAP,CAAkBJ,IAAlB,EAAwBE,OAAxB,CAAb;AACD;;AAED,MAAID,MAAM,CAACkB,IAAP,KAAgBlB,MAAM,CAACF,MAAP,IAAiBE,MAAM,CAACK,eAAxC,CAAJ,EAA8D;AAC5D,UAAMW,WAAW,GAAG,MAAMlB,MAAM,CAACC,IAAD,EAAOC,MAAP,EAAeC,OAAf,CAAhC;AACA,WAAO,IAAIkB,WAAJ,GAAkBC,MAAlB,CAAyBJ,WAAzB,CAAP;AACD;;AAED,QAAM,IAAIC,KAAJ,CAAU,sCAAV,CAAN;AACD;AAKD,OAAO,SAASZ,eAAT,CACLN,IADK,EAELC,MAFK,EAGLC,OAHK,EAIuB;AAC5B,MAAID,MAAM,CAACK,eAAX,EAA4B;AAC1B,UAAMgB,YAAY,GAAGC,WAAW,CAACvB,IAAD,CAAhC;AACA,WAAOC,MAAM,CAACK,eAAP,CAAuBgB,YAAvB,EAAqCpB,OAArC,CAAP;AACD;;AAED,QAAM,IAAIgB,KAAJ,CAAU,yCAAV,CAAN;AACD;AAMD,OAAO,eAAeP,cAAf,CACLa,QADK,EAELC,SAFK,EAGLxB,MAHK,EAILC,OAJK,EAKY;AACjBsB,EAAAA,QAAQ,GAAG7B,WAAW,CAAC6B,QAAD,CAAtB;AACAC,EAAAA,SAAS,GAAG9B,WAAW,CAAC8B,SAAD,CAAvB;;AACA,MAAI7B,SAAS,IAAI,CAACK,MAAM,CAACU,cAAzB,EAAyC;AACvC,UAAM,IAAIO,KAAJ,EAAN;AACD;;AACD,QAAMH,cAAc,GAAG,MAAMd,MAAM,CAACU,cAAP,CAAsBa,QAAtB,EAAgCC,SAAhC,EAA2CvB,OAA3C,CAA7B;AACA,SAAOa,cAAP;AACD;;AAKD,SAASQ,WAAT,CAAqBvB,IAArB,EAA2B;AACzB,QAAMsB,YAAY,GAAG,CAAC;AAACI,IAAAA,KAAK,EAAE1B,IAAR;AAAc2B,IAAAA,KAAK,EAAE,CAArB;AAAwBC,IAAAA,GAAG,EAAE5B,IAAI,CAAC6B;AAAlC,GAAD,CAArB;AACA,SAAOP,YAAP;AACD;;AAKD,SAAST,oBAAT,CAA8BiB,QAA9B,EAAwD;AACtD,wBAAeA,QAAf;AACD","sourcesContent":["import type {Writer, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {concatenateArrayBuffers, resolvePath} from '@loaders.gl/loader-utils';\nimport {isBrowser} from '@loaders.gl/loader-utils';\nimport {writeFile} from '../fetch/write-file';\nimport {fetchFile} from '../fetch/fetch-file';\n\n/**\n * Encode loaded data into a binary ArrayBuffer using the specified Writer.\n */\nexport async function encode(\n data: any,\n writer: Writer,\n options?: LoaderOptions\n): Promise<ArrayBuffer> {\n // TODO Merge default writer options with options argument like it is done in load module.\n if (writer.encode) {\n return await writer.encode(data, options);\n }\n\n if (writer.encodeSync) {\n return writer.encodeSync(data, options);\n }\n\n if (writer.encodeText) {\n return new TextEncoder().encode(await writer.encodeText(data, options));\n }\n\n if (writer.encodeInBatches) {\n // Create an iterator representing the data\n // TODO - Assumes this is a table\n const batches = encodeInBatches(data, writer, options);\n\n // Concatenate the output\n const chunks: any[] = [];\n for await (const batch of batches) {\n chunks.push(batch);\n }\n // @ts-ignore\n return concatenateArrayBuffers(...chunks);\n }\n\n if (!isBrowser && writer.encodeURLtoURL) {\n // TODO - how to generate filenames with correct extensions?\n const tmpInputFilename = getTemporaryFilename('input');\n await writeFile(tmpInputFilename, data);\n\n const tmpOutputFilename = getTemporaryFilename('output');\n\n const outputFilename = await encodeURLtoURL(\n tmpInputFilename,\n tmpOutputFilename,\n writer,\n options\n );\n\n const response = await fetchFile(outputFilename);\n return response.arrayBuffer();\n }\n\n throw new Error('Writer could not encode data');\n}\n\n/**\n * Encode loaded data into a binary ArrayBuffer using the specified Writer.\n */\nexport function encodeSync(data: any, writer: Writer, options?: LoaderOptions): ArrayBuffer {\n if (writer.encodeSync) {\n return writer.encodeSync(data, options);\n }\n throw new Error('Writer could not synchronously encode data');\n}\n\n/**\n * Encode loaded data to text using the specified Writer\n * @note This is a convenience function not intended for production use on large input data.\n * It is not optimized for performance. Data maybe converted from text to binary and back.\n * @throws if the writer does not generate text output\n */\nexport async function encodeText(\n data: any,\n writer: Writer,\n options?: LoaderOptions\n): Promise<string> {\n if (writer.text && writer.encodeText) {\n return await writer.encodeText(data, options);\n }\n\n if (writer.text && (writer.encode || writer.encodeInBatches)) {\n const arrayBuffer = await encode(data, writer, options);\n return new TextDecoder().decode(arrayBuffer);\n }\n\n throw new Error('Writer could not encode data as text');\n}\n\n/**\n * Encode loaded data into a sequence (iterator) of binary ArrayBuffers using the specified Writer.\n */\nexport function encodeInBatches(\n data: any,\n writer: Writer,\n options?: LoaderOptions\n): AsyncIterable<ArrayBuffer> {\n if (writer.encodeInBatches) {\n const dataIterator = getIterator(data);\n return writer.encodeInBatches(dataIterator, options);\n }\n // TODO -fall back to atomic encode?\n throw new Error('Writer could not encode data in batches');\n}\n\n/**\n * Encode data stored in a file (on disk) to another file.\n * @note Node.js only. This function enables using command-line converters as \"writers\".\n */\nexport async function encodeURLtoURL(\n inputUrl,\n outputUrl,\n writer: Writer,\n options\n): Promise<string> {\n inputUrl = resolvePath(inputUrl);\n outputUrl = resolvePath(outputUrl);\n if (isBrowser || !writer.encodeURLtoURL) {\n throw new Error();\n }\n const outputFilename = await writer.encodeURLtoURL(inputUrl, outputUrl, options);\n return outputFilename;\n}\n\n/**\n * @todo TODO - this is an unacceptable hack!!!\n */\nfunction getIterator(data) {\n const dataIterator = [{table: data, start: 0, end: data.length}];\n return dataIterator;\n}\n\n/**\n * @todo Move to utils\n */\nfunction getTemporaryFilename(filename: string): string {\n return `/tmp/${filename}`;\n}\n"],"file":"encode.js"}
|
|
@@ -1,33 +1,30 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
}
|
|
20
|
-
// Multiple URLs / files
|
|
21
|
-
const promises = files.map((file) => loadOneFileInBatches(file, loaders, options, fetch));
|
|
22
|
-
// No point in waiting here for all responses before starting to stream individual streams?
|
|
23
|
-
return promises;
|
|
1
|
+
import { isLoaderObject } from '../loader-utils/normalize-loader';
|
|
2
|
+
import { getFetchFunction } from '../loader-utils/option-utils';
|
|
3
|
+
import { parseInBatches } from './parse-in-batches';
|
|
4
|
+
export function loadInBatches(files, loaders, options, context) {
|
|
5
|
+
if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {
|
|
6
|
+
context = undefined;
|
|
7
|
+
options = loaders;
|
|
8
|
+
loaders = null;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const fetch = getFetchFunction(options || {});
|
|
12
|
+
|
|
13
|
+
if (!Array.isArray(files)) {
|
|
14
|
+
return loadOneFileInBatches(files, loaders, options, fetch);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const promises = files.map(file => loadOneFileInBatches(file, loaders, options, fetch));
|
|
18
|
+
return promises;
|
|
24
19
|
}
|
|
25
|
-
|
|
20
|
+
|
|
26
21
|
async function loadOneFileInBatches(file, loaders, options, fetch) {
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
22
|
+
if (typeof file === 'string') {
|
|
23
|
+
const url = file;
|
|
24
|
+
const response = await fetch(url);
|
|
25
|
+
return await parseInBatches(response, loaders, options);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return await parseInBatches(file, loaders, options);
|
|
33
29
|
}
|
|
30
|
+
//# sourceMappingURL=load-in-batches.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/api/load-in-batches.ts"],"names":["isLoaderObject","getFetchFunction","parseInBatches","loadInBatches","files","loaders","options","context","Array","isArray","undefined","fetch","loadOneFileInBatches","promises","map","file","url","response"],"mappings":"AACA,SAAQA,cAAR,QAA6B,kCAA7B;AACA,SAAQC,gBAAR,QAA+B,8BAA/B;AAEA,SAAQC,cAAR,QAA6B,oBAA7B;AAyBA,OAAO,SAASC,aAAT,CAAuBC,KAAvB,EAA8BC,OAA9B,EAAuCC,OAAvC,EAAgDC,OAAhD,EAAyD;AAE9D,MAAI,CAACC,KAAK,CAACC,OAAN,CAAcJ,OAAd,CAAD,IAA2B,CAACL,cAAc,CAACK,OAAD,CAA9C,EAAyD;AACvDE,IAAAA,OAAO,GAAGG,SAAV;AACAJ,IAAAA,OAAO,GAAGD,OAAV;AACAA,IAAAA,OAAO,GAAG,IAAV;AACD;;AAGD,QAAMM,KAAK,GAAGV,gBAAgB,CAACK,OAAO,IAAI,EAAZ,CAA9B;;AAGA,MAAI,CAACE,KAAK,CAACC,OAAN,CAAcL,KAAd,CAAL,EAA2B;AACzB,WAAOQ,oBAAoB,CAACR,KAAD,EAAQC,OAAR,EAAiBC,OAAjB,EAA0BK,KAA1B,CAA3B;AACD;;AAGD,QAAME,QAAQ,GAAGT,KAAK,CAACU,GAAN,CAAWC,IAAD,IAAUH,oBAAoB,CAACG,IAAD,EAAOV,OAAP,EAAgBC,OAAhB,EAAyBK,KAAzB,CAAxC,CAAjB;AAGA,SAAOE,QAAP;AACD;;AAED,eAAeD,oBAAf,CAAoCG,IAApC,EAA0CV,OAA1C,EAAmDC,OAAnD,EAA4DK,KAA5D,EAAmE;AACjE,MAAI,OAAOI,IAAP,KAAgB,QAApB,EAA8B;AAC5B,UAAMC,GAAG,GAAGD,IAAZ;AACA,UAAME,QAAQ,GAAG,MAAMN,KAAK,CAACK,GAAD,CAA5B;AACA,WAAO,MAAMd,cAAc,CAACe,QAAD,EAAWZ,OAAX,EAAoBC,OAApB,CAA3B;AACD;;AACD,SAAO,MAAMJ,cAAc,CAACa,IAAD,EAAOV,OAAP,EAAgBC,OAAhB,CAA3B;AACD","sourcesContent":["import type {LoaderWithParser, LoaderOptions, LoaderContext} from '@loaders.gl/loader-utils';\nimport {isLoaderObject} from '../loader-utils/normalize-loader';\nimport {getFetchFunction} from '../loader-utils/option-utils';\n\nimport {parseInBatches} from './parse-in-batches';\n\ntype FileType = string | File | Blob | Response | (string | File | Blob | Response)[] | FileList;\n\n/**\n * Parses `data` using a specified loader\n * @param data\n * @param loaders\n * @param options\n * @param context\n */\nexport function loadInBatches(\n files: FileType,\n loaders?: LoaderWithParser | LoaderWithParser[] | LoaderOptions,\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<AsyncIterable<any>>;\n\nexport function loadInBatches(\n files: FileType[] | FileList,\n loaders?: LoaderWithParser | LoaderWithParser[] | LoaderOptions,\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<AsyncIterable<any>>;\n\nexport function loadInBatches(files, loaders, options, context) {\n // Signature: load(url, options)\n if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {\n context = undefined; // context not supported in short signature\n options = loaders;\n loaders = null;\n }\n\n // Select fetch function\n const fetch = getFetchFunction(options || {});\n\n // Single url/file\n if (!Array.isArray(files)) {\n return loadOneFileInBatches(files, loaders, options, fetch);\n }\n\n // Multiple URLs / files\n const promises = files.map((file) => loadOneFileInBatches(file, loaders, options, fetch));\n\n // No point in waiting here for all responses before starting to stream individual streams?\n return promises;\n}\n\nasync function loadOneFileInBatches(file, loaders, options, fetch) {\n if (typeof file === 'string') {\n const url = file;\n const response = await fetch(url);\n return await parseInBatches(response, loaders, options);\n }\n return await parseInBatches(file, loaders, options);\n}\n"],"file":"load-in-batches.js"}
|
package/dist/lib/api/load.js
CHANGED
|
@@ -1,41 +1,25 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
}
|
|
25
|
-
// Select fetch function
|
|
26
|
-
const fetch = (0, option_utils_1.getFetchFunction)(options);
|
|
27
|
-
// at this point, `url` could be already loaded binary data
|
|
28
|
-
let data = url;
|
|
29
|
-
// url is a string, fetch the url
|
|
30
|
-
if (typeof url === 'string') {
|
|
31
|
-
data = await fetch(url);
|
|
32
|
-
// URL is Blob or File, fetchFile handles it (alt: we could generate ObjectURL here)
|
|
33
|
-
}
|
|
34
|
-
if ((0, is_type_1.isBlob)(url)) {
|
|
35
|
-
// The fetch response object will contain blob.name
|
|
36
|
-
data = await fetch(url);
|
|
37
|
-
}
|
|
38
|
-
// Data is loaded (at least we have a `Response` object) so time to hand over to `parse`
|
|
39
|
-
return await (0, parse_1.parse)(data, loaders, options);
|
|
1
|
+
import { isBlob } from '../../javascript-utils/is-type';
|
|
2
|
+
import { isLoaderObject } from '../loader-utils/normalize-loader';
|
|
3
|
+
import { getFetchFunction } from '../loader-utils/option-utils';
|
|
4
|
+
import { parse } from './parse';
|
|
5
|
+
export async function load(url, loaders, options, context) {
|
|
6
|
+
if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {
|
|
7
|
+
context = undefined;
|
|
8
|
+
options = loaders;
|
|
9
|
+
loaders = undefined;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const fetch = getFetchFunction(options);
|
|
13
|
+
let data = url;
|
|
14
|
+
|
|
15
|
+
if (typeof url === 'string') {
|
|
16
|
+
data = await fetch(url);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (isBlob(url)) {
|
|
20
|
+
data = await fetch(url);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return await parse(data, loaders, options);
|
|
40
24
|
}
|
|
41
|
-
|
|
25
|
+
//# sourceMappingURL=load.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/api/load.ts"],"names":["isBlob","isLoaderObject","getFetchFunction","parse","load","url","loaders","options","context","Array","isArray","undefined","fetch","data"],"mappings":"AACA,SAAQA,MAAR,QAAqB,gCAArB;AACA,SAAQC,cAAR,QAA6B,kCAA7B;AACA,SAAQC,gBAAR,QAA+B,8BAA/B;AAEA,SAAQC,KAAR,QAAoB,SAApB;AAYA,OAAO,eAAeC,IAAf,CACLC,GADK,EAELC,OAFK,EAGLC,OAHK,EAILC,OAJK,EAKS;AAEd,MAAI,CAACC,KAAK,CAACC,OAAN,CAAcJ,OAAd,CAAD,IAA2B,CAACL,cAAc,CAACK,OAAD,CAA9C,EAAyD;AACvDE,IAAAA,OAAO,GAAGG,SAAV;AACAJ,IAAAA,OAAO,GAAGD,OAAV;AACAA,IAAAA,OAAO,GAAGK,SAAV;AACD;;AAGD,QAAMC,KAAK,GAAGV,gBAAgB,CAACK,OAAD,CAA9B;AAGA,MAAIM,IAAI,GAAGR,GAAX;;AAEA,MAAI,OAAOA,GAAP,KAAe,QAAnB,EAA6B;AAC3BQ,IAAAA,IAAI,GAAG,MAAMD,KAAK,CAACP,GAAD,CAAlB;AAED;;AAED,MAAIL,MAAM,CAACK,GAAD,CAAV,EAAiB;AAEfQ,IAAAA,IAAI,GAAG,MAAMD,KAAK,CAACP,GAAD,CAAlB;AACD;;AAGD,SAAO,MAAMF,KAAK,CAACU,IAAD,EAAOP,OAAP,EAAgBC,OAAhB,CAAlB;AACD","sourcesContent":["import type {DataType, Loader, LoaderContext, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {isBlob} from '../../javascript-utils/is-type';\nimport {isLoaderObject} from '../loader-utils/normalize-loader';\nimport {getFetchFunction} from '../loader-utils/option-utils';\n\nimport {parse} from './parse';\n\n/**\n * Parses `data` using a specified loader\n * Note: Load does duplicate a lot of parse.\n * it can also call fetchFile on string urls, which `parse` won't do.\n * @param data\n * @param loaders\n * @param options\n * @param context\n */\n// implementation signature\nexport async function load(\n url: string | DataType,\n loaders?: Loader | Loader[] | LoaderOptions,\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<any> {\n // Signature: load(url, options)\n if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {\n context = undefined; // context not supported in short signature\n options = loaders as LoaderOptions;\n loaders = undefined;\n }\n\n // Select fetch function\n const fetch = getFetchFunction(options);\n\n // at this point, `url` could be already loaded binary data\n let data = url;\n // url is a string, fetch the url\n if (typeof url === 'string') {\n data = await fetch(url);\n // URL is Blob or File, fetchFile handles it (alt: we could generate ObjectURL here)\n }\n\n if (isBlob(url)) {\n // The fetch response object will contain blob.name\n data = await fetch(url);\n }\n\n // Data is loaded (at least we have a `Response` object) so time to hand over to `parse`\n return await parse(data, loaders, options);\n}\n"],"file":"load.js"}
|
|
@@ -1,120 +1,99 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
// @ts-ignore
|
|
38
|
-
return null;
|
|
39
|
-
}
|
|
40
|
-
// Normalize options
|
|
41
|
-
// @ts-ignore
|
|
42
|
-
options = (0, option_utils_1.normalizeOptions)(options, loader, loaders, url);
|
|
43
|
-
// @ts-ignore
|
|
44
|
-
context = (0, loader_context_1.getLoaderContext)(
|
|
45
|
-
// @ts-ignore
|
|
46
|
-
{ url, parseInBatches, parse: parse_1.parse, loaders: loaders }, options, context);
|
|
47
|
-
return await parseWithLoaderInBatches(loader, data, options, context);
|
|
1
|
+
import { assert, concatenateArrayBuffersAsync } from '@loaders.gl/loader-utils';
|
|
2
|
+
import { isLoaderObject } from '../loader-utils/normalize-loader';
|
|
3
|
+
import { normalizeOptions } from '../loader-utils/option-utils';
|
|
4
|
+
import { getLoaderContext } from '../loader-utils/loader-context';
|
|
5
|
+
import { getAsyncIterableFromData } from '../loader-utils/get-data';
|
|
6
|
+
import { getResourceUrlAndType } from '../utils/resource-utils';
|
|
7
|
+
import { selectLoader } from './select-loader';
|
|
8
|
+
import { parse } from './parse';
|
|
9
|
+
export async function parseInBatches(data, loaders, options, context) {
|
|
10
|
+
assert(!context || typeof context === 'object');
|
|
11
|
+
|
|
12
|
+
if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {
|
|
13
|
+
context = undefined;
|
|
14
|
+
options = loaders;
|
|
15
|
+
loaders = undefined;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
data = await data;
|
|
19
|
+
options = options || {};
|
|
20
|
+
const {
|
|
21
|
+
url
|
|
22
|
+
} = getResourceUrlAndType(data);
|
|
23
|
+
const loader = await selectLoader(data, loaders, options);
|
|
24
|
+
|
|
25
|
+
if (!loader) {
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
options = normalizeOptions(options, loader, loaders, url);
|
|
30
|
+
context = getLoaderContext({
|
|
31
|
+
url,
|
|
32
|
+
parseInBatches,
|
|
33
|
+
parse,
|
|
34
|
+
loaders: loaders
|
|
35
|
+
}, options, context);
|
|
36
|
+
return await parseWithLoaderInBatches(loader, data, options, context);
|
|
48
37
|
}
|
|
49
|
-
|
|
50
|
-
/**
|
|
51
|
-
* Loader has been selected and context has been prepared, see if we need to emit a metadata batch
|
|
52
|
-
*/
|
|
38
|
+
|
|
53
39
|
async function parseWithLoaderInBatches(loader, data, options, context) {
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
40
|
+
const outputIterator = await parseToOutputIterator(loader, data, options, context);
|
|
41
|
+
|
|
42
|
+
if (!options.metadata) {
|
|
43
|
+
return outputIterator;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const metadataBatch = {
|
|
47
|
+
batchType: 'metadata',
|
|
48
|
+
metadata: {
|
|
49
|
+
_loader: loader,
|
|
50
|
+
_context: context
|
|
51
|
+
},
|
|
52
|
+
data: [],
|
|
53
|
+
bytesUsed: 0
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
async function* makeMetadataBatchIterator(iterator) {
|
|
57
|
+
yield metadataBatch;
|
|
58
|
+
yield* iterator;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return makeMetadataBatchIterator(outputIterator);
|
|
74
62
|
}
|
|
75
|
-
|
|
76
|
-
* Prep work is done, now it is time to start parsing into an output operator
|
|
77
|
-
* The approach depends on which parse function the loader exposes
|
|
78
|
-
* `parseInBatches` (preferred), `parse` (fallback)
|
|
79
|
-
*/
|
|
63
|
+
|
|
80
64
|
async function parseToOutputIterator(loader, data, options, context) {
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
};
|
|
105
|
-
yield batch;
|
|
106
|
-
}
|
|
107
|
-
return parseChunkInBatches();
|
|
65
|
+
const inputIterator = await getAsyncIterableFromData(data, options);
|
|
66
|
+
const transformedIterator = await applyInputTransforms(inputIterator, (options === null || options === void 0 ? void 0 : options.transforms) || []);
|
|
67
|
+
|
|
68
|
+
if (loader.parseInBatches) {
|
|
69
|
+
return loader.parseInBatches(transformedIterator, options, context);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
async function* parseChunkInBatches() {
|
|
73
|
+
const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);
|
|
74
|
+
const parsedData = await parse(arrayBuffer, loader, { ...options,
|
|
75
|
+
mimeType: loader.mimeTypes[0]
|
|
76
|
+
}, context);
|
|
77
|
+
const batch = {
|
|
78
|
+
mimeType: loader.mimeTypes[0],
|
|
79
|
+
shape: Array.isArray(parsedData) ? 'row-table' : 'unknown',
|
|
80
|
+
batchType: 'data',
|
|
81
|
+
data: parsedData,
|
|
82
|
+
length: Array.isArray(parsedData) ? parsedData.length : 1
|
|
83
|
+
};
|
|
84
|
+
yield batch;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return parseChunkInBatches();
|
|
108
88
|
}
|
|
109
|
-
|
|
110
|
-
* Create an iterator chain with any transform iterators (crypto, decompression)
|
|
111
|
-
* @param inputIterator
|
|
112
|
-
* @param options
|
|
113
|
-
*/
|
|
89
|
+
|
|
114
90
|
async function applyInputTransforms(inputIterator, transforms = []) {
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
91
|
+
let iteratorChain = inputIterator;
|
|
92
|
+
|
|
93
|
+
for await (const transformBatches of transforms) {
|
|
94
|
+
iteratorChain = transformBatches(iteratorChain);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return iteratorChain;
|
|
120
98
|
}
|
|
99
|
+
//# sourceMappingURL=parse-in-batches.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/lib/api/parse-in-batches.ts"],"names":["assert","concatenateArrayBuffersAsync","isLoaderObject","normalizeOptions","getLoaderContext","getAsyncIterableFromData","getResourceUrlAndType","selectLoader","parse","parseInBatches","data","loaders","options","context","Array","isArray","undefined","url","loader","parseWithLoaderInBatches","outputIterator","parseToOutputIterator","metadata","metadataBatch","batchType","_loader","_context","bytesUsed","makeMetadataBatchIterator","iterator","inputIterator","transformedIterator","applyInputTransforms","transforms","parseChunkInBatches","arrayBuffer","parsedData","mimeType","mimeTypes","batch","shape","length","iteratorChain","transformBatches"],"mappings":"AAQA,SAAQA,MAAR,EAAgBC,4BAAhB,QAAmD,0BAAnD;AACA,SAAQC,cAAR,QAA6B,kCAA7B;AACA,SAAQC,gBAAR,QAA+B,8BAA/B;AACA,SAAQC,gBAAR,QAA+B,gCAA/B;AACA,SAAQC,wBAAR,QAAuC,0BAAvC;AACA,SAAQC,qBAAR,QAAoC,yBAApC;AACA,SAAQC,YAAR,QAA2B,iBAA3B;AAGA,SAAQC,KAAR,QAAoB,SAApB;AASA,OAAO,eAAeC,cAAf,CACLC,IADK,EAELC,OAFK,EAGLC,OAHK,EAILC,OAJK,EAKwB;AAC7Bb,EAAAA,MAAM,CAAC,CAACa,OAAD,IAAY,OAAOA,OAAP,KAAmB,QAAhC,CAAN;;AAGA,MAAI,CAACC,KAAK,CAACC,OAAN,CAAcJ,OAAd,CAAD,IAA2B,CAACT,cAAc,CAACS,OAAD,CAA9C,EAAyD;AACvDE,IAAAA,OAAO,GAAGG,SAAV;AACAJ,IAAAA,OAAO,GAAGD,OAAV;AACAA,IAAAA,OAAO,GAAGK,SAAV;AACD;;AAEDN,EAAAA,IAAI,GAAG,MAAMA,IAAb;AACAE,EAAAA,OAAO,GAAGA,OAAO,IAAI,EAArB;AAGA,QAAM;AAACK,IAAAA;AAAD,MAAQX,qBAAqB,CAACI,IAAD,CAAnC;AAIA,QAAMQ,MAAM,GAAG,MAAMX,YAAY,CAACG,IAAD,EAAsBC,OAAtB,EAA2CC,OAA3C,CAAjC;;AAEA,MAAI,CAACM,MAAL,EAAa;AAEX,WAAO,IAAP;AACD;;AAIDN,EAAAA,OAAO,GAAGT,gBAAgB,CAACS,OAAD,EAAUM,MAAV,EAAkBP,OAAlB,EAA2BM,GAA3B,CAA1B;AAEAJ,EAAAA,OAAO,GAAGT,gBAAgB,CAExB;AAACa,IAAAA,GAAD;AAAMR,IAAAA,cAAN;AAAsBD,IAAAA,KAAtB;AAA6BG,IAAAA,OAAO,EAAEA;AAAtC,GAFwB,EAGxBC,OAHwB,EAIxBC,OAJwB,CAA1B;AAOA,SAAO,MAAMM,wBAAwB,CAACD,MAAD,EAA6BR,IAA7B,EAAmCE,OAAnC,EAA4CC,OAA5C,CAArC;AACD;;AAKD,eAAeM,wBAAf,CACED,MADF,EAEER,IAFF,EAGEE,OAHF,EAIEC,OAJF,EAK+B;AAC7B,QAAMO,cAAc,GAAG,MAAMC,qBAAqB,CAACH,MAAD,EAASR,IAAT,EAAeE,OAAf,EAAwBC,OAAxB,CAAlD;;AAGA,MAAI,CAACD,OAAO,CAACU,QAAb,EAAuB;AACrB,WAAOF,cAAP;AACD;;AAED,QAAMG,aAAa,GAAG;AACpBC,IAAAA,SAAS,EAAE,UADS;AAEpBF,IAAAA,QAAQ,EAAE;AACRG,MAAAA,OAAO,EAAEP,MADD;AAERQ,MAAAA,QAAQ,EAAEb;AAFF,KAFU;AAOpBH,IAAAA,IAAI,EAAE,EAPc;AAQpBiB,IAAAA,SAAS,EAAE;AARS,GAAtB;;AAWA,kBAAgBC,yBAAhB,CAA0CC,QAA1C,EAAoD;AAClD,UAAMN,aAAN;AACA,WAAOM,QAAP;AACD;;AAED,SAAOD,yBAAyB,CAACR,cAAD,CAAhC;AACD;;AAOD,eAAeC,qBAAf,CACEH,MADF,EAEER,IAFF,EAGEE,OAHF,EAIEC,OAJF,EAK+B;AAE7B,QAAMiB,aAAa,GAAG,MAAMzB,wBAAwB,CAACK,IAAD,EAAOE,OAAP,CAApD;AAGA,QAAMmB,mBAAmB,GAAG,MAAMC,oBAAoB,CAACF,aAAD,EAAgB,CAAAlB,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEqB,UAAT,KAAuB,EAAvC,CAAtD;;AAGA,MAAIf,MAAM,CAACT,cAAX,EAA2B;AACzB,WAAOS,MAAM,CAACT,cAAP,CAAsBsB,mBAAtB,EAA2CnB,OAA3C,EAAoDC,OAApD,CAAP;AACD;;AAGD,kBAAgBqB,mBAAhB,GAAsC;AACpC,UAAMC,WAAW,GAAG,MAAMlC,4BAA4B,CAAC8B,mBAAD,CAAtD;AAEA,UAAMK,UAAU,GAAG,MAAM5B,KAAK,CAC5B2B,WAD4B,EAE5BjB,MAF4B,EAI5B,EAAC,GAAGN,OAAJ;AAAayB,MAAAA,QAAQ,EAAEnB,MAAM,CAACoB,SAAP,CAAiB,CAAjB;AAAvB,KAJ4B,EAK5BzB,OAL4B,CAA9B;AASA,UAAM0B,KAAY,GAAG;AACnBF,MAAAA,QAAQ,EAAEnB,MAAM,CAACoB,SAAP,CAAiB,CAAjB,CADS;AAEnBE,MAAAA,KAAK,EAAE1B,KAAK,CAACC,OAAN,CAAcqB,UAAd,IAA4B,WAA5B,GAA0C,SAF9B;AAGnBZ,MAAAA,SAAS,EAAE,MAHQ;AAInBd,MAAAA,IAAI,EAAE0B,UAJa;AAKnBK,MAAAA,MAAM,EAAE3B,KAAK,CAACC,OAAN,CAAcqB,UAAd,IAA4BA,UAAU,CAACK,MAAvC,GAAgD;AALrC,KAArB;AAOA,UAAMF,KAAN;AACD;;AAED,SAAOL,mBAAmB,EAA1B;AACD;;AAWD,eAAeF,oBAAf,CACEF,aADF,EAEEG,UAA8B,GAAG,EAFnC,EAG+D;AAC7D,MAAIS,aAAa,GAAGZ,aAApB;;AACA,aAAW,MAAMa,gBAAjB,IAAqCV,UAArC,EAAiD;AAC/CS,IAAAA,aAAa,GAAGC,gBAAgB,CAACD,aAAD,CAAhC;AACD;;AACD,SAAOA,aAAP;AACD","sourcesContent":["import type {Batch} from '@loaders.gl/schema';\nimport type {\n BatchableDataType,\n Loader,\n LoaderWithParser,\n LoaderContext,\n LoaderOptions\n} from '@loaders.gl/loader-utils';\nimport {assert, concatenateArrayBuffersAsync} from '@loaders.gl/loader-utils';\nimport {isLoaderObject} from '../loader-utils/normalize-loader';\nimport {normalizeOptions} from '../loader-utils/option-utils';\nimport {getLoaderContext} from '../loader-utils/loader-context';\nimport {getAsyncIterableFromData} from '../loader-utils/get-data';\nimport {getResourceUrlAndType} from '../utils/resource-utils';\nimport {selectLoader} from './select-loader';\n\n// Ensure `parse` is available in context if loader falls back to `parse`\nimport {parse} from './parse';\n\n/**\n * Parses `data` using a specified loader\n * @param data\n * @param loaders\n * @param options\n * @param context\n */\nexport async function parseInBatches(\n data: BatchableDataType,\n loaders?: Loader | Loader[] | LoaderOptions,\n options?: LoaderOptions,\n context?: LoaderContext\n): Promise<AsyncIterable<any>> {\n assert(!context || typeof context === 'object'); // parseInBatches no longer accepts final url\n\n // Signature: parseInBatches(data, options, url) - Uses registered loaders\n if (!Array.isArray(loaders) && !isLoaderObject(loaders)) {\n context = undefined; // context not supported in short signature\n options = loaders as LoaderOptions;\n loaders = undefined;\n }\n\n data = await data; // Resolve any promise\n options = options || {};\n\n // Extract a url for auto detection\n const {url} = getResourceUrlAndType(data);\n\n // Chooses a loader and normalizes it\n // Note - only uses URL and contentType for streams and iterator inputs\n const loader = await selectLoader(data as ArrayBuffer, loaders as Loader[], options);\n // Note: if options.nothrow was set, it is possible that no loader was found, if so just return null\n if (!loader) {\n // @ts-ignore\n return null;\n }\n\n // Normalize options\n // @ts-ignore\n options = normalizeOptions(options, loader, loaders, url);\n // @ts-ignore\n context = getLoaderContext(\n // @ts-ignore\n {url, parseInBatches, parse, loaders: loaders as Loader[]},\n options,\n context\n );\n\n return await parseWithLoaderInBatches(loader as LoaderWithParser, data, options, context);\n}\n\n/**\n * Loader has been selected and context has been prepared, see if we need to emit a metadata batch\n */\nasync function parseWithLoaderInBatches(\n loader: LoaderWithParser,\n data: BatchableDataType,\n options: LoaderOptions,\n context: LoaderContext\n): Promise<AsyncIterable<any>> {\n const outputIterator = await parseToOutputIterator(loader, data, options, context);\n\n // Generate metadata batch if requested\n if (!options.metadata) {\n return outputIterator;\n }\n\n const metadataBatch = {\n batchType: 'metadata',\n metadata: {\n _loader: loader,\n _context: context\n },\n // Populate with some default fields to avoid crashing\n data: [],\n bytesUsed: 0\n };\n\n async function* makeMetadataBatchIterator(iterator) {\n yield metadataBatch;\n yield* iterator;\n }\n\n return makeMetadataBatchIterator(outputIterator);\n}\n\n/**\n * Prep work is done, now it is time to start parsing into an output operator\n * The approach depends on which parse function the loader exposes\n * `parseInBatches` (preferred), `parse` (fallback)\n */\nasync function parseToOutputIterator(\n loader: LoaderWithParser,\n data: BatchableDataType,\n options: LoaderOptions,\n context: LoaderContext\n): Promise<AsyncIterable<any>> {\n // Get an iterator from the input\n const inputIterator = await getAsyncIterableFromData(data, options);\n\n // Apply any iterator transforms (options.transforms)\n const transformedIterator = await applyInputTransforms(inputIterator, options?.transforms || []);\n\n // If loader supports parseInBatches, we are done\n if (loader.parseInBatches) {\n return loader.parseInBatches(transformedIterator, options, context);\n }\n\n // Fallback: load atomically using `parse` concatenating input iterator into single chunk\n async function* parseChunkInBatches() {\n const arrayBuffer = await concatenateArrayBuffersAsync(transformedIterator);\n // Call `parse` instead of `loader.parse` to ensure we can call workers etc.\n const parsedData = await parse(\n arrayBuffer,\n loader,\n // TODO - Hack: supply loaders MIME type to ensure we match it\n {...options, mimeType: loader.mimeTypes[0]},\n context\n );\n // yield a single batch, the output from loader.parse()\n // TODO - run through batch builder to apply options etc...\n const batch: Batch = {\n mimeType: loader.mimeTypes[0],\n shape: Array.isArray(parsedData) ? 'row-table' : 'unknown',\n batchType: 'data',\n data: parsedData,\n length: Array.isArray(parsedData) ? parsedData.length : 1\n };\n yield batch;\n }\n\n return parseChunkInBatches();\n}\n\ntype TransformBatches = (\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n) => AsyncIterable<ArrayBuffer>;\n\n/**\n * Create an iterator chain with any transform iterators (crypto, decompression)\n * @param inputIterator\n * @param options\n */\nasync function applyInputTransforms(\n inputIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n transforms: TransformBatches[] = []\n): Promise<AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>> {\n let iteratorChain = inputIterator;\n for await (const transformBatches of transforms) {\n iteratorChain = transformBatches(iteratorChain);\n }\n return iteratorChain;\n}\n"],"file":"parse-in-batches.js"}
|