@loaders.gl/core 3.4.11 → 3.4.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/dist.min.js +40 -112
  2. package/dist/es5/lib/init.js +1 -1
  3. package/dist/es5/null-loader.js +1 -1
  4. package/dist/esm/lib/init.js +1 -1
  5. package/dist/esm/null-loader.js +1 -1
  6. package/dist/lib/fetch/read-file.d.ts +1 -1
  7. package/dist/lib/fetch/read-file.d.ts.map +1 -1
  8. package/dist/null-worker.js +1 -1
  9. package/package.json +4 -6
  10. package/dist/bundle.js +0 -5
  11. package/dist/core-addons/write-file-browser.js +0 -60
  12. package/dist/index.js +0 -104
  13. package/dist/iterators/batch-iterators/timed-batch-iterator.js +0 -22
  14. package/dist/iterators/make-iterator/make-array-buffer-iterator.js +0 -27
  15. package/dist/iterators/make-iterator/make-blob-iterator.js +0 -21
  16. package/dist/iterators/make-iterator/make-iterator.js +0 -37
  17. package/dist/iterators/make-iterator/make-stream-iterator.js +0 -96
  18. package/dist/iterators/make-iterator/make-string-iterator.js +0 -24
  19. package/dist/iterators/make-stream/make-dom-stream.js +0 -46
  20. package/dist/iterators/make-stream/make-node-stream.js +0 -82
  21. package/dist/javascript-utils/is-type.js +0 -41
  22. package/dist/lib/api/encode.js +0 -120
  23. package/dist/lib/api/load-in-batches.js +0 -33
  24. package/dist/lib/api/load.js +0 -42
  25. package/dist/lib/api/loader-options.js +0 -7
  26. package/dist/lib/api/parse-in-batches.js +0 -117
  27. package/dist/lib/api/parse-sync.js +0 -59
  28. package/dist/lib/api/parse.js +0 -82
  29. package/dist/lib/api/register-loaders.js +0 -35
  30. package/dist/lib/api/save.js +0 -15
  31. package/dist/lib/api/select-loader.js +0 -253
  32. package/dist/lib/common.js +0 -2
  33. package/dist/lib/fetch/fetch-error-message.js +0 -25
  34. package/dist/lib/fetch/fetch-file.js +0 -27
  35. package/dist/lib/fetch/read-array-buffer.js +0 -41
  36. package/dist/lib/fetch/read-file.js +0 -29
  37. package/dist/lib/fetch/write-file.js +0 -22
  38. package/dist/lib/filesystems/browser-filesystem.js +0 -126
  39. package/dist/lib/filesystems/filesystem.js +0 -2
  40. package/dist/lib/filesystems/read-array-buffer.js +0 -29
  41. package/dist/lib/init.js +0 -16
  42. package/dist/lib/loader-utils/check-errors.js +0 -30
  43. package/dist/lib/loader-utils/get-data.js +0 -129
  44. package/dist/lib/loader-utils/get-fetch-function.js +0 -31
  45. package/dist/lib/loader-utils/loader-context.js +0 -59
  46. package/dist/lib/loader-utils/loggers.js +0 -41
  47. package/dist/lib/loader-utils/normalize-loader.js +0 -52
  48. package/dist/lib/loader-utils/option-defaults.js +0 -43
  49. package/dist/lib/loader-utils/option-utils.js +0 -160
  50. package/dist/lib/progress/fetch-progress.js +0 -59
  51. package/dist/lib/utils/log.js +0 -6
  52. package/dist/lib/utils/mime-type-utils.js +0 -42
  53. package/dist/lib/utils/resource-utils.js +0 -90
  54. package/dist/lib/utils/response-utils.js +0 -115
  55. package/dist/lib/utils/url-utils.js +0 -14
  56. package/dist/null-loader.js +0 -56
  57. package/dist/workers/null-worker.js +0 -5
@@ -1,37 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.makeIterator = void 0;
4
- const make_string_iterator_1 = require("./make-string-iterator");
5
- const make_array_buffer_iterator_1 = require("./make-array-buffer-iterator");
6
- const make_blob_iterator_1 = require("./make-blob-iterator");
7
- const make_stream_iterator_1 = require("./make-stream-iterator");
8
- const is_type_1 = require("../../javascript-utils/is-type");
9
- /**
10
- * Returns an iterator that breaks its input into chunks and yields them one-by-one.
11
- * @param data
12
- * @param options
13
- * @returns
14
- * This function can e.g. be used to enable data sources that can only be read atomically
15
- * (such as `Blob` and `File` via `FileReader`) to still be parsed in batches.
16
- */
17
- function makeIterator(data, options) {
18
- if (typeof data === 'string') {
19
- // Note: Converts string chunks to binary
20
- return (0, make_string_iterator_1.makeStringIterator)(data, options);
21
- }
22
- if (data instanceof ArrayBuffer) {
23
- return (0, make_array_buffer_iterator_1.makeArrayBufferIterator)(data, options);
24
- }
25
- if ((0, is_type_1.isBlob)(data)) {
26
- return (0, make_blob_iterator_1.makeBlobIterator)(data, options);
27
- }
28
- if ((0, is_type_1.isReadableStream)(data)) {
29
- return (0, make_stream_iterator_1.makeStreamIterator)(data, options);
30
- }
31
- if ((0, is_type_1.isResponse)(data)) {
32
- const response = data;
33
- return (0, make_stream_iterator_1.makeStreamIterator)(response.body, options);
34
- }
35
- throw new Error('makeIterator');
36
- }
37
- exports.makeIterator = makeIterator;
@@ -1,96 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.makeStreamIterator = void 0;
4
- const loader_utils_1 = require("@loaders.gl/loader-utils");
5
- /**
6
- * Returns an async iterable that reads from a stream (works in both Node.js and browsers)
7
- * @param stream stream to iterator over
8
- */
9
- function makeStreamIterator(stream, options) {
10
- return loader_utils_1.isBrowser
11
- ? makeBrowserStreamIterator(stream, options)
12
- : makeNodeStreamIterator(stream, options);
13
- }
14
- exports.makeStreamIterator = makeStreamIterator;
15
- /**
16
- * Returns an async iterable that reads from a DOM (browser) stream
17
- * @param stream stream to iterate from
18
- * @see https://jakearchibald.com/2017/async-iterators-and-generators/#making-streams-iterate
19
- */
20
- async function* makeBrowserStreamIterator(stream, options) {
21
- // WhatWG: stream is supposed to have a `getIterator` method
22
- // if (typeof stream.getIterator === 'function') {
23
- // return stream.getIterator();
24
- // }
25
- // if (typeof stream[Symbol.asyncIterator] === 'function') {
26
- // return makeToArrayBufferIterator(stream);
27
- // }
28
- // In the browser, we first need to get a lock on the stream
29
- const reader = stream.getReader();
30
- let nextBatchPromise;
31
- try {
32
- // eslint-disable-next-line no-constant-condition
33
- while (true) {
34
- const currentBatchPromise = nextBatchPromise || reader.read();
35
- // Issue a read for an additional batch, while we await the next batch
36
- // Idea is to make fetching happen in parallel with processing / parsing
37
- if (options?._streamReadAhead) {
38
- nextBatchPromise = reader.read();
39
- }
40
- // Read from the stream
41
- // value is a Uint8Array
42
- const { done, value } = await currentBatchPromise;
43
- // Exit if we're done
44
- if (done) {
45
- return;
46
- }
47
- // Else yield the chunk
48
- yield (0, loader_utils_1.toArrayBuffer)(value);
49
- }
50
- }
51
- catch (error) {
52
- // TODO - examples makes it look like this should always be called,
53
- // but that generates exceptions so only call it if we do not reach the end
54
- reader.releaseLock();
55
- }
56
- }
57
- /**
58
- * Returns an async iterable that reads from a DOM (browser) stream
59
- * @param stream stream to iterate from
60
- * @note Requires Node.js >= 10
61
- */
62
- async function* makeNodeStreamIterator(stream, options) {
63
- // Hacky test for node version to ensure we don't call bad polyfills
64
- // NODE 10+: stream is an asyncIterator
65
- for await (const chunk of stream) {
66
- yield (0, loader_utils_1.toArrayBuffer)(chunk); // Coerce each chunk to ArrayBuffer
67
- }
68
- }
69
- /* TODO - remove NODE < 10
70
- * @see https://github.com/bustle/streaming-iterables, MIT license
71
- *
72
- if (typeof stream[Symbol.asyncIterator] === 'function') {
73
- return;
74
- }
75
-
76
- // TODO - check if is this ever used in Node 10+?
77
- // eslint-disable-next-line no-constant-condition
78
- while (true) {
79
- const data = stream.read();
80
- if (data !== null) {
81
- yield toArrayBuffer(data);
82
- // eslint-disable-next-line no-continue
83
- continue;
84
- }
85
- if (stream._readableState?.ended) {
86
- return;
87
- }
88
- await onceReadable(stream);
89
- }
90
-
91
- async function onceReadable(stream: Readable): Promise<any> {
92
- return new Promise((resolve) => {
93
- stream.once('readable', resolve);
94
- });
95
- }
96
- */
@@ -1,24 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.makeStringIterator = void 0;
4
- const DEFAULT_CHUNK_SIZE = 256 * 1024;
5
- /**
6
- * Returns an iterator that breaks a big string into chunks and yields them one-by-one as ArrayBuffers
7
- * @param blob string to iterate over
8
- * @param options
9
- * @param options.chunkSize
10
- */
11
- function* makeStringIterator(string, options) {
12
- const chunkSize = options?.chunkSize || DEFAULT_CHUNK_SIZE;
13
- let offset = 0;
14
- const textEncoder = new TextEncoder();
15
- while (offset < string.length) {
16
- // Create a chunk of the right size
17
- const chunkLength = Math.min(string.length - offset, chunkSize);
18
- const chunk = string.slice(offset, offset + chunkLength);
19
- offset += chunkLength;
20
- // yield an ArrayBuffer chunk
21
- yield textEncoder.encode(chunk);
22
- }
23
- }
24
- exports.makeStringIterator = makeStringIterator;
@@ -1,46 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.makeStream = void 0;
4
- /**
5
- * Builds a DOM stream from an iterator
6
- * This stream is currently used in browsers only,
7
- * but note that Web stream support is present in Node from Node 16
8
- * https://nodejs.org/api/webstreams.html#webstreams_web_streams_api
9
- */
10
- function makeStream(source, options) {
11
- const iterator = source[Symbol.asyncIterator]
12
- ? source[Symbol.asyncIterator]()
13
- : source[Symbol.iterator]();
14
- return new ReadableStream({
15
- // Create a byte stream (enables `Response(stream).arrayBuffer()`)
16
- // Only supported on Chrome
17
- // See: https://developer.mozilla.org/en-US/docs/Web/API/ReadableByteStreamController
18
- type: 'bytes',
19
- async pull(controller) {
20
- try {
21
- const { done, value } = await iterator.next();
22
- if (done) {
23
- controller.close();
24
- }
25
- else {
26
- // TODO - ignores controller.desiredSize
27
- // @ts-expect-error Unclear why value is not correctly typed
28
- controller.enqueue(new Uint8Array(value));
29
- }
30
- }
31
- catch (error) {
32
- controller.error(error);
33
- }
34
- },
35
- async cancel() {
36
- await iterator?.return?.();
37
- }
38
- },
39
- // options: QueingStrategy<Uint8Array>
40
- {
41
- // This is bytes, not chunks
42
- highWaterMark: 2 ** 24,
43
- ...options
44
- });
45
- }
46
- exports.makeStream = makeStream;
@@ -1,82 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || function (mod) {
19
- if (mod && mod.__esModule) return mod;
20
- var result = {};
21
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
- __setModuleDefault(result, mod);
23
- return result;
24
- };
25
- Object.defineProperty(exports, "__esModule", { value: true });
26
- exports.makeStream = void 0;
27
- const Stream = __importStar(require("stream"));
28
- class _Readable {
29
- }
30
- const Readable = Stream.Readable || _Readable;
31
- /** Builds a node stream from an iterator */
32
- function makeStream(source, options) {
33
- const iterator = source[Symbol.asyncIterator]
34
- ? source[Symbol.asyncIterator]()
35
- : source[Symbol.iterator]();
36
- return new AsyncIterableReadable(iterator, options);
37
- }
38
- exports.makeStream = makeStream;
39
- class AsyncIterableReadable extends Readable {
40
- constructor(it, options) {
41
- super(options);
42
- this._iterator = it;
43
- this._pulling = false;
44
- this._bytesMode = !options || !options.objectMode;
45
- }
46
- async _read(size) {
47
- if (!this._pulling) {
48
- this._pulling = true;
49
- this._pulling = await this._pull(size, this._iterator);
50
- }
51
- }
52
- async _destroy(error, cb) {
53
- if (!this._iterator) {
54
- return;
55
- }
56
- if (error) {
57
- await this._iterator?.throw?.(error);
58
- }
59
- else {
60
- await this._iterator?.return?.(error);
61
- }
62
- cb?.(null);
63
- }
64
- // eslint-disable-next-line complexity
65
- async _pull(size, it) {
66
- const bm = this._bytesMode;
67
- let r = null;
68
- // while (this.readable && !(r = await it.next(bm ? size : null)).done) {
69
- while (this.readable && !(r = await it.next()).done) {
70
- if (size !== null) {
71
- size -= bm && ArrayBuffer.isView(r.value) ? r.value.byteLength : 1;
72
- }
73
- if (!this.push(new Uint8Array(r.value)) || size <= 0) {
74
- break;
75
- }
76
- }
77
- if ((r?.done || !this.readable) && (this.push(null) || true)) {
78
- it?.return?.();
79
- }
80
- return !this.readable;
81
- }
82
- }
@@ -1,41 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.isWritableStream = exports.isReadableStream = exports.isReadableNodeStream = exports.isWritableNodeStream = exports.isReadableDOMStream = exports.isWritableDOMStream = exports.isBuffer = exports.isBlob = exports.isFile = exports.isResponse = exports.isIterator = exports.isAsyncIterable = exports.isIterable = exports.isPromise = exports.isPureObject = exports.isObject = void 0;
4
- const isBoolean = (x) => typeof x === 'boolean';
5
- const isFunction = (x) => typeof x === 'function';
6
- const isObject = (x) => x !== null && typeof x === 'object';
7
- exports.isObject = isObject;
8
- const isPureObject = (x) => (0, exports.isObject)(x) && x.constructor === {}.constructor;
9
- exports.isPureObject = isPureObject;
10
- const isPromise = (x) => (0, exports.isObject)(x) && isFunction(x.then);
11
- exports.isPromise = isPromise;
12
- const isIterable = (x) => x && typeof x[Symbol.iterator] === 'function';
13
- exports.isIterable = isIterable;
14
- const isAsyncIterable = (x) => x && typeof x[Symbol.asyncIterator] === 'function';
15
- exports.isAsyncIterable = isAsyncIterable;
16
- const isIterator = (x) => x && isFunction(x.next);
17
- exports.isIterator = isIterator;
18
- const isResponse = (x) => (typeof Response !== 'undefined' && x instanceof Response) ||
19
- (x && x.arrayBuffer && x.text && x.json);
20
- exports.isResponse = isResponse;
21
- const isFile = (x) => typeof File !== 'undefined' && x instanceof File;
22
- exports.isFile = isFile;
23
- const isBlob = (x) => typeof Blob !== 'undefined' && x instanceof Blob;
24
- exports.isBlob = isBlob;
25
- /** Check for Node.js `Buffer` without triggering bundler to include buffer polyfill */
26
- const isBuffer = (x) => x && typeof x === 'object' && x.isBuffer;
27
- exports.isBuffer = isBuffer;
28
- const isWritableDOMStream = (x) => (0, exports.isObject)(x) && isFunction(x.abort) && isFunction(x.getWriter);
29
- exports.isWritableDOMStream = isWritableDOMStream;
30
- const isReadableDOMStream = (x) => (typeof ReadableStream !== 'undefined' && x instanceof ReadableStream) ||
31
- ((0, exports.isObject)(x) && isFunction(x.tee) && isFunction(x.cancel) && isFunction(x.getReader));
32
- exports.isReadableDOMStream = isReadableDOMStream;
33
- // Not implemented in Firefox: && isFunction(x.pipeTo)
34
- const isWritableNodeStream = (x) => (0, exports.isObject)(x) && isFunction(x.end) && isFunction(x.write) && isBoolean(x.writable);
35
- exports.isWritableNodeStream = isWritableNodeStream;
36
- const isReadableNodeStream = (x) => (0, exports.isObject)(x) && isFunction(x.read) && isFunction(x.pipe) && isBoolean(x.readable);
37
- exports.isReadableNodeStream = isReadableNodeStream;
38
- const isReadableStream = (x) => (0, exports.isReadableDOMStream)(x) || (0, exports.isReadableNodeStream)(x);
39
- exports.isReadableStream = isReadableStream;
40
- const isWritableStream = (x) => (0, exports.isWritableDOMStream)(x) || (0, exports.isWritableNodeStream)(x);
41
- exports.isWritableStream = isWritableStream;
@@ -1,120 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.encodeURLtoURL = exports.encodeInBatches = exports.encodeText = exports.encodeSync = exports.encode = void 0;
4
- const loader_utils_1 = require("@loaders.gl/loader-utils");
5
- const worker_utils_1 = require("@loaders.gl/worker-utils");
6
- const loader_utils_2 = require("@loaders.gl/loader-utils");
7
- const loader_utils_3 = require("@loaders.gl/loader-utils");
8
- const write_file_1 = require("../fetch/write-file");
9
- const fetch_file_1 = require("../fetch/fetch-file");
10
- const loader_options_1 = require("./loader-options");
11
- /**
12
- * Encode loaded data into a binary ArrayBuffer using the specified Writer.
13
- */
14
- async function encode(data, writer, options) {
15
- const globalOptions = (0, loader_options_1.getLoaderOptions)();
16
- // const globalOptions: WriterOptions = {}; // getWriterOptions();
17
- options = { ...globalOptions, ...options };
18
- if ((0, loader_utils_1.canEncodeWithWorker)(writer, options)) {
19
- return await (0, worker_utils_1.processOnWorker)(writer, data, options);
20
- }
21
- // TODO Merge default writer options with options argument like it is done in load module.
22
- if (writer.encode) {
23
- return await writer.encode(data, options);
24
- }
25
- if (writer.encodeSync) {
26
- return writer.encodeSync(data, options);
27
- }
28
- if (writer.encodeText) {
29
- return new TextEncoder().encode(await writer.encodeText(data, options));
30
- }
31
- if (writer.encodeInBatches) {
32
- // Create an iterator representing the data
33
- // TODO - Assumes this is a table
34
- const batches = encodeInBatches(data, writer, options);
35
- // Concatenate the output
36
- const chunks = [];
37
- for await (const batch of batches) {
38
- chunks.push(batch);
39
- }
40
- // @ts-ignore
41
- return (0, loader_utils_2.concatenateArrayBuffers)(...chunks);
42
- }
43
- if (!loader_utils_3.isBrowser && writer.encodeURLtoURL) {
44
- // TODO - how to generate filenames with correct extensions?
45
- const tmpInputFilename = getTemporaryFilename('input');
46
- await (0, write_file_1.writeFile)(tmpInputFilename, data);
47
- const tmpOutputFilename = getTemporaryFilename('output');
48
- const outputFilename = await encodeURLtoURL(tmpInputFilename, tmpOutputFilename, writer, options);
49
- const response = await (0, fetch_file_1.fetchFile)(outputFilename);
50
- return response.arrayBuffer();
51
- }
52
- throw new Error('Writer could not encode data');
53
- }
54
- exports.encode = encode;
55
- /**
56
- * Encode loaded data into a binary ArrayBuffer using the specified Writer.
57
- */
58
- function encodeSync(data, writer, options) {
59
- if (writer.encodeSync) {
60
- return writer.encodeSync(data, options);
61
- }
62
- throw new Error('Writer could not synchronously encode data');
63
- }
64
- exports.encodeSync = encodeSync;
65
- /**
66
- * Encode loaded data to text using the specified Writer
67
- * @note This is a convenience function not intended for production use on large input data.
68
- * It is not optimized for performance. Data maybe converted from text to binary and back.
69
- * @throws if the writer does not generate text output
70
- */
71
- async function encodeText(data, writer, options) {
72
- if (writer.text && writer.encodeText) {
73
- return await writer.encodeText(data, options);
74
- }
75
- if (writer.text && (writer.encode || writer.encodeInBatches)) {
76
- const arrayBuffer = await encode(data, writer, options);
77
- return new TextDecoder().decode(arrayBuffer);
78
- }
79
- throw new Error('Writer could not encode data as text');
80
- }
81
- exports.encodeText = encodeText;
82
- /**
83
- * Encode loaded data into a sequence (iterator) of binary ArrayBuffers using the specified Writer.
84
- */
85
- function encodeInBatches(data, writer, options) {
86
- if (writer.encodeInBatches) {
87
- const dataIterator = getIterator(data);
88
- return writer.encodeInBatches(dataIterator, options);
89
- }
90
- // TODO -fall back to atomic encode?
91
- throw new Error('Writer could not encode data in batches');
92
- }
93
- exports.encodeInBatches = encodeInBatches;
94
- /**
95
- * Encode data stored in a file (on disk) to another file.
96
- * @note Node.js only. This function enables using command-line converters as "writers".
97
- */
98
- async function encodeURLtoURL(inputUrl, outputUrl, writer, options) {
99
- inputUrl = (0, loader_utils_2.resolvePath)(inputUrl);
100
- outputUrl = (0, loader_utils_2.resolvePath)(outputUrl);
101
- if (loader_utils_3.isBrowser || !writer.encodeURLtoURL) {
102
- throw new Error();
103
- }
104
- const outputFilename = await writer.encodeURLtoURL(inputUrl, outputUrl, options);
105
- return outputFilename;
106
- }
107
- exports.encodeURLtoURL = encodeURLtoURL;
108
- /**
109
- * @todo TODO - this is an unacceptable hack!!!
110
- */
111
- function getIterator(data) {
112
- const dataIterator = [{ table: data, start: 0, end: data.length }];
113
- return dataIterator;
114
- }
115
- /**
116
- * @todo Move to utils
117
- */
118
- function getTemporaryFilename(filename) {
119
- return `/tmp/${filename}`;
120
- }
@@ -1,33 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.loadInBatches = void 0;
4
- const normalize_loader_1 = require("../loader-utils/normalize-loader");
5
- const get_fetch_function_1 = require("../loader-utils/get-fetch-function");
6
- const parse_in_batches_1 = require("./parse-in-batches");
7
- function loadInBatches(files, loaders, options, context) {
8
- // Signature: load(url, options)
9
- if (!Array.isArray(loaders) && !(0, normalize_loader_1.isLoaderObject)(loaders)) {
10
- context = undefined; // context not supported in short signature
11
- options = loaders;
12
- loaders = null;
13
- }
14
- // Select fetch function
15
- const fetch = (0, get_fetch_function_1.getFetchFunction)(options || {});
16
- // Single url/file
17
- if (!Array.isArray(files)) {
18
- return loadOneFileInBatches(files, loaders, options, fetch);
19
- }
20
- // Multiple URLs / files
21
- const promises = files.map((file) => loadOneFileInBatches(file, loaders, options, fetch));
22
- // No point in waiting here for all responses before starting to stream individual streams?
23
- return promises;
24
- }
25
- exports.loadInBatches = loadInBatches;
26
- async function loadOneFileInBatches(file, loaders, options, fetch) {
27
- if (typeof file === 'string') {
28
- const url = file;
29
- const response = await fetch(url);
30
- return await (0, parse_in_batches_1.parseInBatches)(response, loaders, options);
31
- }
32
- return await (0, parse_in_batches_1.parseInBatches)(file, loaders, options);
33
- }
@@ -1,42 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.load = void 0;
4
- const is_type_1 = require("../../javascript-utils/is-type");
5
- const normalize_loader_1 = require("../loader-utils/normalize-loader");
6
- const get_fetch_function_1 = require("../loader-utils/get-fetch-function");
7
- const parse_1 = require("./parse");
8
- /**
9
- * Parses `data` using a specified loader
10
- * Note: Load does duplicate a lot of parse.
11
- * it can also call fetchFile on string urls, which `parse` won't do.
12
- * @param data
13
- * @param loaders
14
- * @param options
15
- * @param context
16
- */
17
- // implementation signature
18
- async function load(url, loaders, options, context) {
19
- // Signature: load(url, options)
20
- if (!Array.isArray(loaders) && !(0, normalize_loader_1.isLoaderObject)(loaders)) {
21
- context = undefined; // context not supported in short signature
22
- options = loaders;
23
- loaders = undefined;
24
- }
25
- // Select fetch function
26
- const fetch = (0, get_fetch_function_1.getFetchFunction)(options);
27
- // at this point, `url` could be already loaded binary data
28
- let data = url;
29
- // url is a string, fetch the url
30
- if (typeof url === 'string') {
31
- data = await fetch(url);
32
- // URL is Blob or File, fetchFile handles it (alt: we could generate ObjectURL here)
33
- }
34
- if ((0, is_type_1.isBlob)(url)) {
35
- // The fetch response object will contain blob.name
36
- // @ts-expect-error TODO - This may not work for overridden fetch functions
37
- data = await fetch(url);
38
- }
39
- // Data is loaded (at least we have a `Response` object) so time to hand over to `parse`
40
- return await (0, parse_1.parse)(data, loaders, options);
41
- }
42
- exports.load = load;
@@ -1,7 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getLoaderOptions = exports.setLoaderOptions = void 0;
4
- var option_utils_1 = require("../loader-utils/option-utils");
5
- Object.defineProperty(exports, "setLoaderOptions", { enumerable: true, get: function () { return option_utils_1.setGlobalOptions; } });
6
- var option_utils_2 = require("../loader-utils/option-utils");
7
- Object.defineProperty(exports, "getLoaderOptions", { enumerable: true, get: function () { return option_utils_2.getGlobalLoaderOptions; } });
@@ -1,117 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.parseInBatches = void 0;
4
- const loader_utils_1 = require("@loaders.gl/loader-utils");
5
- const normalize_loader_1 = require("../loader-utils/normalize-loader");
6
- const option_utils_1 = require("../loader-utils/option-utils");
7
- const loader_context_1 = require("../loader-utils/loader-context");
8
- const get_data_1 = require("../loader-utils/get-data");
9
- const resource_utils_1 = require("../utils/resource-utils");
10
- const select_loader_1 = require("./select-loader");
11
- // Ensure `parse` is available in context if loader falls back to `parse`
12
- const parse_1 = require("./parse");
13
- /**
14
- * Parses `data` using a specified loader
15
- * @param data
16
- * @param loaders
17
- * @param options
18
- * @param context
19
- */
20
- async function parseInBatches(data, loaders, options, context) {
21
- (0, loader_utils_1.assert)(!context || typeof context === 'object'); // parseInBatches no longer accepts final url
22
- const loaderArray = Array.isArray(loaders) ? loaders : undefined;
23
- // Signature: parseInBatches(data, options, url) - Uses registered loaders
24
- if (!Array.isArray(loaders) && !(0, normalize_loader_1.isLoaderObject)(loaders)) {
25
- context = undefined; // context not supported in short signature
26
- options = loaders;
27
- loaders = undefined;
28
- }
29
- data = await data; // Resolve any promise
30
- options = options || {};
31
- // Extract a url for auto detection
32
- const url = (0, resource_utils_1.getResourceUrl)(data);
33
- // Chooses a loader and normalizes it
34
- // Note - only uses URL and contentType for streams and iterator inputs
35
- const loader = await (0, select_loader_1.selectLoader)(data, loaders, options);
36
- // Note: if options.nothrow was set, it is possible that no loader was found, if so just return null
37
- if (!loader) {
38
- // @ts-ignore
39
- return null;
40
- }
41
- // Normalize options
42
- options = (0, option_utils_1.normalizeOptions)(options, loader, loaderArray, url);
43
- context = (0, loader_context_1.getLoaderContext)({ url, parseInBatches, parse: parse_1.parse, loaders: loaderArray }, options, context || null);
44
- return await parseWithLoaderInBatches(loader, data, options, context);
45
- }
46
- exports.parseInBatches = parseInBatches;
47
- /**
48
- * Loader has been selected and context has been prepared, see if we need to emit a metadata batch
49
- */
50
- async function parseWithLoaderInBatches(loader, data, options, context) {
51
- const outputIterator = await parseToOutputIterator(loader, data, options, context);
52
- // Generate metadata batch if requested
53
- if (!options.metadata) {
54
- return outputIterator;
55
- }
56
- const metadataBatch = {
57
- batchType: 'metadata',
58
- metadata: {
59
- _loader: loader,
60
- _context: context
61
- },
62
- // Populate with some default fields to avoid crashing
63
- data: [],
64
- bytesUsed: 0
65
- };
66
- async function* makeMetadataBatchIterator(iterator) {
67
- yield metadataBatch;
68
- yield* iterator;
69
- }
70
- return makeMetadataBatchIterator(outputIterator);
71
- }
72
- /**
73
- * Prep work is done, now it is time to start parsing into an output operator
74
- * The approach depends on which parse function the loader exposes
75
- * `parseInBatches` (preferred), `parse` (fallback)
76
- */
77
- async function parseToOutputIterator(loader, data, options, context) {
78
- // Get an iterator from the input
79
- const inputIterator = await (0, get_data_1.getAsyncIterableFromData)(data, options);
80
- // Apply any iterator transforms (options.transforms)
81
- const transformedIterator = await applyInputTransforms(inputIterator, options?.transforms || []);
82
- // If loader supports parseInBatches, we are done
83
- if (loader.parseInBatches) {
84
- return loader.parseInBatches(transformedIterator, options, context);
85
- }
86
- // Fallback: load atomically using `parse` concatenating input iterator into single chunk
87
- async function* parseChunkInBatches() {
88
- const arrayBuffer = await (0, loader_utils_1.concatenateArrayBuffersAsync)(transformedIterator);
89
- // Call `parse` instead of `loader.parse` to ensure we can call workers etc.
90
- const parsedData = await (0, parse_1.parse)(arrayBuffer, loader,
91
- // TODO - Hack: supply loaders MIME type to ensure we match it
92
- { ...options, mimeType: loader.mimeTypes[0] }, context);
93
- // yield a single batch, the output from loader.parse()
94
- // TODO - run through batch builder to apply options etc...
95
- const batch = {
96
- mimeType: loader.mimeTypes[0],
97
- shape: Array.isArray(parsedData) ? 'row-table' : 'unknown',
98
- batchType: 'data',
99
- data: parsedData,
100
- length: Array.isArray(parsedData) ? parsedData.length : 1
101
- };
102
- yield batch;
103
- }
104
- return parseChunkInBatches();
105
- }
106
- /**
107
- * Create an iterator chain with any transform iterators (crypto, decompression)
108
- * @param inputIterator
109
- * @param options
110
- */
111
- async function applyInputTransforms(inputIterator, transforms = []) {
112
- let iteratorChain = inputIterator;
113
- for await (const transformBatches of transforms) {
114
- iteratorChain = transformBatches(iteratorChain);
115
- }
116
- return iteratorChain;
117
- }