@loaders.gl/loader-utils 4.0.0-alpha.4 → 4.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/index.js +328 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/json-loader.js +44 -0
- package/dist/es5/json-loader.js.map +1 -0
- package/dist/es5/lib/binary-utils/array-buffer-utils.js +80 -0
- package/dist/es5/lib/binary-utils/array-buffer-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/dataview-copy-utils.js +58 -0
- package/dist/es5/lib/binary-utils/dataview-copy-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/get-first-characters.js +31 -0
- package/dist/es5/lib/binary-utils/get-first-characters.js.map +1 -0
- package/dist/es5/lib/binary-utils/memory-conversion-utils.js +44 -0
- package/dist/es5/lib/binary-utils/memory-conversion-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/memory-copy-utils.js +34 -0
- package/dist/es5/lib/binary-utils/memory-copy-utils.js.map +1 -0
- package/dist/es5/lib/env-utils/assert.js +12 -0
- package/dist/es5/lib/env-utils/assert.js.map +1 -0
- package/dist/es5/lib/env-utils/globals.js +30 -0
- package/dist/es5/lib/env-utils/globals.js.map +1 -0
- package/dist/es5/lib/filesystems/node-filesystem.js +210 -0
- package/dist/es5/lib/filesystems/node-filesystem.js.map +1 -0
- package/dist/es5/lib/filesystems/readable-file.js +91 -0
- package/dist/es5/lib/filesystems/readable-file.js.map +1 -0
- package/dist/es5/lib/filesystems/writable-file.js +82 -0
- package/dist/es5/lib/filesystems/writable-file.js.map +1 -0
- package/dist/es5/lib/iterators/async-iteration.js +190 -0
- package/dist/es5/lib/iterators/async-iteration.js.map +1 -0
- package/dist/es5/lib/iterators/text-iterators.js +305 -0
- package/dist/es5/lib/iterators/text-iterators.js.map +1 -0
- package/dist/es5/lib/node/buffer.browser.js +14 -0
- package/dist/es5/lib/node/buffer.browser.js.map +1 -0
- package/dist/es5/lib/node/buffer.js +27 -0
- package/dist/es5/lib/node/buffer.js.map +1 -0
- package/dist/es5/lib/node/fs.js +88 -0
- package/dist/es5/lib/node/fs.js.map +1 -0
- package/dist/es5/lib/node/promisify.js +36 -0
- package/dist/es5/lib/node/promisify.js.map +1 -0
- package/dist/es5/lib/node/stream.js +16 -0
- package/dist/es5/lib/node/stream.js.map +1 -0
- package/dist/es5/lib/option-utils/merge-loader-options.js +30 -0
- package/dist/es5/lib/option-utils/merge-loader-options.js.map +1 -0
- package/dist/es5/lib/parser-utils/parse-json.js +15 -0
- package/dist/es5/lib/parser-utils/parse-json.js.map +1 -0
- package/dist/es5/lib/path-utils/file-aliases.js +33 -0
- package/dist/es5/lib/path-utils/file-aliases.js.map +1 -0
- package/dist/es5/lib/path-utils/get-cwd.js +15 -0
- package/dist/es5/lib/path-utils/get-cwd.js.map +1 -0
- package/dist/es5/lib/path-utils/path.js +136 -0
- package/dist/es5/lib/path-utils/path.js.map +1 -0
- package/dist/es5/lib/request-utils/request-scheduler.js +150 -0
- package/dist/es5/lib/request-utils/request-scheduler.js.map +1 -0
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js +142 -0
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js.map +1 -0
- package/dist/es5/lib/worker-loader-utils/encode-with-worker.js +18 -0
- package/dist/es5/lib/worker-loader-utils/encode-with-worker.js.map +1 -0
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js +115 -0
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js.map +1 -0
- package/dist/es5/types.js +2 -0
- package/dist/es5/types.js.map +1 -0
- package/dist/es5/workers/json-worker.js +6 -0
- package/dist/es5/workers/json-worker.js.map +1 -0
- package/dist/esm/index.js +29 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/json-loader.js +18 -0
- package/dist/esm/json-loader.js.map +1 -0
- package/dist/esm/lib/binary-utils/array-buffer-utils.js +52 -0
- package/dist/esm/lib/binary-utils/array-buffer-utils.js.map +1 -0
- package/dist/{lib/binary-utils/binary-copy-utils.js → esm/lib/binary-utils/dataview-copy-utils.js} +27 -4
- package/dist/esm/lib/binary-utils/dataview-copy-utils.js.map +1 -0
- package/dist/esm/lib/binary-utils/get-first-characters.js +24 -0
- package/dist/esm/lib/binary-utils/get-first-characters.js.map +1 -0
- package/dist/esm/lib/binary-utils/memory-conversion-utils.js +31 -0
- package/dist/esm/lib/binary-utils/memory-conversion-utils.js.map +1 -0
- package/dist/esm/lib/binary-utils/memory-copy-utils.js +26 -0
- package/dist/esm/lib/binary-utils/memory-copy-utils.js.map +1 -0
- package/dist/esm/lib/env-utils/assert.js +6 -0
- package/dist/esm/lib/env-utils/assert.js.map +1 -0
- package/dist/esm/lib/env-utils/globals.js +16 -0
- package/dist/esm/lib/env-utils/globals.js.map +1 -0
- package/dist/esm/lib/filesystems/node-filesystem.js +53 -0
- package/dist/esm/lib/filesystems/node-filesystem.js.map +1 -0
- package/dist/esm/lib/filesystems/readable-file.js +20 -0
- package/dist/esm/lib/filesystems/readable-file.js.map +1 -0
- package/dist/esm/lib/filesystems/writable-file.js +20 -0
- package/dist/esm/lib/filesystems/writable-file.js.map +1 -0
- package/dist/esm/lib/iterators/async-iteration.js +32 -0
- package/dist/esm/lib/iterators/async-iteration.js.map +1 -0
- package/dist/esm/lib/iterators/text-iterators.js +47 -0
- package/dist/esm/lib/iterators/text-iterators.js.map +1 -0
- package/dist/esm/lib/node/buffer.browser.js +7 -0
- package/dist/esm/lib/node/buffer.browser.js.map +1 -0
- package/dist/esm/lib/node/buffer.js +20 -0
- package/dist/esm/lib/node/buffer.js.map +1 -0
- package/dist/esm/lib/node/fs.js +40 -0
- package/dist/esm/lib/node/fs.js.map +1 -0
- package/dist/esm/lib/node/promisify.js +10 -0
- package/dist/esm/lib/node/promisify.js.map +1 -0
- package/dist/esm/lib/node/stream.js +7 -0
- package/dist/esm/lib/node/stream.js.map +1 -0
- package/dist/esm/lib/option-utils/merge-loader-options.js +17 -0
- package/dist/esm/lib/option-utils/merge-loader-options.js.map +1 -0
- package/dist/esm/lib/parser-utils/parse-json.js +9 -0
- package/dist/esm/lib/parser-utils/parse-json.js.map +1 -0
- package/dist/esm/lib/path-utils/file-aliases.js +24 -0
- package/dist/esm/lib/path-utils/file-aliases.js.map +1 -0
- package/dist/esm/lib/path-utils/get-cwd.js +9 -0
- package/dist/esm/lib/path-utils/get-cwd.js.map +1 -0
- package/dist/esm/lib/path-utils/path.js +127 -0
- package/dist/esm/lib/path-utils/path.js.map +1 -0
- package/dist/esm/lib/request-utils/request-scheduler.js +120 -0
- package/dist/esm/lib/request-utils/request-scheduler.js.map +1 -0
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js +95 -0
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js.map +1 -0
- package/dist/esm/lib/worker-loader-utils/encode-with-worker.js +12 -0
- package/dist/esm/lib/worker-loader-utils/encode-with-worker.js.map +1 -0
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js +63 -0
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js.map +1 -0
- package/dist/esm/types.js +2 -0
- package/dist/esm/types.js.map +1 -0
- package/dist/esm/workers/json-worker.js +4 -0
- package/dist/esm/workers/json-worker.js.map +1 -0
- package/dist/index.d.ts +32 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +122 -26
- package/dist/json-loader.d.ts +9 -0
- package/dist/json-loader.d.ts.map +1 -0
- package/dist/json-loader.js +24 -18
- package/dist/lib/binary-utils/array-buffer-utils.d.ts +27 -0
- package/dist/lib/binary-utils/array-buffer-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/array-buffer-utils.js +78 -80
- package/dist/lib/binary-utils/dataview-copy-utils.d.ts +43 -0
- package/dist/lib/binary-utils/dataview-copy-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/dataview-copy-utils.js +97 -0
- package/dist/lib/binary-utils/get-first-characters.d.ts +17 -0
- package/dist/lib/binary-utils/get-first-characters.d.ts.map +1 -0
- package/dist/lib/binary-utils/get-first-characters.js +43 -25
- package/dist/lib/binary-utils/memory-conversion-utils.d.ts +15 -0
- package/dist/lib/binary-utils/memory-conversion-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/memory-conversion-utils.js +73 -0
- package/dist/lib/binary-utils/memory-copy-utils.d.ts +25 -0
- package/dist/lib/binary-utils/memory-copy-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/memory-copy-utils.js +58 -24
- package/dist/lib/env-utils/assert.d.ts +6 -0
- package/dist/lib/env-utils/assert.d.ts.map +1 -0
- package/dist/lib/env-utils/assert.js +12 -5
- package/dist/lib/env-utils/globals.d.ts +15 -0
- package/dist/lib/env-utils/globals.d.ts.map +1 -0
- package/dist/lib/env-utils/globals.js +23 -9
- package/dist/lib/filesystems/node-filesystem.d.ts +39 -0
- package/dist/lib/filesystems/node-filesystem.d.ts.map +1 -0
- package/dist/lib/filesystems/node-filesystem.js +72 -59
- package/dist/lib/filesystems/readable-file.d.ts +10 -0
- package/dist/lib/filesystems/readable-file.d.ts.map +1 -0
- package/dist/lib/filesystems/readable-file.js +25 -0
- package/dist/lib/filesystems/writable-file.d.ts +18 -0
- package/dist/lib/filesystems/writable-file.d.ts.map +1 -0
- package/dist/lib/filesystems/writable-file.js +48 -0
- package/dist/lib/iterators/async-iteration.d.ts +20 -0
- package/dist/lib/iterators/async-iteration.d.ts.map +1 -0
- package/dist/lib/iterators/async-iteration.js +49 -35
- package/dist/lib/iterators/text-iterators.d.ts +19 -0
- package/dist/lib/iterators/text-iterators.d.ts.map +1 -0
- package/dist/lib/iterators/text-iterators.js +56 -41
- package/dist/lib/node/buffer.browser.d.ts +12 -0
- package/dist/lib/node/buffer.browser.d.ts.map +1 -0
- package/dist/lib/node/buffer.browser.js +22 -0
- package/dist/lib/node/buffer.d.ts +12 -0
- package/dist/lib/node/buffer.d.ts.map +1 -0
- package/dist/lib/node/buffer.js +33 -25
- package/dist/lib/node/fs.d.ts +25 -0
- package/dist/lib/node/fs.d.ts.map +1 -0
- package/dist/lib/node/fs.js +48 -26
- package/dist/lib/node/promisify.d.ts +13 -0
- package/dist/lib/node/promisify.d.ts.map +1 -0
- package/dist/lib/node/promisify.js +22 -0
- package/dist/lib/node/stream.d.ts +4 -0
- package/dist/lib/node/stream.d.ts.map +1 -0
- package/dist/lib/node/stream.js +17 -0
- package/dist/lib/option-utils/merge-loader-options.d.ts +9 -0
- package/dist/lib/option-utils/merge-loader-options.d.ts.map +1 -0
- package/dist/lib/option-utils/merge-loader-options.js +27 -0
- package/dist/lib/parser-utils/parse-json.d.ts +5 -0
- package/dist/lib/parser-utils/parse-json.d.ts.map +1 -0
- package/dist/lib/parser-utils/parse-json.js +15 -8
- package/dist/lib/path-utils/file-aliases.d.ts +17 -0
- package/dist/lib/path-utils/file-aliases.d.ts.map +1 -0
- package/dist/lib/path-utils/file-aliases.js +40 -19
- package/dist/lib/path-utils/get-cwd.d.ts +2 -0
- package/dist/lib/path-utils/get-cwd.d.ts.map +1 -0
- package/dist/lib/path-utils/get-cwd.js +12 -0
- package/dist/lib/path-utils/path.d.ts +24 -0
- package/dist/lib/path-utils/path.d.ts.map +1 -0
- package/dist/lib/path-utils/path.js +173 -19
- package/dist/lib/request-utils/request-scheduler.d.ts +62 -0
- package/dist/lib/request-utils/request-scheduler.d.ts.map +1 -0
- package/dist/lib/request-utils/request-scheduler.js +127 -131
- package/dist/lib/worker-loader-utils/create-loader-worker.d.ts +7 -0
- package/dist/lib/worker-loader-utils/create-loader-worker.d.ts.map +1 -0
- package/dist/lib/worker-loader-utils/create-loader-worker.js +92 -96
- package/dist/lib/worker-loader-utils/encode-with-worker.d.ts +8 -0
- package/dist/lib/worker-loader-utils/encode-with-worker.d.ts.map +1 -0
- package/dist/lib/worker-loader-utils/encode-with-worker.js +21 -0
- package/dist/lib/worker-loader-utils/parse-with-worker.d.ts +15 -0
- package/dist/lib/worker-loader-utils/parse-with-worker.d.ts.map +1 -0
- package/dist/lib/worker-loader-utils/parse-with-worker.js +77 -60
- package/dist/types.d.ts +220 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +3 -2
- package/dist/workers/json-worker.d.ts +2 -0
- package/dist/workers/json-worker.d.ts.map +1 -0
- package/dist/workers/json-worker.js +5 -4
- package/package.json +14 -24
- package/src/index.ts +32 -18
- package/src/json-loader.ts +5 -4
- package/src/lib/binary-utils/array-buffer-utils.ts +4 -40
- package/src/lib/binary-utils/{binary-copy-utils.ts → dataview-copy-utils.ts} +57 -5
- package/src/lib/binary-utils/get-first-characters.ts +17 -1
- package/src/lib/binary-utils/memory-conversion-utils.ts +53 -0
- package/src/lib/filesystems/readable-file.ts +30 -0
- package/src/lib/filesystems/writable-file.ts +44 -0
- package/src/lib/node/buffer.browser.ts +20 -0
- package/src/lib/node/buffer.ts +12 -11
- package/src/lib/node/fs.ts +51 -16
- package/src/lib/node/promisify.ts +70 -0
- package/src/lib/node/stream.ts +18 -0
- package/src/lib/option-utils/merge-loader-options.ts +35 -0
- package/src/lib/path-utils/get-cwd.ts +9 -0
- package/src/lib/path-utils/path.ts +138 -4
- package/src/lib/worker-loader-utils/create-loader-worker.ts +3 -2
- package/src/lib/worker-loader-utils/encode-with-worker.ts +21 -0
- package/src/lib/worker-loader-utils/parse-with-worker.ts +16 -3
- package/src/types.ts +156 -93
- package/dist/index.js.map +0 -1
- package/dist/json-loader.js.map +0 -1
- package/dist/lib/binary-utils/array-buffer-utils.js.map +0 -1
- package/dist/lib/binary-utils/binary-copy-utils.js.map +0 -1
- package/dist/lib/binary-utils/buffer-utils.js +0 -16
- package/dist/lib/binary-utils/buffer-utils.js.map +0 -1
- package/dist/lib/binary-utils/encode-utils.js +0 -31
- package/dist/lib/binary-utils/encode-utils.js.map +0 -1
- package/dist/lib/binary-utils/get-first-characters.js.map +0 -1
- package/dist/lib/binary-utils/memory-copy-utils.js.map +0 -1
- package/dist/lib/env-utils/assert.js.map +0 -1
- package/dist/lib/env-utils/globals.js.map +0 -1
- package/dist/lib/filesystems/node-filesystem.js.map +0 -1
- package/dist/lib/iterators/async-iteration.js.map +0 -1
- package/dist/lib/iterators/text-iterators.js.map +0 -1
- package/dist/lib/node/buffer.js.map +0 -1
- package/dist/lib/node/fs.js.map +0 -1
- package/dist/lib/node/util.js +0 -3
- package/dist/lib/node/util.js.map +0 -1
- package/dist/lib/parser-utils/parse-json.js.map +0 -1
- package/dist/lib/path-utils/file-aliases.js.map +0 -1
- package/dist/lib/path-utils/path.js.map +0 -1
- package/dist/lib/request-utils/request-scheduler.js.map +0 -1
- package/dist/lib/worker-loader-utils/create-loader-worker.js.map +0 -1
- package/dist/lib/worker-loader-utils/parse-with-worker.js.map +0 -1
- package/dist/types.js.map +0 -1
- package/dist/workers/json-worker.js.map +0 -1
- package/src/lib/binary-utils/buffer-utils.ts +0 -28
- package/src/lib/binary-utils/encode-utils.ts +0 -32
- package/src/lib/node/util.ts +0 -4
|
@@ -11,7 +11,7 @@ let requestId = 0;
|
|
|
11
11
|
*/
|
|
12
12
|
export function createLoaderWorker(loader: LoaderWithParser) {
|
|
13
13
|
// Check that we are actually in a worker thread
|
|
14
|
-
if (
|
|
14
|
+
if (!WorkerBody.inWorkerThread()) {
|
|
15
15
|
return;
|
|
16
16
|
}
|
|
17
17
|
|
|
@@ -21,13 +21,14 @@ export function createLoaderWorker(loader: LoaderWithParser) {
|
|
|
21
21
|
try {
|
|
22
22
|
// validateLoaderVersion(loader, data.source.split('@')[1]);
|
|
23
23
|
|
|
24
|
-
const {input, options = {}} = payload;
|
|
24
|
+
const {input, options = {}, context = {}} = payload;
|
|
25
25
|
|
|
26
26
|
const result = await parseData({
|
|
27
27
|
loader,
|
|
28
28
|
arrayBuffer: input,
|
|
29
29
|
options,
|
|
30
30
|
context: {
|
|
31
|
+
...context,
|
|
31
32
|
parse: parseOnMainThread
|
|
32
33
|
}
|
|
33
34
|
});
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import {WorkerFarm} from '@loaders.gl/worker-utils';
|
|
2
|
+
import {Writer, WriterOptions} from '../../types';
|
|
3
|
+
import {isBrowser} from '../env-utils/globals';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Determines if a loader can parse with worker
|
|
7
|
+
* @param loader
|
|
8
|
+
* @param options
|
|
9
|
+
*/
|
|
10
|
+
export function canEncodeWithWorker(writer: Writer, options?: WriterOptions) {
|
|
11
|
+
if (!WorkerFarm.isSupported()) {
|
|
12
|
+
return false;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
// Node workers are still experimental
|
|
16
|
+
if (!isBrowser && !options?._nodeWorkers) {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
return writer.worker && options?.worker;
|
|
21
|
+
}
|
|
@@ -1,4 +1,9 @@
|
|
|
1
|
-
import
|
|
1
|
+
import {
|
|
2
|
+
WorkerJob,
|
|
3
|
+
WorkerMessageType,
|
|
4
|
+
WorkerMessagePayload,
|
|
5
|
+
isBrowser
|
|
6
|
+
} from '@loaders.gl/worker-utils';
|
|
2
7
|
import type {Loader, LoaderOptions, LoaderContext} from '../../types';
|
|
3
8
|
import {WorkerFarm, getWorkerURL} from '@loaders.gl/worker-utils';
|
|
4
9
|
|
|
@@ -12,6 +17,11 @@ export function canParseWithWorker(loader: Loader, options?: LoaderOptions) {
|
|
|
12
17
|
return false;
|
|
13
18
|
}
|
|
14
19
|
|
|
20
|
+
// Node workers are still experimental
|
|
21
|
+
if (!isBrowser && !options?._nodeWorkers) {
|
|
22
|
+
return false;
|
|
23
|
+
}
|
|
24
|
+
|
|
15
25
|
return loader.worker && options?.worker;
|
|
16
26
|
}
|
|
17
27
|
|
|
@@ -33,19 +43,22 @@ export async function parseWithWorker(
|
|
|
33
43
|
const workerPool = workerFarm.getWorkerPool({name, url});
|
|
34
44
|
|
|
35
45
|
// options.log object contains functions which cannot be transferred
|
|
46
|
+
// context.fetch & context.parse functions cannot be transferred
|
|
36
47
|
// TODO - decide how to handle logging on workers
|
|
37
48
|
options = JSON.parse(JSON.stringify(options));
|
|
49
|
+
context = JSON.parse(JSON.stringify(context || {}));
|
|
38
50
|
|
|
39
51
|
const job = await workerPool.startJob(
|
|
40
52
|
'process-on-worker',
|
|
41
53
|
// @ts-expect-error
|
|
42
|
-
onMessage.bind(null, parseOnMainThread) // eslint-disable-
|
|
54
|
+
onMessage.bind(null, parseOnMainThread) // eslint-disable-line @typescript-eslint/no-misused-promises
|
|
43
55
|
);
|
|
44
56
|
|
|
45
57
|
job.postMessage('process', {
|
|
46
58
|
// @ts-ignore
|
|
47
59
|
input: data,
|
|
48
|
-
options
|
|
60
|
+
options,
|
|
61
|
+
context
|
|
49
62
|
});
|
|
50
63
|
|
|
51
64
|
const result = await job.result;
|
package/src/types.ts
CHANGED
|
@@ -19,7 +19,11 @@ export type NumericArray = Array<number> | TypedIntArray | TypedFloatArray;
|
|
|
19
19
|
|
|
20
20
|
type FetchLike = (url: string, options?: RequestInit) => Promise<Response>;
|
|
21
21
|
|
|
22
|
-
//
|
|
22
|
+
// LOADERS
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Core Loader Options
|
|
26
|
+
*/
|
|
23
27
|
export type LoaderOptions = {
|
|
24
28
|
/** fetch options or a custom fetch function */
|
|
25
29
|
fetch?: typeof fetch | FetchLike | RequestInit | null;
|
|
@@ -55,7 +59,7 @@ export type LoaderOptions = {
|
|
|
55
59
|
// workers
|
|
56
60
|
|
|
57
61
|
/** CDN load workers from */
|
|
58
|
-
CDN?: string;
|
|
62
|
+
CDN?: string | null;
|
|
59
63
|
/** Set to `false` to disable workers */
|
|
60
64
|
worker?: boolean;
|
|
61
65
|
/** Number of concurrent workers (per loader) on desktop browser */
|
|
@@ -64,7 +68,9 @@ export type LoaderOptions = {
|
|
|
64
68
|
maxMobileConcurrency?: number;
|
|
65
69
|
/** Set to `false` to prevent reuse workers */
|
|
66
70
|
reuseWorkers?: boolean;
|
|
67
|
-
/**
|
|
71
|
+
/** Whether to use workers under Node.js (experimental) */
|
|
72
|
+
_nodeWorkers?: boolean;
|
|
73
|
+
/** set to 'test' to run local worker */
|
|
68
74
|
_workerType?: string;
|
|
69
75
|
|
|
70
76
|
/** @deprecated `options.batchType` removed, Use `options.<loader>.type` instead */
|
|
@@ -72,54 +78,59 @@ export type LoaderOptions = {
|
|
|
72
78
|
/** @deprecated `options.throw removed`, Use `options.nothrow` instead */
|
|
73
79
|
throws?: boolean;
|
|
74
80
|
/** @deprecated `options.dataType` no longer used */
|
|
75
|
-
dataType?:
|
|
81
|
+
dataType?: never;
|
|
76
82
|
/** @deprecated `options.uri` no longer used */
|
|
77
|
-
uri?:
|
|
83
|
+
uri?: never;
|
|
78
84
|
/** @deprecated `options.method` removed. Use `options.fetch.method` */
|
|
79
|
-
method?:
|
|
85
|
+
method?: never;
|
|
80
86
|
/** @deprecated `options.headers` removed. Use `options.fetch.headers` */
|
|
81
|
-
headers?:
|
|
87
|
+
headers?: never;
|
|
82
88
|
/** @deprecated `options.body` removed. Use `options.fetch.body` */
|
|
83
|
-
body?:
|
|
89
|
+
body?: never;
|
|
84
90
|
/** @deprecated `options.mode` removed. Use `options.fetch.mode` */
|
|
85
|
-
mode?:
|
|
91
|
+
mode?: never;
|
|
86
92
|
/** @deprecated `options.credentials` removed. Use `options.fetch.credentials` */
|
|
87
|
-
credentials?:
|
|
93
|
+
credentials?: never;
|
|
88
94
|
/** @deprecated `options.cache` removed. Use `options.fetch.cache` */
|
|
89
|
-
cache?:
|
|
95
|
+
cache?: never;
|
|
90
96
|
/** @deprecated `options.redirect` removed. Use `options.fetch.redirect` */
|
|
91
|
-
redirect?:
|
|
97
|
+
redirect?: never;
|
|
92
98
|
/** @deprecated `options.referrer` removed. Use `options.fetch.referrer` */
|
|
93
|
-
referrer?:
|
|
99
|
+
referrer?: never;
|
|
94
100
|
/** @deprecated `options.referrerPolicy` removed. Use `options.fetch.referrerPolicy` */
|
|
95
|
-
referrerPolicy?:
|
|
101
|
+
referrerPolicy?: never;
|
|
96
102
|
/** @deprecated `options.integrity` removed. Use `options.fetch.integrity` */
|
|
97
|
-
integrity?:
|
|
103
|
+
integrity?: never;
|
|
98
104
|
/** @deprecated `options.keepalive` removed. Use `options.fetch.keepalive` */
|
|
99
|
-
keepalive?:
|
|
105
|
+
keepalive?: never;
|
|
100
106
|
/** @deprecated `options.signal` removed. Use `options.fetch.signal` */
|
|
101
|
-
signal?:
|
|
107
|
+
signal?: never;
|
|
102
108
|
|
|
103
109
|
// Accept other keys (loader options objects, e.g. `options.csv`, `options.json` ...)
|
|
104
|
-
[loaderId: string]:
|
|
110
|
+
[loaderId: string]: unknown;
|
|
105
111
|
};
|
|
106
112
|
|
|
107
113
|
type PreloadOptions = {
|
|
108
|
-
[key: string]:
|
|
114
|
+
[key: string]: unknown;
|
|
109
115
|
};
|
|
110
116
|
|
|
111
117
|
/**
|
|
112
118
|
* A worker loader definition that can be used with `@loaders.gl/core` functions
|
|
113
119
|
*/
|
|
114
|
-
export type Loader = {
|
|
120
|
+
export type Loader<DataT = any, BatchT = any, LoaderOptionsT = LoaderOptions> = {
|
|
121
|
+
// Types
|
|
122
|
+
dataType?: DataT;
|
|
123
|
+
batchType?: BatchT;
|
|
124
|
+
|
|
125
|
+
options: LoaderOptionsT;
|
|
126
|
+
deprecatedOptions?: Record<string, string | Record<string, string>>;
|
|
127
|
+
|
|
115
128
|
// Worker
|
|
116
129
|
name: string;
|
|
117
130
|
id: string;
|
|
118
131
|
module: string;
|
|
119
132
|
version: string;
|
|
120
133
|
worker?: string | boolean;
|
|
121
|
-
options: object;
|
|
122
|
-
deprecatedOptions?: object;
|
|
123
134
|
// end Worker
|
|
124
135
|
|
|
125
136
|
category?: string;
|
|
@@ -129,118 +140,170 @@ export type Loader = {
|
|
|
129
140
|
binary?: boolean;
|
|
130
141
|
text?: boolean;
|
|
131
142
|
|
|
132
|
-
tests?: (((ArrayBuffer) => boolean) | ArrayBuffer | string)[];
|
|
143
|
+
tests?: (((ArrayBuffer: ArrayBuffer) => boolean) | ArrayBuffer | string)[];
|
|
133
144
|
|
|
134
145
|
// TODO - deprecated
|
|
135
146
|
supported?: boolean;
|
|
136
|
-
testText?: (string) => boolean;
|
|
147
|
+
testText?: (string: string) => boolean;
|
|
137
148
|
};
|
|
138
149
|
|
|
139
150
|
/**
|
|
140
151
|
* A "bundled" loader definition that can be used with `@loaders.gl/core` functions
|
|
141
152
|
* If a worker loader is supported it will also be supported.
|
|
142
153
|
*/
|
|
143
|
-
export type LoaderWithParser =
|
|
154
|
+
export type LoaderWithParser<DataT = any, BatchT = any, LoaderOptionsT = LoaderOptions> = Loader<
|
|
155
|
+
DataT,
|
|
156
|
+
BatchT,
|
|
157
|
+
LoaderOptionsT
|
|
158
|
+
> & {
|
|
144
159
|
// TODO - deprecated
|
|
145
|
-
testText?: (string) => boolean;
|
|
160
|
+
testText?: (string: string) => boolean;
|
|
146
161
|
|
|
147
|
-
parse:
|
|
162
|
+
parse: (
|
|
163
|
+
arrayBuffer: ArrayBuffer,
|
|
164
|
+
options?: LoaderOptionsT,
|
|
165
|
+
context?: LoaderContext
|
|
166
|
+
) => Promise<DataT>;
|
|
148
167
|
preload?: Preload;
|
|
149
|
-
parseSync?:
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
module: string;
|
|
167
|
-
version: string;
|
|
168
|
-
|
|
169
|
-
options: object;
|
|
170
|
-
deprecatedOptions?: object;
|
|
171
|
-
|
|
172
|
-
// TODO - are these are needed?
|
|
173
|
-
binary?: boolean;
|
|
174
|
-
extensions?: string[];
|
|
175
|
-
mimeTypes?: string[];
|
|
176
|
-
text?: boolean;
|
|
177
|
-
|
|
178
|
-
encode?: Encode;
|
|
179
|
-
encodeSync?: EncodeSync;
|
|
180
|
-
encodeInBatches?: EncodeInBatches;
|
|
181
|
-
encodeURLtoURL?: EncodeURLtoURL;
|
|
182
|
-
encodeText?: EncodeText;
|
|
168
|
+
parseSync?: (
|
|
169
|
+
arrayBuffer: ArrayBuffer,
|
|
170
|
+
options?: LoaderOptionsT,
|
|
171
|
+
context?: LoaderContext
|
|
172
|
+
) => DataT;
|
|
173
|
+
parseText?: (text: string, options?: LoaderOptionsT) => Promise<DataT>;
|
|
174
|
+
parseTextSync?: (text: string, options?: LoaderOptionsT) => DataT;
|
|
175
|
+
parseInBatches?: (
|
|
176
|
+
iterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
|
|
177
|
+
options?: LoaderOptionsT,
|
|
178
|
+
context?: LoaderContext
|
|
179
|
+
) => AsyncIterable<BatchT>;
|
|
180
|
+
parseFileInBatches?: (
|
|
181
|
+
file: Blob,
|
|
182
|
+
options?: LoaderOptionsT,
|
|
183
|
+
context?: LoaderContext
|
|
184
|
+
) => AsyncIterable<BatchT>;
|
|
183
185
|
};
|
|
184
186
|
|
|
185
187
|
export type LoaderContext = {
|
|
186
188
|
loaders?: Loader[] | null;
|
|
187
189
|
url?: string;
|
|
188
190
|
|
|
189
|
-
fetch: typeof fetch;
|
|
191
|
+
fetch: typeof fetch | FetchLike;
|
|
192
|
+
response?: Response;
|
|
190
193
|
parse: (
|
|
191
194
|
arrayBuffer: ArrayBuffer,
|
|
192
|
-
loaders
|
|
195
|
+
loaders?: Loader | Loader[] | LoaderOptions,
|
|
193
196
|
options?: LoaderOptions,
|
|
194
197
|
context?: LoaderContext
|
|
195
198
|
) => Promise<any>;
|
|
196
199
|
parseSync?: (
|
|
197
200
|
arrayBuffer: ArrayBuffer,
|
|
198
|
-
loaders
|
|
201
|
+
loaders?: Loader | Loader[] | LoaderOptions,
|
|
199
202
|
options?: LoaderOptions,
|
|
200
203
|
context?: LoaderContext
|
|
201
204
|
) => any;
|
|
202
205
|
parseInBatches?: (
|
|
203
206
|
iterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
|
|
204
|
-
loaders
|
|
207
|
+
loaders?: Loader | Loader[] | LoaderOptions,
|
|
205
208
|
options?: LoaderOptions,
|
|
206
209
|
context?: LoaderContext
|
|
207
210
|
) => AsyncIterable<any> | Promise<AsyncIterable<any>>;
|
|
208
211
|
};
|
|
209
212
|
|
|
210
|
-
type Parse = (
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
) => Promise<any>;
|
|
215
|
-
type ParseSync = (
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
) => any;
|
|
220
|
-
type ParseText = (text: string, options?: LoaderOptions) => Promise<any>;
|
|
221
|
-
type ParseTextSync = (text: string, options?: LoaderOptions) => any;
|
|
222
|
-
type ParseInBatches = (
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
) => AsyncIterable<any>;
|
|
227
|
-
type ParseFileInBatches = (
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
) => AsyncIterable<any>;
|
|
232
|
-
|
|
233
|
-
type
|
|
213
|
+
// type Parse = (
|
|
214
|
+
// arrayBuffer: ArrayBuffer,
|
|
215
|
+
// options?: LoaderOptions,
|
|
216
|
+
// context?: LoaderContext
|
|
217
|
+
// ) => Promise<any>;
|
|
218
|
+
// type ParseSync = (
|
|
219
|
+
// arrayBuffer: ArrayBuffer,
|
|
220
|
+
// options?: LoaderOptions,
|
|
221
|
+
// context?: LoaderContext
|
|
222
|
+
// ) => any;
|
|
223
|
+
// type ParseText = (text: string, options?: LoaderOptions) => Promise<any>;
|
|
224
|
+
// type ParseTextSync = (text: string, options?: LoaderOptions) => any;
|
|
225
|
+
// type ParseInBatches = (
|
|
226
|
+
// iterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,
|
|
227
|
+
// options?: LoaderOptions,
|
|
228
|
+
// context?: LoaderContext
|
|
229
|
+
// ) => AsyncIterable<any>;
|
|
230
|
+
// type ParseFileInBatches = (
|
|
231
|
+
// file: Blob,
|
|
232
|
+
// options?: LoaderOptions,
|
|
233
|
+
// context?: LoaderContext
|
|
234
|
+
// ) => AsyncIterable<any>;
|
|
235
|
+
|
|
236
|
+
type Preload = (url: string, options?: PreloadOptions) => any;
|
|
237
|
+
|
|
238
|
+
/** Typescript helper to extract options type from a generic loader type */
|
|
239
|
+
export type LoaderOptionsType<T = Loader> = T extends Loader<any, any, infer Options>
|
|
240
|
+
? Options
|
|
241
|
+
: never;
|
|
242
|
+
/** Typescript helper to extract data type from a generic loader type */
|
|
243
|
+
export type LoaderReturnType<T = Loader> = T extends Loader<infer Return, any, any>
|
|
244
|
+
? Return
|
|
245
|
+
: never;
|
|
246
|
+
/** Typescript helper to extract batch type from a generic loader type */
|
|
247
|
+
export type LoaderBatchType<T = Loader> = T extends Loader<any, infer Batch, any> ? Batch : never;
|
|
248
|
+
|
|
249
|
+
// WRITERS
|
|
250
|
+
|
|
251
|
+
/** Options for writers */
|
|
252
|
+
export type WriterOptions = {
|
|
253
|
+
/** worker source. If is set will be used instead of loading worker from the Internet */
|
|
254
|
+
souce?: string | null;
|
|
255
|
+
/** writer-specific options */
|
|
256
|
+
[writerId: string]: any;
|
|
257
|
+
};
|
|
258
|
+
|
|
259
|
+
/**
|
|
260
|
+
* A writer definition that can be used with `@loaders.gl/core` functions
|
|
261
|
+
*/
|
|
262
|
+
export type Writer<DataT = unknown, BatchT = unknown, WriterOptionsT = WriterOptions> = {
|
|
263
|
+
name: string;
|
|
264
|
+
|
|
265
|
+
id: string;
|
|
266
|
+
module: string;
|
|
267
|
+
version: string;
|
|
268
|
+
worker?: string | boolean;
|
|
269
|
+
|
|
270
|
+
// TODO - are these are needed?
|
|
271
|
+
extensions?: string[];
|
|
272
|
+
mimeTypes?: string[];
|
|
273
|
+
binary?: boolean;
|
|
274
|
+
text?: boolean;
|
|
275
|
+
|
|
276
|
+
options: WriterOptionsT;
|
|
277
|
+
deprecatedOptions?: Record<string, string>;
|
|
278
|
+
|
|
279
|
+
// encodeText?: EncodeText;
|
|
280
|
+
// encode?: Encode;
|
|
281
|
+
encodeSync?: EncodeSync;
|
|
282
|
+
// encodeInBatches?: EncodeInBatches;
|
|
283
|
+
encodeURLtoURL?: EncodeURLtoURL;
|
|
284
|
+
|
|
285
|
+
encode?(data: DataT, options?: WriterOptionsT): Promise<ArrayBuffer>;
|
|
286
|
+
encodeText?(table: DataT, options?: WriterOptionsT): Promise<string> | string;
|
|
287
|
+
encodeInBatches?(data: AsyncIterable<any>, options?: WriterOptionsT): AsyncIterable<ArrayBuffer>;
|
|
288
|
+
};
|
|
289
|
+
|
|
290
|
+
// type Encode = (data: any, options?: WriterOptions) => Promise<ArrayBuffer>;
|
|
234
291
|
type EncodeSync = (data: any, options?: WriterOptions) => ArrayBuffer;
|
|
235
292
|
// TODO
|
|
236
|
-
type EncodeText = Function;
|
|
237
|
-
type EncodeInBatches = Function;
|
|
293
|
+
// type EncodeText = Function;
|
|
294
|
+
// type EncodeInBatches = Function;
|
|
238
295
|
type EncodeURLtoURL = (
|
|
239
296
|
inputUrl: string,
|
|
240
297
|
outputUrl: string,
|
|
241
298
|
options?: WriterOptions
|
|
242
299
|
) => Promise<string>;
|
|
243
|
-
|
|
300
|
+
|
|
301
|
+
/** Typescript helper to extract the writer options type from a generic writer type */
|
|
302
|
+
export type WriterOptionsType<T = Writer> = T extends Writer<unknown, unknown, infer Options>
|
|
303
|
+
? Options
|
|
304
|
+
: never;
|
|
305
|
+
|
|
306
|
+
// MISC TYPES
|
|
244
307
|
|
|
245
308
|
export type TransformBatches = (
|
|
246
309
|
asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>
|
|
@@ -295,7 +358,7 @@ export interface IFileSystem {
|
|
|
295
358
|
|
|
296
359
|
type ReadOptions = {buffer?: ArrayBuffer; offset?: number; length?: number; position?: number};
|
|
297
360
|
export interface IRandomAccessReadFileSystem extends IFileSystem {
|
|
298
|
-
open(path: string, flags, mode
|
|
361
|
+
open(path: string, flags: string | number, mode?: any): Promise<any>;
|
|
299
362
|
close(fd: any): Promise<void>;
|
|
300
363
|
fstat(fd: any): Promise<object>;
|
|
301
364
|
read(fd: any, options?: ReadOptions): Promise<{bytesRead: number; buffer: Buffer}>;
|
package/dist/index.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts"],"names":["assert","isBrowser","isWorker","nodeVersion","self","window","global","document","createLoaderWorker","parseWithWorker","canParseWithWorker","parseJSON","toArrayBuffer","sliceArrayBuffer","concatenateArrayBuffers","concatenateTypedArrays","compareArrayBuffers","padToNBytes","copyToArray","copyArrayBuffer","copyPaddedArrayBufferToDataView","copyPaddedStringToDataView","padStringToByteAlignment","copyStringToDataView","copyBinaryToDataView","getFirstCharacters","getMagicString","makeTextEncoderIterator","makeTextDecoderIterator","makeLineIterator","makeNumberedLineIterator","forEach","concatenateArrayBuffersAsync","default","RequestScheduler","setPathPrefix","getPathPrefix","resolvePath","addAliases","_addAliases","JSONLoader","path","isBuffer","toBuffer","bufferToArrayBuffer","util","promisify","fs","_NodeFileSystem"],"mappings":"AAgBA,SAAQA,MAAR,QAAqB,wBAArB;AACA,SACEC,SADF,EAEEC,QAFF,EAGEC,WAHF,EAIEC,IAJF,EAKEC,MALF,EAMEC,MANF,EAOEC,QAPF,QAQO,yBARP;AAWA,SAAQC,kBAAR,QAAiC,gDAAjC;AACA,SAAQC,eAAR,EAAyBC,kBAAzB,QAAkD,6CAAlD;AAGA,SAAQC,SAAR,QAAwB,+BAAxB;AAGA,SACEC,aADF,EAEEC,gBAFF,EAGEC,uBAHF,EAIEC,sBAJF,EAKEC,mBALF,QAMO,uCANP;AAOA,SAAQC,WAAR,EAAqBC,WAArB,EAAkCC,eAAlC,QAAwD,sCAAxD;AACA,SACEC,+BADF,EAEEC,0BAFF,QAGO,sCAHP;AAIA,SACEC,wBADF,EAEEC,oBAFF,EAGEC,oBAHF,QAIO,iCAJP;AAKA,SAAQC,kBAAR,EAA4BC,cAA5B,QAAiD,yCAAjD;AAGA,SACEC,uBADF,EAEEC,uBAFF,EAGEC,gBAHF,EAIEC,wBAJF,QAKO,gCALP;AAMA,SAAQC,OAAR,EAAiBC,4BAAjB,QAAoD,iCAApD;AAGA,SAAQC,OAAO,IAAIC,gBAAnB,QAA0C,uCAA1C;AAGA,SAAQC,aAAR,EAAuBC,aAAvB,EAAsCC,WAAtC,QAAwD,+BAAxD;AACA,SAAQC,UAAU,IAAIC,WAAtB,QAAwC,+BAAxC;AAGA,SAAQC,UAAR,QAAyB,eAAzB;AAOA,OAAO,KAAKC,IAAZ,MAAsB,uBAAtB;AACA,SAAQA,IAAR;AAGA,SAAQC,QAAR,EAAkBC,QAAlB,EAA4BC,mBAA5B,QAAsD,iCAAtD;AAKA,OAAO,KAAKC,IAAZ,MAAsB,iBAAtB;AACA,SAAQA,IAAR;AAEA,SAAQC,SAAR,QAAwB,iBAAxB;AAGA,OAAO,KAAKC,EAAZ,MAAoB,eAApB;AACA,SAAQA,EAAR;AAGA,SAAQd,OAAO,IAAIe,eAAnB,QAAyC,mCAAzC","sourcesContent":["// TYPES\nexport type {\n Loader,\n LoaderWithParser,\n LoaderContext,\n LoaderOptions,\n Writer,\n WriterOptions,\n DataType,\n SyncDataType,\n BatchableDataType,\n IFileSystem,\n IRandomAccessReadFileSystem\n} from './types';\n\n// GENERAL UTILS\nexport {assert} from './lib/env-utils/assert';\nexport {\n isBrowser,\n isWorker,\n nodeVersion,\n self,\n window,\n global,\n document\n} from './lib/env-utils/globals';\n\n// LOADERS.GL-SPECIFIC WORKER UTILS\nexport {createLoaderWorker} from './lib/worker-loader-utils/create-loader-worker';\nexport {parseWithWorker, canParseWithWorker} from './lib/worker-loader-utils/parse-with-worker';\n\n// PARSER UTILS\nexport {parseJSON} from './lib/parser-utils/parse-json';\n\n// MEMORY COPY UTILS\nexport {\n toArrayBuffer,\n sliceArrayBuffer,\n concatenateArrayBuffers,\n concatenateTypedArrays,\n compareArrayBuffers\n} from './lib/binary-utils/array-buffer-utils';\nexport {padToNBytes, copyToArray, copyArrayBuffer} from './lib/binary-utils/memory-copy-utils';\nexport {\n copyPaddedArrayBufferToDataView,\n copyPaddedStringToDataView\n} from './lib/binary-utils/binary-copy-utils';\nexport {\n padStringToByteAlignment,\n copyStringToDataView,\n copyBinaryToDataView\n} from './lib/binary-utils/encode-utils';\nexport {getFirstCharacters, getMagicString} from './lib/binary-utils/get-first-characters';\n\n// ITERATOR UTILS\nexport {\n makeTextEncoderIterator,\n makeTextDecoderIterator,\n makeLineIterator,\n makeNumberedLineIterator\n} from './lib/iterators/text-iterators';\nexport {forEach, concatenateArrayBuffersAsync} from './lib/iterators/async-iteration';\n\n// REQUEST UTILS\nexport {default as RequestScheduler} from './lib/request-utils/request-scheduler';\n\n// PATH HELPERS\nexport {setPathPrefix, getPathPrefix, resolvePath} from './lib/path-utils/file-aliases';\nexport {addAliases as _addAliases} from './lib/path-utils/file-aliases';\n\n// MICRO LOADERS\nexport {JSONLoader} from './json-loader';\n\n// NODE support\n\n// Node.js emulation (can be used in browser)\n\n// `path` replacement (avoids bundling big path polyfill)\nimport * as path from './lib/path-utils/path';\nexport {path};\n\n// Avoid direct use of `Buffer` which pulls in 50KB polyfill\nexport {isBuffer, toBuffer, bufferToArrayBuffer} from './lib/binary-utils/buffer-utils';\n\n// Note.js wrappers (can be safely imported, but not used in browser)\n\n// Use instead of importing 'util'\nimport * as util from './lib/node/util';\nexport {util};\n// TODO - remove\nexport {promisify} from './lib/node/util';\n\n// Use instead of importing 'fs';`\nimport * as fs from './lib/node/fs';\nexport {fs};\n\n// EXPERIMENTAL\nexport {default as _NodeFileSystem} from './lib/filesystems/node-filesystem';\n"],"file":"index.js"}
|
package/dist/json-loader.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/json-loader.ts"],"names":["VERSION","JSONLoader","name","id","module","version","extensions","mimeTypes","category","text","parseTextSync","parse","arrayBuffer","TextDecoder","decode","options","JSON","_typecheckJSONLoader"],"mappings":"AAIA,MAAMA,OAAO,GAAG,2BAAuB,WAAvB,qBAAmD,QAAnE;AAMA,OAAO,MAAMC,UAAU,GAAG;AACxBC,EAAAA,IAAI,EAAE,MADkB;AAExBC,EAAAA,EAAE,EAAE,MAFoB;AAGxBC,EAAAA,MAAM,EAAE,MAHgB;AAIxBC,EAAAA,OAAO,EAAEL,OAJe;AAKxBM,EAAAA,UAAU,EAAE,CAAC,MAAD,EAAS,SAAT,CALY;AAMxBC,EAAAA,SAAS,EAAE,CAAC,kBAAD,CANa;AAOxBC,EAAAA,QAAQ,EAAE,MAPc;AAQxBC,EAAAA,IAAI,EAAE,IARkB;AASxBC,EAAAA,aATwB;AAUxBC,EAAAA,KAAK,EAAE,MAAOC,WAAP,IAAuBF,aAAa,CAAC,IAAIG,WAAJ,GAAkBC,MAAlB,CAAyBF,WAAzB,CAAD,CAVnB;AAWxBG,EAAAA,OAAO,EAAE;AAXe,CAAnB;;AAeP,SAASL,aAAT,CAAuBD,IAAvB,EAA6B;AAC3B,SAAOO,IAAI,CAACL,KAAL,CAAWF,IAAX,CAAP;AACD;;AAED,OAAO,MAAMQ,oBAAsC,GAAGhB,UAA/C","sourcesContent":["import type {LoaderWithParser} from './types';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * A JSON Micro loader (minimal bundle size)\n * Alternative to `@loaders.gl/json`\n */\nexport const JSONLoader = {\n name: 'JSON',\n id: 'json',\n module: 'json',\n version: VERSION,\n extensions: ['json', 'geojson'],\n mimeTypes: ['application/json'],\n category: 'json',\n text: true,\n parseTextSync,\n parse: async (arrayBuffer) => parseTextSync(new TextDecoder().decode(arrayBuffer)),\n options: {}\n};\n\n// TODO - deprecated\nfunction parseTextSync(text) {\n return JSON.parse(text);\n}\n\nexport const _typecheckJSONLoader: LoaderWithParser = JSONLoader;\n"],"file":"json-loader.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/binary-utils/array-buffer-utils.ts"],"names":["isBuffer","bufferToArrayBuffer","toArrayBuffer","data","ArrayBuffer","isView","byteOffset","byteLength","buffer","slice","text","uint8Array","TextEncoder","encode","_toArrayBuffer","Error","compareArrayBuffers","arrayBuffer1","arrayBuffer2","array1","Uint8Array","array2","i","length","concatenateArrayBuffers","sources","sourceArrays","map","source2","reduce","typedArray","result","offset","sourceArray","set","concatenateTypedArrays","typedArrays","arrays","TypedArrayConstructor","constructor","sumLength","acc","value","array","sliceArrayBuffer","arrayBuffer","subArray","undefined","subarray","arrayCopy"],"mappings":"AACA,SAAQA,QAAR,EAAkBC,mBAAlB,QAA4C,gBAA5C;AAKA,OAAO,SAASC,aAAT,CAAuBC,IAAvB,EAA+C;AAEpD,MAAIH,QAAQ,CAACG,IAAD,CAAZ,EAAoB;AAClB,WAAOF,mBAAmB,CAACE,IAAD,CAA1B;AACD;;AAED,MAAIA,IAAI,YAAYC,WAApB,EAAiC;AAC/B,WAAOD,IAAP;AACD;;AAGD,MAAIC,WAAW,CAACC,MAAZ,CAAmBF,IAAnB,CAAJ,EAA8B;AAC5B,QAAIA,IAAI,CAACG,UAAL,KAAoB,CAApB,IAAyBH,IAAI,CAACI,UAAL,KAAoBJ,IAAI,CAACK,MAAL,CAAYD,UAA7D,EAAyE;AACvE,aAAOJ,IAAI,CAACK,MAAZ;AACD;;AACD,WAAOL,IAAI,CAACK,MAAL,CAAYC,KAAZ,CAAkBN,IAAI,CAACG,UAAvB,EAAmCH,IAAI,CAACG,UAAL,GAAkBH,IAAI,CAACI,UAA1D,CAAP;AACD;;AAED,MAAI,OAAOJ,IAAP,KAAgB,QAApB,EAA8B;AAC5B,UAAMO,IAAI,GAAGP,IAAb;AACA,UAAMQ,UAAU,GAAG,IAAIC,WAAJ,GAAkBC,MAAlB,CAAyBH,IAAzB,CAAnB;AACA,WAAOC,UAAU,CAACH,MAAlB;AACD;;AAGD,MAAIL,IAAI,IAAI,OAAOA,IAAP,KAAgB,QAAxB,IAAoCA,IAAI,CAACW,cAA7C,EAA6D;AAC3D,WAAOX,IAAI,CAACW,cAAL,EAAP;AACD;;AAED,QAAM,IAAIC,KAAJ,CAAU,eAAV,CAAN;AACD;AAQD,OAAO,SAASC,mBAAT,CACLC,YADK,EAELC,YAFK,EAGLX,UAHK,EAII;AACTA,EAAAA,UAAU,GAAGA,UAAU,IAAIU,YAAY,CAACV,UAAxC;;AACA,MAAIU,YAAY,CAACV,UAAb,GAA0BA,UAA1B,IAAwCW,YAAY,CAACX,UAAb,GAA0BA,UAAtE,EAAkF;AAChF,WAAO,KAAP;AACD;;AACD,QAAMY,MAAM,GAAG,IAAIC,UAAJ,CAAeH,YAAf,CAAf;AACA,QAAMI,MAAM,GAAG,IAAID,UAAJ,CAAeF,YAAf,CAAf;;AACA,OAAK,IAAII,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGH,MAAM,CAACI,MAA3B,EAAmC,EAAED,CAArC,EAAwC;AACtC,QAAIH,MAAM,CAACG,CAAD,CAAN,KAAcD,MAAM,CAACC,CAAD,CAAxB,EAA6B;AAC3B,aAAO,KAAP;AACD;AACF;;AACD,SAAO,IAAP;AACD;AAMD,OAAO,SAASE,uBAAT,CAAiC,GAAGC,OAApC,EAAwF;AAE7F,QAAMC,YAAY,GAAGD,OAAO,CAACE,GAAR,CAAaC,OAAD,IAC/BA,OAAO,YAAYxB,WAAnB,GAAiC,IAAIgB,UAAJ,CAAeQ,OAAf,CAAjC,GAA2DA,OADxC,CAArB;AAKA,QAAMrB,UAAU,GAAGmB,YAAY,CAACG,MAAb,CAAoB,CAACN,MAAD,EAASO,UAAT,KAAwBP,MAAM,GAAGO,UAAU,CAACvB,UAAhE,EAA4E,CAA5E,CAAnB;AAGA,QAAMwB,MAAM,GAAG,IAAIX,UAAJ,CAAeb,UAAf,CAAf;AAGA,MAAIyB,MAAM,GAAG,CAAb;;AACA,OAAK,MAAMC,WAAX,IAA0BP,YAA1B,EAAwC;AACtCK,IAAAA,MAAM,CAACG,GAAP,CAAWD,WAAX,EAAwBD,MAAxB;AACAA,IAAAA,MAAM,IAAIC,WAAW,CAAC1B,UAAtB;AACD;;AAGD,SAAOwB,MAAM,CAACvB,MAAd;AACD;AAQD,OAAO,SAAS2B,sBAAT,CAAmC,GAAGC,WAAtC,EAA2D;AAEhE,QAAMC,MAAM,GAAGD,WAAf;AAEA,QAAME,qBAAqB,GAAID,MAAM,IAAIA,MAAM,CAACd,MAAP,GAAgB,CAA1B,IAA+Bc,MAAM,CAAC,CAAD,CAAN,CAAUE,WAA1C,IAA0D,IAAxF;;AACA,MAAI,CAACD,qBAAL,EAA4B;AAC1B,UAAM,IAAIvB,KAAJ,CACJ,sGADI,CAAN;AAGD;;AAED,QAAMyB,SAAS,GAAGH,MAAM,CAACR,MAAP,CAAc,CAACY,GAAD,EAAMC,KAAN,KAAgBD,GAAG,GAAGC,KAAK,CAACnB,MAA1C,EAAkD,CAAlD,CAAlB;AAEA,QAAMQ,MAAM,GAAG,IAAIO,qBAAJ,CAA0BE,SAA1B,CAAf;AACA,MAAIR,MAAM,GAAG,CAAb;;AACA,OAAK,MAAMW,KAAX,IAAoBN,MAApB,EAA4B;AAC1BN,IAAAA,MAAM,CAACG,GAAP,CAAWS,KAAX,EAAkBX,MAAlB;AACAA,IAAAA,MAAM,IAAIW,KAAK,CAACpB,MAAhB;AACD;;AACD,SAAOQ,MAAP;AACD;AAQD,OAAO,SAASa,gBAAT,CACLC,WADK,EAELvC,UAFK,EAGLC,UAHK,EAIQ;AACb,QAAMuC,QAAQ,GACZvC,UAAU,KAAKwC,SAAf,GACI,IAAI3B,UAAJ,CAAeyB,WAAf,EAA4BG,QAA5B,CAAqC1C,UAArC,EAAiDA,UAAU,GAAGC,UAA9D,CADJ,GAEI,IAAIa,UAAJ,CAAeyB,WAAf,EAA4BG,QAA5B,CAAqC1C,UAArC,CAHN;AAIA,QAAM2C,SAAS,GAAG,IAAI7B,UAAJ,CAAe0B,QAAf,CAAlB;AACA,SAAOG,SAAS,CAACzC,MAAjB;AACD","sourcesContent":["import {TypedArray} from '../../types';\nimport {isBuffer, bufferToArrayBuffer} from './buffer-utils';\n\n/**\n * Convert an object to an array buffer\n */\nexport function toArrayBuffer(data: any): ArrayBuffer {\n // Note: Should be called first, Buffers can trigger other detections below\n if (isBuffer(data)) {\n return bufferToArrayBuffer(data);\n }\n\n if (data instanceof ArrayBuffer) {\n return data;\n }\n\n // Careful - Node Buffers look like Uint8Arrays (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {\n return data.buffer;\n }\n return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n if (data && typeof data === 'object' && data._toArrayBuffer) {\n return data._toArrayBuffer();\n }\n\n throw new Error('toArrayBuffer');\n}\n\n/**\n * compare two binary arrays for equality\n * @param {ArrayBuffer} a\n * @param {ArrayBuffer} b\n * @param {number} byteLength\n */\nexport function compareArrayBuffers(\n arrayBuffer1: ArrayBuffer,\n arrayBuffer2: ArrayBuffer,\n byteLength?: number\n): boolean {\n byteLength = byteLength || arrayBuffer1.byteLength;\n if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {\n return false;\n }\n const array1 = new Uint8Array(arrayBuffer1);\n const array2 = new Uint8Array(arrayBuffer2);\n for (let i = 0; i < array1.length; ++i) {\n if (array1[i] !== array2[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Concatenate a sequence of ArrayBuffers\n * @return A concatenated ArrayBuffer\n */\nexport function concatenateArrayBuffers(...sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {\n // Make sure all inputs are wrapped in typed arrays\n const sourceArrays = sources.map((source2) =>\n source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2\n );\n\n // Get length of all inputs\n const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);\n\n // Allocate array with space for all inputs\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let offset = 0;\n for (const sourceArray of sourceArrays) {\n result.set(sourceArray, offset);\n offset += sourceArray.byteLength;\n }\n\n // We work with ArrayBuffers, discard the typed array wrapper\n return result.buffer;\n}\n\n/**\n * Concatenate arbitrary count of typed arrays\n * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays\n * @param {...*} arrays - list of arrays. All arrays should be the same type\n * @return A concatenated TypedArray\n */\nexport function concatenateTypedArrays<T>(...typedArrays: T[]): T {\n // @ts-ignore\n const arrays = typedArrays as TypedArray[];\n // @ts-ignore\n const TypedArrayConstructor = (arrays && arrays.length > 1 && arrays[0].constructor) || null;\n if (!TypedArrayConstructor) {\n throw new Error(\n '\"concatenateTypedArrays\" - incorrect quantity of arguments or arguments have incompatible data types'\n );\n }\n\n const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);\n // @ts-ignore typescript does not like dynamic constructors\n const result = new TypedArrayConstructor(sumLength);\n let offset = 0;\n for (const array of arrays) {\n result.set(array, offset);\n offset += array.length;\n }\n return result;\n}\n\n/**\n * Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0\n * @param arrayBuffer\n * @param byteOffset\n * @param byteLength\n */\nexport function sliceArrayBuffer(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength?: number\n): ArrayBuffer {\n const subArray =\n byteLength !== undefined\n ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength)\n : new Uint8Array(arrayBuffer).subarray(byteOffset);\n const arrayCopy = new Uint8Array(subArray);\n return arrayCopy.buffer;\n}\n"],"file":"array-buffer-utils.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/binary-utils/binary-copy-utils.ts"],"names":["padToNBytes","copyPaddedArrayBufferToDataView","dataView","byteOffset","sourceBuffer","padding","paddedLength","byteLength","padLength","targetArray","Uint8Array","buffer","sourceArray","set","i","setUint8","copyPaddedStringToDataView","string","textEncoder","TextEncoder","stringBuffer","encode"],"mappings":"AACA,SAAQA,WAAR,QAA0B,qBAA1B;AAYA,OAAO,SAASC,+BAAT,CACLC,QADK,EAELC,UAFK,EAGLC,YAHK,EAILC,OAJK,EAKL;AACA,QAAMC,YAAY,GAAGN,WAAW,CAACI,YAAY,CAACG,UAAd,EAA0BF,OAA1B,CAAhC;AACA,QAAMG,SAAS,GAAGF,YAAY,GAAGF,YAAY,CAACG,UAA9C;;AAEA,MAAIL,QAAJ,EAAc;AAEZ,UAAMO,WAAW,GAAG,IAAIC,UAAJ,CAClBR,QAAQ,CAACS,MADS,EAElBT,QAAQ,CAACC,UAAT,GAAsBA,UAFJ,EAGlBC,YAAY,CAACG,UAHK,CAApB;AAKA,UAAMK,WAAW,GAAG,IAAIF,UAAJ,CAAeN,YAAf,CAApB;AACAK,IAAAA,WAAW,CAACI,GAAZ,CAAgBD,WAAhB;;AAGA,SAAK,IAAIE,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGN,SAApB,EAA+B,EAAEM,CAAjC,EAAoC;AAElCZ,MAAAA,QAAQ,CAACa,QAAT,CAAkBZ,UAAU,GAAGC,YAAY,CAACG,UAA1B,GAAuCO,CAAzD,EAA4D,IAA5D;AACD;AACF;;AACDX,EAAAA,UAAU,IAAIG,YAAd;AACA,SAAOH,UAAP;AACD;AAYD,OAAO,SAASa,0BAAT,CACLd,QADK,EAELC,UAFK,EAGLc,MAHK,EAILZ,OAJK,EAKG;AACR,QAAMa,WAAW,GAAG,IAAIC,WAAJ,EAApB;AAGA,QAAMC,YAAY,GAAGF,WAAW,CAACG,MAAZ,CAAmBJ,MAAnB,CAArB;AAEAd,EAAAA,UAAU,GAAGF,+BAA+B,CAACC,QAAD,EAAWC,UAAX,EAAuBiB,YAAvB,EAAqCf,OAArC,CAA5C;AAEA,SAAOF,UAAP;AACD","sourcesContent":["import {TypedArray} from '../../types';\nimport {padToNBytes} from './memory-copy-utils';\n\n/**\n * Copy sourceBuffer to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {Array | TypedArray} sourceBuffer - source data buffer\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedArrayBufferToDataView(\n dataView: DataView | null,\n byteOffset: number,\n sourceBuffer: TypedArray,\n padding: number\n) {\n const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);\n const padLength = paddedLength - sourceBuffer.byteLength;\n\n if (dataView) {\n // Copy array\n const targetArray = new Uint8Array(\n dataView.buffer,\n dataView.byteOffset + byteOffset,\n sourceBuffer.byteLength\n );\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n\n // Add PADDING\n for (let i = 0; i < padLength; ++i) {\n // json chunk is padded with spaces (ASCII 0x20)\n dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);\n }\n }\n byteOffset += paddedLength;\n return byteOffset;\n}\n\n/**\n * Copy string to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {string} string - source string\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedStringToDataView(\n dataView: DataView | null,\n byteOffset: number,\n string: string,\n padding: number\n): number {\n const textEncoder = new TextEncoder();\n // PERFORMANCE IDEA: We encode twice, once to get size and once to store\n // PERFORMANCE IDEA: Use TextEncoder.encodeInto() to avoid temporary copy\n const stringBuffer = textEncoder.encode(string);\n\n byteOffset = copyPaddedArrayBufferToDataView(dataView, byteOffset, stringBuffer, padding);\n\n return byteOffset;\n}\n"],"file":"binary-copy-utils.js"}
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import * as node from '../node/buffer';
|
|
2
|
-
export function isBuffer(value) {
|
|
3
|
-
return value && typeof value === 'object' && value.isBuffer;
|
|
4
|
-
}
|
|
5
|
-
export function toBuffer(data) {
|
|
6
|
-
return node.toBuffer ? node.toBuffer(data) : data;
|
|
7
|
-
}
|
|
8
|
-
export function bufferToArrayBuffer(buffer) {
|
|
9
|
-
if (isBuffer(buffer)) {
|
|
10
|
-
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
11
|
-
return typedArray.slice().buffer;
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
return buffer;
|
|
15
|
-
}
|
|
16
|
-
//# sourceMappingURL=buffer-utils.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/binary-utils/buffer-utils.ts"],"names":["node","isBuffer","value","toBuffer","data","bufferToArrayBuffer","buffer","typedArray","Uint8Array","byteOffset","length","slice"],"mappings":"AAAA,OAAO,KAAKA,IAAZ,MAAsB,gBAAtB;AAKA,OAAO,SAASC,QAAT,CAAkBC,KAAlB,EAAuC;AAC5C,SAAOA,KAAK,IAAI,OAAOA,KAAP,KAAiB,QAA1B,IAAsCA,KAAK,CAACD,QAAnD;AACD;AAMD,OAAO,SAASE,QAAT,CAAkBC,IAAlB,EAAqC;AAC1C,SAAOJ,IAAI,CAACG,QAAL,GAAgBH,IAAI,CAACG,QAAL,CAAcC,IAAd,CAAhB,GAAsCA,IAA7C;AACD;AAMD,OAAO,SAASC,mBAAT,CAA6BC,MAA7B,EAAuD;AAC5D,MAAIL,QAAQ,CAACK,MAAD,CAAZ,EAAsB;AACpB,UAAMC,UAAU,GAAG,IAAIC,UAAJ,CAAeF,MAAM,CAACA,MAAtB,EAA8BA,MAAM,CAACG,UAArC,EAAiDH,MAAM,CAACI,MAAxD,CAAnB;AACA,WAAOH,UAAU,CAACI,KAAX,GAAmBL,MAA1B;AACD;;AACD,SAAOA,MAAP;AACD","sourcesContent":["import * as node from '../node/buffer';\n\n/**\n * Check for Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n */\nexport function isBuffer(value: any): boolean {\n return value && typeof value === 'object' && value.isBuffer;\n}\n\n/**\n * Converts to Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toBuffer(data: any): Buffer {\n return node.toBuffer ? node.toBuffer(data) : data;\n}\n\n/**\n * Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function bufferToArrayBuffer(buffer: any): ArrayBuffer {\n if (isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n"],"file":"buffer-utils.js"}
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
export function padStringToByteAlignment(string, byteAlignment) {
|
|
2
|
-
const length = string.length;
|
|
3
|
-
const paddedLength = Math.ceil(length / byteAlignment) * byteAlignment;
|
|
4
|
-
const padding = paddedLength - length;
|
|
5
|
-
let whitespace = '';
|
|
6
|
-
|
|
7
|
-
for (let i = 0; i < padding; ++i) {
|
|
8
|
-
whitespace += ' ';
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
return string + whitespace;
|
|
12
|
-
}
|
|
13
|
-
export function copyStringToDataView(dataView, byteOffset, string, byteLength) {
|
|
14
|
-
if (dataView) {
|
|
15
|
-
for (let i = 0; i < byteLength; i++) {
|
|
16
|
-
dataView.setUint8(byteOffset + i, string.charCodeAt(i));
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
return byteOffset + byteLength;
|
|
21
|
-
}
|
|
22
|
-
export function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {
|
|
23
|
-
if (dataView) {
|
|
24
|
-
for (let i = 0; i < byteLength; i++) {
|
|
25
|
-
dataView.setUint8(byteOffset + i, binary[i]);
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
return byteOffset + byteLength;
|
|
30
|
-
}
|
|
31
|
-
//# sourceMappingURL=encode-utils.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/binary-utils/encode-utils.ts"],"names":["padStringToByteAlignment","string","byteAlignment","length","paddedLength","Math","ceil","padding","whitespace","i","copyStringToDataView","dataView","byteOffset","byteLength","setUint8","charCodeAt","copyBinaryToDataView","binary"],"mappings":"AAIA,OAAO,SAASA,wBAAT,CAAkCC,MAAlC,EAA0CC,aAA1C,EAAyD;AAC9D,QAAMC,MAAM,GAAGF,MAAM,CAACE,MAAtB;AACA,QAAMC,YAAY,GAAGC,IAAI,CAACC,IAAL,CAAUH,MAAM,GAAGD,aAAnB,IAAoCA,aAAzD;AACA,QAAMK,OAAO,GAAGH,YAAY,GAAGD,MAA/B;AACA,MAAIK,UAAU,GAAG,EAAjB;;AACA,OAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGF,OAApB,EAA6B,EAAEE,CAA/B,EAAkC;AAChCD,IAAAA,UAAU,IAAI,GAAd;AACD;;AACD,SAAOP,MAAM,GAAGO,UAAhB;AACD;AAED,OAAO,SAASE,oBAAT,CAA8BC,QAA9B,EAAwCC,UAAxC,EAAoDX,MAApD,EAA4DY,UAA5D,EAAwE;AAC7E,MAAIF,QAAJ,EAAc;AACZ,SAAK,IAAIF,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGI,UAApB,EAAgCJ,CAAC,EAAjC,EAAqC;AACnCE,MAAAA,QAAQ,CAACG,QAAT,CAAkBF,UAAU,GAAGH,CAA/B,EAAkCR,MAAM,CAACc,UAAP,CAAkBN,CAAlB,CAAlC;AACD;AACF;;AACD,SAAOG,UAAU,GAAGC,UAApB;AACD;AAED,OAAO,SAASG,oBAAT,CAA8BL,QAA9B,EAAwCC,UAAxC,EAAoDK,MAApD,EAA4DJ,UAA5D,EAAwE;AAC7E,MAAIF,QAAJ,EAAc;AACZ,SAAK,IAAIF,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGI,UAApB,EAAgCJ,CAAC,EAAjC,EAAqC;AACnCE,MAAAA,QAAQ,CAACG,QAAT,CAAkBF,UAAU,GAAGH,CAA/B,EAAkCQ,MAAM,CAACR,CAAD,CAAxC;AACD;AACF;;AACD,SAAOG,UAAU,GAAGC,UAApB;AACD","sourcesContent":["// Note: These were broken out from gltf loader...\n// eslint-disable-next-line complexity\n\n// PERFORMANCE IDEA: No need to copy string twice...\nexport function padStringToByteAlignment(string, byteAlignment) {\n const length = string.length;\n const paddedLength = Math.ceil(length / byteAlignment) * byteAlignment; // Round up to the required alignment\n const padding = paddedLength - length;\n let whitespace = '';\n for (let i = 0; i < padding; ++i) {\n whitespace += ' ';\n }\n return string + whitespace;\n}\n\nexport function copyStringToDataView(dataView, byteOffset, string, byteLength) {\n if (dataView) {\n for (let i = 0; i < byteLength; i++) {\n dataView.setUint8(byteOffset + i, string.charCodeAt(i));\n }\n }\n return byteOffset + byteLength;\n}\n\nexport function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {\n if (dataView) {\n for (let i = 0; i < byteLength; i++) {\n dataView.setUint8(byteOffset + i, binary[i]);\n }\n }\n return byteOffset + byteLength;\n}\n"],"file":"encode-utils.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/binary-utils/get-first-characters.ts"],"names":["getFirstCharacters","data","length","slice","ArrayBuffer","isView","getMagicString","buffer","byteOffset","arrayBuffer","byteLength","dataView","DataView","magic","i","String","fromCharCode","getUint8"],"mappings":"AAAA,OAAO,SAASA,kBAAT,CAA4BC,IAA5B,EAAkCC,MAAM,GAAG,CAA3C,EAAsD;AAC3D,MAAI,OAAOD,IAAP,KAAgB,QAApB,EAA8B;AAC5B,WAAOA,IAAI,CAACE,KAAL,CAAW,CAAX,EAAcD,MAAd,CAAP;AACD,GAFD,MAEO,IAAIE,WAAW,CAACC,MAAZ,CAAmBJ,IAAnB,CAAJ,EAA8B;AAEnC,WAAOK,cAAc,CAACL,IAAI,CAACM,MAAN,EAAcN,IAAI,CAACO,UAAnB,EAA+BN,MAA/B,CAArB;AACD,GAHM,MAGA,IAAID,IAAI,YAAYG,WAApB,EAAiC;AACtC,UAAMI,UAAU,GAAG,CAAnB;AACA,WAAOF,cAAc,CAACL,IAAD,EAAOO,UAAP,EAAmBN,MAAnB,CAArB;AACD;;AACD,SAAO,EAAP;AACD;AAED,OAAO,SAASI,cAAT,CACLG,WADK,EAELD,UAFK,EAGLN,MAHK,EAIG;AACR,MAAIO,WAAW,CAACC,UAAZ,IAA0BF,UAAU,GAAGN,MAA3C,EAAmD;AACjD,WAAO,EAAP;AACD;;AACD,QAAMS,QAAQ,GAAG,IAAIC,QAAJ,CAAaH,WAAb,CAAjB;AACA,MAAII,KAAK,GAAG,EAAZ;;AACA,OAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGZ,MAApB,EAA4BY,CAAC,EAA7B,EAAiC;AAC/BD,IAAAA,KAAK,IAAIE,MAAM,CAACC,YAAP,CAAoBL,QAAQ,CAACM,QAAT,CAAkBT,UAAU,GAAGM,CAA/B,CAApB,CAAT;AACD;;AACD,SAAOD,KAAP;AACD","sourcesContent":["export function getFirstCharacters(data, length = 5): string {\n if (typeof data === 'string') {\n return data.slice(0, length);\n } else if (ArrayBuffer.isView(data)) {\n // Typed Arrays can have offsets into underlying buffer\n return getMagicString(data.buffer, data.byteOffset, length);\n } else if (data instanceof ArrayBuffer) {\n const byteOffset = 0;\n return getMagicString(data, byteOffset, length);\n }\n return '';\n}\n\nexport function getMagicString(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n length: number\n): string {\n if (arrayBuffer.byteLength <= byteOffset + length) {\n return '';\n }\n const dataView = new DataView(arrayBuffer);\n let magic = '';\n for (let i = 0; i < length; i++) {\n magic += String.fromCharCode(dataView.getUint8(byteOffset + i));\n }\n return magic;\n}\n"],"file":"get-first-characters.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/binary-utils/memory-copy-utils.ts"],"names":["assert","padToNBytes","byteLength","padding","copyArrayBuffer","targetBuffer","sourceBuffer","byteOffset","targetArray","Uint8Array","sourceArray","set","copyToArray","source","target","targetOffset","ArrayBuffer","srcByteOffset","srcByteLength","buffer","arrayBuffer"],"mappings":"AAAA,SAAQA,MAAR,QAAqB,qBAArB;AASA,OAAO,SAASC,WAAT,CAAqBC,UAArB,EAAyCC,OAAzC,EAAkE;AACvEH,EAAAA,MAAM,CAACE,UAAU,IAAI,CAAf,CAAN;AACAF,EAAAA,MAAM,CAACG,OAAO,GAAG,CAAX,CAAN;AACA,SAAQD,UAAU,IAAIC,OAAO,GAAG,CAAd,CAAX,GAA+B,EAAEA,OAAO,GAAG,CAAZ,CAAtC;AACD;AAQD,OAAO,SAASC,eAAT,CACLC,YADK,EAELC,YAFK,EAGLC,UAHK,EAILL,UAAkB,GAAGI,YAAY,CAACJ,UAJ7B,EAKQ;AACb,QAAMM,WAAW,GAAG,IAAIC,UAAJ,CAAeJ,YAAf,EAA6BE,UAA7B,EAAyCL,UAAzC,CAApB;AACA,QAAMQ,WAAW,GAAG,IAAID,UAAJ,CAAeH,YAAf,CAApB;AACAE,EAAAA,WAAW,CAACG,GAAZ,CAAgBD,WAAhB;AACA,SAAOL,YAAP;AACD;AAUD,OAAO,SAASO,WAAT,CAAqBC,MAArB,EAAgDC,MAAhD,EAA6DC,YAA7D,EAA2F;AAChG,MAAIL,WAAJ;;AAEA,MAAIG,MAAM,YAAYG,WAAtB,EAAmC;AACjCN,IAAAA,WAAW,GAAG,IAAID,UAAJ,CAAeI,MAAf,CAAd;AACD,GAFD,MAEO;AAOL,UAAMI,aAAa,GAAGJ,MAAM,CAACN,UAA7B;AACA,UAAMW,aAAa,GAAGL,MAAM,CAACX,UAA7B;AAGAQ,IAAAA,WAAW,GAAG,IAAID,UAAJ,CAAeI,MAAM,CAACM,MAAP,IAAiBN,MAAM,CAACO,WAAvC,EAAoDH,aAApD,EAAmEC,aAAnE,CAAd;AACD;;AAGDJ,EAAAA,MAAM,CAACH,GAAP,CAAWD,WAAX,EAAwBK,YAAxB;AAEA,SAAOA,YAAY,GAAGd,WAAW,CAACS,WAAW,CAACR,UAAb,EAAyB,CAAzB,CAAjC;AACD","sourcesContent":["import {assert} from '../env-utils/assert';\n\n/**\n * Calculate new size of an arrayBuffer to be aligned to an n-byte boundary\n * This function increases `byteLength` by the minimum delta,\n * allowing the total length to be divided by `padding`\n * @param byteLength\n * @param padding\n */\nexport function padToNBytes(byteLength: number, padding: number): number {\n assert(byteLength >= 0); // `Incorrect 'byteLength' value: ${byteLength}`\n assert(padding > 0); // `Incorrect 'padding' value: ${padding}`\n return (byteLength + (padding - 1)) & ~(padding - 1);\n}\n\n/**\n * Creates a new Uint8Array based on two different ArrayBuffers\n * @param targetBuffer The first buffer.\n * @param sourceBuffer The second buffer.\n * @return The new ArrayBuffer created out of the two.\n */\nexport function copyArrayBuffer(\n targetBuffer: ArrayBuffer,\n sourceBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength: number = sourceBuffer.byteLength\n): ArrayBuffer {\n const targetArray = new Uint8Array(targetBuffer, byteOffset, byteLength);\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n return targetBuffer;\n}\n\n/**\n * Copy from source to target at the targetOffset\n *\n * @param source - The data to copy\n * @param target - The destination to copy data into\n * @param targetOffset - The start offset into target to place the copied data\n * @returns the new offset taking into account proper padding\n */\nexport function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number {\n let sourceArray;\n\n if (source instanceof ArrayBuffer) {\n sourceArray = new Uint8Array(source);\n } else {\n // Pack buffer onto the big target array\n //\n // 'source.data.buffer' could be a view onto a larger buffer.\n // We MUST use this constructor to ensure the byteOffset and byteLength is\n // set to correct values from 'source.data' and not the underlying\n // buffer for target.set() to work properly.\n const srcByteOffset = source.byteOffset;\n const srcByteLength = source.byteLength;\n // In gltf parser it is set as \"arrayBuffer\" instead of \"buffer\"\n // https://github.com/visgl/loaders.gl/blob/1e3a82a0a65d7b6a67b1e60633453e5edda2960a/modules/gltf/src/lib/parse-gltf.js#L85\n sourceArray = new Uint8Array(source.buffer || source.arrayBuffer, srcByteOffset, srcByteLength);\n }\n\n // Pack buffer onto the big target array\n target.set(sourceArray, targetOffset);\n\n return targetOffset + padToNBytes(sourceArray.byteLength, 4);\n}\n"],"file":"memory-copy-utils.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/env-utils/assert.ts"],"names":["assert","condition","message","Error"],"mappings":"AAIA,OAAO,SAASA,MAAT,CAAgBC,SAAhB,EAAgCC,OAAhC,EAAwD;AAC7D,MAAI,CAACD,SAAL,EAAgB;AACd,UAAM,IAAIE,KAAJ,CAAUD,OAAO,IAAI,0BAArB,CAAN;AACD;AACF","sourcesContent":["/**\n * Throws an `Error` with the optional `message` if `condition` is falsy\n * @note Replacement for the external assert method to reduce bundle size\n */\nexport function assert(condition: any, message?: string): void {\n if (!condition) {\n throw new Error(message || 'loader assertion failed.');\n }\n}\n"],"file":"assert.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/env-utils/globals.ts"],"names":["globals","self","window","global","document","self_","window_","global_","document_","isBrowser","Boolean","process","String","browser","isWorker","importScripts","matches","version","exec","nodeVersion","parseFloat"],"mappings":"AAIA,MAAMA,OAAO,GAAG;AACdC,EAAAA,IAAI,EAAE,OAAOA,IAAP,KAAgB,WAAhB,IAA+BA,IADvB;AAEdC,EAAAA,MAAM,EAAE,OAAOA,MAAP,KAAkB,WAAlB,IAAiCA,MAF3B;AAGdC,EAAAA,MAAM,EAAE,OAAOA,MAAP,KAAkB,WAAlB,IAAiCA,MAH3B;AAIdC,EAAAA,QAAQ,EAAE,OAAOA,QAAP,KAAoB,WAApB,IAAmCA;AAJ/B,CAAhB;AAQA,MAAMC,KAAU,GAAGL,OAAO,CAACC,IAAR,IAAgBD,OAAO,CAACE,MAAxB,IAAkCF,OAAO,CAACG,MAA1C,IAAoD,EAAvE;AACA,MAAMG,OAAY,GAAGN,OAAO,CAACE,MAAR,IAAkBF,OAAO,CAACC,IAA1B,IAAkCD,OAAO,CAACG,MAA1C,IAAoD,EAAzE;AACA,MAAMI,OAAY,GAAGP,OAAO,CAACG,MAAR,IAAkBH,OAAO,CAACC,IAA1B,IAAkCD,OAAO,CAACE,MAA1C,IAAoD,EAAzE;AACA,MAAMM,SAAc,GAAGR,OAAO,CAACI,QAAR,IAAoB,EAA3C;AAEA,SAAQC,KAAK,IAAIJ,IAAjB,EAAuBK,OAAO,IAAIJ,MAAlC,EAA0CK,OAAO,IAAIJ,MAArD,EAA6DK,SAAS,IAAIJ,QAA1E;AAGA,OAAO,MAAMK,SAAkB,GAE7BC,OAAO,CAAC,OAAOC,OAAP,KAAmB,QAAnB,IAA+BC,MAAM,CAACD,OAAD,CAAN,KAAoB,kBAAnD,IAAyEA,OAAO,CAACE,OAAlF,CAFF;AAKP,OAAO,MAAMC,QAAiB,GAAG,OAAOC,aAAP,KAAyB,UAAnD;AAGP,MAAMC,OAAO,GACX,OAAOL,OAAP,KAAmB,WAAnB,IAAkCA,OAAO,CAACM,OAA1C,IAAqD,YAAYC,IAAZ,CAAiBP,OAAO,CAACM,OAAzB,CADvD;AAGA,OAAO,MAAME,WAAmB,GAAIH,OAAO,IAAII,UAAU,CAACJ,OAAO,CAAC,CAAD,CAAR,CAAtB,IAAuC,CAAnE","sourcesContent":["// Purpose: include this in your module to avoid\n// dependencies on micro modules like 'global' and 'is-browser';\n\n/* eslint-disable no-restricted-globals */\nconst globals = {\n self: typeof self !== 'undefined' && self,\n window: typeof window !== 'undefined' && window,\n global: typeof global !== 'undefined' && global,\n document: typeof document !== 'undefined' && document\n};\n\ntype obj = {[key: string]: any};\nconst self_: obj = globals.self || globals.window || globals.global || {};\nconst window_: obj = globals.window || globals.self || globals.global || {};\nconst global_: obj = globals.global || globals.self || globals.window || {};\nconst document_: obj = globals.document || {};\n\nexport {self_ as self, window_ as window, global_ as global, document_ as document};\n\n/** true if running in a browser */\nexport const isBrowser: boolean =\n // @ts-ignore process does not exist on browser\n Boolean(typeof process !== 'object' || String(process) !== '[object process]' || process.browser);\n\n/** true if running in a worker thread */\nexport const isWorker: boolean = typeof importScripts === 'function';\n\n// Extract node major version\nconst matches =\n typeof process !== 'undefined' && process.version && /v([0-9]*)/.exec(process.version);\n/** Major Node version (as a number) */\nexport const nodeVersion: number = (matches && parseFloat(matches[1])) || 0;\n"],"file":"globals.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/filesystems/node-filesystem.ts"],"names":["fs","NodeFileSystem","constructor","options","fetch","_fetch","readdir","dirname","stat","path","info","size","Number","isDirectory","fallbackFetch","open","flags","mode","close","fd","fstat","read","buffer","offset","length","byteLength","position","totalBytesRead","bytesRead"],"mappings":"AAAA,OAAO,KAAKA,EAAZ,MAAoB,YAApB;AAuBA,eAAe,MAAMC,cAAN,CAAyE;AAEtFC,EAAAA,WAAW,CAACC,OAAD,EAAgC;AACzC,SAAKC,KAAL,GAAaD,OAAO,CAACE,MAArB;AACD;;AAEY,QAAPC,OAAO,CAACC,OAAO,GAAG,GAAX,EAAgBJ,OAAhB,EAA8C;AACzD,WAAO,MAAMH,EAAE,CAACM,OAAH,CAAWC,OAAX,EAAoBJ,OAApB,CAAb;AACD;;AAES,QAAJK,IAAI,CAACC,IAAD,EAAeN,OAAf,EAA4C;AACpD,UAAMO,IAAI,GAAG,MAAMV,EAAE,CAACQ,IAAH,CAAQC,IAAR,EAAcN,OAAd,CAAnB;AACA,WAAO;AAACQ,MAAAA,IAAI,EAAEC,MAAM,CAACF,IAAI,CAACC,IAAN,CAAb;AAA0BE,MAAAA,WAAW,EAAE,MAAM,KAA7C;AAAoDH,MAAAA;AAApD,KAAP;AACD;;AAEU,QAALN,KAAK,CAACK,IAAD,EAAeN,OAAf,EAA8C;AAGvD,UAAMW,aAAa,GAAGX,OAAO,CAACC,KAAR,IAAiB,KAAKA,KAA5C;AACA,WAAOU,aAAa,CAACL,IAAD,EAAON,OAAP,CAApB;AACD;;AAGS,QAAJY,IAAI,CAACN,IAAD,EAAeO,KAAf,EAAuCC,IAAvC,EAAoE;AAC5E,WAAO,MAAMjB,EAAE,CAACe,IAAH,CAAQN,IAAR,EAAcO,KAAd,CAAb;AACD;;AAEU,QAALE,KAAK,CAACC,EAAD,EAA4B;AACrC,WAAO,MAAMnB,EAAE,CAACkB,KAAH,CAASC,EAAT,CAAb;AACD;;AAEU,QAALC,KAAK,CAACD,EAAD,EAA4B;AACrC,UAAMT,IAAI,GAAG,MAAMV,EAAE,CAACoB,KAAH,CAASD,EAAT,CAAnB;AACA,WAAOT,IAAP;AACD;;AAES,QAAJW,IAAI,CACRF,EADQ,EAGR;AAACG,IAAAA,MAAM,GAAG,IAAV;AAAgBC,IAAAA,MAAM,GAAG,CAAzB;AAA4BC,IAAAA,MAAM,GAAGF,MAAM,CAACG,UAA5C;AAAwDC,IAAAA,QAAQ,GAAG;AAAnE,GAHQ,EAIsC;AAC9C,QAAIC,cAAc,GAAG,CAArB;;AAEA,WAAOA,cAAc,GAAGH,MAAxB,EAAgC;AAC9B,YAAM;AAACI,QAAAA;AAAD,UAAc,MAAM5B,EAAE,CAACqB,IAAH,CACxBF,EADwB,EAExBG,MAFwB,EAGxBC,MAAM,GAAGI,cAHe,EAIxBH,MAAM,GAAGG,cAJe,EAKxBD,QAAQ,GAAGC,cALa,CAA1B;AAOAA,MAAAA,cAAc,IAAIC,SAAlB;AACD;;AACD,WAAO;AAACA,MAAAA,SAAS,EAAED,cAAZ;AAA4BL,MAAAA;AAA5B,KAAP;AACD;;AAtDqF","sourcesContent":["import * as fs from '../node/fs';\nimport {IFileSystem, IRandomAccessReadFileSystem} from '../../types';\n// import {fetchFile} from \"../fetch/fetch-file\"\n// import {selectLoader} from \"../api/select-loader\";\n\ntype Stat = {\n size: number;\n isDirectory: () => boolean;\n info?: fs.Stats;\n};\n\ntype ReadOptions = {\n buffer?: Buffer;\n offset?: number;\n length?: number;\n position?: number;\n};\n\n/**\n * FileSystem pass-through for Node.js\n * Compatible with BrowserFileSystem.\n * @param options\n */\nexport default class NodeFileSystem implements IFileSystem, IRandomAccessReadFileSystem {\n // implements IFileSystem\n constructor(options: {[key: string]: any}) {\n this.fetch = options._fetch;\n }\n\n async readdir(dirname = '.', options?: {}): Promise<any[]> {\n return await fs.readdir(dirname, options);\n }\n\n async stat(path: string, options?: {}): Promise<Stat> {\n const info = await fs.stat(path, options);\n return {size: Number(info.size), isDirectory: () => false, info};\n }\n\n async fetch(path: string, options: {[key: string]: any}) {\n // Falls back to handle https:/http:/data: etc fetches\n // eslint-disable-next-line\n const fallbackFetch = options.fetch || this.fetch;\n return fallbackFetch(path, options);\n }\n\n // implements IRandomAccessFileSystem\n async open(path: string, flags: string | number, mode?: any): Promise<number> {\n return await fs.open(path, flags);\n }\n\n async close(fd: number): Promise<void> {\n return await fs.close(fd);\n }\n\n async fstat(fd: number): Promise<Stat> {\n const info = await fs.fstat(fd);\n return info;\n }\n\n async read(\n fd: number,\n // @ts-ignore Possibly null\n {buffer = null, offset = 0, length = buffer.byteLength, position = null}: ReadOptions\n ): Promise<{bytesRead: number; buffer: Buffer}> {\n let totalBytesRead = 0;\n // Read in loop until we get required number of bytes\n while (totalBytesRead < length) {\n const {bytesRead} = await fs.read(\n fd,\n buffer,\n offset + totalBytesRead,\n length - totalBytesRead,\n position + totalBytesRead\n );\n totalBytesRead += bytesRead;\n }\n return {bytesRead: totalBytesRead, buffer};\n }\n}\n"],"file":"node-filesystem.js"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/lib/iterators/async-iteration.ts"],"names":["concatenateArrayBuffers","forEach","iterator","visitor","done","value","next","return","cancel","concatenateArrayBuffersAsync","asyncIterator","arrayBuffers","chunk","push","concatenateStringsAsync","strings","join"],"mappings":"AAAA,SAAQA,uBAAR,QAAsC,oCAAtC;AAgBA,OAAO,eAAeC,OAAf,CAAuBC,QAAvB,EAAiCC,OAAjC,EAA0C;AAE/C,SAAO,IAAP,EAAa;AACX,UAAM;AAACC,MAAAA,IAAD;AAAOC,MAAAA;AAAP,QAAgB,MAAMH,QAAQ,CAACI,IAAT,EAA5B;;AACA,QAAIF,IAAJ,EAAU;AACRF,MAAAA,QAAQ,CAACK,MAAT;AACA;AACD;;AACD,UAAMC,MAAM,GAAGL,OAAO,CAACE,KAAD,CAAtB;;AACA,QAAIG,MAAJ,EAAY;AACV;AACD;AACF;AACF;AASD,OAAO,eAAeC,4BAAf,CACLC,aADK,EAEiB;AACtB,QAAMC,YAA2B,GAAG,EAApC;;AACA,aAAW,MAAMC,KAAjB,IAA0BF,aAA1B,EAAyC;AACvCC,IAAAA,YAAY,CAACE,IAAb,CAAkBD,KAAlB;AACD;;AACD,SAAOZ,uBAAuB,CAAC,GAAGW,YAAJ,CAA9B;AACD;AAED,OAAO,eAAeG,uBAAf,CACLJ,aADK,EAEY;AACjB,QAAMK,OAAiB,GAAG,EAA1B;;AACA,aAAW,MAAMH,KAAjB,IAA0BF,aAA1B,EAAyC;AACvCK,IAAAA,OAAO,CAACF,IAAR,CAAaD,KAAb;AACD;;AACD,SAAOG,OAAO,CAACC,IAAR,CAAa,EAAb,CAAP;AACD","sourcesContent":["import {concatenateArrayBuffers} from '../binary-utils/array-buffer-utils';\n\n// GENERAL UTILITIES\n\n/**\n * Iterate over async iterator, without resetting iterator if end is not reached\n * - forEach intentionally does not reset iterator if exiting loop prematurely\n * so that iteration can continue in a second loop\n * - It is recommended to use a standard for-await as last loop to ensure\n * iterator gets properly reset\n *\n * TODO - optimize using sync iteration if argument is an Iterable?\n *\n * @param iterator\n * @param visitor\n */\nexport async function forEach(iterator, visitor) {\n // eslint-disable-next-line\n while (true) {\n const {done, value} = await iterator.next();\n if (done) {\n iterator.return();\n return;\n }\n const cancel = visitor(value);\n if (cancel) {\n return;\n }\n }\n}\n\n// Breaking big data into iterable chunks, concatenating iterable chunks into big data objects\n\n/**\n * Concatenates all data chunks yielded by an (async) iterator\n * This function can e.g. be used to enable atomic parsers to work on (async) iterator inputs\n */\n\nexport async function concatenateArrayBuffersAsync(\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n): Promise<ArrayBuffer> {\n const arrayBuffers: ArrayBuffer[] = [];\n for await (const chunk of asyncIterator) {\n arrayBuffers.push(chunk);\n }\n return concatenateArrayBuffers(...arrayBuffers);\n}\n\nexport async function concatenateStringsAsync(\n asyncIterator: AsyncIterable<string> | Iterable<string>\n): Promise<string> {\n const strings: string[] = [];\n for await (const chunk of asyncIterator) {\n strings.push(chunk);\n }\n return strings.join('');\n}\n"],"file":"async-iteration.js"}
|