@loaders.gl/loader-utils 3.4.0-alpha.2 → 3.4.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/json-loader.js +7 -11
- package/dist/es5/json-loader.js.map +1 -1
- package/dist/es5/lib/binary-utils/array-buffer-utils.js +1 -8
- package/dist/es5/lib/binary-utils/array-buffer-utils.js.map +1 -1
- package/dist/es5/lib/binary-utils/dataview-copy-utils.js +0 -5
- package/dist/es5/lib/binary-utils/dataview-copy-utils.js.map +1 -1
- package/dist/es5/lib/binary-utils/get-first-characters.js +0 -2
- package/dist/es5/lib/binary-utils/get-first-characters.js.map +1 -1
- package/dist/es5/lib/binary-utils/memory-conversion-utils.js +0 -5
- package/dist/es5/lib/binary-utils/memory-conversion-utils.js.map +1 -1
- package/dist/es5/lib/binary-utils/memory-copy-utils.js +0 -3
- package/dist/es5/lib/binary-utils/memory-copy-utils.js.map +1 -1
- package/dist/es5/lib/env-utils/assert.js.map +1 -1
- package/dist/es5/lib/env-utils/globals.js +1 -5
- package/dist/es5/lib/env-utils/globals.js.map +1 -1
- package/dist/es5/lib/filesystems/node-filesystem.js +86 -100
- package/dist/es5/lib/filesystems/node-filesystem.js.map +1 -1
- package/dist/es5/lib/filesystems/readable-file.js +24 -33
- package/dist/es5/lib/filesystems/readable-file.js.map +1 -1
- package/dist/es5/lib/filesystems/writable-file.js +18 -25
- package/dist/es5/lib/filesystems/writable-file.js.map +1 -1
- package/dist/es5/lib/iterators/async-iteration.js +134 -140
- package/dist/es5/lib/iterators/async-iteration.js.map +1 -1
- package/dist/es5/lib/iterators/text-iterators.js +207 -221
- package/dist/es5/lib/iterators/text-iterators.js.map +1 -1
- package/dist/es5/lib/node/buffer.browser.js +0 -2
- package/dist/es5/lib/node/buffer.browser.js.map +1 -1
- package/dist/es5/lib/node/buffer.js +0 -2
- package/dist/es5/lib/node/buffer.js.map +1 -1
- package/dist/es5/lib/node/fs.js +19 -30
- package/dist/es5/lib/node/fs.js.map +1 -1
- package/dist/es5/lib/node/promisify.js +0 -1
- package/dist/es5/lib/node/promisify.js.map +1 -1
- package/dist/es5/lib/node/stream.js +1 -5
- package/dist/es5/lib/node/stream.js.map +1 -1
- package/dist/es5/lib/option-utils/merge-loader-options.js +0 -1
- package/dist/es5/lib/option-utils/merge-loader-options.js.map +1 -1
- package/dist/es5/lib/parser-utils/parse-json.js.map +1 -1
- package/dist/es5/lib/path-utils/file-aliases.js +0 -5
- package/dist/es5/lib/path-utils/file-aliases.js.map +1 -1
- package/dist/es5/lib/path-utils/get-cwd.js +15 -0
- package/dist/es5/lib/path-utils/get-cwd.js.map +1 -0
- package/dist/es5/lib/path-utils/path.js +105 -5
- package/dist/es5/lib/path-utils/path.js.map +1 -1
- package/dist/es5/lib/request-utils/request-scheduler.js +6 -29
- package/dist/es5/lib/request-utils/request-scheduler.js.map +1 -1
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js +69 -79
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js.map +1 -1
- package/dist/es5/lib/worker-loader-utils/encode-with-worker.js +0 -1
- package/dist/es5/lib/worker-loader-utils/encode-with-worker.js.map +1 -1
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js +70 -77
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js.map +1 -1
- package/dist/es5/types.js.map +1 -1
- package/dist/es5/workers/json-worker.js.map +1 -1
- package/dist/esm/index.js +0 -15
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/json-loader.js +1 -4
- package/dist/esm/json-loader.js.map +1 -1
- package/dist/esm/lib/binary-utils/array-buffer-utils.js +0 -8
- package/dist/esm/lib/binary-utils/array-buffer-utils.js.map +1 -1
- package/dist/esm/lib/binary-utils/dataview-copy-utils.js +0 -7
- package/dist/esm/lib/binary-utils/dataview-copy-utils.js.map +1 -1
- package/dist/esm/lib/binary-utils/get-first-characters.js +0 -3
- package/dist/esm/lib/binary-utils/get-first-characters.js.map +1 -1
- package/dist/esm/lib/binary-utils/memory-conversion-utils.js +0 -7
- package/dist/esm/lib/binary-utils/memory-conversion-utils.js.map +1 -1
- package/dist/esm/lib/binary-utils/memory-copy-utils.js +0 -4
- package/dist/esm/lib/binary-utils/memory-copy-utils.js.map +1 -1
- package/dist/esm/lib/env-utils/assert.js +0 -1
- package/dist/esm/lib/env-utils/assert.js.map +1 -1
- package/dist/esm/lib/env-utils/globals.js +1 -7
- package/dist/esm/lib/env-utils/globals.js.map +1 -1
- package/dist/esm/lib/filesystems/node-filesystem.js +0 -1
- package/dist/esm/lib/filesystems/node-filesystem.js.map +1 -1
- package/dist/esm/lib/filesystems/readable-file.js +0 -2
- package/dist/esm/lib/filesystems/readable-file.js.map +1 -1
- package/dist/esm/lib/filesystems/writable-file.js +0 -1
- package/dist/esm/lib/filesystems/writable-file.js.map +1 -1
- package/dist/esm/lib/iterators/async-iteration.js +0 -2
- package/dist/esm/lib/iterators/async-iteration.js.map +1 -1
- package/dist/esm/lib/iterators/text-iterators.js +13 -12
- package/dist/esm/lib/iterators/text-iterators.js.map +1 -1
- package/dist/esm/lib/node/buffer.browser.js +0 -3
- package/dist/esm/lib/node/buffer.browser.js.map +1 -1
- package/dist/esm/lib/node/buffer.js +0 -3
- package/dist/esm/lib/node/buffer.js.map +1 -1
- package/dist/esm/lib/node/fs.js +1 -8
- package/dist/esm/lib/node/fs.js.map +1 -1
- package/dist/esm/lib/node/promisify.js +0 -2
- package/dist/esm/lib/node/promisify.js.map +1 -1
- package/dist/esm/lib/node/stream.js +1 -5
- package/dist/esm/lib/node/stream.js.map +1 -1
- package/dist/esm/lib/option-utils/merge-loader-options.js +0 -2
- package/dist/esm/lib/option-utils/merge-loader-options.js.map +1 -1
- package/dist/esm/lib/parser-utils/parse-json.js +0 -1
- package/dist/esm/lib/parser-utils/parse-json.js.map +1 -1
- package/dist/esm/lib/path-utils/file-aliases.js +0 -6
- package/dist/esm/lib/path-utils/file-aliases.js.map +1 -1
- package/dist/esm/lib/path-utils/get-cwd.js +9 -0
- package/dist/esm/lib/path-utils/get-cwd.js.map +1 -0
- package/dist/esm/lib/path-utils/path.js +104 -6
- package/dist/esm/lib/path-utils/path.js.map +1 -1
- package/dist/esm/lib/request-utils/request-scheduler.js +0 -16
- package/dist/esm/lib/request-utils/request-scheduler.js.map +1 -1
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js +0 -10
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js.map +1 -1
- package/dist/esm/lib/worker-loader-utils/encode-with-worker.js +0 -2
- package/dist/esm/lib/worker-loader-utils/encode-with-worker.js.map +1 -1
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js +1 -8
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js.map +1 -1
- package/dist/esm/types.js.map +1 -1
- package/dist/lib/path-utils/get-cwd.d.ts +2 -0
- package/dist/lib/path-utils/get-cwd.d.ts.map +1 -0
- package/dist/lib/path-utils/get-cwd.js +12 -0
- package/dist/lib/path-utils/path.d.ts +8 -0
- package/dist/lib/path-utils/path.d.ts.map +1 -1
- package/dist/lib/path-utils/path.js +141 -3
- package/dist/lib/worker-loader-utils/create-loader-worker.js +1 -1
- package/dist/types.d.ts +7 -7
- package/dist/types.d.ts.map +1 -1
- package/package.json +3 -3
- package/src/lib/path-utils/get-cwd.ts +9 -0
- package/src/lib/path-utils/path.ts +138 -4
- package/src/types.ts +7 -7
package/dist/esm/json-loader.js
CHANGED
|
@@ -1,6 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
const VERSION = typeof "3.4.0-alpha.2" !== 'undefined' ? "3.4.0-alpha.2" : 'latest';
|
|
3
|
-
|
|
1
|
+
const VERSION = typeof "3.4.0-alpha.4" !== 'undefined' ? "3.4.0-alpha.4" : 'latest';
|
|
4
2
|
export const JSONLoader = {
|
|
5
3
|
name: 'JSON',
|
|
6
4
|
id: 'json',
|
|
@@ -14,7 +12,6 @@ export const JSONLoader = {
|
|
|
14
12
|
parse: async arrayBuffer => parseTextSync(new TextDecoder().decode(arrayBuffer)),
|
|
15
13
|
options: {}
|
|
16
14
|
};
|
|
17
|
-
|
|
18
15
|
function parseTextSync(text) {
|
|
19
16
|
return JSON.parse(text);
|
|
20
17
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"json-loader.js","names":["VERSION","JSONLoader","name","id","module","version","extensions","mimeTypes","category","text","parseTextSync","parse","arrayBuffer","TextDecoder","decode","options","JSON","_typecheckJSONLoader"],"sources":["../../src/json-loader.ts"],"sourcesContent":["import type {LoaderWithParser} from './types';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * A JSON Micro loader (minimal bundle size)\n * Alternative to `@loaders.gl/json`\n */\nexport const JSONLoader = {\n name: 'JSON',\n id: 'json',\n module: 'json',\n version: VERSION,\n extensions: ['json', 'geojson'],\n mimeTypes: ['application/json'],\n category: 'json',\n text: true,\n parseTextSync,\n parse: async (arrayBuffer) => parseTextSync(new TextDecoder().decode(arrayBuffer)),\n options: {}\n};\n\n// TODO - deprecated\nfunction parseTextSync(text) {\n return JSON.parse(text);\n}\n\nexport const _typecheckJSONLoader: LoaderWithParser = JSONLoader;\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"json-loader.js","names":["VERSION","JSONLoader","name","id","module","version","extensions","mimeTypes","category","text","parseTextSync","parse","arrayBuffer","TextDecoder","decode","options","JSON","_typecheckJSONLoader"],"sources":["../../src/json-loader.ts"],"sourcesContent":["import type {LoaderWithParser} from './types';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * A JSON Micro loader (minimal bundle size)\n * Alternative to `@loaders.gl/json`\n */\nexport const JSONLoader = {\n name: 'JSON',\n id: 'json',\n module: 'json',\n version: VERSION,\n extensions: ['json', 'geojson'],\n mimeTypes: ['application/json'],\n category: 'json',\n text: true,\n parseTextSync,\n parse: async (arrayBuffer) => parseTextSync(new TextDecoder().decode(arrayBuffer)),\n options: {}\n};\n\n// TODO - deprecated\nfunction parseTextSync(text) {\n return JSON.parse(text);\n}\n\nexport const _typecheckJSONLoader: LoaderWithParser = JSONLoader;\n"],"mappings":"AAIA,MAAMA,OAAO,GAAG,sBAAkB,KAAK,WAAW,qBAAiB,QAAQ;AAM3E,OAAO,MAAMC,UAAU,GAAG;EACxBC,IAAI,EAAE,MAAM;EACZC,EAAE,EAAE,MAAM;EACVC,MAAM,EAAE,MAAM;EACdC,OAAO,EAAEL,OAAO;EAChBM,UAAU,EAAE,CAAC,MAAM,EAAE,SAAS,CAAC;EAC/BC,SAAS,EAAE,CAAC,kBAAkB,CAAC;EAC/BC,QAAQ,EAAE,MAAM;EAChBC,IAAI,EAAE,IAAI;EACVC,aAAa;EACbC,KAAK,EAAE,MAAOC,WAAW,IAAKF,aAAa,CAAC,IAAIG,WAAW,CAAC,CAAC,CAACC,MAAM,CAACF,WAAW,CAAC,CAAC;EAClFG,OAAO,EAAE,CAAC;AACZ,CAAC;AAGD,SAASL,aAAaA,CAACD,IAAI,EAAE;EAC3B,OAAOO,IAAI,CAACL,KAAK,CAACF,IAAI,CAAC;AACzB;AAEA,OAAO,MAAMQ,oBAAsC,GAAGhB,UAAU"}
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
export function compareArrayBuffers(arrayBuffer1, arrayBuffer2, byteLength) {
|
|
3
2
|
byteLength = byteLength || arrayBuffer1.byteLength;
|
|
4
3
|
if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {
|
|
@@ -13,26 +12,20 @@ export function compareArrayBuffers(arrayBuffer1, arrayBuffer2, byteLength) {
|
|
|
13
12
|
}
|
|
14
13
|
return true;
|
|
15
14
|
}
|
|
16
|
-
|
|
17
15
|
export function concatenateArrayBuffers() {
|
|
18
16
|
for (var _len = arguments.length, sources = new Array(_len), _key = 0; _key < _len; _key++) {
|
|
19
17
|
sources[_key] = arguments[_key];
|
|
20
18
|
}
|
|
21
19
|
const sourceArrays = sources.map(source2 => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2);
|
|
22
|
-
|
|
23
20
|
const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
|
|
24
|
-
|
|
25
21
|
const result = new Uint8Array(byteLength);
|
|
26
|
-
|
|
27
22
|
let offset = 0;
|
|
28
23
|
for (const sourceArray of sourceArrays) {
|
|
29
24
|
result.set(sourceArray, offset);
|
|
30
25
|
offset += sourceArray.byteLength;
|
|
31
26
|
}
|
|
32
|
-
|
|
33
27
|
return result.buffer;
|
|
34
28
|
}
|
|
35
|
-
|
|
36
29
|
export function concatenateTypedArrays() {
|
|
37
30
|
for (var _len2 = arguments.length, typedArrays = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
|
|
38
31
|
typedArrays[_key2] = arguments[_key2];
|
|
@@ -51,7 +44,6 @@ export function concatenateTypedArrays() {
|
|
|
51
44
|
}
|
|
52
45
|
return result;
|
|
53
46
|
}
|
|
54
|
-
|
|
55
47
|
export function sliceArrayBuffer(arrayBuffer, byteOffset, byteLength) {
|
|
56
48
|
const subArray = byteLength !== undefined ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength) : new Uint8Array(arrayBuffer).subarray(byteOffset);
|
|
57
49
|
const arrayCopy = new Uint8Array(subArray);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"array-buffer-utils.js","names":["compareArrayBuffers","arrayBuffer1","arrayBuffer2","byteLength","array1","Uint8Array","array2","i","length","concatenateArrayBuffers","sources","sourceArrays","map","source2","ArrayBuffer","reduce","typedArray","result","offset","sourceArray","set","buffer","concatenateTypedArrays","typedArrays","arrays","TypedArrayConstructor","constructor","Error","sumLength","acc","value","array","sliceArrayBuffer","arrayBuffer","byteOffset","subArray","undefined","subarray","arrayCopy"],"sources":["../../../../src/lib/binary-utils/array-buffer-utils.ts"],"sourcesContent":["import {TypedArray} from '../../types';\n\n/**\n * compare two binary arrays for equality\n * @param a\n * @param b\n * @param byteLength\n */\nexport function compareArrayBuffers(\n arrayBuffer1: ArrayBuffer,\n arrayBuffer2: ArrayBuffer,\n byteLength?: number\n): boolean {\n byteLength = byteLength || arrayBuffer1.byteLength;\n if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {\n return false;\n }\n const array1 = new Uint8Array(arrayBuffer1);\n const array2 = new Uint8Array(arrayBuffer2);\n for (let i = 0; i < array1.length; ++i) {\n if (array1[i] !== array2[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Concatenate a sequence of ArrayBuffers\n * @return A concatenated ArrayBuffer\n */\nexport function concatenateArrayBuffers(...sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {\n // Make sure all inputs are wrapped in typed arrays\n const sourceArrays = sources.map((source2) =>\n source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2\n );\n\n // Get length of all inputs\n const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);\n\n // Allocate array with space for all inputs\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let offset = 0;\n for (const sourceArray of sourceArrays) {\n result.set(sourceArray, offset);\n offset += sourceArray.byteLength;\n }\n\n // We work with ArrayBuffers, discard the typed array wrapper\n return result.buffer;\n}\n\n/**\n * Concatenate arbitrary count of typed arrays\n * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays\n * @param - list of arrays. All arrays should be the same type\n * @return A concatenated TypedArray\n */\nexport function concatenateTypedArrays<T>(...typedArrays: T[]): T {\n // @ts-ignore\n const arrays = typedArrays as TypedArray[];\n // @ts-ignore\n const TypedArrayConstructor = (arrays && arrays.length > 1 && arrays[0].constructor) || null;\n if (!TypedArrayConstructor) {\n throw new Error(\n '\"concatenateTypedArrays\" - incorrect quantity of arguments or arguments have incompatible data types'\n );\n }\n\n const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);\n // @ts-ignore typescript does not like dynamic constructors\n const result = new TypedArrayConstructor(sumLength);\n let offset = 0;\n for (const array of arrays) {\n result.set(array, offset);\n offset += array.length;\n }\n return result;\n}\n\n/**\n * Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0\n * @param arrayBuffer\n * @param byteOffset\n * @param byteLength\n */\nexport function sliceArrayBuffer(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength?: number\n): ArrayBuffer {\n const subArray =\n byteLength !== undefined\n ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength)\n : new Uint8Array(arrayBuffer).subarray(byteOffset);\n const arrayCopy = new Uint8Array(subArray);\n return arrayCopy.buffer;\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"array-buffer-utils.js","names":["compareArrayBuffers","arrayBuffer1","arrayBuffer2","byteLength","array1","Uint8Array","array2","i","length","concatenateArrayBuffers","_len","arguments","sources","Array","_key","sourceArrays","map","source2","ArrayBuffer","reduce","typedArray","result","offset","sourceArray","set","buffer","concatenateTypedArrays","_len2","typedArrays","_key2","arrays","TypedArrayConstructor","constructor","Error","sumLength","acc","value","array","sliceArrayBuffer","arrayBuffer","byteOffset","subArray","undefined","subarray","arrayCopy"],"sources":["../../../../src/lib/binary-utils/array-buffer-utils.ts"],"sourcesContent":["import {TypedArray} from '../../types';\n\n/**\n * compare two binary arrays for equality\n * @param a\n * @param b\n * @param byteLength\n */\nexport function compareArrayBuffers(\n arrayBuffer1: ArrayBuffer,\n arrayBuffer2: ArrayBuffer,\n byteLength?: number\n): boolean {\n byteLength = byteLength || arrayBuffer1.byteLength;\n if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {\n return false;\n }\n const array1 = new Uint8Array(arrayBuffer1);\n const array2 = new Uint8Array(arrayBuffer2);\n for (let i = 0; i < array1.length; ++i) {\n if (array1[i] !== array2[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Concatenate a sequence of ArrayBuffers\n * @return A concatenated ArrayBuffer\n */\nexport function concatenateArrayBuffers(...sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {\n // Make sure all inputs are wrapped in typed arrays\n const sourceArrays = sources.map((source2) =>\n source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2\n );\n\n // Get length of all inputs\n const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);\n\n // Allocate array with space for all inputs\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let offset = 0;\n for (const sourceArray of sourceArrays) {\n result.set(sourceArray, offset);\n offset += sourceArray.byteLength;\n }\n\n // We work with ArrayBuffers, discard the typed array wrapper\n return result.buffer;\n}\n\n/**\n * Concatenate arbitrary count of typed arrays\n * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays\n * @param - list of arrays. All arrays should be the same type\n * @return A concatenated TypedArray\n */\nexport function concatenateTypedArrays<T>(...typedArrays: T[]): T {\n // @ts-ignore\n const arrays = typedArrays as TypedArray[];\n // @ts-ignore\n const TypedArrayConstructor = (arrays && arrays.length > 1 && arrays[0].constructor) || null;\n if (!TypedArrayConstructor) {\n throw new Error(\n '\"concatenateTypedArrays\" - incorrect quantity of arguments or arguments have incompatible data types'\n );\n }\n\n const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);\n // @ts-ignore typescript does not like dynamic constructors\n const result = new TypedArrayConstructor(sumLength);\n let offset = 0;\n for (const array of arrays) {\n result.set(array, offset);\n offset += array.length;\n }\n return result;\n}\n\n/**\n * Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0\n * @param arrayBuffer\n * @param byteOffset\n * @param byteLength\n */\nexport function sliceArrayBuffer(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength?: number\n): ArrayBuffer {\n const subArray =\n byteLength !== undefined\n ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength)\n : new Uint8Array(arrayBuffer).subarray(byteOffset);\n const arrayCopy = new Uint8Array(subArray);\n return arrayCopy.buffer;\n}\n"],"mappings":"AAQA,OAAO,SAASA,mBAAmBA,CACjCC,YAAyB,EACzBC,YAAyB,EACzBC,UAAmB,EACV;EACTA,UAAU,GAAGA,UAAU,IAAIF,YAAY,CAACE,UAAU;EAClD,IAAIF,YAAY,CAACE,UAAU,GAAGA,UAAU,IAAID,YAAY,CAACC,UAAU,GAAGA,UAAU,EAAE;IAChF,OAAO,KAAK;EACd;EACA,MAAMC,MAAM,GAAG,IAAIC,UAAU,CAACJ,YAAY,CAAC;EAC3C,MAAMK,MAAM,GAAG,IAAID,UAAU,CAACH,YAAY,CAAC;EAC3C,KAAK,IAAIK,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,MAAM,CAACI,MAAM,EAAE,EAAED,CAAC,EAAE;IACtC,IAAIH,MAAM,CAACG,CAAC,CAAC,KAAKD,MAAM,CAACC,CAAC,CAAC,EAAE;MAC3B,OAAO,KAAK;IACd;EACF;EACA,OAAO,IAAI;AACb;AAMA,OAAO,SAASE,uBAAuBA,CAAA,EAAwD;EAAA,SAAAC,IAAA,GAAAC,SAAA,CAAAH,MAAA,EAApDI,OAAO,OAAAC,KAAA,CAAAH,IAAA,GAAAI,IAAA,MAAAA,IAAA,GAAAJ,IAAA,EAAAI,IAAA;IAAPF,OAAO,CAAAE,IAAA,IAAAH,SAAA,CAAAG,IAAA;EAAA;EAEhD,MAAMC,YAAY,GAAGH,OAAO,CAACI,GAAG,CAAEC,OAAO,IACvCA,OAAO,YAAYC,WAAW,GAAG,IAAIb,UAAU,CAACY,OAAO,CAAC,GAAGA,OAC7D,CAAC;EAGD,MAAMd,UAAU,GAAGY,YAAY,CAACI,MAAM,CAAC,CAACX,MAAM,EAAEY,UAAU,KAAKZ,MAAM,GAAGY,UAAU,CAACjB,UAAU,EAAE,CAAC,CAAC;EAGjG,MAAMkB,MAAM,GAAG,IAAIhB,UAAU,CAACF,UAAU,CAAC;EAGzC,IAAImB,MAAM,GAAG,CAAC;EACd,KAAK,MAAMC,WAAW,IAAIR,YAAY,EAAE;IACtCM,MAAM,CAACG,GAAG,CAACD,WAAW,EAAED,MAAM,CAAC;IAC/BA,MAAM,IAAIC,WAAW,CAACpB,UAAU;EAClC;EAGA,OAAOkB,MAAM,CAACI,MAAM;AACtB;AAQA,OAAO,SAASC,sBAAsBA,CAAA,EAA4B;EAAA,SAAAC,KAAA,GAAAhB,SAAA,CAAAH,MAAA,EAArBoB,WAAW,OAAAf,KAAA,CAAAc,KAAA,GAAAE,KAAA,MAAAA,KAAA,GAAAF,KAAA,EAAAE,KAAA;IAAXD,WAAW,CAAAC,KAAA,IAAAlB,SAAA,CAAAkB,KAAA;EAAA;EAEtD,MAAMC,MAAM,GAAGF,WAA2B;EAE1C,MAAMG,qBAAqB,GAAID,MAAM,IAAIA,MAAM,CAACtB,MAAM,GAAG,CAAC,IAAIsB,MAAM,CAAC,CAAC,CAAC,CAACE,WAAW,IAAK,IAAI;EAC5F,IAAI,CAACD,qBAAqB,EAAE;IAC1B,MAAM,IAAIE,KAAK,CACb,sGACF,CAAC;EACH;EAEA,MAAMC,SAAS,GAAGJ,MAAM,CAACX,MAAM,CAAC,CAACgB,GAAG,EAAEC,KAAK,KAAKD,GAAG,GAAGC,KAAK,CAAC5B,MAAM,EAAE,CAAC,CAAC;EAEtE,MAAMa,MAAM,GAAG,IAAIU,qBAAqB,CAACG,SAAS,CAAC;EACnD,IAAIZ,MAAM,GAAG,CAAC;EACd,KAAK,MAAMe,KAAK,IAAIP,MAAM,EAAE;IAC1BT,MAAM,CAACG,GAAG,CAACa,KAAK,EAAEf,MAAM,CAAC;IACzBA,MAAM,IAAIe,KAAK,CAAC7B,MAAM;EACxB;EACA,OAAOa,MAAM;AACf;AAQA,OAAO,SAASiB,gBAAgBA,CAC9BC,WAAwB,EACxBC,UAAkB,EAClBrC,UAAmB,EACN;EACb,MAAMsC,QAAQ,GACZtC,UAAU,KAAKuC,SAAS,GACpB,IAAIrC,UAAU,CAACkC,WAAW,CAAC,CAACI,QAAQ,CAACH,UAAU,EAAEA,UAAU,GAAGrC,UAAU,CAAC,GACzE,IAAIE,UAAU,CAACkC,WAAW,CAAC,CAACI,QAAQ,CAACH,UAAU,CAAC;EACtD,MAAMI,SAAS,GAAG,IAAIvC,UAAU,CAACoC,QAAQ,CAAC;EAC1C,OAAOG,SAAS,CAACnB,MAAM;AACzB"}
|
|
@@ -1,7 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
import { padToNBytes } from './memory-copy-utils';
|
|
4
|
-
|
|
5
2
|
export function padStringToByteAlignment(string, byteAlignment) {
|
|
6
3
|
const length = string.length;
|
|
7
4
|
const paddedLength = Math.ceil(length / byteAlignment) * byteAlignment;
|
|
@@ -12,7 +9,6 @@ export function padStringToByteAlignment(string, byteAlignment) {
|
|
|
12
9
|
}
|
|
13
10
|
return string + whitespace;
|
|
14
11
|
}
|
|
15
|
-
|
|
16
12
|
export function copyStringToDataView(dataView, byteOffset, string, byteLength) {
|
|
17
13
|
if (dataView) {
|
|
18
14
|
for (let i = 0; i < byteLength; i++) {
|
|
@@ -29,7 +25,6 @@ export function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {
|
|
|
29
25
|
}
|
|
30
26
|
return byteOffset + byteLength;
|
|
31
27
|
}
|
|
32
|
-
|
|
33
28
|
export function copyPaddedArrayBufferToDataView(dataView, byteOffset, sourceBuffer, padding) {
|
|
34
29
|
const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);
|
|
35
30
|
const padLength = paddedLength - sourceBuffer.byteLength;
|
|
@@ -37,7 +32,6 @@ export function copyPaddedArrayBufferToDataView(dataView, byteOffset, sourceBuff
|
|
|
37
32
|
const targetArray = new Uint8Array(dataView.buffer, dataView.byteOffset + byteOffset, sourceBuffer.byteLength);
|
|
38
33
|
const sourceArray = new Uint8Array(sourceBuffer);
|
|
39
34
|
targetArray.set(sourceArray);
|
|
40
|
-
|
|
41
35
|
for (let i = 0; i < padLength; ++i) {
|
|
42
36
|
dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);
|
|
43
37
|
}
|
|
@@ -45,7 +39,6 @@ export function copyPaddedArrayBufferToDataView(dataView, byteOffset, sourceBuff
|
|
|
45
39
|
byteOffset += paddedLength;
|
|
46
40
|
return byteOffset;
|
|
47
41
|
}
|
|
48
|
-
|
|
49
42
|
export function copyPaddedStringToDataView(dataView, byteOffset, string, padding) {
|
|
50
43
|
const textEncoder = new TextEncoder();
|
|
51
44
|
const stringBuffer = textEncoder.encode(string);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dataview-copy-utils.js","names":["padToNBytes","padStringToByteAlignment","string","byteAlignment","length","paddedLength","Math","ceil","padding","whitespace","i","copyStringToDataView","dataView","byteOffset","byteLength","setUint8","charCodeAt","copyBinaryToDataView","binary","copyPaddedArrayBufferToDataView","sourceBuffer","padLength","targetArray","Uint8Array","buffer","sourceArray","set","copyPaddedStringToDataView","textEncoder","TextEncoder","stringBuffer","encode"],"sources":["../../../../src/lib/binary-utils/dataview-copy-utils.ts"],"sourcesContent":["// loaders./gl, MIT license\n\nimport {TypedArray} from '../../types';\nimport {padToNBytes} from './memory-copy-utils';\n\n/**\n * Helper function that pads a string with spaces to fit a certain byte alignment\n * @param string\n * @param byteAlignment\n * @returns\n *\n * @todo PERFORMANCE IDEA: No need to copy string twice...\n */\nexport function padStringToByteAlignment(string: string, byteAlignment: number): string {\n const length = string.length;\n const paddedLength = Math.ceil(length / byteAlignment) * byteAlignment; // Round up to the required alignment\n const padding = paddedLength - length;\n let whitespace = '';\n for (let i = 0; i < padding; ++i) {\n whitespace += ' ';\n }\n return string + whitespace;\n}\n\n/**\n *\n * @param dataView\n * @param byteOffset\n * @param string\n * @param byteLength\n * @returns\n */\nexport function copyStringToDataView(\n dataView: DataView,\n byteOffset: number,\n string: string,\n byteLength: number\n): number {\n if (dataView) {\n for (let i = 0; i < byteLength; i++) {\n dataView.setUint8(byteOffset + i, string.charCodeAt(i));\n }\n }\n return byteOffset + byteLength;\n}\n\nexport function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {\n if (dataView) {\n for (let i = 0; i < byteLength; i++) {\n dataView.setUint8(byteOffset + i, binary[i]);\n }\n }\n return byteOffset + byteLength;\n}\n\n/**\n * Copy sourceBuffer to dataView with some padding\n *\n * @param dataView - destination data container. If null - only new offset is calculated\n * @param byteOffset - destination byte offset to copy to\n * @param sourceBuffer - source data buffer\n * @param padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedArrayBufferToDataView(\n dataView: DataView | null,\n byteOffset: number,\n sourceBuffer: TypedArray,\n padding: number\n): number {\n const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);\n const padLength = paddedLength - sourceBuffer.byteLength;\n\n if (dataView) {\n // Copy array\n const targetArray = new Uint8Array(\n dataView.buffer,\n dataView.byteOffset + byteOffset,\n sourceBuffer.byteLength\n );\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n\n // Add PADDING\n for (let i = 0; i < padLength; ++i) {\n // json chunk is padded with spaces (ASCII 0x20)\n dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);\n }\n }\n byteOffset += paddedLength;\n return byteOffset;\n}\n\n/**\n * Copy string to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {string} string - source string\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedStringToDataView(\n dataView: DataView | null,\n byteOffset: number,\n string: string,\n padding: number\n): number {\n const textEncoder = new TextEncoder();\n // PERFORMANCE IDEA: We encode twice, once to get size and once to store\n // PERFORMANCE IDEA: Use TextEncoder.encodeInto() to avoid temporary copy\n const stringBuffer = textEncoder.encode(string);\n\n byteOffset = copyPaddedArrayBufferToDataView(dataView, byteOffset, stringBuffer, padding);\n\n return byteOffset;\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"dataview-copy-utils.js","names":["padToNBytes","padStringToByteAlignment","string","byteAlignment","length","paddedLength","Math","ceil","padding","whitespace","i","copyStringToDataView","dataView","byteOffset","byteLength","setUint8","charCodeAt","copyBinaryToDataView","binary","copyPaddedArrayBufferToDataView","sourceBuffer","padLength","targetArray","Uint8Array","buffer","sourceArray","set","copyPaddedStringToDataView","textEncoder","TextEncoder","stringBuffer","encode"],"sources":["../../../../src/lib/binary-utils/dataview-copy-utils.ts"],"sourcesContent":["// loaders./gl, MIT license\n\nimport {TypedArray} from '../../types';\nimport {padToNBytes} from './memory-copy-utils';\n\n/**\n * Helper function that pads a string with spaces to fit a certain byte alignment\n * @param string\n * @param byteAlignment\n * @returns\n *\n * @todo PERFORMANCE IDEA: No need to copy string twice...\n */\nexport function padStringToByteAlignment(string: string, byteAlignment: number): string {\n const length = string.length;\n const paddedLength = Math.ceil(length / byteAlignment) * byteAlignment; // Round up to the required alignment\n const padding = paddedLength - length;\n let whitespace = '';\n for (let i = 0; i < padding; ++i) {\n whitespace += ' ';\n }\n return string + whitespace;\n}\n\n/**\n *\n * @param dataView\n * @param byteOffset\n * @param string\n * @param byteLength\n * @returns\n */\nexport function copyStringToDataView(\n dataView: DataView,\n byteOffset: number,\n string: string,\n byteLength: number\n): number {\n if (dataView) {\n for (let i = 0; i < byteLength; i++) {\n dataView.setUint8(byteOffset + i, string.charCodeAt(i));\n }\n }\n return byteOffset + byteLength;\n}\n\nexport function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {\n if (dataView) {\n for (let i = 0; i < byteLength; i++) {\n dataView.setUint8(byteOffset + i, binary[i]);\n }\n }\n return byteOffset + byteLength;\n}\n\n/**\n * Copy sourceBuffer to dataView with some padding\n *\n * @param dataView - destination data container. If null - only new offset is calculated\n * @param byteOffset - destination byte offset to copy to\n * @param sourceBuffer - source data buffer\n * @param padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedArrayBufferToDataView(\n dataView: DataView | null,\n byteOffset: number,\n sourceBuffer: TypedArray,\n padding: number\n): number {\n const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);\n const padLength = paddedLength - sourceBuffer.byteLength;\n\n if (dataView) {\n // Copy array\n const targetArray = new Uint8Array(\n dataView.buffer,\n dataView.byteOffset + byteOffset,\n sourceBuffer.byteLength\n );\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n\n // Add PADDING\n for (let i = 0; i < padLength; ++i) {\n // json chunk is padded with spaces (ASCII 0x20)\n dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);\n }\n }\n byteOffset += paddedLength;\n return byteOffset;\n}\n\n/**\n * Copy string to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {string} string - source string\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedStringToDataView(\n dataView: DataView | null,\n byteOffset: number,\n string: string,\n padding: number\n): number {\n const textEncoder = new TextEncoder();\n // PERFORMANCE IDEA: We encode twice, once to get size and once to store\n // PERFORMANCE IDEA: Use TextEncoder.encodeInto() to avoid temporary copy\n const stringBuffer = textEncoder.encode(string);\n\n byteOffset = copyPaddedArrayBufferToDataView(dataView, byteOffset, stringBuffer, padding);\n\n return byteOffset;\n}\n"],"mappings":"AAGA,SAAQA,WAAW,QAAO,qBAAqB;AAU/C,OAAO,SAASC,wBAAwBA,CAACC,MAAc,EAAEC,aAAqB,EAAU;EACtF,MAAMC,MAAM,GAAGF,MAAM,CAACE,MAAM;EAC5B,MAAMC,YAAY,GAAGC,IAAI,CAACC,IAAI,CAACH,MAAM,GAAGD,aAAa,CAAC,GAAGA,aAAa;EACtE,MAAMK,OAAO,GAAGH,YAAY,GAAGD,MAAM;EACrC,IAAIK,UAAU,GAAG,EAAE;EACnB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,OAAO,EAAE,EAAEE,CAAC,EAAE;IAChCD,UAAU,IAAI,GAAG;EACnB;EACA,OAAOP,MAAM,GAAGO,UAAU;AAC5B;AAUA,OAAO,SAASE,oBAAoBA,CAClCC,QAAkB,EAClBC,UAAkB,EAClBX,MAAc,EACdY,UAAkB,EACV;EACR,IAAIF,QAAQ,EAAE;IACZ,KAAK,IAAIF,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGI,UAAU,EAAEJ,CAAC,EAAE,EAAE;MACnCE,QAAQ,CAACG,QAAQ,CAACF,UAAU,GAAGH,CAAC,EAAER,MAAM,CAACc,UAAU,CAACN,CAAC,CAAC,CAAC;IACzD;EACF;EACA,OAAOG,UAAU,GAAGC,UAAU;AAChC;AAEA,OAAO,SAASG,oBAAoBA,CAACL,QAAQ,EAAEC,UAAU,EAAEK,MAAM,EAAEJ,UAAU,EAAE;EAC7E,IAAIF,QAAQ,EAAE;IACZ,KAAK,IAAIF,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGI,UAAU,EAAEJ,CAAC,EAAE,EAAE;MACnCE,QAAQ,CAACG,QAAQ,CAACF,UAAU,GAAGH,CAAC,EAAEQ,MAAM,CAACR,CAAC,CAAC,CAAC;IAC9C;EACF;EACA,OAAOG,UAAU,GAAGC,UAAU;AAChC;AAYA,OAAO,SAASK,+BAA+BA,CAC7CP,QAAyB,EACzBC,UAAkB,EAClBO,YAAwB,EACxBZ,OAAe,EACP;EACR,MAAMH,YAAY,GAAGL,WAAW,CAACoB,YAAY,CAACN,UAAU,EAAEN,OAAO,CAAC;EAClE,MAAMa,SAAS,GAAGhB,YAAY,GAAGe,YAAY,CAACN,UAAU;EAExD,IAAIF,QAAQ,EAAE;IAEZ,MAAMU,WAAW,GAAG,IAAIC,UAAU,CAChCX,QAAQ,CAACY,MAAM,EACfZ,QAAQ,CAACC,UAAU,GAAGA,UAAU,EAChCO,YAAY,CAACN,UACf,CAAC;IACD,MAAMW,WAAW,GAAG,IAAIF,UAAU,CAACH,YAAY,CAAC;IAChDE,WAAW,CAACI,GAAG,CAACD,WAAW,CAAC;IAG5B,KAAK,IAAIf,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGW,SAAS,EAAE,EAAEX,CAAC,EAAE;MAElCE,QAAQ,CAACG,QAAQ,CAACF,UAAU,GAAGO,YAAY,CAACN,UAAU,GAAGJ,CAAC,EAAE,IAAI,CAAC;IACnE;EACF;EACAG,UAAU,IAAIR,YAAY;EAC1B,OAAOQ,UAAU;AACnB;AAYA,OAAO,SAASc,0BAA0BA,CACxCf,QAAyB,EACzBC,UAAkB,EAClBX,MAAc,EACdM,OAAe,EACP;EACR,MAAMoB,WAAW,GAAG,IAAIC,WAAW,CAAC,CAAC;EAGrC,MAAMC,YAAY,GAAGF,WAAW,CAACG,MAAM,CAAC7B,MAAM,CAAC;EAE/CW,UAAU,GAAGM,+BAA+B,CAACP,QAAQ,EAAEC,UAAU,EAAEiB,YAAY,EAAEtB,OAAO,CAAC;EAEzF,OAAOK,UAAU;AACnB"}
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
export function getFirstCharacters(data) {
|
|
4
2
|
let length = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 5;
|
|
5
3
|
if (typeof data === 'string') {
|
|
@@ -12,7 +10,6 @@ export function getFirstCharacters(data) {
|
|
|
12
10
|
}
|
|
13
11
|
return '';
|
|
14
12
|
}
|
|
15
|
-
|
|
16
13
|
export function getMagicString(arrayBuffer, byteOffset, length) {
|
|
17
14
|
if (arrayBuffer.byteLength <= byteOffset + length) {
|
|
18
15
|
return '';
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"get-first-characters.js","names":["getFirstCharacters","data","length","slice","ArrayBuffer","isView","getMagicString","buffer","byteOffset","arrayBuffer","byteLength","dataView","DataView","magic","i","String","fromCharCode","getUint8"],"sources":["../../../../src/lib/binary-utils/get-first-characters.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n/**\n * Get the first characters from a binary file (interpret the first bytes as an ASCII string)\n * @param data\n * @param length\n * @returns\n */\nexport function getFirstCharacters(data: string | ArrayBuffer, length: number = 5): string {\n if (typeof data === 'string') {\n return data.slice(0, length);\n } else if (ArrayBuffer.isView(data)) {\n // Typed Arrays can have offsets into underlying buffer\n return getMagicString(data.buffer, data.byteOffset, length);\n } else if (data instanceof ArrayBuffer) {\n const byteOffset = 0;\n return getMagicString(data, byteOffset, length);\n }\n return '';\n}\n\n/**\n * Gets a magic string from a \"file\"\n * Typically used to check or detect file format\n * @param arrayBuffer\n * @param byteOffset\n * @param length\n * @returns\n */\nexport function getMagicString(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n length: number\n): string {\n if (arrayBuffer.byteLength <= byteOffset + length) {\n return '';\n }\n const dataView = new DataView(arrayBuffer);\n let magic = '';\n for (let i = 0; i < length; i++) {\n magic += String.fromCharCode(dataView.getUint8(byteOffset + i));\n }\n return magic;\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"get-first-characters.js","names":["getFirstCharacters","data","length","arguments","undefined","slice","ArrayBuffer","isView","getMagicString","buffer","byteOffset","arrayBuffer","byteLength","dataView","DataView","magic","i","String","fromCharCode","getUint8"],"sources":["../../../../src/lib/binary-utils/get-first-characters.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n/**\n * Get the first characters from a binary file (interpret the first bytes as an ASCII string)\n * @param data\n * @param length\n * @returns\n */\nexport function getFirstCharacters(data: string | ArrayBuffer, length: number = 5): string {\n if (typeof data === 'string') {\n return data.slice(0, length);\n } else if (ArrayBuffer.isView(data)) {\n // Typed Arrays can have offsets into underlying buffer\n return getMagicString(data.buffer, data.byteOffset, length);\n } else if (data instanceof ArrayBuffer) {\n const byteOffset = 0;\n return getMagicString(data, byteOffset, length);\n }\n return '';\n}\n\n/**\n * Gets a magic string from a \"file\"\n * Typically used to check or detect file format\n * @param arrayBuffer\n * @param byteOffset\n * @param length\n * @returns\n */\nexport function getMagicString(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n length: number\n): string {\n if (arrayBuffer.byteLength <= byteOffset + length) {\n return '';\n }\n const dataView = new DataView(arrayBuffer);\n let magic = '';\n for (let i = 0; i < length; i++) {\n magic += String.fromCharCode(dataView.getUint8(byteOffset + i));\n }\n return magic;\n}\n"],"mappings":"AAQA,OAAO,SAASA,kBAAkBA,CAACC,IAA0B,EAA8B;EAAA,IAA5BC,MAAc,GAAAC,SAAA,CAAAD,MAAA,QAAAC,SAAA,QAAAC,SAAA,GAAAD,SAAA,MAAG,CAAC;EAC/E,IAAI,OAAOF,IAAI,KAAK,QAAQ,EAAE;IAC5B,OAAOA,IAAI,CAACI,KAAK,CAAC,CAAC,EAAEH,MAAM,CAAC;EAC9B,CAAC,MAAM,IAAII,WAAW,CAACC,MAAM,CAACN,IAAI,CAAC,EAAE;IAEnC,OAAOO,cAAc,CAACP,IAAI,CAACQ,MAAM,EAAER,IAAI,CAACS,UAAU,EAAER,MAAM,CAAC;EAC7D,CAAC,MAAM,IAAID,IAAI,YAAYK,WAAW,EAAE;IACtC,MAAMI,UAAU,GAAG,CAAC;IACpB,OAAOF,cAAc,CAACP,IAAI,EAAES,UAAU,EAAER,MAAM,CAAC;EACjD;EACA,OAAO,EAAE;AACX;AAUA,OAAO,SAASM,cAAcA,CAC5BG,WAAwB,EACxBD,UAAkB,EAClBR,MAAc,EACN;EACR,IAAIS,WAAW,CAACC,UAAU,IAAIF,UAAU,GAAGR,MAAM,EAAE;IACjD,OAAO,EAAE;EACX;EACA,MAAMW,QAAQ,GAAG,IAAIC,QAAQ,CAACH,WAAW,CAAC;EAC1C,IAAII,KAAK,GAAG,EAAE;EACd,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGd,MAAM,EAAEc,CAAC,EAAE,EAAE;IAC/BD,KAAK,IAAIE,MAAM,CAACC,YAAY,CAACL,QAAQ,CAACM,QAAQ,CAACT,UAAU,GAAGM,CAAC,CAAC,CAAC;EACjE;EACA,OAAOD,KAAK;AACd"}
|
|
@@ -1,15 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
import * as node from '../node/buffer';
|
|
4
|
-
|
|
5
2
|
export function isBuffer(value) {
|
|
6
3
|
return value && typeof value === 'object' && value.isBuffer;
|
|
7
4
|
}
|
|
8
|
-
|
|
9
5
|
export function toBuffer(data) {
|
|
10
6
|
return node.toBuffer ? node.toBuffer(data) : data;
|
|
11
7
|
}
|
|
12
|
-
|
|
13
8
|
export function toArrayBuffer(data) {
|
|
14
9
|
if (isBuffer(data)) {
|
|
15
10
|
return node.toArrayBuffer(data);
|
|
@@ -17,7 +12,6 @@ export function toArrayBuffer(data) {
|
|
|
17
12
|
if (data instanceof ArrayBuffer) {
|
|
18
13
|
return data;
|
|
19
14
|
}
|
|
20
|
-
|
|
21
15
|
if (ArrayBuffer.isView(data)) {
|
|
22
16
|
if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {
|
|
23
17
|
return data.buffer;
|
|
@@ -29,7 +23,6 @@ export function toArrayBuffer(data) {
|
|
|
29
23
|
const uint8Array = new TextEncoder().encode(text);
|
|
30
24
|
return uint8Array.buffer;
|
|
31
25
|
}
|
|
32
|
-
|
|
33
26
|
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
34
27
|
return data._toArrayBuffer();
|
|
35
28
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"memory-conversion-utils.js","names":["node","isBuffer","value","toBuffer","data","toArrayBuffer","ArrayBuffer","isView","byteOffset","byteLength","buffer","slice","text","uint8Array","TextEncoder","encode","_toArrayBuffer","Error"],"sources":["../../../../src/lib/binary-utils/memory-conversion-utils.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport * as node from '../node/buffer';\n\n/**\n * Check for Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n */\nexport function isBuffer(value: any): boolean {\n return value && typeof value === 'object' && value.isBuffer;\n}\n\n/**\n * Converts to Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toBuffer(data: any): Buffer {\n return node.toBuffer ? node.toBuffer(data) : data;\n}\n\n/**\n * Convert an object to an array buffer\n */\nexport function toArrayBuffer(data: unknown): ArrayBuffer {\n // Note: Should be called first, Buffers can trigger other detections below\n if (isBuffer(data)) {\n return node.toArrayBuffer(data);\n }\n\n if (data instanceof ArrayBuffer) {\n return data;\n }\n\n // Careful - Node Buffers look like Uint8Arrays (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {\n return data.buffer;\n }\n return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n if (data && typeof data === 'object' && (data as any)._toArrayBuffer) {\n return (data as any)._toArrayBuffer();\n }\n\n throw new Error('toArrayBuffer');\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"memory-conversion-utils.js","names":["node","isBuffer","value","toBuffer","data","toArrayBuffer","ArrayBuffer","isView","byteOffset","byteLength","buffer","slice","text","uint8Array","TextEncoder","encode","_toArrayBuffer","Error"],"sources":["../../../../src/lib/binary-utils/memory-conversion-utils.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport * as node from '../node/buffer';\n\n/**\n * Check for Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n */\nexport function isBuffer(value: any): boolean {\n return value && typeof value === 'object' && value.isBuffer;\n}\n\n/**\n * Converts to Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toBuffer(data: any): Buffer {\n return node.toBuffer ? node.toBuffer(data) : data;\n}\n\n/**\n * Convert an object to an array buffer\n */\nexport function toArrayBuffer(data: unknown): ArrayBuffer {\n // Note: Should be called first, Buffers can trigger other detections below\n if (isBuffer(data)) {\n return node.toArrayBuffer(data);\n }\n\n if (data instanceof ArrayBuffer) {\n return data;\n }\n\n // Careful - Node Buffers look like Uint8Arrays (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {\n return data.buffer;\n }\n return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n if (data && typeof data === 'object' && (data as any)._toArrayBuffer) {\n return (data as any)._toArrayBuffer();\n }\n\n throw new Error('toArrayBuffer');\n}\n"],"mappings":"AAEA,OAAO,KAAKA,IAAI,MAAM,gBAAgB;AAKtC,OAAO,SAASC,QAAQA,CAACC,KAAU,EAAW;EAC5C,OAAOA,KAAK,IAAI,OAAOA,KAAK,KAAK,QAAQ,IAAIA,KAAK,CAACD,QAAQ;AAC7D;AAMA,OAAO,SAASE,QAAQA,CAACC,IAAS,EAAU;EAC1C,OAAOJ,IAAI,CAACG,QAAQ,GAAGH,IAAI,CAACG,QAAQ,CAACC,IAAI,CAAC,GAAGA,IAAI;AACnD;AAKA,OAAO,SAASC,aAAaA,CAACD,IAAa,EAAe;EAExD,IAAIH,QAAQ,CAACG,IAAI,CAAC,EAAE;IAClB,OAAOJ,IAAI,CAACK,aAAa,CAACD,IAAI,CAAC;EACjC;EAEA,IAAIA,IAAI,YAAYE,WAAW,EAAE;IAC/B,OAAOF,IAAI;EACb;EAGA,IAAIE,WAAW,CAACC,MAAM,CAACH,IAAI,CAAC,EAAE;IAC5B,IAAIA,IAAI,CAACI,UAAU,KAAK,CAAC,IAAIJ,IAAI,CAACK,UAAU,KAAKL,IAAI,CAACM,MAAM,CAACD,UAAU,EAAE;MACvE,OAAOL,IAAI,CAACM,MAAM;IACpB;IACA,OAAON,IAAI,CAACM,MAAM,CAACC,KAAK,CAACP,IAAI,CAACI,UAAU,EAAEJ,IAAI,CAACI,UAAU,GAAGJ,IAAI,CAACK,UAAU,CAAC;EAC9E;EAEA,IAAI,OAAOL,IAAI,KAAK,QAAQ,EAAE;IAC5B,MAAMQ,IAAI,GAAGR,IAAI;IACjB,MAAMS,UAAU,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACH,IAAI,CAAC;IACjD,OAAOC,UAAU,CAACH,MAAM;EAC1B;EAGA,IAAIN,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,IAAKA,IAAI,CAASY,cAAc,EAAE;IACpE,OAAQZ,IAAI,CAASY,cAAc,CAAC,CAAC;EACvC;EAEA,MAAM,IAAIC,KAAK,CAAC,eAAe,CAAC;AAClC"}
|
|
@@ -1,11 +1,9 @@
|
|
|
1
1
|
import { assert } from '../env-utils/assert';
|
|
2
|
-
|
|
3
2
|
export function padToNBytes(byteLength, padding) {
|
|
4
3
|
assert(byteLength >= 0);
|
|
5
4
|
assert(padding > 0);
|
|
6
5
|
return byteLength + (padding - 1) & ~(padding - 1);
|
|
7
6
|
}
|
|
8
|
-
|
|
9
7
|
export function copyArrayBuffer(targetBuffer, sourceBuffer, byteOffset) {
|
|
10
8
|
let byteLength = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : sourceBuffer.byteLength;
|
|
11
9
|
const targetArray = new Uint8Array(targetBuffer, byteOffset, byteLength);
|
|
@@ -13,7 +11,6 @@ export function copyArrayBuffer(targetBuffer, sourceBuffer, byteOffset) {
|
|
|
13
11
|
targetArray.set(sourceArray);
|
|
14
12
|
return targetBuffer;
|
|
15
13
|
}
|
|
16
|
-
|
|
17
14
|
export function copyToArray(source, target, targetOffset) {
|
|
18
15
|
let sourceArray;
|
|
19
16
|
if (source instanceof ArrayBuffer) {
|
|
@@ -23,7 +20,6 @@ export function copyToArray(source, target, targetOffset) {
|
|
|
23
20
|
const srcByteLength = source.byteLength;
|
|
24
21
|
sourceArray = new Uint8Array(source.buffer || source.arrayBuffer, srcByteOffset, srcByteLength);
|
|
25
22
|
}
|
|
26
|
-
|
|
27
23
|
target.set(sourceArray, targetOffset);
|
|
28
24
|
return targetOffset + padToNBytes(sourceArray.byteLength, 4);
|
|
29
25
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"memory-copy-utils.js","names":["assert","padToNBytes","byteLength","padding","copyArrayBuffer","targetBuffer","sourceBuffer","byteOffset","targetArray","Uint8Array","sourceArray","set","copyToArray","source","target","targetOffset","ArrayBuffer","srcByteOffset","srcByteLength","buffer","arrayBuffer"],"sources":["../../../../src/lib/binary-utils/memory-copy-utils.ts"],"sourcesContent":["import {assert} from '../env-utils/assert';\n\n/**\n * Calculate new size of an arrayBuffer to be aligned to an n-byte boundary\n * This function increases `byteLength` by the minimum delta,\n * allowing the total length to be divided by `padding`\n * @param byteLength\n * @param padding\n */\nexport function padToNBytes(byteLength: number, padding: number): number {\n assert(byteLength >= 0); // `Incorrect 'byteLength' value: ${byteLength}`\n assert(padding > 0); // `Incorrect 'padding' value: ${padding}`\n return (byteLength + (padding - 1)) & ~(padding - 1);\n}\n\n/**\n * Creates a new Uint8Array based on two different ArrayBuffers\n * @param targetBuffer The first buffer.\n * @param sourceBuffer The second buffer.\n * @return The new ArrayBuffer created out of the two.\n */\nexport function copyArrayBuffer(\n targetBuffer: ArrayBuffer,\n sourceBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength: number = sourceBuffer.byteLength\n): ArrayBuffer {\n const targetArray = new Uint8Array(targetBuffer, byteOffset, byteLength);\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n return targetBuffer;\n}\n\n/**\n * Copy from source to target at the targetOffset\n *\n * @param source - The data to copy\n * @param target - The destination to copy data into\n * @param targetOffset - The start offset into target to place the copied data\n * @returns the new offset taking into account proper padding\n */\nexport function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number {\n let sourceArray;\n\n if (source instanceof ArrayBuffer) {\n sourceArray = new Uint8Array(source);\n } else {\n // Pack buffer onto the big target array\n //\n // 'source.data.buffer' could be a view onto a larger buffer.\n // We MUST use this constructor to ensure the byteOffset and byteLength is\n // set to correct values from 'source.data' and not the underlying\n // buffer for target.set() to work properly.\n const srcByteOffset = source.byteOffset;\n const srcByteLength = source.byteLength;\n // In gltf parser it is set as \"arrayBuffer\" instead of \"buffer\"\n // https://github.com/visgl/loaders.gl/blob/1e3a82a0a65d7b6a67b1e60633453e5edda2960a/modules/gltf/src/lib/parse-gltf.js#L85\n sourceArray = new Uint8Array(source.buffer || source.arrayBuffer, srcByteOffset, srcByteLength);\n }\n\n // Pack buffer onto the big target array\n target.set(sourceArray, targetOffset);\n\n return targetOffset + padToNBytes(sourceArray.byteLength, 4);\n}\n"],"mappings":"AAAA,SAAQA,MAAM,QAAO,qBAAqB
|
|
1
|
+
{"version":3,"file":"memory-copy-utils.js","names":["assert","padToNBytes","byteLength","padding","copyArrayBuffer","targetBuffer","sourceBuffer","byteOffset","arguments","length","undefined","targetArray","Uint8Array","sourceArray","set","copyToArray","source","target","targetOffset","ArrayBuffer","srcByteOffset","srcByteLength","buffer","arrayBuffer"],"sources":["../../../../src/lib/binary-utils/memory-copy-utils.ts"],"sourcesContent":["import {assert} from '../env-utils/assert';\n\n/**\n * Calculate new size of an arrayBuffer to be aligned to an n-byte boundary\n * This function increases `byteLength` by the minimum delta,\n * allowing the total length to be divided by `padding`\n * @param byteLength\n * @param padding\n */\nexport function padToNBytes(byteLength: number, padding: number): number {\n assert(byteLength >= 0); // `Incorrect 'byteLength' value: ${byteLength}`\n assert(padding > 0); // `Incorrect 'padding' value: ${padding}`\n return (byteLength + (padding - 1)) & ~(padding - 1);\n}\n\n/**\n * Creates a new Uint8Array based on two different ArrayBuffers\n * @param targetBuffer The first buffer.\n * @param sourceBuffer The second buffer.\n * @return The new ArrayBuffer created out of the two.\n */\nexport function copyArrayBuffer(\n targetBuffer: ArrayBuffer,\n sourceBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength: number = sourceBuffer.byteLength\n): ArrayBuffer {\n const targetArray = new Uint8Array(targetBuffer, byteOffset, byteLength);\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n return targetBuffer;\n}\n\n/**\n * Copy from source to target at the targetOffset\n *\n * @param source - The data to copy\n * @param target - The destination to copy data into\n * @param targetOffset - The start offset into target to place the copied data\n * @returns the new offset taking into account proper padding\n */\nexport function copyToArray(source: ArrayBuffer | any, target: any, targetOffset: number): number {\n let sourceArray;\n\n if (source instanceof ArrayBuffer) {\n sourceArray = new Uint8Array(source);\n } else {\n // Pack buffer onto the big target array\n //\n // 'source.data.buffer' could be a view onto a larger buffer.\n // We MUST use this constructor to ensure the byteOffset and byteLength is\n // set to correct values from 'source.data' and not the underlying\n // buffer for target.set() to work properly.\n const srcByteOffset = source.byteOffset;\n const srcByteLength = source.byteLength;\n // In gltf parser it is set as \"arrayBuffer\" instead of \"buffer\"\n // https://github.com/visgl/loaders.gl/blob/1e3a82a0a65d7b6a67b1e60633453e5edda2960a/modules/gltf/src/lib/parse-gltf.js#L85\n sourceArray = new Uint8Array(source.buffer || source.arrayBuffer, srcByteOffset, srcByteLength);\n }\n\n // Pack buffer onto the big target array\n target.set(sourceArray, targetOffset);\n\n return targetOffset + padToNBytes(sourceArray.byteLength, 4);\n}\n"],"mappings":"AAAA,SAAQA,MAAM,QAAO,qBAAqB;AAS1C,OAAO,SAASC,WAAWA,CAACC,UAAkB,EAAEC,OAAe,EAAU;EACvEH,MAAM,CAACE,UAAU,IAAI,CAAC,CAAC;EACvBF,MAAM,CAACG,OAAO,GAAG,CAAC,CAAC;EACnB,OAAQD,UAAU,IAAIC,OAAO,GAAG,CAAC,CAAC,GAAI,EAAEA,OAAO,GAAG,CAAC,CAAC;AACtD;AAQA,OAAO,SAASC,eAAeA,CAC7BC,YAAyB,EACzBC,YAAyB,EACzBC,UAAkB,EAEL;EAAA,IADbL,UAAkB,GAAAM,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAGF,YAAY,CAACJ,UAAU;EAE5C,MAAMS,WAAW,GAAG,IAAIC,UAAU,CAACP,YAAY,EAAEE,UAAU,EAAEL,UAAU,CAAC;EACxE,MAAMW,WAAW,GAAG,IAAID,UAAU,CAACN,YAAY,CAAC;EAChDK,WAAW,CAACG,GAAG,CAACD,WAAW,CAAC;EAC5B,OAAOR,YAAY;AACrB;AAUA,OAAO,SAASU,WAAWA,CAACC,MAAyB,EAAEC,MAAW,EAAEC,YAAoB,EAAU;EAChG,IAAIL,WAAW;EAEf,IAAIG,MAAM,YAAYG,WAAW,EAAE;IACjCN,WAAW,GAAG,IAAID,UAAU,CAACI,MAAM,CAAC;EACtC,CAAC,MAAM;IAOL,MAAMI,aAAa,GAAGJ,MAAM,CAACT,UAAU;IACvC,MAAMc,aAAa,GAAGL,MAAM,CAACd,UAAU;IAGvCW,WAAW,GAAG,IAAID,UAAU,CAACI,MAAM,CAACM,MAAM,IAAIN,MAAM,CAACO,WAAW,EAAEH,aAAa,EAAEC,aAAa,CAAC;EACjG;EAGAJ,MAAM,CAACH,GAAG,CAACD,WAAW,EAAEK,YAAY,CAAC;EAErC,OAAOA,YAAY,GAAGjB,WAAW,CAACY,WAAW,CAACX,UAAU,EAAE,CAAC,CAAC;AAC9D"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"assert.js","names":["assert","condition","message","Error"],"sources":["../../../../src/lib/env-utils/assert.ts"],"sourcesContent":["/**\n * Throws an `Error` with the optional `message` if `condition` is falsy\n * @note Replacement for the external assert method to reduce bundle size\n */\nexport function assert(condition: any, message?: string): void {\n if (!condition) {\n throw new Error(message || 'loader assertion failed.');\n }\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"assert.js","names":["assert","condition","message","Error"],"sources":["../../../../src/lib/env-utils/assert.ts"],"sourcesContent":["/**\n * Throws an `Error` with the optional `message` if `condition` is falsy\n * @note Replacement for the external assert method to reduce bundle size\n */\nexport function assert(condition: any, message?: string): void {\n if (!condition) {\n throw new Error(message || 'loader assertion failed.');\n }\n}\n"],"mappings":"AAIA,OAAO,SAASA,MAAMA,CAACC,SAAc,EAAEC,OAAgB,EAAQ;EAC7D,IAAI,CAACD,SAAS,EAAE;IACd,MAAM,IAAIE,KAAK,CAACD,OAAO,IAAI,0BAA0B,CAAC;EACxD;AACF"}
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
const globals = {
|
|
4
2
|
self: typeof self !== 'undefined' && self,
|
|
5
3
|
window: typeof window !== 'undefined' && window,
|
|
@@ -11,12 +9,8 @@ const window_ = globals.window || globals.self || globals.global || {};
|
|
|
11
9
|
const global_ = globals.global || globals.self || globals.window || {};
|
|
12
10
|
const document_ = globals.document || {};
|
|
13
11
|
export { self_ as self, window_ as window, global_ as global, document_ as document };
|
|
14
|
-
|
|
15
|
-
export const isBrowser =
|
|
16
|
-
Boolean(typeof process !== 'object' || String(process) !== '[object process]' || process.browser);
|
|
17
|
-
|
|
12
|
+
export const isBrowser = Boolean(typeof process !== 'object' || String(process) !== '[object process]' || process.browser);
|
|
18
13
|
export const isWorker = typeof importScripts === 'function';
|
|
19
|
-
|
|
20
14
|
const matches = typeof process !== 'undefined' && process.version && /v([0-9]*)/.exec(process.version);
|
|
21
15
|
export const nodeVersion = matches && parseFloat(matches[1]) || 0;
|
|
22
16
|
//# sourceMappingURL=globals.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"globals.js","names":["globals","self","window","global","document","self_","window_","global_","document_","isBrowser","Boolean","process","String","browser","isWorker","importScripts","matches","version","exec","nodeVersion","parseFloat"],"sources":["../../../../src/lib/env-utils/globals.ts"],"sourcesContent":["// Purpose: include this in your module to avoid\n// dependencies on micro modules like 'global' and 'is-browser';\n\n/* eslint-disable no-restricted-globals */\nconst globals = {\n self: typeof self !== 'undefined' && self,\n window: typeof window !== 'undefined' && window,\n global: typeof global !== 'undefined' && global,\n document: typeof document !== 'undefined' && document\n};\n\ntype obj = {[key: string]: any};\nconst self_: obj = globals.self || globals.window || globals.global || {};\nconst window_: obj = globals.window || globals.self || globals.global || {};\nconst global_: obj = globals.global || globals.self || globals.window || {};\nconst document_: obj = globals.document || {};\n\nexport {self_ as self, window_ as window, global_ as global, document_ as document};\n\n/** true if running in a browser */\nexport const isBrowser: boolean =\n // @ts-ignore process does not exist on browser\n Boolean(typeof process !== 'object' || String(process) !== '[object process]' || process.browser);\n\n/** true if running in a worker thread */\nexport const isWorker: boolean = typeof importScripts === 'function';\n\n// Extract node major version\nconst matches =\n typeof process !== 'undefined' && process.version && /v([0-9]*)/.exec(process.version);\n/** Major Node version (as a number) */\nexport const nodeVersion: number = (matches && parseFloat(matches[1])) || 0;\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"globals.js","names":["globals","self","window","global","document","self_","window_","global_","document_","isBrowser","Boolean","process","String","browser","isWorker","importScripts","matches","version","exec","nodeVersion","parseFloat"],"sources":["../../../../src/lib/env-utils/globals.ts"],"sourcesContent":["// Purpose: include this in your module to avoid\n// dependencies on micro modules like 'global' and 'is-browser';\n\n/* eslint-disable no-restricted-globals */\nconst globals = {\n self: typeof self !== 'undefined' && self,\n window: typeof window !== 'undefined' && window,\n global: typeof global !== 'undefined' && global,\n document: typeof document !== 'undefined' && document\n};\n\ntype obj = {[key: string]: any};\nconst self_: obj = globals.self || globals.window || globals.global || {};\nconst window_: obj = globals.window || globals.self || globals.global || {};\nconst global_: obj = globals.global || globals.self || globals.window || {};\nconst document_: obj = globals.document || {};\n\nexport {self_ as self, window_ as window, global_ as global, document_ as document};\n\n/** true if running in a browser */\nexport const isBrowser: boolean =\n // @ts-ignore process does not exist on browser\n Boolean(typeof process !== 'object' || String(process) !== '[object process]' || process.browser);\n\n/** true if running in a worker thread */\nexport const isWorker: boolean = typeof importScripts === 'function';\n\n// Extract node major version\nconst matches =\n typeof process !== 'undefined' && process.version && /v([0-9]*)/.exec(process.version);\n/** Major Node version (as a number) */\nexport const nodeVersion: number = (matches && parseFloat(matches[1])) || 0;\n"],"mappings":"AAIA,MAAMA,OAAO,GAAG;EACdC,IAAI,EAAE,OAAOA,IAAI,KAAK,WAAW,IAAIA,IAAI;EACzCC,MAAM,EAAE,OAAOA,MAAM,KAAK,WAAW,IAAIA,MAAM;EAC/CC,MAAM,EAAE,OAAOA,MAAM,KAAK,WAAW,IAAIA,MAAM;EAC/CC,QAAQ,EAAE,OAAOA,QAAQ,KAAK,WAAW,IAAIA;AAC/C,CAAC;AAGD,MAAMC,KAAU,GAAGL,OAAO,CAACC,IAAI,IAAID,OAAO,CAACE,MAAM,IAAIF,OAAO,CAACG,MAAM,IAAI,CAAC,CAAC;AACzE,MAAMG,OAAY,GAAGN,OAAO,CAACE,MAAM,IAAIF,OAAO,CAACC,IAAI,IAAID,OAAO,CAACG,MAAM,IAAI,CAAC,CAAC;AAC3E,MAAMI,OAAY,GAAGP,OAAO,CAACG,MAAM,IAAIH,OAAO,CAACC,IAAI,IAAID,OAAO,CAACE,MAAM,IAAI,CAAC,CAAC;AAC3E,MAAMM,SAAc,GAAGR,OAAO,CAACI,QAAQ,IAAI,CAAC,CAAC;AAE7C,SAAQC,KAAK,IAAIJ,IAAI,EAAEK,OAAO,IAAIJ,MAAM,EAAEK,OAAO,IAAIJ,MAAM,EAAEK,SAAS,IAAIJ,QAAQ;AAGlF,OAAO,MAAMK,SAAkB,GAE7BC,OAAO,CAAC,OAAOC,OAAO,KAAK,QAAQ,IAAIC,MAAM,CAACD,OAAO,CAAC,KAAK,kBAAkB,IAAIA,OAAO,CAACE,OAAO,CAAC;AAGnG,OAAO,MAAMC,QAAiB,GAAG,OAAOC,aAAa,KAAK,UAAU;AAGpE,MAAMC,OAAO,GACX,OAAOL,OAAO,KAAK,WAAW,IAAIA,OAAO,CAACM,OAAO,IAAI,WAAW,CAACC,IAAI,CAACP,OAAO,CAACM,OAAO,CAAC;AAExF,OAAO,MAAME,WAAmB,GAAIH,OAAO,IAAII,UAAU,CAACJ,OAAO,CAAC,CAAC,CAAC,CAAC,IAAK,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"node-filesystem.js","names":["fs","NodeFileSystem","constructor","options","fetch","_fetch","readdir","dirname","stat","path","info","size","Number","isDirectory","fallbackFetch","open","flags","mode","close","fd","fstat","read","
|
|
1
|
+
{"version":3,"file":"node-filesystem.js","names":["fs","NodeFileSystem","constructor","options","fetch","_fetch","readdir","dirname","arguments","length","undefined","stat","path","info","size","Number","isDirectory","fallbackFetch","open","flags","mode","close","fd","fstat","read","_ref","buffer","offset","byteLength","position","totalBytesRead","bytesRead"],"sources":["../../../../src/lib/filesystems/node-filesystem.ts"],"sourcesContent":["import * as fs from '../node/fs';\nimport {IFileSystem, IRandomAccessReadFileSystem} from '../../types';\n// import {fetchFile} from \"../fetch/fetch-file\"\n// import {selectLoader} from \"../api/select-loader\";\n\ntype Stat = {\n size: number;\n isDirectory: () => boolean;\n info?: fs.Stats;\n};\n\ntype ReadOptions = {\n buffer?: Buffer;\n offset?: number;\n length?: number;\n position?: number;\n};\n\n/**\n * FileSystem pass-through for Node.js\n * Compatible with BrowserFileSystem.\n * @param options\n */\nexport default class NodeFileSystem implements IFileSystem, IRandomAccessReadFileSystem {\n // implements IFileSystem\n constructor(options: {[key: string]: any}) {\n this.fetch = options._fetch;\n }\n\n async readdir(dirname = '.', options?: {}): Promise<any[]> {\n return await fs.readdir(dirname, options);\n }\n\n async stat(path: string, options?: {}): Promise<Stat> {\n const info = await fs.stat(path, options);\n return {size: Number(info.size), isDirectory: () => false, info};\n }\n\n async fetch(path: string, options: {[key: string]: any}) {\n // Falls back to handle https:/http:/data: etc fetches\n // eslint-disable-next-line\n const fallbackFetch = options.fetch || this.fetch;\n return fallbackFetch(path, options);\n }\n\n // implements IRandomAccessFileSystem\n async open(path: string, flags: string | number, mode?: any): Promise<number> {\n return await fs.open(path, flags);\n }\n\n async close(fd: number): Promise<void> {\n return await fs.close(fd);\n }\n\n async fstat(fd: number): Promise<Stat> {\n const info = await fs.fstat(fd);\n return info;\n }\n\n async read(\n fd: number,\n // @ts-ignore Possibly null\n {buffer = null, offset = 0, length = buffer.byteLength, position = null}: ReadOptions\n ): Promise<{bytesRead: number; buffer: Buffer}> {\n let totalBytesRead = 0;\n // Read in loop until we get required number of bytes\n while (totalBytesRead < length) {\n const {bytesRead} = await fs.read(\n fd,\n buffer,\n offset + totalBytesRead,\n length - totalBytesRead,\n position + totalBytesRead\n );\n totalBytesRead += bytesRead;\n }\n return {bytesRead: totalBytesRead, buffer};\n }\n}\n"],"mappings":"AAAA,OAAO,KAAKA,EAAE,MAAM,YAAY;AAuBhC,eAAe,MAAMC,cAAc,CAAqD;EAEtFC,WAAWA,CAACC,OAA6B,EAAE;IACzC,IAAI,CAACC,KAAK,GAAGD,OAAO,CAACE,MAAM;EAC7B;EAEA,MAAMC,OAAOA,CAAA,EAA8C;IAAA,IAA7CC,OAAO,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,GAAG;IAAA,IAAEL,OAAY,GAAAK,SAAA,CAAAC,MAAA,OAAAD,SAAA,MAAAE,SAAA;IACvC,OAAO,MAAMV,EAAE,CAACM,OAAO,CAACC,OAAO,EAAEJ,OAAO,CAAC;EAC3C;EAEA,MAAMQ,IAAIA,CAACC,IAAY,EAAET,OAAY,EAAiB;IACpD,MAAMU,IAAI,GAAG,MAAMb,EAAE,CAACW,IAAI,CAACC,IAAI,EAAET,OAAO,CAAC;IACzC,OAAO;MAACW,IAAI,EAAEC,MAAM,CAACF,IAAI,CAACC,IAAI,CAAC;MAAEE,WAAW,EAAEA,CAAA,KAAM,KAAK;MAAEH;IAAI,CAAC;EAClE;EAEA,MAAMT,KAAKA,CAACQ,IAAY,EAAET,OAA6B,EAAE;IAGvD,MAAMc,aAAa,GAAGd,OAAO,CAACC,KAAK,IAAI,IAAI,CAACA,KAAK;IACjD,OAAOa,aAAa,CAACL,IAAI,EAAET,OAAO,CAAC;EACrC;EAGA,MAAMe,IAAIA,CAACN,IAAY,EAAEO,KAAsB,EAAEC,IAAU,EAAmB;IAC5E,OAAO,MAAMpB,EAAE,CAACkB,IAAI,CAACN,IAAI,EAAEO,KAAK,CAAC;EACnC;EAEA,MAAME,KAAKA,CAACC,EAAU,EAAiB;IACrC,OAAO,MAAMtB,EAAE,CAACqB,KAAK,CAACC,EAAE,CAAC;EAC3B;EAEA,MAAMC,KAAKA,CAACD,EAAU,EAAiB;IACrC,MAAMT,IAAI,GAAG,MAAMb,EAAE,CAACuB,KAAK,CAACD,EAAE,CAAC;IAC/B,OAAOT,IAAI;EACb;EAEA,MAAMW,IAAIA,CACRF,EAAU,EAAAG,IAAA,EAGoC;IAAA,IAD9C;MAACC,MAAM,GAAG,IAAI;MAAEC,MAAM,GAAG,CAAC;MAAElB,MAAM,GAAGiB,MAAM,CAACE,UAAU;MAAEC,QAAQ,GAAG;IAAiB,CAAC,GAAAJ,IAAA;IAErF,IAAIK,cAAc,GAAG,CAAC;IAEtB,OAAOA,cAAc,GAAGrB,MAAM,EAAE;MAC9B,MAAM;QAACsB;MAAS,CAAC,GAAG,MAAM/B,EAAE,CAACwB,IAAI,CAC/BF,EAAE,EACFI,MAAM,EACNC,MAAM,GAAGG,cAAc,EACvBrB,MAAM,GAAGqB,cAAc,EACvBD,QAAQ,GAAGC,cACb,CAAC;MACDA,cAAc,IAAIC,SAAS;IAC7B;IACA,OAAO;MAACA,SAAS,EAAED,cAAc;MAAEJ;IAAM,CAAC;EAC5C;AACF"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"readable-file.js","names":["makeReadableFile","data","ArrayBuffer","arrayBuffer","read","start","length","Buffer","from","close","size","byteLength","blob","slice"],"sources":["../../../../src/lib/filesystems/readable-file.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nexport type ReadableFile = {\n read: (position: number, length: number) => Promise<Buffer>;\n close: () => Promise<void>;\n /** Length of file in bytes */\n size: number;\n};\n\n/** Helper function to create an envelope reader for a binary memory input */\nexport function makeReadableFile(data: Blob | ArrayBuffer): ReadableFile {\n if (data instanceof ArrayBuffer) {\n const arrayBuffer: ArrayBuffer = data;\n return {\n read: async (start: number, length: number) => Buffer.from(data, start, length),\n close: async () => {},\n size: arrayBuffer.byteLength\n };\n }\n\n const blob: Blob = data;\n return {\n read: async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n },\n close: async () => {},\n size: blob.size\n };\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"readable-file.js","names":["makeReadableFile","data","ArrayBuffer","arrayBuffer","read","start","length","Buffer","from","close","size","byteLength","blob","slice"],"sources":["../../../../src/lib/filesystems/readable-file.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nexport type ReadableFile = {\n read: (position: number, length: number) => Promise<Buffer>;\n close: () => Promise<void>;\n /** Length of file in bytes */\n size: number;\n};\n\n/** Helper function to create an envelope reader for a binary memory input */\nexport function makeReadableFile(data: Blob | ArrayBuffer): ReadableFile {\n if (data instanceof ArrayBuffer) {\n const arrayBuffer: ArrayBuffer = data;\n return {\n read: async (start: number, length: number) => Buffer.from(data, start, length),\n close: async () => {},\n size: arrayBuffer.byteLength\n };\n }\n\n const blob: Blob = data;\n return {\n read: async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n },\n close: async () => {},\n size: blob.size\n };\n}\n"],"mappings":"AAUA,OAAO,SAASA,gBAAgBA,CAACC,IAAwB,EAAgB;EACvE,IAAIA,IAAI,YAAYC,WAAW,EAAE;IAC/B,MAAMC,WAAwB,GAAGF,IAAI;IACrC,OAAO;MACLG,IAAI,EAAE,MAAAA,CAAOC,KAAa,EAAEC,MAAc,KAAKC,MAAM,CAACC,IAAI,CAACP,IAAI,EAAEI,KAAK,EAAEC,MAAM,CAAC;MAC/EG,KAAK,EAAE,MAAAA,CAAA,KAAY,CAAC,CAAC;MACrBC,IAAI,EAAEP,WAAW,CAACQ;IACpB,CAAC;EACH;EAEA,MAAMC,IAAU,GAAGX,IAAI;EACvB,OAAO;IACLG,IAAI,EAAE,MAAAA,CAAOC,KAAa,EAAEC,MAAc,KAAK;MAC7C,MAAMH,WAAW,GAAG,MAAMS,IAAI,CAACC,KAAK,CAACR,KAAK,EAAEA,KAAK,GAAGC,MAAM,CAAC,CAACH,WAAW,CAAC,CAAC;MACzE,OAAOI,MAAM,CAACC,IAAI,CAACL,WAAW,CAAC;IACjC,CAAC;IACDM,KAAK,EAAE,MAAAA,CAAA,KAAY,CAAC,CAAC;IACrBC,IAAI,EAAEE,IAAI,CAACF;EACb,CAAC;AACH"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writable-file.js","names":["isBrowser","fs","makeWritableFile","pathOrStream","options","write","close","outputStream","createWriteStream","buffer","Promise","resolve","reject","err"],"sources":["../../../../src/lib/filesystems/writable-file.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {isBrowser} from '../env-utils/globals';\nimport * as fs from '../node/fs';\nimport type {Writable} from 'stream';\n\nexport type WritableFile = {\n write: (buf: Buffer) => Promise<void>;\n close: () => Promise<void>;\n};\n\nexport interface WriteStreamOptions {\n flags?: string;\n encoding?: 'utf8';\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/** Helper function to create an envelope reader for a binary memory input */\nexport function makeWritableFile(\n pathOrStream: string | Writable,\n options?: WriteStreamOptions\n): WritableFile {\n if (isBrowser) {\n return {\n write: async () => {},\n close: async () => {}\n };\n }\n\n const outputStream: Writable =\n typeof pathOrStream === 'string' ? fs.createWriteStream(pathOrStream, options) : pathOrStream;\n return {\n write: async (buffer: Buffer) =>\n new Promise((resolve, reject) => {\n outputStream.write(buffer, (err) => (err ? reject(err) : resolve()));\n }),\n close: () =>\n new Promise((resolve, reject) => {\n (outputStream as any).close((err) => (err ? reject(err) : resolve()));\n })\n };\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"writable-file.js","names":["isBrowser","fs","makeWritableFile","pathOrStream","options","write","close","outputStream","createWriteStream","buffer","Promise","resolve","reject","err"],"sources":["../../../../src/lib/filesystems/writable-file.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {isBrowser} from '../env-utils/globals';\nimport * as fs from '../node/fs';\nimport type {Writable} from 'stream';\n\nexport type WritableFile = {\n write: (buf: Buffer) => Promise<void>;\n close: () => Promise<void>;\n};\n\nexport interface WriteStreamOptions {\n flags?: string;\n encoding?: 'utf8';\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/** Helper function to create an envelope reader for a binary memory input */\nexport function makeWritableFile(\n pathOrStream: string | Writable,\n options?: WriteStreamOptions\n): WritableFile {\n if (isBrowser) {\n return {\n write: async () => {},\n close: async () => {}\n };\n }\n\n const outputStream: Writable =\n typeof pathOrStream === 'string' ? fs.createWriteStream(pathOrStream, options) : pathOrStream;\n return {\n write: async (buffer: Buffer) =>\n new Promise((resolve, reject) => {\n outputStream.write(buffer, (err) => (err ? reject(err) : resolve()));\n }),\n close: () =>\n new Promise((resolve, reject) => {\n (outputStream as any).close((err) => (err ? reject(err) : resolve()));\n })\n };\n}\n"],"mappings":"AACA,SAAQA,SAAS,QAAO,sBAAsB;AAC9C,OAAO,KAAKC,EAAE,MAAM,YAAY;AAkBhC,OAAO,SAASC,gBAAgBA,CAC9BC,YAA+B,EAC/BC,OAA4B,EACd;EACd,IAAIJ,SAAS,EAAE;IACb,OAAO;MACLK,KAAK,EAAE,MAAAA,CAAA,KAAY,CAAC,CAAC;MACrBC,KAAK,EAAE,MAAAA,CAAA,KAAY,CAAC;IACtB,CAAC;EACH;EAEA,MAAMC,YAAsB,GAC1B,OAAOJ,YAAY,KAAK,QAAQ,GAAGF,EAAE,CAACO,iBAAiB,CAACL,YAAY,EAAEC,OAAO,CAAC,GAAGD,YAAY;EAC/F,OAAO;IACLE,KAAK,EAAE,MAAOI,MAAc,IAC1B,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;MAC/BL,YAAY,CAACF,KAAK,CAACI,MAAM,EAAGI,GAAG,IAAMA,GAAG,GAAGD,MAAM,CAACC,GAAG,CAAC,GAAGF,OAAO,CAAC,CAAE,CAAC;IACtE,CAAC,CAAC;IACJL,KAAK,EAAEA,CAAA,KACL,IAAII,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;MAC9BL,YAAY,CAASD,KAAK,CAAEO,GAAG,IAAMA,GAAG,GAAGD,MAAM,CAACC,GAAG,CAAC,GAAGF,OAAO,CAAC,CAAE,CAAC;IACvE,CAAC;EACL,CAAC;AACH"}
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import { concatenateArrayBuffers } from '../binary-utils/array-buffer-utils';
|
|
2
|
-
|
|
3
2
|
export async function forEach(iterator, visitor) {
|
|
4
3
|
while (true) {
|
|
5
4
|
const {
|
|
@@ -16,7 +15,6 @@ export async function forEach(iterator, visitor) {
|
|
|
16
15
|
}
|
|
17
16
|
}
|
|
18
17
|
}
|
|
19
|
-
|
|
20
18
|
export async function concatenateArrayBuffersAsync(asyncIterator) {
|
|
21
19
|
const arrayBuffers = [];
|
|
22
20
|
for await (const chunk of asyncIterator) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"async-iteration.js","names":["concatenateArrayBuffers","forEach","iterator","visitor","done","value","next","return","cancel","concatenateArrayBuffersAsync","asyncIterator","arrayBuffers","chunk","push","concatenateStringsAsync","strings","join"],"sources":["../../../../src/lib/iterators/async-iteration.ts"],"sourcesContent":["import {concatenateArrayBuffers} from '../binary-utils/array-buffer-utils';\n\n// GENERAL UTILITIES\n\n/**\n * Iterate over async iterator, without resetting iterator if end is not reached\n * - forEach intentionally does not reset iterator if exiting loop prematurely\n * so that iteration can continue in a second loop\n * - It is recommended to use a standard for-await as last loop to ensure\n * iterator gets properly reset\n *\n * TODO - optimize using sync iteration if argument is an Iterable?\n *\n * @param iterator\n * @param visitor\n */\nexport async function forEach(iterator, visitor) {\n // eslint-disable-next-line\n while (true) {\n const {done, value} = await iterator.next();\n if (done) {\n iterator.return();\n return;\n }\n const cancel = visitor(value);\n if (cancel) {\n return;\n }\n }\n}\n\n// Breaking big data into iterable chunks, concatenating iterable chunks into big data objects\n\n/**\n * Concatenates all data chunks yielded by an (async) iterator\n * This function can e.g. be used to enable atomic parsers to work on (async) iterator inputs\n */\n\nexport async function concatenateArrayBuffersAsync(\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n): Promise<ArrayBuffer> {\n const arrayBuffers: ArrayBuffer[] = [];\n for await (const chunk of asyncIterator) {\n arrayBuffers.push(chunk);\n }\n return concatenateArrayBuffers(...arrayBuffers);\n}\n\nexport async function concatenateStringsAsync(\n asyncIterator: AsyncIterable<string> | Iterable<string>\n): Promise<string> {\n const strings: string[] = [];\n for await (const chunk of asyncIterator) {\n strings.push(chunk);\n }\n return strings.join('');\n}\n"],"mappings":"AAAA,SAAQA,uBAAuB,QAAO,oCAAoC
|
|
1
|
+
{"version":3,"file":"async-iteration.js","names":["concatenateArrayBuffers","forEach","iterator","visitor","done","value","next","return","cancel","concatenateArrayBuffersAsync","asyncIterator","arrayBuffers","chunk","push","concatenateStringsAsync","strings","join"],"sources":["../../../../src/lib/iterators/async-iteration.ts"],"sourcesContent":["import {concatenateArrayBuffers} from '../binary-utils/array-buffer-utils';\n\n// GENERAL UTILITIES\n\n/**\n * Iterate over async iterator, without resetting iterator if end is not reached\n * - forEach intentionally does not reset iterator if exiting loop prematurely\n * so that iteration can continue in a second loop\n * - It is recommended to use a standard for-await as last loop to ensure\n * iterator gets properly reset\n *\n * TODO - optimize using sync iteration if argument is an Iterable?\n *\n * @param iterator\n * @param visitor\n */\nexport async function forEach(iterator, visitor) {\n // eslint-disable-next-line\n while (true) {\n const {done, value} = await iterator.next();\n if (done) {\n iterator.return();\n return;\n }\n const cancel = visitor(value);\n if (cancel) {\n return;\n }\n }\n}\n\n// Breaking big data into iterable chunks, concatenating iterable chunks into big data objects\n\n/**\n * Concatenates all data chunks yielded by an (async) iterator\n * This function can e.g. be used to enable atomic parsers to work on (async) iterator inputs\n */\n\nexport async function concatenateArrayBuffersAsync(\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n): Promise<ArrayBuffer> {\n const arrayBuffers: ArrayBuffer[] = [];\n for await (const chunk of asyncIterator) {\n arrayBuffers.push(chunk);\n }\n return concatenateArrayBuffers(...arrayBuffers);\n}\n\nexport async function concatenateStringsAsync(\n asyncIterator: AsyncIterable<string> | Iterable<string>\n): Promise<string> {\n const strings: string[] = [];\n for await (const chunk of asyncIterator) {\n strings.push(chunk);\n }\n return strings.join('');\n}\n"],"mappings":"AAAA,SAAQA,uBAAuB,QAAO,oCAAoC;AAgB1E,OAAO,eAAeC,OAAOA,CAACC,QAAQ,EAAEC,OAAO,EAAE;EAE/C,OAAO,IAAI,EAAE;IACX,MAAM;MAACC,IAAI;MAAEC;IAAK,CAAC,GAAG,MAAMH,QAAQ,CAACI,IAAI,CAAC,CAAC;IAC3C,IAAIF,IAAI,EAAE;MACRF,QAAQ,CAACK,MAAM,CAAC,CAAC;MACjB;IACF;IACA,MAAMC,MAAM,GAAGL,OAAO,CAACE,KAAK,CAAC;IAC7B,IAAIG,MAAM,EAAE;MACV;IACF;EACF;AACF;AASA,OAAO,eAAeC,4BAA4BA,CAChDC,aAAiE,EAC3C;EACtB,MAAMC,YAA2B,GAAG,EAAE;EACtC,WAAW,MAAMC,KAAK,IAAIF,aAAa,EAAE;IACvCC,YAAY,CAACE,IAAI,CAACD,KAAK,CAAC;EAC1B;EACA,OAAOZ,uBAAuB,CAAC,GAAGW,YAAY,CAAC;AACjD;AAEA,OAAO,eAAeG,uBAAuBA,CAC3CJ,aAAuD,EACtC;EACjB,MAAMK,OAAiB,GAAG,EAAE;EAC5B,WAAW,MAAMH,KAAK,IAAIF,aAAa,EAAE;IACvCK,OAAO,CAACF,IAAI,CAACD,KAAK,CAAC;EACrB;EACA,OAAOG,OAAO,CAACC,IAAI,CAAC,EAAE,CAAC;AACzB"}
|
|
@@ -1,22 +1,24 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
1
|
+
export function makeTextDecoderIterator(arrayBufferIterator) {
|
|
2
|
+
try {
|
|
3
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
4
|
+
return async function* () {
|
|
5
|
+
const textDecoder = new TextDecoder(undefined, options);
|
|
6
|
+
for await (const arrayBuffer of arrayBufferIterator) {
|
|
7
|
+
yield typeof arrayBuffer === 'string' ? arrayBuffer : textDecoder.decode(arrayBuffer, {
|
|
8
|
+
stream: true
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
}();
|
|
12
|
+
} catch (e) {
|
|
13
|
+
return Promise.reject(e);
|
|
10
14
|
}
|
|
11
15
|
}
|
|
12
|
-
|
|
13
16
|
export async function* makeTextEncoderIterator(textIterator) {
|
|
14
17
|
const textEncoder = new TextEncoder();
|
|
15
18
|
for await (const text of textIterator) {
|
|
16
19
|
yield typeof text === 'string' ? textEncoder.encode(text) : text;
|
|
17
20
|
}
|
|
18
21
|
}
|
|
19
|
-
|
|
20
22
|
export async function* makeLineIterator(textIterator) {
|
|
21
23
|
let previous = '';
|
|
22
24
|
for await (const textChunk of textIterator) {
|
|
@@ -32,7 +34,6 @@ export async function* makeLineIterator(textIterator) {
|
|
|
32
34
|
yield previous;
|
|
33
35
|
}
|
|
34
36
|
}
|
|
35
|
-
|
|
36
37
|
export async function* makeNumberedLineIterator(lineIterator) {
|
|
37
38
|
let counter = 1;
|
|
38
39
|
for await (const line of lineIterator) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"text-iterators.js","names":["makeTextDecoderIterator","arrayBufferIterator","options","
|
|
1
|
+
{"version":3,"file":"text-iterators.js","names":["makeTextDecoderIterator","arrayBufferIterator","options","arguments","length","undefined","textDecoder","TextDecoder","arrayBuffer","decode","stream","e","Promise","reject","makeTextEncoderIterator","textIterator","textEncoder","TextEncoder","text","encode","makeLineIterator","previous","textChunk","eolIndex","indexOf","line","slice","makeNumberedLineIterator","lineIterator","counter"],"sources":["../../../../src/lib/iterators/text-iterators.ts"],"sourcesContent":["// TextDecoder iterators\n// TextDecoder will keep any partial undecoded bytes between calls to `decode`\n\nexport async function* makeTextDecoderIterator(\n arrayBufferIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options: TextDecoderOptions = {}\n): AsyncIterable<string> {\n const textDecoder = new TextDecoder(undefined, options);\n for await (const arrayBuffer of arrayBufferIterator) {\n yield typeof arrayBuffer === 'string'\n ? arrayBuffer\n : textDecoder.decode(arrayBuffer, {stream: true});\n }\n}\n\n// TextEncoder iterator\n// TODO - this is not useful unless min chunk size is given\n// TextEncoder will keep any partial undecoded bytes between calls to `encode`\n// If iterator does not yield strings, assume arrayBuffer and return unencoded\n\nexport async function* makeTextEncoderIterator(\n textIterator: AsyncIterable<string> | Iterable<ArrayBuffer>\n): AsyncIterable<ArrayBuffer> {\n const textEncoder = new TextEncoder();\n for await (const text of textIterator) {\n yield typeof text === 'string' ? textEncoder.encode(text) : text;\n }\n}\n\n/**\n * @param textIterator async iterable yielding strings\n * @returns an async iterable over lines\n * See http://2ality.com/2018/04/async-iter-nodejs.html\n */\n\nexport async function* makeLineIterator(\n textIterator: AsyncIterable<string>\n): AsyncIterable<string> {\n let previous = '';\n for await (const textChunk of textIterator) {\n previous += textChunk;\n let eolIndex;\n while ((eolIndex = previous.indexOf('\\n')) >= 0) {\n // line includes the EOL\n const line = previous.slice(0, eolIndex + 1);\n previous = previous.slice(eolIndex + 1);\n yield line;\n }\n }\n\n if (previous.length > 0) {\n yield previous;\n }\n}\n\n/**\n * @param lineIterator async iterable yielding lines\n * @returns async iterable yielding numbered lines\n *\n * See http://2ality.com/2018/04/async-iter-nodejs.html\n */\nexport async function* makeNumberedLineIterator(\n lineIterator: AsyncIterable<string>\n): AsyncIterable<{counter: number; line: string}> {\n let counter = 1;\n for await (const line of lineIterator) {\n yield {counter, line};\n counter++;\n }\n}\n"],"mappings":"AAGA,OAAO,SAAgBA,uBAAuBA,CAC5CC,mBAAuE;EAAA;IAAA,IACvEC,OAA2B,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;IAAA,0BACT;MACvB,MAAMG,WAAW,GAAG,IAAIC,WAAW,CAACF,SAAS,EAAEH,OAAO,CAAC;MACvD,WAAW,MAAMM,WAAW,IAAIP,mBAAmB,EAAE;QACnD,MAAM,OAAOO,WAAW,KAAK,QAAQ,GACjCA,WAAW,GACXF,WAAW,CAACG,MAAM,CAACD,WAAW,EAAE;UAACE,MAAM,EAAE;QAAI,CAAC,CAAC;MACrD;IACF,CAAC;EAAA,SAAAC,CAAA;IAAA,OAAAC,OAAA,CAAAC,MAAA,CAAAF,CAAA;EAAA;AAAA;AAOD,OAAO,gBAAgBG,uBAAuBA,CAC5CC,YAA2D,EAC/B;EAC5B,MAAMC,WAAW,GAAG,IAAIC,WAAW,CAAC,CAAC;EACrC,WAAW,MAAMC,IAAI,IAAIH,YAAY,EAAE;IACrC,MAAM,OAAOG,IAAI,KAAK,QAAQ,GAAGF,WAAW,CAACG,MAAM,CAACD,IAAI,CAAC,GAAGA,IAAI;EAClE;AACF;AAQA,OAAO,gBAAgBE,gBAAgBA,CACrCL,YAAmC,EACZ;EACvB,IAAIM,QAAQ,GAAG,EAAE;EACjB,WAAW,MAAMC,SAAS,IAAIP,YAAY,EAAE;IAC1CM,QAAQ,IAAIC,SAAS;IACrB,IAAIC,QAAQ;IACZ,OAAO,CAACA,QAAQ,GAAGF,QAAQ,CAACG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;MAE/C,MAAMC,IAAI,GAAGJ,QAAQ,CAACK,KAAK,CAAC,CAAC,EAAEH,QAAQ,GAAG,CAAC,CAAC;MAC5CF,QAAQ,GAAGA,QAAQ,CAACK,KAAK,CAACH,QAAQ,GAAG,CAAC,CAAC;MACvC,MAAME,IAAI;IACZ;EACF;EAEA,IAAIJ,QAAQ,CAACjB,MAAM,GAAG,CAAC,EAAE;IACvB,MAAMiB,QAAQ;EAChB;AACF;AAQA,OAAO,gBAAgBM,wBAAwBA,CAC7CC,YAAmC,EACa;EAChD,IAAIC,OAAO,GAAG,CAAC;EACf,WAAW,MAAMJ,IAAI,IAAIG,YAAY,EAAE;IACrC,MAAM;MAACC,OAAO;MAAEJ;IAAI,CAAC;IACrBI,OAAO,EAAE;EACX;AACF"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"buffer.browser.js","names":["toArrayBuffer","buffer","toBuffer","binaryData","Error"],"sources":["../../../../src/lib/node/buffer.browser.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// Isolates Buffer references to ensure they are only bundled under Node.js (avoids big webpack polyfill)\n// this file is selected by the package.json \"browser\" field).\n\n/**\n * Convert Buffer to ArrayBuffer\n * Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toArrayBuffer(buffer) {\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(binaryData: ArrayBuffer | ArrayBuffer | Buffer): Buffer {\n throw new Error('Buffer not supported in browser');\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"buffer.browser.js","names":["toArrayBuffer","buffer","toBuffer","binaryData","Error"],"sources":["../../../../src/lib/node/buffer.browser.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// Isolates Buffer references to ensure they are only bundled under Node.js (avoids big webpack polyfill)\n// this file is selected by the package.json \"browser\" field).\n\n/**\n * Convert Buffer to ArrayBuffer\n * Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toArrayBuffer(buffer) {\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(binaryData: ArrayBuffer | ArrayBuffer | Buffer): Buffer {\n throw new Error('Buffer not supported in browser');\n}\n"],"mappings":"AAUA,OAAO,SAASA,aAAaA,CAACC,MAAM,EAAE;EACpC,OAAOA,MAAM;AACf;AAKA,OAAO,SAASC,QAAQA,CAACC,UAA8C,EAAU;EAC/E,MAAM,IAAIC,KAAK,CAAC,iCAAiC,CAAC;AACpD"}
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
1
|
export function toArrayBuffer(buffer) {
|
|
4
2
|
if (Buffer.isBuffer(buffer)) {
|
|
5
3
|
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
@@ -7,7 +5,6 @@ export function toArrayBuffer(buffer) {
|
|
|
7
5
|
}
|
|
8
6
|
return buffer;
|
|
9
7
|
}
|
|
10
|
-
|
|
11
8
|
export function toBuffer(binaryData) {
|
|
12
9
|
if (Buffer.isBuffer(binaryData)) {
|
|
13
10
|
return binaryData;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"buffer.js","names":["toArrayBuffer","buffer","Buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","toBuffer","binaryData","ArrayBuffer","isView","from","Error"],"sources":["../../../../src/lib/node/buffer.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// Isolates Buffer references to ensure they are only bundled under Node.js (avoids big webpack polyfill)\n// this file is selected by the package.json \"browser\" field).\n\n/**\n * Convert Buffer to ArrayBuffer\n * Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toArrayBuffer(buffer) {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(binaryData: ArrayBuffer | ArrayBuffer | Buffer): Buffer {\n if (Buffer.isBuffer(binaryData)) {\n return binaryData;\n }\n\n if (ArrayBuffer.isView(binaryData)) {\n binaryData = binaryData.buffer;\n }\n\n if (typeof Buffer !== 'undefined' && binaryData instanceof ArrayBuffer) {\n return Buffer.from(binaryData);\n }\n\n throw new Error('toBuffer');\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"buffer.js","names":["toArrayBuffer","buffer","Buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","toBuffer","binaryData","ArrayBuffer","isView","from","Error"],"sources":["../../../../src/lib/node/buffer.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// Isolates Buffer references to ensure they are only bundled under Node.js (avoids big webpack polyfill)\n// this file is selected by the package.json \"browser\" field).\n\n/**\n * Convert Buffer to ArrayBuffer\n * Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toArrayBuffer(buffer) {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(binaryData: ArrayBuffer | ArrayBuffer | Buffer): Buffer {\n if (Buffer.isBuffer(binaryData)) {\n return binaryData;\n }\n\n if (ArrayBuffer.isView(binaryData)) {\n binaryData = binaryData.buffer;\n }\n\n if (typeof Buffer !== 'undefined' && binaryData instanceof ArrayBuffer) {\n return Buffer.from(binaryData);\n }\n\n throw new Error('toBuffer');\n}\n"],"mappings":"AAUA,OAAO,SAASA,aAAaA,CAACC,MAAM,EAAE;EAEpC,IAAIC,MAAM,CAACC,QAAQ,CAACF,MAAM,CAAC,EAAE;IAC3B,MAAMG,UAAU,GAAG,IAAIC,UAAU,CAACJ,MAAM,CAACA,MAAM,EAAEA,MAAM,CAACK,UAAU,EAAEL,MAAM,CAACM,MAAM,CAAC;IAClF,OAAOH,UAAU,CAACI,KAAK,CAAC,CAAC,CAACP,MAAM;EAClC;EACA,OAAOA,MAAM;AACf;AAKA,OAAO,SAASQ,QAAQA,CAACC,UAA8C,EAAU;EAC/E,IAAIR,MAAM,CAACC,QAAQ,CAACO,UAAU,CAAC,EAAE;IAC/B,OAAOA,UAAU;EACnB;EAEA,IAAIC,WAAW,CAACC,MAAM,CAACF,UAAU,CAAC,EAAE;IAClCA,UAAU,GAAGA,UAAU,CAACT,MAAM;EAChC;EAEA,IAAI,OAAOC,MAAM,KAAK,WAAW,IAAIQ,UAAU,YAAYC,WAAW,EAAE;IACtE,OAAOT,MAAM,CAACW,IAAI,CAACH,UAAU,CAAC;EAChC;EAEA,MAAM,IAAII,KAAK,CAAC,UAAU,CAAC;AAC7B"}
|