@loaders.gl/loader-utils 3.3.0-alpha.5 → 3.3.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/index.js +93 -119
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/json-loader.js +2 -9
- package/dist/es5/json-loader.js.map +1 -1
- package/dist/es5/lib/binary-utils/array-buffer-utils.js +6 -26
- package/dist/es5/lib/binary-utils/array-buffer-utils.js.map +1 -1
- package/dist/es5/lib/binary-utils/binary-copy-utils.js +0 -4
- package/dist/es5/lib/binary-utils/binary-copy-utils.js.map +1 -1
- package/dist/es5/lib/binary-utils/buffer-utils.js +1 -9
- package/dist/es5/lib/binary-utils/buffer-utils.js.map +1 -1
- package/dist/es5/lib/binary-utils/encode-utils.js +2 -8
- package/dist/es5/lib/binary-utils/encode-utils.js.map +1 -1
- package/dist/es5/lib/binary-utils/get-first-characters.js +0 -7
- package/dist/es5/lib/binary-utils/get-first-characters.js.map +1 -1
- package/dist/es5/lib/binary-utils/memory-copy-utils.js +1 -4
- package/dist/es5/lib/binary-utils/memory-copy-utils.js.map +1 -1
- package/dist/es5/lib/env-utils/assert.js +0 -1
- package/dist/es5/lib/env-utils/assert.js.map +1 -1
- package/dist/es5/lib/env-utils/globals.js +5 -4
- package/dist/es5/lib/env-utils/globals.js.map +1 -1
- package/dist/es5/lib/filesystems/node-filesystem.js +2 -45
- package/dist/es5/lib/filesystems/node-filesystem.js.map +1 -1
- package/dist/es5/lib/iterators/async-iteration.js +57 -127
- package/dist/es5/lib/iterators/async-iteration.js.map +1 -1
- package/dist/es5/lib/iterators/text-iterators.js +127 -242
- package/dist/es5/lib/iterators/text-iterators.js.map +1 -1
- package/dist/es5/lib/node/buffer.js +0 -6
- package/dist/es5/lib/node/buffer.js.map +1 -1
- package/dist/es5/lib/node/fs.js +9 -16
- package/dist/es5/lib/node/fs.js.map +1 -1
- package/dist/es5/lib/node/util.js +0 -5
- package/dist/es5/lib/node/util.js.map +1 -1
- package/dist/es5/lib/parser-utils/parse-json.js +0 -2
- package/dist/es5/lib/parser-utils/parse-json.js.map +1 -1
- package/dist/es5/lib/path-utils/file-aliases.js +3 -4
- package/dist/es5/lib/path-utils/file-aliases.js.map +1 -1
- package/dist/es5/lib/path-utils/path.js +1 -4
- package/dist/es5/lib/path-utils/path.js.map +1 -1
- package/dist/es5/lib/request-utils/request-scheduler.js +24 -33
- package/dist/es5/lib/request-utils/request-scheduler.js.map +1 -1
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js +2 -36
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js.map +1 -1
- package/dist/es5/lib/worker-loader-utils/encode-with-worker.js +0 -4
- package/dist/es5/lib/worker-loader-utils/encode-with-worker.js.map +1 -1
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js +2 -23
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js.map +1 -1
- package/dist/es5/types.js.map +1 -1
- package/dist/es5/workers/json-worker.js +0 -2
- package/dist/es5/workers/json-worker.js.map +1 -1
- package/dist/esm/index.js +14 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/json-loader.js +3 -2
- package/dist/esm/json-loader.js.map +1 -1
- package/dist/esm/lib/binary-utils/array-buffer-utils.js +16 -15
- package/dist/esm/lib/binary-utils/array-buffer-utils.js.map +1 -1
- package/dist/esm/lib/binary-utils/binary-copy-utils.js +2 -2
- package/dist/esm/lib/binary-utils/binary-copy-utils.js.map +1 -1
- package/dist/esm/lib/binary-utils/buffer-utils.js +3 -1
- package/dist/esm/lib/binary-utils/buffer-utils.js.map +1 -1
- package/dist/esm/lib/binary-utils/encode-utils.js +2 -4
- package/dist/esm/lib/binary-utils/encode-utils.js.map +1 -1
- package/dist/esm/lib/binary-utils/get-first-characters.js +2 -5
- package/dist/esm/lib/binary-utils/get-first-characters.js.map +1 -1
- package/dist/esm/lib/binary-utils/memory-copy-utils.js +5 -2
- package/dist/esm/lib/binary-utils/memory-copy-utils.js.map +1 -1
- package/dist/esm/lib/env-utils/assert.js +1 -0
- package/dist/esm/lib/env-utils/assert.js.map +1 -1
- package/dist/esm/lib/env-utils/globals.js +7 -1
- package/dist/esm/lib/env-utils/globals.js.map +1 -1
- package/dist/esm/lib/filesystems/node-filesystem.js +10 -16
- package/dist/esm/lib/filesystems/node-filesystem.js.map +1 -1
- package/dist/esm/lib/iterators/async-iteration.js +2 -7
- package/dist/esm/lib/iterators/async-iteration.js.map +1 -1
- package/dist/esm/lib/iterators/text-iterators.js +7 -7
- package/dist/esm/lib/iterators/text-iterators.js.map +1 -1
- package/dist/esm/lib/node/buffer.js +4 -5
- package/dist/esm/lib/node/buffer.js.map +1 -1
- package/dist/esm/lib/node/fs.js +7 -4
- package/dist/esm/lib/node/fs.js.map +1 -1
- package/dist/esm/lib/node/util.js +1 -0
- package/dist/esm/lib/node/util.js.map +1 -1
- package/dist/esm/lib/parser-utils/parse-json.js +1 -0
- package/dist/esm/lib/parser-utils/parse-json.js.map +1 -1
- package/dist/esm/lib/path-utils/file-aliases.js +6 -2
- package/dist/esm/lib/path-utils/file-aliases.js.map +1 -1
- package/dist/esm/lib/path-utils/path.js +8 -3
- package/dist/esm/lib/path-utils/path.js.map +1 -1
- package/dist/esm/lib/request-utils/request-scheduler.js +11 -21
- package/dist/esm/lib/request-utils/request-scheduler.js.map +1 -1
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js +19 -17
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js.map +1 -1
- package/dist/esm/lib/worker-loader-utils/encode-with-worker.js +1 -1
- package/dist/esm/lib/worker-loader-utils/encode-with-worker.js.map +1 -1
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js +6 -7
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js.map +1 -1
- package/dist/esm/types.js.map +1 -1
- package/dist/esm/workers/json-worker.js.map +1 -1
- package/dist/index.js +5 -1
- package/dist/lib/binary-utils/buffer-utils.js +5 -1
- package/dist/lib/filesystems/node-filesystem.d.ts +1 -0
- package/dist/lib/filesystems/node-filesystem.d.ts.map +1 -1
- package/dist/lib/filesystems/node-filesystem.js +5 -1
- package/dist/lib/node/util.js +5 -1
- package/package.json +3 -3
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"create-loader-worker.js","names":["requestId","createLoaderWorker","loader","WorkerBody","inWorkerThread","onmessage","type","payload","input","options","context","parseData","arrayBuffer","parse","parseOnMainThread","result","postMessage","message","Error","error","Promise","resolve","reject","id","onMessage","removeEventListener","addEventListener","parseSync","data","parser","parseTextSync","textDecoder","TextDecoder","decode","name","modules","worker"],"sources":["../../../../src/lib/worker-loader-utils/create-loader-worker.ts"],"sourcesContent":["/* eslint-disable no-restricted-globals */\nimport type {LoaderWithParser} from '../../types';\nimport {WorkerBody} from '@loaders.gl/worker-utils';\n// import {validateLoaderVersion} from './validate-loader-version';\n\nlet requestId = 0;\n\n/**\n * Set up a WebWorkerGlobalScope to talk with the main thread\n * @param loader\n */\nexport function createLoaderWorker(loader: LoaderWithParser) {\n // Check that we are actually in a worker thread\n if (!WorkerBody.inWorkerThread()) {\n return;\n }\n\n WorkerBody.onmessage = async (type, payload) => {\n switch (type) {\n case 'process':\n try {\n // validateLoaderVersion(loader, data.source.split('@')[1]);\n\n const {input, options = {}, context = {}} = payload;\n\n const result = await parseData({\n loader,\n arrayBuffer: input,\n options,\n context: {\n ...context,\n parse: parseOnMainThread\n }\n });\n WorkerBody.postMessage('done', {result});\n } catch (error) {\n const message = error instanceof Error ? error.message : '';\n WorkerBody.postMessage('error', {error: message});\n }\n break;\n default:\n }\n };\n}\n\nfunction parseOnMainThread(arrayBuffer: ArrayBuffer, options: {[key: string]: any}): Promise<void> {\n return new Promise((resolve, reject) => {\n const id = requestId++;\n\n /**\n */\n const onMessage = (type, payload) => {\n if (payload.id !== id) {\n // not ours\n return;\n }\n\n switch (type) {\n case 'done':\n WorkerBody.removeEventListener(onMessage);\n resolve(payload.result);\n break;\n\n case 'error':\n WorkerBody.removeEventListener(onMessage);\n reject(payload.error);\n break;\n\n default:\n // ignore\n }\n };\n\n WorkerBody.addEventListener(onMessage);\n\n // Ask the main thread to decode data\n const payload = {id, input: arrayBuffer, options};\n WorkerBody.postMessage('process', payload);\n });\n}\n\n// TODO - Support byteOffset and byteLength (enabling parsing of embedded binaries without copies)\n// TODO - Why not support async loader.parse* funcs here?\n// TODO - Why not reuse a common function instead of reimplementing loader.parse* selection logic? Keeping loader small?\n// TODO - Lack of appropriate parser functions can be detected when we create worker, no need to wait until parse\nasync function parseData({loader, arrayBuffer, options, context}) {\n let data;\n let parser;\n if (loader.parseSync || loader.parse) {\n data = arrayBuffer;\n parser = loader.parseSync || loader.parse;\n } else if (loader.parseTextSync) {\n const textDecoder = new TextDecoder();\n data = textDecoder.decode(arrayBuffer);\n parser = loader.parseTextSync;\n } else {\n throw new Error(`Could not load data with ${loader.name} loader`);\n }\n\n // TODO - proper merge in of loader options...\n options = {\n ...options,\n modules: (loader && loader.options && loader.options.modules) || {},\n worker: false\n };\n\n return await parser(data, {...options}, context, loader);\n}\n"],"mappings":";;;;;;;;;;AAEA;AAAoD;AAAA;;AAGpD,IAAIA,SAAS,GAAG,CAAC;;AAMV,SAASC,kBAAkB,CAACC,MAAwB,EAAE;EAE3D,IAAI,CAACC,uBAAU,CAACC,cAAc,EAAE,EAAE;IAChC;EACF;EAEAD,uBAAU,CAACE,SAAS;IAAA,qEAAG,iBAAOC,IAAI,EAAEC,OAAO;MAAA;MAAA;QAAA;UAAA;YAAA;cAAA,cACjCD,IAAI;cAAA,gCACL,SAAS;cAAA;YAAA;cAAA;cAIHE,KAAK,GAAgCD,OAAO,CAA5CC,KAAK,qBAAgCD,OAAO,CAArCE,OAAO,EAAPA,OAAO,iCAAG,CAAC,CAAC,wCAAkBF,OAAO,CAAvBG,OAAO,EAAPA,OAAO,iCAAG,CAAC,CAAC;cAAA;cAAA,OAEnBC,SAAS,CAAC;gBAC7BT,MAAM,EAANA,MAAM;gBACNU,WAAW,EAAEJ,KAAK;gBAClBC,OAAO,EAAPA,OAAO;gBACPC,OAAO,kCACFA,OAAO;kBACVG,KAAK,EAAEC;gBAAiB;cAE5B,CAAC,CAAC;YAAA;cARIC,MAAM;cASZZ,uBAAU,CAACa,WAAW,CAAC,MAAM,EAAE;gBAACD,MAAM,EAANA;cAAM,CAAC,CAAC;cAAC;cAAA;YAAA;cAAA;cAAA;cAEnCE,OAAO,GAAG,uBAAiBC,KAAK,GAAG,YAAMD,OAAO,GAAG,EAAE;cAC3Dd,uBAAU,CAACa,WAAW,CAAC,OAAO,EAAE;gBAACG,KAAK,EAAEF;cAAO,CAAC,CAAC;YAAC;cAAA;YAAA;YAAA;cAAA;UAAA;QAAA;MAAA;IAAA,CAKzD;IAAA;MAAA;IAAA;EAAA;AACH;AAEA,SAASH,iBAAiB,CAACF,WAAwB,EAAEH,OAA6B,EAAiB;EACjG,OAAO,IAAIW,OAAO,CAAC,UAACC,OAAO,EAAEC,MAAM,EAAK;IACtC,IAAMC,EAAE,GAAGvB,SAAS,EAAE;;IAItB,IAAMwB,SAAS,GAAG,SAAZA,SAAS,CAAIlB,IAAI,EAAEC,OAAO,EAAK;MACnC,IAAIA,OAAO,CAACgB,EAAE,KAAKA,EAAE,EAAE;QAErB;MACF;MAEA,QAAQjB,IAAI;QACV,KAAK,MAAM;UACTH,uBAAU,CAACsB,mBAAmB,CAACD,SAAS,CAAC;UACzCH,OAAO,CAACd,OAAO,CAACQ,MAAM,CAAC;UACvB;QAEF,KAAK,OAAO;UACVZ,uBAAU,CAACsB,mBAAmB,CAACD,SAAS,CAAC;UACzCF,MAAM,CAACf,OAAO,CAACY,KAAK,CAAC;UACrB;QAEF;MAAQ;IAGZ,CAAC;;IAEDhB,uBAAU,CAACuB,gBAAgB,CAACF,SAAS,CAAC;;IAGtC,IAAMjB,OAAO,GAAG;MAACgB,EAAE,EAAFA,EAAE;MAAEf,KAAK,EAAEI,WAAW;MAAEH,OAAO,EAAPA;IAAO,CAAC;IACjDN,uBAAU,CAACa,WAAW,CAAC,SAAS,EAAET,OAAO,CAAC;EAC5C,CAAC,CAAC;AACJ;;AAAC,SAMcI,SAAS;EAAA;AAAA;AAAA;EAAA,uEAAxB;IAAA;IAAA;MAAA;QAAA;UAAA;YAA0BT,MAAM,SAANA,MAAM,EAAEU,WAAW,SAAXA,WAAW,EAAEH,OAAO,SAAPA,OAAO,EAAEC,OAAO,SAAPA,OAAO;YAAA,MAGzDR,MAAM,CAACyB,SAAS,IAAIzB,MAAM,CAACW,KAAK;cAAA;cAAA;YAAA;YAClCe,IAAI,GAAGhB,WAAW;YAClBiB,MAAM,GAAG3B,MAAM,CAACyB,SAAS,IAAIzB,MAAM,CAACW,KAAK;YAAC;YAAA;UAAA;YAAA,KACjCX,MAAM,CAAC4B,aAAa;cAAA;cAAA;YAAA;YACvBC,WAAW,GAAG,IAAIC,WAAW,EAAE;YACrCJ,IAAI,GAAGG,WAAW,CAACE,MAAM,CAACrB,WAAW,CAAC;YACtCiB,MAAM,GAAG3B,MAAM,CAAC4B,aAAa;YAAC;YAAA;UAAA;YAAA,MAExB,IAAIZ,KAAK,oCAA6BhB,MAAM,CAACgC,IAAI,aAAU;UAAA;YAInEzB,OAAO,mCACFA,OAAO;cACV0B,OAAO,EAAGjC,MAAM,IAAIA,MAAM,CAACO,OAAO,IAAIP,MAAM,CAACO,OAAO,CAAC0B,OAAO,IAAK,CAAC,CAAC;cACnEC,MAAM,EAAE;YAAK,EACd;YAAC;YAAA,OAEWP,MAAM,CAACD,IAAI,oBAAMnB,OAAO,GAAGC,OAAO,EAAER,MAAM,CAAC;UAAA;YAAA;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACzD;EAAA;AAAA"}
|
|
@@ -4,11 +4,8 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.canEncodeWithWorker = canEncodeWithWorker;
|
|
7
|
-
|
|
8
7
|
var _workerUtils = require("@loaders.gl/worker-utils");
|
|
9
|
-
|
|
10
8
|
var _globals = require("../env-utils/globals");
|
|
11
|
-
|
|
12
9
|
function canEncodeWithWorker(writer, options) {
|
|
13
10
|
if (!_workerUtils.WorkerFarm.isSupported()) {
|
|
14
11
|
return false;
|
|
@@ -17,7 +14,6 @@ function canEncodeWithWorker(writer, options) {
|
|
|
17
14
|
if (!_globals.isBrowser && !(options !== null && options !== void 0 && options._nodeWorkers)) {
|
|
18
15
|
return false;
|
|
19
16
|
}
|
|
20
|
-
|
|
21
17
|
return writer.worker && (options === null || options === void 0 ? void 0 : options.worker);
|
|
22
18
|
}
|
|
23
19
|
//# sourceMappingURL=encode-with-worker.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"encode-with-worker.js","names":["canEncodeWithWorker","writer","options","WorkerFarm","isSupported","isBrowser","_nodeWorkers","worker"],"sources":["../../../../src/lib/worker-loader-utils/encode-with-worker.ts"],"sourcesContent":["import {WorkerFarm} from '@loaders.gl/worker-utils';\nimport {Writer, WriterOptions} from '../../types';\nimport {isBrowser} from '../env-utils/globals';\n\n/**\n * Determines if a loader can parse with worker\n * @param loader\n * @param options\n */\nexport function canEncodeWithWorker(writer: Writer, options?: WriterOptions) {\n if (!WorkerFarm.isSupported()) {\n return false;\n }\n\n // Node workers are still experimental\n if (!isBrowser && !options?._nodeWorkers) {\n return false;\n }\n\n return writer.worker && options?.worker;\n}\n"],"mappings":";;;;;;AAAA;AAEA;AAOO,SAASA,mBAAmB,CAACC,MAAc,EAAEC,OAAuB,EAAE;EAC3E,IAAI,CAACC,uBAAU,CAACC,WAAW,EAAE,EAAE;IAC7B,OAAO,KAAK;EACd;;EAGA,IAAI,CAACC,kBAAS,IAAI,EAACH,OAAO,aAAPA,OAAO,eAAPA,OAAO,CAAEI,YAAY,GAAE;IACxC,OAAO,KAAK;EACd;EAEA,OAAOL,MAAM,CAACM,MAAM,KAAIL,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEK,MAAM;AACzC"}
|
|
@@ -1,19 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
3
|
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
|
|
5
4
|
Object.defineProperty(exports, "__esModule", {
|
|
6
5
|
value: true
|
|
7
6
|
});
|
|
8
7
|
exports.canParseWithWorker = canParseWithWorker;
|
|
9
8
|
exports.parseWithWorker = parseWithWorker;
|
|
10
|
-
|
|
11
9
|
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
12
|
-
|
|
13
10
|
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
14
|
-
|
|
15
11
|
var _workerUtils = require("@loaders.gl/worker-utils");
|
|
16
|
-
|
|
17
12
|
function canParseWithWorker(loader, options) {
|
|
18
13
|
if (!_workerUtils.WorkerFarm.isSupported()) {
|
|
19
14
|
return false;
|
|
@@ -22,14 +17,12 @@ function canParseWithWorker(loader, options) {
|
|
|
22
17
|
if (!_workerUtils.isBrowser && !(options !== null && options !== void 0 && options._nodeWorkers)) {
|
|
23
18
|
return false;
|
|
24
19
|
}
|
|
25
|
-
|
|
26
20
|
return loader.worker && (options === null || options === void 0 ? void 0 : options.worker);
|
|
27
21
|
}
|
|
28
22
|
|
|
29
23
|
function parseWithWorker(_x, _x2, _x3, _x4, _x5) {
|
|
30
24
|
return _parseWithWorker.apply(this, arguments);
|
|
31
25
|
}
|
|
32
|
-
|
|
33
26
|
function _parseWithWorker() {
|
|
34
27
|
_parseWithWorker = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(loader, data, options, context, parseOnMainThread) {
|
|
35
28
|
var name, url, workerFarm, workerPool, job, result;
|
|
@@ -47,8 +40,8 @@ function _parseWithWorker() {
|
|
|
47
40
|
options = JSON.parse(JSON.stringify(options));
|
|
48
41
|
context = JSON.parse(JSON.stringify(context || {}));
|
|
49
42
|
_context.next = 8;
|
|
50
|
-
return workerPool.startJob('process-on-worker',
|
|
51
|
-
|
|
43
|
+
return workerPool.startJob('process-on-worker',
|
|
44
|
+
onMessage.bind(null, parseOnMainThread));
|
|
52
45
|
case 8:
|
|
53
46
|
job = _context.sent;
|
|
54
47
|
job.postMessage('process', {
|
|
@@ -58,15 +51,12 @@ function _parseWithWorker() {
|
|
|
58
51
|
});
|
|
59
52
|
_context.next = 12;
|
|
60
53
|
return job.result;
|
|
61
|
-
|
|
62
54
|
case 12:
|
|
63
55
|
result = _context.sent;
|
|
64
56
|
_context.next = 15;
|
|
65
57
|
return result.result;
|
|
66
|
-
|
|
67
58
|
case 15:
|
|
68
59
|
return _context.abrupt("return", _context.sent);
|
|
69
|
-
|
|
70
60
|
case 16:
|
|
71
61
|
case "end":
|
|
72
62
|
return _context.stop();
|
|
@@ -76,15 +66,12 @@ function _parseWithWorker() {
|
|
|
76
66
|
}));
|
|
77
67
|
return _parseWithWorker.apply(this, arguments);
|
|
78
68
|
}
|
|
79
|
-
|
|
80
69
|
function onMessage(_x6, _x7, _x8, _x9) {
|
|
81
70
|
return _onMessage.apply(this, arguments);
|
|
82
71
|
}
|
|
83
|
-
|
|
84
72
|
function _onMessage() {
|
|
85
73
|
_onMessage = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(parseOnMainThread, job, type, payload) {
|
|
86
74
|
var id, input, _options, result, message;
|
|
87
|
-
|
|
88
75
|
return _regenerator.default.wrap(function _callee2$(_context2) {
|
|
89
76
|
while (1) {
|
|
90
77
|
switch (_context2.prev = _context2.next) {
|
|
@@ -92,21 +79,17 @@ function _onMessage() {
|
|
|
92
79
|
_context2.t0 = type;
|
|
93
80
|
_context2.next = _context2.t0 === 'done' ? 3 : _context2.t0 === 'error' ? 5 : _context2.t0 === 'process' ? 7 : 20;
|
|
94
81
|
break;
|
|
95
|
-
|
|
96
82
|
case 3:
|
|
97
83
|
job.done(payload);
|
|
98
84
|
return _context2.abrupt("break", 21);
|
|
99
|
-
|
|
100
85
|
case 5:
|
|
101
86
|
job.error(new Error(payload.error));
|
|
102
87
|
return _context2.abrupt("break", 21);
|
|
103
|
-
|
|
104
88
|
case 7:
|
|
105
89
|
id = payload.id, input = payload.input, _options = payload.options;
|
|
106
90
|
_context2.prev = 8;
|
|
107
91
|
_context2.next = 11;
|
|
108
92
|
return parseOnMainThread(input, _options);
|
|
109
|
-
|
|
110
93
|
case 11:
|
|
111
94
|
result = _context2.sent;
|
|
112
95
|
job.postMessage('done', {
|
|
@@ -115,7 +98,6 @@ function _onMessage() {
|
|
|
115
98
|
});
|
|
116
99
|
_context2.next = 19;
|
|
117
100
|
break;
|
|
118
|
-
|
|
119
101
|
case 15:
|
|
120
102
|
_context2.prev = 15;
|
|
121
103
|
_context2.t1 = _context2["catch"](8);
|
|
@@ -124,13 +106,10 @@ function _onMessage() {
|
|
|
124
106
|
id: id,
|
|
125
107
|
error: message
|
|
126
108
|
});
|
|
127
|
-
|
|
128
109
|
case 19:
|
|
129
110
|
return _context2.abrupt("break", 21);
|
|
130
|
-
|
|
131
111
|
case 20:
|
|
132
112
|
console.warn("parse-with-worker unknown message ".concat(type));
|
|
133
|
-
|
|
134
113
|
case 21:
|
|
135
114
|
case "end":
|
|
136
115
|
return _context2.stop();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"parse-with-worker.js","names":["canParseWithWorker","loader","options","WorkerFarm","isSupported","isBrowser","_nodeWorkers","worker","parseWithWorker","data","context","parseOnMainThread","name","id","url","getWorkerURL","workerFarm","getWorkerFarm","workerPool","getWorkerPool","JSON","parse","stringify","startJob","onMessage","bind","job","postMessage","input","result","type","payload","done","error","Error","message","console","warn"],"sources":["../../../../src/lib/worker-loader-utils/parse-with-worker.ts"],"sourcesContent":["import {\n WorkerJob,\n WorkerMessageType,\n WorkerMessagePayload,\n isBrowser\n} from '@loaders.gl/worker-utils';\nimport type {Loader, LoaderOptions, LoaderContext} from '../../types';\nimport {WorkerFarm, getWorkerURL} from '@loaders.gl/worker-utils';\n\n/**\n * Determines if a loader can parse with worker\n * @param loader\n * @param options\n */\nexport function canParseWithWorker(loader: Loader, options?: LoaderOptions) {\n if (!WorkerFarm.isSupported()) {\n return false;\n }\n\n // Node workers are still experimental\n if (!isBrowser && !options?._nodeWorkers) {\n return false;\n }\n\n return loader.worker && options?.worker;\n}\n\n/**\n * this function expects that the worker function sends certain messages,\n * this can be automated if the worker is wrapper by a call to createLoaderWorker in @loaders.gl/loader-utils.\n */\nexport async function parseWithWorker(\n loader: Loader,\n data: any,\n options?: LoaderOptions,\n context?: LoaderContext,\n parseOnMainThread?: (arrayBuffer: ArrayBuffer, options: {[key: string]: any}) => Promise<void>\n) {\n const name = loader.id; // TODO\n const url = getWorkerURL(loader, options);\n\n const workerFarm = WorkerFarm.getWorkerFarm(options);\n const workerPool = workerFarm.getWorkerPool({name, url});\n\n // options.log object contains functions which cannot be transferred\n // context.fetch & context.parse functions cannot be transferred\n // TODO - decide how to handle logging on workers\n options = JSON.parse(JSON.stringify(options));\n context = JSON.parse(JSON.stringify(context || {}));\n\n const job = await workerPool.startJob(\n 'process-on-worker',\n // @ts-expect-error\n onMessage.bind(null, parseOnMainThread) // eslint-disable-line @typescript-eslint/no-misused-promises\n );\n\n job.postMessage('process', {\n // @ts-ignore\n input: data,\n options,\n context\n });\n\n const result = await job.result;\n // TODO - what is going on here?\n return await result.result;\n}\n\n/**\n * Handle worker's responses to the main thread\n * @param job\n * @param type\n * @param payload\n */\nasync function onMessage(\n parseOnMainThread: (arrayBuffer: ArrayBuffer, options?: {[key: string]: any}) => Promise<void>,\n job: WorkerJob,\n type: WorkerMessageType,\n payload: WorkerMessagePayload\n) {\n switch (type) {\n case 'done':\n job.done(payload);\n break;\n\n case 'error':\n job.error(new Error(payload.error));\n break;\n\n case 'process':\n // Worker is asking for main thread to parseO\n const {id, input, options} = payload;\n try {\n const result = await parseOnMainThread(input, options);\n job.postMessage('done', {id, result});\n } catch (error) {\n const message = error instanceof Error ? error.message : 'unknown error';\n job.postMessage('error', {id, error: message});\n }\n break;\n\n default:\n // eslint-disable-next-line\n console.warn(`parse-with-worker unknown message ${type}`);\n }\n}\n"],"mappings":";;;;;;;;;;AAAA;AAcO,SAASA,kBAAkB,CAACC,MAAc,EAAEC,OAAuB,EAAE;EAC1E,IAAI,CAACC,uBAAU,CAACC,WAAW,EAAE,EAAE;IAC7B,OAAO,KAAK;EACd;;EAGA,IAAI,CAACC,sBAAS,IAAI,EAACH,OAAO,aAAPA,OAAO,eAAPA,OAAO,CAAEI,YAAY,GAAE;IACxC,OAAO,KAAK;EACd;EAEA,OAAOL,MAAM,CAACM,MAAM,KAAIL,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEK,MAAM;AACzC;;AAAC,SAMqBC,eAAe;EAAA;AAAA;AAAA;EAAA,6EAA9B,iBACLP,MAAc,EACdQ,IAAS,EACTP,OAAuB,EACvBQ,OAAuB,EACvBC,iBAA8F;IAAA;IAAA;MAAA;QAAA;UAAA;YAExFC,IAAI,GAAGX,MAAM,CAACY,EAAE;YAChBC,GAAG,GAAG,IAAAC,yBAAY,EAACd,MAAM,EAAEC,OAAO,CAAC;YAEnCc,UAAU,GAAGb,uBAAU,CAACc,aAAa,CAACf,OAAO,CAAC;YAC9CgB,UAAU,GAAGF,UAAU,CAACG,aAAa,CAAC;cAACP,IAAI,EAAJA,IAAI;cAAEE,GAAG,EAAHA;YAAG,CAAC,CAAC;YAKxDZ,OAAO,GAAGkB,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACpB,OAAO,CAAC,CAAC;YAC7CQ,OAAO,GAAGU,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACZ,OAAO,IAAI,CAAC,CAAC,CAAC,CAAC;YAAC;YAAA,OAElCQ,UAAU,CAACK,QAAQ,CACnC,mBAAmB;YAEnBC,SAAS,CAACC,IAAI,CAAC,IAAI,EAAEd,iBAAiB,CAAC,CACxC;UAAA;YAJKe,GAAG;YAMTA,GAAG,CAACC,WAAW,CAAC,SAAS,EAAE;cAEzBC,KAAK,EAAEnB,IAAI;cACXP,OAAO,EAAPA,OAAO;cACPQ,OAAO,EAAPA;YACF,CAAC,CAAC;YAAC;YAAA,OAEkBgB,GAAG,CAACG,MAAM;UAAA;YAAzBA,MAAM;YAAA;YAAA,OAECA,MAAM,CAACA,MAAM;UAAA;YAAA;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CAC3B;EAAA;AAAA;AAAA,SAQcL,SAAS;EAAA;AAAA;AAAA;EAAA,uEAAxB,kBACEb,iBAA8F,EAC9Fe,GAAc,EACdI,IAAuB,EACvBC,OAA6B;IAAA;IAAA;MAAA;QAAA;UAAA;YAAA,eAErBD,IAAI;YAAA,kCACL,MAAM,wBAIN,OAAO,wBAIP,SAAS;YAAA;UAAA;YAPZJ,GAAG,CAACM,IAAI,CAACD,OAAO,CAAC;YAAC;UAAA;YAIlBL,GAAG,CAACO,KAAK,CAAC,IAAIC,KAAK,CAACH,OAAO,CAACE,KAAK,CAAC,CAAC;YAAC;UAAA;YAK7BpB,EAAE,GAAoBkB,OAAO,CAA7BlB,EAAE,EAAEe,KAAK,GAAaG,OAAO,CAAzBH,KAAK,EAAE1B,QAAO,GAAI6B,OAAO,CAAlB7B,OAAO;YAAA;YAAA;YAAA,OAEFS,iBAAiB,CAACiB,KAAK,EAAE1B,QAAO,CAAC;UAAA;YAAhD2B,MAAM;YACZH,GAAG,CAACC,WAAW,CAAC,MAAM,EAAE;cAACd,EAAE,EAAFA,EAAE;cAAEgB,MAAM,EAANA;YAAM,CAAC,CAAC;YAAC;YAAA;UAAA;YAAA;YAAA;YAEhCM,OAAO,GAAG,wBAAiBD,KAAK,GAAG,aAAMC,OAAO,GAAG,eAAe;YACxET,GAAG,CAACC,WAAW,CAAC,OAAO,EAAE;cAACd,EAAE,EAAFA,EAAE;cAAEoB,KAAK,EAAEE;YAAO,CAAC,CAAC;UAAC;YAAA;UAAA;YAMjDC,OAAO,CAACC,IAAI,6CAAsCP,IAAI,EAAG;UAAC;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CAE/D;EAAA;AAAA"}
|
package/dist/es5/types.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"types.js","names":[],"sources":["../../src/types.ts"],"sourcesContent":["// Typed arrays\n\nexport type TypedIntArray =\n | Int8Array\n | Uint8Array\n | Uint8ClampedArray\n | Int16Array\n | Uint16Array\n | Int32Array\n | Uint32Array\n | Int32Array\n | Uint32Array;\n\nexport type TypedFloatArray = Uint16Array | Float32Array | Float64Array;\n\nexport type TypedArray = TypedIntArray | TypedFloatArray;\n\nexport type NumericArray = Array<number> | TypedIntArray | TypedFloatArray;\n\ntype FetchLike = (url: string, options?: RequestInit) => Promise<Response>;\n\n/**\n * Core Loader Options\n */\nexport type LoaderOptions = {\n /** fetch options or a custom fetch function */\n fetch?: typeof fetch | FetchLike | RequestInit | null;\n /** Do not throw on errors */\n nothrow?: boolean;\n\n /** loader selection, search first for supplied mimeType */\n mimeType?: string;\n /** loader selection, provide fallback mimeType is server does not provide */\n fallbackMimeType?: string;\n /** loader selection, avoid searching registered loaders */\n ignoreRegisteredLoaders?: boolean;\n\n // general\n /** Experimental: Supply a logger to the parser */\n log?: any;\n\n // batched parsing\n\n /** Size of each batch. `auto` matches batches to size of incoming chunks */\n batchSize?: number | 'auto';\n /** Minimal amount of time between batches */\n batchDebounceMs?: number;\n /** Stop loading after a given number of rows (compare SQL limit clause) */\n limit?: 0;\n /** Experimental: Stop loading after reaching */\n _limitMB?: 0;\n /** Generate metadata batches */\n metadata?: boolean;\n /** Transforms to run on incoming batches */\n transforms?: TransformBatches[];\n\n // workers\n\n /** CDN load workers from */\n CDN?: string;\n /** Set to `false` to disable workers */\n worker?: boolean;\n /** Number of concurrent workers (per loader) on desktop browser */\n maxConcurrency?: number;\n /** Number of concurrent workers (per loader) on mobile browsers */\n maxMobileConcurrency?: number;\n /** Set to `false` to prevent reuse workers */\n reuseWorkers?: boolean;\n /** Whether to use workers under Node.js (experimental) */\n _nodeWorkers?: boolean;\n /** set to 'test' to run local worker */\n _workerType?: string;\n\n /** @deprecated `options.batchType` removed, Use `options.<loader>.type` instead */\n batchType?: 'row' | 'columnar' | 'arrow';\n /** @deprecated `options.throw removed`, Use `options.nothrow` instead */\n throws?: boolean;\n /** @deprecated `options.dataType` no longer used */\n dataType?: any;\n /** @deprecated `options.uri` no longer used */\n uri?: any;\n /** @deprecated `options.method` removed. Use `options.fetch.method` */\n method?: any;\n /** @deprecated `options.headers` removed. Use `options.fetch.headers` */\n headers?: any;\n /** @deprecated `options.body` removed. Use `options.fetch.body` */\n body?: any;\n /** @deprecated `options.mode` removed. Use `options.fetch.mode` */\n mode?: any;\n /** @deprecated `options.credentials` removed. Use `options.fetch.credentials` */\n credentials?: any;\n /** @deprecated `options.cache` removed. Use `options.fetch.cache` */\n cache?: any;\n /** @deprecated `options.redirect` removed. Use `options.fetch.redirect` */\n redirect?: any;\n /** @deprecated `options.referrer` removed. Use `options.fetch.referrer` */\n referrer?: any;\n /** @deprecated `options.referrerPolicy` removed. Use `options.fetch.referrerPolicy` */\n referrerPolicy?: any;\n /** @deprecated `options.integrity` removed. Use `options.fetch.integrity` */\n integrity?: any;\n /** @deprecated `options.keepalive` removed. Use `options.fetch.keepalive` */\n keepalive?: any;\n /** @deprecated `options.signal` removed. Use `options.fetch.signal` */\n signal?: any;\n\n // Accept other keys (loader options objects, e.g. `options.csv`, `options.json` ...)\n [loaderId: string]: any;\n};\n\ntype PreloadOptions = {\n [key: string]: any;\n};\n\n/**\n * A worker loader definition that can be used with `@loaders.gl/core` functions\n */\nexport type Loader = {\n // Worker\n name: string;\n id: string;\n module: string;\n version: string;\n worker?: string | boolean;\n options: object;\n deprecatedOptions?: object;\n // end Worker\n\n category?: string;\n extensions: string[];\n mimeTypes: string[];\n\n binary?: boolean;\n text?: boolean;\n\n tests?: (((ArrayBuffer) => boolean) | ArrayBuffer | string)[];\n\n // TODO - deprecated\n supported?: boolean;\n testText?: (string) => boolean;\n};\n\n/**\n * A \"bundled\" loader definition that can be used with `@loaders.gl/core` functions\n * If a worker loader is supported it will also be supported.\n */\nexport type LoaderWithParser = Loader & {\n // TODO - deprecated\n testText?: (string) => boolean;\n\n parse: Parse;\n preload?: Preload;\n parseSync?: ParseSync;\n parseText?: ParseText;\n parseTextSync?: ParseTextSync;\n parseInBatches?: ParseInBatches;\n parseFileInBatches?: ParseFileInBatches;\n};\n\n/** Options for writers */\nexport type WriterOptions = {\n /** worker source. If is set will be used instead of loading worker from the Internet */\n souce?: string | null;\n /** writer-specific options */\n [writerId: string]: any;\n};\n\n/**\n * A writer definition that can be used with `@loaders.gl/core` functions\n */\nexport type Writer = {\n name: string;\n\n id: string;\n module: string;\n version: string;\n worker?: string | boolean;\n\n options: WriterOptions;\n deprecatedOptions?: object;\n\n // TODO - are these are needed?\n binary?: boolean;\n extensions?: string[];\n mimeTypes?: string[];\n text?: boolean;\n\n encode?: Encode;\n encodeSync?: EncodeSync;\n encodeInBatches?: EncodeInBatches;\n encodeURLtoURL?: EncodeURLtoURL;\n encodeText?: EncodeText;\n};\n\nexport type LoaderContext = {\n loaders?: Loader[] | null;\n url?: string;\n\n fetch: typeof fetch;\n response?: Response;\n parse: (\n arrayBuffer: ArrayBuffer,\n loaders?,\n options?: LoaderOptions,\n context?: LoaderContext\n ) => Promise<any>;\n parseSync?: (\n arrayBuffer: ArrayBuffer,\n loaders?,\n options?: LoaderOptions,\n context?: LoaderContext\n ) => any;\n parseInBatches?: (\n iterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n loaders?,\n options?: LoaderOptions,\n context?: LoaderContext\n ) => AsyncIterable<any> | Promise<AsyncIterable<any>>;\n};\n\ntype Parse = (\n arrayBuffer: ArrayBuffer,\n options?: LoaderOptions,\n context?: LoaderContext\n) => Promise<any>;\ntype ParseSync = (\n arrayBuffer: ArrayBuffer,\n options?: LoaderOptions,\n context?: LoaderContext\n) => any;\ntype ParseText = (text: string, options?: LoaderOptions) => Promise<any>;\ntype ParseTextSync = (text: string, options?: LoaderOptions) => any;\ntype ParseInBatches = (\n iterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>,\n options?: LoaderOptions,\n context?: LoaderContext\n) => AsyncIterable<any>;\ntype ParseFileInBatches = (\n file: Blob,\n options?: LoaderOptions,\n context?: LoaderContext\n) => AsyncIterable<any>;\n\ntype Encode = (data: any, options?: WriterOptions) => Promise<ArrayBuffer>;\ntype EncodeSync = (data: any, options?: WriterOptions) => ArrayBuffer;\n// TODO\ntype EncodeText = Function;\ntype EncodeInBatches = Function;\ntype EncodeURLtoURL = (\n inputUrl: string,\n outputUrl: string,\n options?: WriterOptions\n) => Promise<string>;\ntype Preload = (url: string, options?: PreloadOptions) => any;\n\nexport type TransformBatches = (\n asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>\n) => AsyncIterable<ArrayBuffer>;\n\n/** Types that can be synchronously parsed */\nexport type SyncDataType = string | ArrayBuffer; // TODO File | Blob can be read synchronously...\n\n/** Types that can be parsed async */\nexport type DataType =\n | string\n | ArrayBuffer\n | File\n | Blob\n | Response\n | ReadableStream\n | Iterable<ArrayBuffer>\n | AsyncIterable<ArrayBuffer>;\n\n/** Types that can be parsed in batches */\nexport type BatchableDataType =\n | DataType\n | Iterable<ArrayBuffer>\n | AsyncIterable<ArrayBuffer>\n | Promise<AsyncIterable<ArrayBuffer>>;\n\n/**\n * A FileSystem interface can encapsulate a FileList, a ZipFile, a GoogleDrive etc.\n */\nexport interface IFileSystem {\n /**\n * Return a list of file names\n * @param dirname directory name. file system root directory if omitted\n */\n readdir(dirname?: string, options?: {recursive?: boolean}): Promise<string[]>;\n\n /**\n * Gets information from a local file from the filesystem\n * @param filename file name to stat\n * @param options currently unused\n * @throws if filename is not in local filesystem\n */\n stat(filename: string, options?: object): Promise<{size: number}>;\n\n /**\n * Fetches a local file from the filesystem (or a URL)\n * @param filename\n * @param options\n */\n fetch(filename: string, options?: object): Promise<Response>;\n}\n\ntype ReadOptions = {buffer?: ArrayBuffer; offset?: number; length?: number; position?: number};\nexport interface IRandomAccessReadFileSystem extends IFileSystem {\n open(path: string, flags, mode?): Promise<any>;\n close(fd: any): Promise<void>;\n fstat(fd: any): Promise<object>;\n read(fd: any, options?: ReadOptions): Promise<{bytesRead: number; buffer: Buffer}>;\n}\n"],"mappings":""}
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
3
|
var _createLoaderWorker = require("../lib/worker-loader-utils/create-loader-worker");
|
|
4
|
-
|
|
5
4
|
var _jsonLoader = require("../json-loader");
|
|
6
|
-
|
|
7
5
|
(0, _createLoaderWorker.createLoaderWorker)(_jsonLoader.JSONLoader);
|
|
8
6
|
//# sourceMappingURL=json-worker.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"json-worker.js","names":["createLoaderWorker","JSONLoader"],"sources":["../../../src/workers/json-worker.ts"],"sourcesContent":["import {createLoaderWorker} from '../lib/worker-loader-utils/create-loader-worker';\nimport {JSONLoader} from '../json-loader';\n\ncreateLoaderWorker(JSONLoader);\n"],"mappings":";;AAAA;AACA;AAEA,IAAAA,sCAAkB,EAACC,sBAAU,CAAC"}
|
package/dist/esm/index.js
CHANGED
|
@@ -1,27 +1,41 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
1
3
|
export { assert } from './lib/env-utils/assert';
|
|
2
4
|
export { isBrowser, isWorker, nodeVersion, self, window, global, document } from './lib/env-utils/globals';
|
|
5
|
+
|
|
3
6
|
export { createLoaderWorker } from './lib/worker-loader-utils/create-loader-worker';
|
|
4
7
|
export { parseWithWorker, canParseWithWorker } from './lib/worker-loader-utils/parse-with-worker';
|
|
5
8
|
export { canEncodeWithWorker } from './lib/worker-loader-utils/encode-with-worker';
|
|
9
|
+
|
|
6
10
|
export { parseJSON } from './lib/parser-utils/parse-json';
|
|
11
|
+
|
|
7
12
|
export { toArrayBuffer, sliceArrayBuffer, concatenateArrayBuffers, concatenateTypedArrays, compareArrayBuffers } from './lib/binary-utils/array-buffer-utils';
|
|
8
13
|
export { padToNBytes, copyToArray, copyArrayBuffer } from './lib/binary-utils/memory-copy-utils';
|
|
9
14
|
export { copyPaddedArrayBufferToDataView, copyPaddedStringToDataView } from './lib/binary-utils/binary-copy-utils';
|
|
10
15
|
export { padStringToByteAlignment, copyStringToDataView, copyBinaryToDataView } from './lib/binary-utils/encode-utils';
|
|
11
16
|
export { getFirstCharacters, getMagicString } from './lib/binary-utils/get-first-characters';
|
|
17
|
+
|
|
12
18
|
export { makeTextEncoderIterator, makeTextDecoderIterator, makeLineIterator, makeNumberedLineIterator } from './lib/iterators/text-iterators';
|
|
13
19
|
export { forEach, concatenateArrayBuffersAsync } from './lib/iterators/async-iteration';
|
|
20
|
+
|
|
14
21
|
export { default as RequestScheduler } from './lib/request-utils/request-scheduler';
|
|
22
|
+
|
|
15
23
|
export { setPathPrefix, getPathPrefix, resolvePath } from './lib/path-utils/file-aliases';
|
|
16
24
|
export { addAliases as _addAliases } from './lib/path-utils/file-aliases';
|
|
25
|
+
|
|
17
26
|
export { JSONLoader } from './json-loader';
|
|
27
|
+
|
|
18
28
|
import * as path from './lib/path-utils/path';
|
|
19
29
|
export { path };
|
|
30
|
+
|
|
20
31
|
export { isBuffer, toBuffer, bufferToArrayBuffer } from './lib/binary-utils/buffer-utils';
|
|
32
|
+
|
|
21
33
|
import * as util from './lib/node/util';
|
|
22
34
|
export { util };
|
|
23
35
|
export { promisify } from './lib/node/util';
|
|
36
|
+
|
|
24
37
|
import * as fs from './lib/node/fs';
|
|
25
38
|
export { fs };
|
|
39
|
+
|
|
26
40
|
export { default as _NodeFileSystem } from './lib/filesystems/node-filesystem';
|
|
27
41
|
//# sourceMappingURL=index.js.map
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"index.js","names":["assert","isBrowser","isWorker","nodeVersion","self","window","global","document","createLoaderWorker","parseWithWorker","canParseWithWorker","canEncodeWithWorker","parseJSON","toArrayBuffer","sliceArrayBuffer","concatenateArrayBuffers","concatenateTypedArrays","compareArrayBuffers","padToNBytes","copyToArray","copyArrayBuffer","copyPaddedArrayBufferToDataView","copyPaddedStringToDataView","padStringToByteAlignment","copyStringToDataView","copyBinaryToDataView","getFirstCharacters","getMagicString","makeTextEncoderIterator","makeTextDecoderIterator","makeLineIterator","makeNumberedLineIterator","forEach","concatenateArrayBuffersAsync","default","RequestScheduler","setPathPrefix","getPathPrefix","resolvePath","addAliases","_addAliases","JSONLoader","path","isBuffer","toBuffer","bufferToArrayBuffer","util","promisify","fs","_NodeFileSystem"],"sources":["../../src/index.ts"],"sourcesContent":["// TYPES\nexport type {\n Loader,\n LoaderWithParser,\n LoaderContext,\n LoaderOptions,\n Writer,\n WriterOptions,\n DataType,\n SyncDataType,\n BatchableDataType,\n IFileSystem,\n IRandomAccessReadFileSystem\n} from './types';\n\n// GENERAL UTILS\nexport {assert} from './lib/env-utils/assert';\nexport {\n isBrowser,\n isWorker,\n nodeVersion,\n self,\n window,\n global,\n document\n} from './lib/env-utils/globals';\n\n// LOADERS.GL-SPECIFIC WORKER UTILS\nexport {createLoaderWorker} from './lib/worker-loader-utils/create-loader-worker';\nexport {parseWithWorker, canParseWithWorker} from './lib/worker-loader-utils/parse-with-worker';\nexport {canEncodeWithWorker} from './lib/worker-loader-utils/encode-with-worker';\n\n// PARSER UTILS\nexport {parseJSON} from './lib/parser-utils/parse-json';\n\n// MEMORY COPY UTILS\nexport {\n toArrayBuffer,\n sliceArrayBuffer,\n concatenateArrayBuffers,\n concatenateTypedArrays,\n compareArrayBuffers\n} from './lib/binary-utils/array-buffer-utils';\nexport {padToNBytes, copyToArray, copyArrayBuffer} from './lib/binary-utils/memory-copy-utils';\nexport {\n copyPaddedArrayBufferToDataView,\n copyPaddedStringToDataView\n} from './lib/binary-utils/binary-copy-utils';\nexport {\n padStringToByteAlignment,\n copyStringToDataView,\n copyBinaryToDataView\n} from './lib/binary-utils/encode-utils';\nexport {getFirstCharacters, getMagicString} from './lib/binary-utils/get-first-characters';\n\n// ITERATOR UTILS\nexport {\n makeTextEncoderIterator,\n makeTextDecoderIterator,\n makeLineIterator,\n makeNumberedLineIterator\n} from './lib/iterators/text-iterators';\nexport {forEach, concatenateArrayBuffersAsync} from './lib/iterators/async-iteration';\n\n// REQUEST UTILS\nexport {default as RequestScheduler} from './lib/request-utils/request-scheduler';\n\n// PATH HELPERS\nexport {setPathPrefix, getPathPrefix, resolvePath} from './lib/path-utils/file-aliases';\nexport {addAliases as _addAliases} from './lib/path-utils/file-aliases';\n\n// MICRO LOADERS\nexport {JSONLoader} from './json-loader';\n\n// NODE support\n\n// Node.js emulation (can be used in browser)\n\n// `path` replacement (avoids bundling big path polyfill)\nimport * as path from './lib/path-utils/path';\nexport {path};\n\n// Avoid direct use of `Buffer` which pulls in 50KB polyfill\nexport {isBuffer, toBuffer, bufferToArrayBuffer} from './lib/binary-utils/buffer-utils';\n\n// Note.js wrappers (can be safely imported, but not used in browser)\n\n// Use instead of importing 'util'\nimport * as util from './lib/node/util';\nexport {util};\n// TODO - remove\nexport {promisify} from './lib/node/util';\n\n// Use instead of importing 'fs';`\nimport * as fs from './lib/node/fs';\nexport {fs};\n\n// EXPERIMENTAL\nexport {default as _NodeFileSystem} from './lib/filesystems/node-filesystem';\n"],"mappings":";;AAgBA,SAAQA,MAAM,QAAO,wBAAwB;AAC7C,SACEC,SAAS,EACTC,QAAQ,EACRC,WAAW,EACXC,IAAI,EACJC,MAAM,EACNC,MAAM,EACNC,QAAQ,QACH,yBAAyB;;AAGhC,SAAQC,kBAAkB,QAAO,gDAAgD;AACjF,SAAQC,eAAe,EAAEC,kBAAkB,QAAO,6CAA6C;AAC/F,SAAQC,mBAAmB,QAAO,8CAA8C;;AAGhF,SAAQC,SAAS,QAAO,+BAA+B;;AAGvD,SACEC,aAAa,EACbC,gBAAgB,EAChBC,uBAAuB,EACvBC,sBAAsB,EACtBC,mBAAmB,QACd,uCAAuC;AAC9C,SAAQC,WAAW,EAAEC,WAAW,EAAEC,eAAe,QAAO,sCAAsC;AAC9F,SACEC,+BAA+B,EAC/BC,0BAA0B,QACrB,sCAAsC;AAC7C,SACEC,wBAAwB,EACxBC,oBAAoB,EACpBC,oBAAoB,QACf,iCAAiC;AACxC,SAAQC,kBAAkB,EAAEC,cAAc,QAAO,yCAAyC;;AAG1F,SACEC,uBAAuB,EACvBC,uBAAuB,EACvBC,gBAAgB,EAChBC,wBAAwB,QACnB,gCAAgC;AACvC,SAAQC,OAAO,EAAEC,4BAA4B,QAAO,iCAAiC;;AAGrF,SAAQC,OAAO,IAAIC,gBAAgB,QAAO,uCAAuC;;AAGjF,SAAQC,aAAa,EAAEC,aAAa,EAAEC,WAAW,QAAO,+BAA+B;AACvF,SAAQC,UAAU,IAAIC,WAAW,QAAO,+BAA+B;;AAGvE,SAAQC,UAAU,QAAO,eAAe;;AAOxC,OAAO,KAAKC,IAAI,MAAM,uBAAuB;AAC7C,SAAQA,IAAI;;AAGZ,SAAQC,QAAQ,EAAEC,QAAQ,EAAEC,mBAAmB,QAAO,iCAAiC;;AAKvF,OAAO,KAAKC,IAAI,MAAM,iBAAiB;AACvC,SAAQA,IAAI;AAEZ,SAAQC,SAAS,QAAO,iBAAiB;;AAGzC,OAAO,KAAKC,EAAE,MAAM,eAAe;AACnC,SAAQA,EAAE;;AAGV,SAAQd,OAAO,IAAIe,eAAe,QAAO,mCAAmC"}
|
package/dist/esm/json-loader.js
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
|
-
|
|
1
|
+
|
|
2
|
+
const VERSION = typeof "3.3.0-alpha.6" !== 'undefined' ? "3.3.0-alpha.6" : 'latest';
|
|
3
|
+
|
|
2
4
|
export const JSONLoader = {
|
|
3
5
|
name: 'JSON',
|
|
4
6
|
id: 'json',
|
|
@@ -16,6 +18,5 @@ export const JSONLoader = {
|
|
|
16
18
|
function parseTextSync(text) {
|
|
17
19
|
return JSON.parse(text);
|
|
18
20
|
}
|
|
19
|
-
|
|
20
21
|
export const _typecheckJSONLoader = JSONLoader;
|
|
21
22
|
//# sourceMappingURL=json-loader.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"json-loader.js","names":["VERSION","JSONLoader","name","id","module","version","extensions","mimeTypes","category","text","parseTextSync","parse","arrayBuffer","TextDecoder","decode","options","JSON","_typecheckJSONLoader"],"sources":["../../src/json-loader.ts"],"sourcesContent":["import type {LoaderWithParser} from './types';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * A JSON Micro loader (minimal bundle size)\n * Alternative to `@loaders.gl/json`\n */\nexport const JSONLoader = {\n name: 'JSON',\n id: 'json',\n module: 'json',\n version: VERSION,\n extensions: ['json', 'geojson'],\n mimeTypes: ['application/json'],\n category: 'json',\n text: true,\n parseTextSync,\n parse: async (arrayBuffer) => parseTextSync(new TextDecoder().decode(arrayBuffer)),\n options: {}\n};\n\n// TODO - deprecated\nfunction parseTextSync(text) {\n return JSON.parse(text);\n}\n\nexport const _typecheckJSONLoader: LoaderWithParser = JSONLoader;\n"],"mappings":";AAIA,MAAMA,OAAO,GAAG,sBAAkB,KAAK,WAAW,qBAAiB,QAAQ;;AAM3E,OAAO,MAAMC,UAAU,GAAG;EACxBC,IAAI,EAAE,MAAM;EACZC,EAAE,EAAE,MAAM;EACVC,MAAM,EAAE,MAAM;EACdC,OAAO,EAAEL,OAAO;EAChBM,UAAU,EAAE,CAAC,MAAM,EAAE,SAAS,CAAC;EAC/BC,SAAS,EAAE,CAAC,kBAAkB,CAAC;EAC/BC,QAAQ,EAAE,MAAM;EAChBC,IAAI,EAAE,IAAI;EACVC,aAAa;EACbC,KAAK,EAAE,MAAOC,WAAW,IAAKF,aAAa,CAAC,IAAIG,WAAW,EAAE,CAACC,MAAM,CAACF,WAAW,CAAC,CAAC;EAClFG,OAAO,EAAE,CAAC;AACZ,CAAC;;AAGD,SAASL,aAAa,CAACD,IAAI,EAAE;EAC3B,OAAOO,IAAI,CAACL,KAAK,CAACF,IAAI,CAAC;AACzB;AAEA,OAAO,MAAMQ,oBAAsC,GAAGhB,UAAU"}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { isBuffer, bufferToArrayBuffer } from './buffer-utils';
|
|
2
|
+
|
|
2
3
|
export function toArrayBuffer(data) {
|
|
3
4
|
if (isBuffer(data)) {
|
|
4
5
|
return bufferToArrayBuffer(data);
|
|
5
6
|
}
|
|
6
|
-
|
|
7
7
|
if (data instanceof ArrayBuffer) {
|
|
8
8
|
return data;
|
|
9
9
|
}
|
|
@@ -12,10 +12,8 @@ export function toArrayBuffer(data) {
|
|
|
12
12
|
if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {
|
|
13
13
|
return data.buffer;
|
|
14
14
|
}
|
|
15
|
-
|
|
16
15
|
return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);
|
|
17
16
|
}
|
|
18
|
-
|
|
19
17
|
if (typeof data === 'string') {
|
|
20
18
|
const text = data;
|
|
21
19
|
const uint8Array = new TextEncoder().encode(text);
|
|
@@ -25,33 +23,35 @@ export function toArrayBuffer(data) {
|
|
|
25
23
|
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
26
24
|
return data._toArrayBuffer();
|
|
27
25
|
}
|
|
28
|
-
|
|
29
26
|
throw new Error('toArrayBuffer');
|
|
30
27
|
}
|
|
28
|
+
|
|
31
29
|
export function compareArrayBuffers(arrayBuffer1, arrayBuffer2, byteLength) {
|
|
32
30
|
byteLength = byteLength || arrayBuffer1.byteLength;
|
|
33
|
-
|
|
34
31
|
if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {
|
|
35
32
|
return false;
|
|
36
33
|
}
|
|
37
|
-
|
|
38
34
|
const array1 = new Uint8Array(arrayBuffer1);
|
|
39
35
|
const array2 = new Uint8Array(arrayBuffer2);
|
|
40
|
-
|
|
41
36
|
for (let i = 0; i < array1.length; ++i) {
|
|
42
37
|
if (array1[i] !== array2[i]) {
|
|
43
38
|
return false;
|
|
44
39
|
}
|
|
45
40
|
}
|
|
46
|
-
|
|
47
41
|
return true;
|
|
48
42
|
}
|
|
49
|
-
|
|
43
|
+
|
|
44
|
+
export function concatenateArrayBuffers() {
|
|
45
|
+
for (var _len = arguments.length, sources = new Array(_len), _key = 0; _key < _len; _key++) {
|
|
46
|
+
sources[_key] = arguments[_key];
|
|
47
|
+
}
|
|
50
48
|
const sourceArrays = sources.map(source2 => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2);
|
|
49
|
+
|
|
51
50
|
const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
|
|
51
|
+
|
|
52
52
|
const result = new Uint8Array(byteLength);
|
|
53
|
-
let offset = 0;
|
|
54
53
|
|
|
54
|
+
let offset = 0;
|
|
55
55
|
for (const sourceArray of sourceArrays) {
|
|
56
56
|
result.set(sourceArray, offset);
|
|
57
57
|
offset += sourceArray.byteLength;
|
|
@@ -59,25 +59,26 @@ export function concatenateArrayBuffers(...sources) {
|
|
|
59
59
|
|
|
60
60
|
return result.buffer;
|
|
61
61
|
}
|
|
62
|
-
|
|
62
|
+
|
|
63
|
+
export function concatenateTypedArrays() {
|
|
64
|
+
for (var _len2 = arguments.length, typedArrays = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
|
|
65
|
+
typedArrays[_key2] = arguments[_key2];
|
|
66
|
+
}
|
|
63
67
|
const arrays = typedArrays;
|
|
64
68
|
const TypedArrayConstructor = arrays && arrays.length > 1 && arrays[0].constructor || null;
|
|
65
|
-
|
|
66
69
|
if (!TypedArrayConstructor) {
|
|
67
70
|
throw new Error('"concatenateTypedArrays" - incorrect quantity of arguments or arguments have incompatible data types');
|
|
68
71
|
}
|
|
69
|
-
|
|
70
72
|
const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);
|
|
71
73
|
const result = new TypedArrayConstructor(sumLength);
|
|
72
74
|
let offset = 0;
|
|
73
|
-
|
|
74
75
|
for (const array of arrays) {
|
|
75
76
|
result.set(array, offset);
|
|
76
77
|
offset += array.length;
|
|
77
78
|
}
|
|
78
|
-
|
|
79
79
|
return result;
|
|
80
80
|
}
|
|
81
|
+
|
|
81
82
|
export function sliceArrayBuffer(arrayBuffer, byteOffset, byteLength) {
|
|
82
83
|
const subArray = byteLength !== undefined ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength) : new Uint8Array(arrayBuffer).subarray(byteOffset);
|
|
83
84
|
const arrayCopy = new Uint8Array(subArray);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"array-buffer-utils.js","names":["isBuffer","bufferToArrayBuffer","toArrayBuffer","data","ArrayBuffer","isView","byteOffset","byteLength","buffer","slice","text","uint8Array","TextEncoder","encode","_toArrayBuffer","Error","compareArrayBuffers","arrayBuffer1","arrayBuffer2","array1","Uint8Array","array2","i","length","concatenateArrayBuffers","sources","sourceArrays","map","source2","reduce","typedArray","result","offset","sourceArray","set","concatenateTypedArrays","typedArrays","arrays","TypedArrayConstructor","constructor","sumLength","acc","value","array","sliceArrayBuffer","arrayBuffer","subArray","undefined","subarray","arrayCopy"],"sources":["../../../../src/lib/binary-utils/array-buffer-utils.ts"],"sourcesContent":["import {TypedArray} from '../../types';\nimport {isBuffer, bufferToArrayBuffer} from './buffer-utils';\n\n/**\n * Convert an object to an array buffer\n */\nexport function toArrayBuffer(data: any): ArrayBuffer {\n // Note: Should be called first, Buffers can trigger other detections below\n if (isBuffer(data)) {\n return bufferToArrayBuffer(data);\n }\n\n if (data instanceof ArrayBuffer) {\n return data;\n }\n\n // Careful - Node Buffers look like Uint8Arrays (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {\n return data.buffer;\n }\n return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n if (data && typeof data === 'object' && data._toArrayBuffer) {\n return data._toArrayBuffer();\n }\n\n throw new Error('toArrayBuffer');\n}\n\n/**\n * compare two binary arrays for equality\n * @param {ArrayBuffer} a\n * @param {ArrayBuffer} b\n * @param {number} byteLength\n */\nexport function compareArrayBuffers(\n arrayBuffer1: ArrayBuffer,\n arrayBuffer2: ArrayBuffer,\n byteLength?: number\n): boolean {\n byteLength = byteLength || arrayBuffer1.byteLength;\n if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {\n return false;\n }\n const array1 = new Uint8Array(arrayBuffer1);\n const array2 = new Uint8Array(arrayBuffer2);\n for (let i = 0; i < array1.length; ++i) {\n if (array1[i] !== array2[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Concatenate a sequence of ArrayBuffers\n * @return A concatenated ArrayBuffer\n */\nexport function concatenateArrayBuffers(...sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {\n // Make sure all inputs are wrapped in typed arrays\n const sourceArrays = sources.map((source2) =>\n source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2\n );\n\n // Get length of all inputs\n const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);\n\n // Allocate array with space for all inputs\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let offset = 0;\n for (const sourceArray of sourceArrays) {\n result.set(sourceArray, offset);\n offset += sourceArray.byteLength;\n }\n\n // We work with ArrayBuffers, discard the typed array wrapper\n return result.buffer;\n}\n\n/**\n * Concatenate arbitrary count of typed arrays\n * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays\n * @param {...*} arrays - list of arrays. All arrays should be the same type\n * @return A concatenated TypedArray\n */\nexport function concatenateTypedArrays<T>(...typedArrays: T[]): T {\n // @ts-ignore\n const arrays = typedArrays as TypedArray[];\n // @ts-ignore\n const TypedArrayConstructor = (arrays && arrays.length > 1 && arrays[0].constructor) || null;\n if (!TypedArrayConstructor) {\n throw new Error(\n '\"concatenateTypedArrays\" - incorrect quantity of arguments or arguments have incompatible data types'\n );\n }\n\n const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);\n // @ts-ignore typescript does not like dynamic constructors\n const result = new TypedArrayConstructor(sumLength);\n let offset = 0;\n for (const array of arrays) {\n result.set(array, offset);\n offset += array.length;\n }\n return result;\n}\n\n/**\n * Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0\n * @param arrayBuffer\n * @param byteOffset\n * @param byteLength\n */\nexport function sliceArrayBuffer(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength?: number\n): ArrayBuffer {\n const subArray =\n byteLength !== undefined\n ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength)\n : new Uint8Array(arrayBuffer).subarray(byteOffset);\n const arrayCopy = new Uint8Array(subArray);\n return arrayCopy.buffer;\n}\n"],"mappings":"AACA,SAAQA,QAAQ,EAAEC,mBAAmB,QAAO,gBAAgB;;AAK5D,OAAO,SAASC,aAAa,CAACC,IAAS,EAAe;EAEpD,IAAIH,QAAQ,CAACG,IAAI,CAAC,EAAE;IAClB,OAAOF,mBAAmB,CAACE,IAAI,CAAC;EAClC;EAEA,IAAIA,IAAI,YAAYC,WAAW,EAAE;IAC/B,OAAOD,IAAI;EACb;;EAGA,IAAIC,WAAW,CAACC,MAAM,CAACF,IAAI,CAAC,EAAE;IAC5B,IAAIA,IAAI,CAACG,UAAU,KAAK,CAAC,IAAIH,IAAI,CAACI,UAAU,KAAKJ,IAAI,CAACK,MAAM,CAACD,UAAU,EAAE;MACvE,OAAOJ,IAAI,CAACK,MAAM;IACpB;IACA,OAAOL,IAAI,CAACK,MAAM,CAACC,KAAK,CAACN,IAAI,CAACG,UAAU,EAAEH,IAAI,CAACG,UAAU,GAAGH,IAAI,CAACI,UAAU,CAAC;EAC9E;EAEA,IAAI,OAAOJ,IAAI,KAAK,QAAQ,EAAE;IAC5B,MAAMO,IAAI,GAAGP,IAAI;IACjB,MAAMQ,UAAU,GAAG,IAAIC,WAAW,EAAE,CAACC,MAAM,CAACH,IAAI,CAAC;IACjD,OAAOC,UAAU,CAACH,MAAM;EAC1B;;EAGA,IAAIL,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,IAAIA,IAAI,CAACW,cAAc,EAAE;IAC3D,OAAOX,IAAI,CAACW,cAAc,EAAE;EAC9B;EAEA,MAAM,IAAIC,KAAK,CAAC,eAAe,CAAC;AAClC;;AAQA,OAAO,SAASC,mBAAmB,CACjCC,YAAyB,EACzBC,YAAyB,EACzBX,UAAmB,EACV;EACTA,UAAU,GAAGA,UAAU,IAAIU,YAAY,CAACV,UAAU;EAClD,IAAIU,YAAY,CAACV,UAAU,GAAGA,UAAU,IAAIW,YAAY,CAACX,UAAU,GAAGA,UAAU,EAAE;IAChF,OAAO,KAAK;EACd;EACA,MAAMY,MAAM,GAAG,IAAIC,UAAU,CAACH,YAAY,CAAC;EAC3C,MAAMI,MAAM,GAAG,IAAID,UAAU,CAACF,YAAY,CAAC;EAC3C,KAAK,IAAII,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,MAAM,CAACI,MAAM,EAAE,EAAED,CAAC,EAAE;IACtC,IAAIH,MAAM,CAACG,CAAC,CAAC,KAAKD,MAAM,CAACC,CAAC,CAAC,EAAE;MAC3B,OAAO,KAAK;IACd;EACF;EACA,OAAO,IAAI;AACb;;AAMA,OAAO,SAASE,uBAAuB,GAAwD;EAAA,kCAApDC,OAAO;IAAPA,OAAO;EAAA;EAEhD,MAAMC,YAAY,GAAGD,OAAO,CAACE,GAAG,CAAEC,OAAO,IACvCA,OAAO,YAAYxB,WAAW,GAAG,IAAIgB,UAAU,CAACQ,OAAO,CAAC,GAAGA,OAAO,CACnE;;EAGD,MAAMrB,UAAU,GAAGmB,YAAY,CAACG,MAAM,CAAC,CAACN,MAAM,EAAEO,UAAU,KAAKP,MAAM,GAAGO,UAAU,CAACvB,UAAU,EAAE,CAAC,CAAC;;EAGjG,MAAMwB,MAAM,GAAG,IAAIX,UAAU,CAACb,UAAU,CAAC;;EAGzC,IAAIyB,MAAM,GAAG,CAAC;EACd,KAAK,MAAMC,WAAW,IAAIP,YAAY,EAAE;IACtCK,MAAM,CAACG,GAAG,CAACD,WAAW,EAAED,MAAM,CAAC;IAC/BA,MAAM,IAAIC,WAAW,CAAC1B,UAAU;EAClC;;EAGA,OAAOwB,MAAM,CAACvB,MAAM;AACtB;;AAQA,OAAO,SAAS2B,sBAAsB,GAA4B;EAAA,mCAArBC,WAAW;IAAXA,WAAW;EAAA;EAEtD,MAAMC,MAAM,GAAGD,WAA2B;EAE1C,MAAME,qBAAqB,GAAID,MAAM,IAAIA,MAAM,CAACd,MAAM,GAAG,CAAC,IAAIc,MAAM,CAAC,CAAC,CAAC,CAACE,WAAW,IAAK,IAAI;EAC5F,IAAI,CAACD,qBAAqB,EAAE;IAC1B,MAAM,IAAIvB,KAAK,CACb,sGAAsG,CACvG;EACH;EAEA,MAAMyB,SAAS,GAAGH,MAAM,CAACR,MAAM,CAAC,CAACY,GAAG,EAAEC,KAAK,KAAKD,GAAG,GAAGC,KAAK,CAACnB,MAAM,EAAE,CAAC,CAAC;EAEtE,MAAMQ,MAAM,GAAG,IAAIO,qBAAqB,CAACE,SAAS,CAAC;EACnD,IAAIR,MAAM,GAAG,CAAC;EACd,KAAK,MAAMW,KAAK,IAAIN,MAAM,EAAE;IAC1BN,MAAM,CAACG,GAAG,CAACS,KAAK,EAAEX,MAAM,CAAC;IACzBA,MAAM,IAAIW,KAAK,CAACpB,MAAM;EACxB;EACA,OAAOQ,MAAM;AACf;;AAQA,OAAO,SAASa,gBAAgB,CAC9BC,WAAwB,EACxBvC,UAAkB,EAClBC,UAAmB,EACN;EACb,MAAMuC,QAAQ,GACZvC,UAAU,KAAKwC,SAAS,GACpB,IAAI3B,UAAU,CAACyB,WAAW,CAAC,CAACG,QAAQ,CAAC1C,UAAU,EAAEA,UAAU,GAAGC,UAAU,CAAC,GACzE,IAAIa,UAAU,CAACyB,WAAW,CAAC,CAACG,QAAQ,CAAC1C,UAAU,CAAC;EACtD,MAAM2C,SAAS,GAAG,IAAI7B,UAAU,CAAC0B,QAAQ,CAAC;EAC1C,OAAOG,SAAS,CAACzC,MAAM;AACzB"}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { padToNBytes } from './memory-copy-utils';
|
|
2
|
+
|
|
2
3
|
export function copyPaddedArrayBufferToDataView(dataView, byteOffset, sourceBuffer, padding) {
|
|
3
4
|
const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);
|
|
4
5
|
const padLength = paddedLength - sourceBuffer.byteLength;
|
|
5
|
-
|
|
6
6
|
if (dataView) {
|
|
7
7
|
const targetArray = new Uint8Array(dataView.buffer, dataView.byteOffset + byteOffset, sourceBuffer.byteLength);
|
|
8
8
|
const sourceArray = new Uint8Array(sourceBuffer);
|
|
@@ -12,10 +12,10 @@ export function copyPaddedArrayBufferToDataView(dataView, byteOffset, sourceBuff
|
|
|
12
12
|
dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);
|
|
13
13
|
}
|
|
14
14
|
}
|
|
15
|
-
|
|
16
15
|
byteOffset += paddedLength;
|
|
17
16
|
return byteOffset;
|
|
18
17
|
}
|
|
18
|
+
|
|
19
19
|
export function copyPaddedStringToDataView(dataView, byteOffset, string, padding) {
|
|
20
20
|
const textEncoder = new TextEncoder();
|
|
21
21
|
const stringBuffer = textEncoder.encode(string);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"binary-copy-utils.js","names":["padToNBytes","copyPaddedArrayBufferToDataView","dataView","byteOffset","sourceBuffer","padding","paddedLength","byteLength","padLength","targetArray","Uint8Array","buffer","sourceArray","set","i","setUint8","copyPaddedStringToDataView","string","textEncoder","TextEncoder","stringBuffer","encode"],"sources":["../../../../src/lib/binary-utils/binary-copy-utils.ts"],"sourcesContent":["import {TypedArray} from '../../types';\nimport {padToNBytes} from './memory-copy-utils';\n\n/**\n * Copy sourceBuffer to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {Array | TypedArray} sourceBuffer - source data buffer\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedArrayBufferToDataView(\n dataView: DataView | null,\n byteOffset: number,\n sourceBuffer: TypedArray,\n padding: number\n) {\n const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);\n const padLength = paddedLength - sourceBuffer.byteLength;\n\n if (dataView) {\n // Copy array\n const targetArray = new Uint8Array(\n dataView.buffer,\n dataView.byteOffset + byteOffset,\n sourceBuffer.byteLength\n );\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n\n // Add PADDING\n for (let i = 0; i < padLength; ++i) {\n // json chunk is padded with spaces (ASCII 0x20)\n dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);\n }\n }\n byteOffset += paddedLength;\n return byteOffset;\n}\n\n/**\n * Copy string to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {string} string - source string\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedStringToDataView(\n dataView: DataView | null,\n byteOffset: number,\n string: string,\n padding: number\n): number {\n const textEncoder = new TextEncoder();\n // PERFORMANCE IDEA: We encode twice, once to get size and once to store\n // PERFORMANCE IDEA: Use TextEncoder.encodeInto() to avoid temporary copy\n const stringBuffer = textEncoder.encode(string);\n\n byteOffset = copyPaddedArrayBufferToDataView(dataView, byteOffset, stringBuffer, padding);\n\n return byteOffset;\n}\n"],"mappings":"AACA,SAAQA,WAAW,QAAO,qBAAqB;;AAY/C,OAAO,SAASC,+BAA+B,CAC7CC,QAAyB,EACzBC,UAAkB,EAClBC,YAAwB,EACxBC,OAAe,EACf;EACA,MAAMC,YAAY,GAAGN,WAAW,CAACI,YAAY,CAACG,UAAU,EAAEF,OAAO,CAAC;EAClE,MAAMG,SAAS,GAAGF,YAAY,GAAGF,YAAY,CAACG,UAAU;EAExD,IAAIL,QAAQ,EAAE;IAEZ,MAAMO,WAAW,GAAG,IAAIC,UAAU,CAChCR,QAAQ,CAACS,MAAM,EACfT,QAAQ,CAACC,UAAU,GAAGA,UAAU,EAChCC,YAAY,CAACG,UAAU,CACxB;IACD,MAAMK,WAAW,GAAG,IAAIF,UAAU,CAACN,YAAY,CAAC;IAChDK,WAAW,CAACI,GAAG,CAACD,WAAW,CAAC;;IAG5B,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGN,SAAS,EAAE,EAAEM,CAAC,EAAE;MAElCZ,QAAQ,CAACa,QAAQ,CAACZ,UAAU,GAAGC,YAAY,CAACG,UAAU,GAAGO,CAAC,EAAE,IAAI,CAAC;IACnE;EACF;EACAX,UAAU,IAAIG,YAAY;EAC1B,OAAOH,UAAU;AACnB;;AAYA,OAAO,SAASa,0BAA0B,CACxCd,QAAyB,EACzBC,UAAkB,EAClBc,MAAc,EACdZ,OAAe,EACP;EACR,MAAMa,WAAW,GAAG,IAAIC,WAAW,EAAE;EAGrC,MAAMC,YAAY,GAAGF,WAAW,CAACG,MAAM,CAACJ,MAAM,CAAC;EAE/Cd,UAAU,GAAGF,+BAA+B,CAACC,QAAQ,EAAEC,UAAU,EAAEiB,YAAY,EAAEf,OAAO,CAAC;EAEzF,OAAOF,UAAU;AACnB"}
|
|
@@ -1,16 +1,18 @@
|
|
|
1
1
|
import * as node from '../node/buffer';
|
|
2
|
+
|
|
2
3
|
export function isBuffer(value) {
|
|
3
4
|
return value && typeof value === 'object' && value.isBuffer;
|
|
4
5
|
}
|
|
6
|
+
|
|
5
7
|
export function toBuffer(data) {
|
|
6
8
|
return node.toBuffer ? node.toBuffer(data) : data;
|
|
7
9
|
}
|
|
10
|
+
|
|
8
11
|
export function bufferToArrayBuffer(buffer) {
|
|
9
12
|
if (isBuffer(buffer)) {
|
|
10
13
|
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
11
14
|
return typedArray.slice().buffer;
|
|
12
15
|
}
|
|
13
|
-
|
|
14
16
|
return buffer;
|
|
15
17
|
}
|
|
16
18
|
//# sourceMappingURL=buffer-utils.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"
|
|
1
|
+
{"version":3,"file":"buffer-utils.js","names":["node","isBuffer","value","toBuffer","data","bufferToArrayBuffer","buffer","typedArray","Uint8Array","byteOffset","length","slice"],"sources":["../../../../src/lib/binary-utils/buffer-utils.ts"],"sourcesContent":["import * as node from '../node/buffer';\n\n/**\n * Check for Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n */\nexport function isBuffer(value: any): boolean {\n return value && typeof value === 'object' && value.isBuffer;\n}\n\n/**\n * Converts to Node.js `Buffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function toBuffer(data: any): Buffer {\n return node.toBuffer ? node.toBuffer(data) : data;\n}\n\n/**\n * Converts Node.js `Buffer` to `ArrayBuffer` (without triggering bundler to include Buffer polyfill on browser)\n * @todo better data type\n */\nexport function bufferToArrayBuffer(buffer: any): ArrayBuffer {\n if (isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n"],"mappings":"AAAA,OAAO,KAAKA,IAAI,MAAM,gBAAgB;;AAKtC,OAAO,SAASC,QAAQ,CAACC,KAAU,EAAW;EAC5C,OAAOA,KAAK,IAAI,OAAOA,KAAK,KAAK,QAAQ,IAAIA,KAAK,CAACD,QAAQ;AAC7D;;AAMA,OAAO,SAASE,QAAQ,CAACC,IAAS,EAAU;EAC1C,OAAOJ,IAAI,CAACG,QAAQ,GAAGH,IAAI,CAACG,QAAQ,CAACC,IAAI,CAAC,GAAGA,IAAI;AACnD;;AAMA,OAAO,SAASC,mBAAmB,CAACC,MAAW,EAAe;EAC5D,IAAIL,QAAQ,CAACK,MAAM,CAAC,EAAE;IACpB,MAAMC,UAAU,GAAG,IAAIC,UAAU,CAACF,MAAM,CAACA,MAAM,EAAEA,MAAM,CAACG,UAAU,EAAEH,MAAM,CAACI,MAAM,CAAC;IAClF,OAAOH,UAAU,CAACI,KAAK,EAAE,CAACL,MAAM;EAClC;EACA,OAAOA,MAAM;AACf"}
|
|
@@ -1,13 +1,13 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
1
3
|
export function padStringToByteAlignment(string, byteAlignment) {
|
|
2
4
|
const length = string.length;
|
|
3
5
|
const paddedLength = Math.ceil(length / byteAlignment) * byteAlignment;
|
|
4
6
|
const padding = paddedLength - length;
|
|
5
7
|
let whitespace = '';
|
|
6
|
-
|
|
7
8
|
for (let i = 0; i < padding; ++i) {
|
|
8
9
|
whitespace += ' ';
|
|
9
10
|
}
|
|
10
|
-
|
|
11
11
|
return string + whitespace;
|
|
12
12
|
}
|
|
13
13
|
export function copyStringToDataView(dataView, byteOffset, string, byteLength) {
|
|
@@ -16,7 +16,6 @@ export function copyStringToDataView(dataView, byteOffset, string, byteLength) {
|
|
|
16
16
|
dataView.setUint8(byteOffset + i, string.charCodeAt(i));
|
|
17
17
|
}
|
|
18
18
|
}
|
|
19
|
-
|
|
20
19
|
return byteOffset + byteLength;
|
|
21
20
|
}
|
|
22
21
|
export function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {
|
|
@@ -25,7 +24,6 @@ export function copyBinaryToDataView(dataView, byteOffset, binary, byteLength) {
|
|
|
25
24
|
dataView.setUint8(byteOffset + i, binary[i]);
|
|
26
25
|
}
|
|
27
26
|
}
|
|
28
|
-
|
|
29
27
|
return byteOffset + byteLength;
|
|
30
28
|
}
|
|
31
29
|
//# sourceMappingURL=encode-utils.js.map
|