@loaders.gl/worker-utils 3.4.13 → 3.4.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/index.js +20 -20
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/async-queue/async-queue.js +55 -71
- package/dist/es5/lib/async-queue/async-queue.js.map +1 -1
- package/dist/es5/lib/env-utils/globals.js +10 -12
- package/dist/es5/lib/env-utils/globals.js.map +1 -1
- package/dist/es5/lib/env-utils/version.js +3 -3
- package/dist/es5/lib/env-utils/version.js.map +1 -1
- package/dist/es5/lib/library-utils/library-utils.js +33 -104
- package/dist/es5/lib/library-utils/library-utils.js.map +1 -1
- package/dist/es5/lib/node/require-utils.browser.js +15 -0
- package/dist/es5/lib/node/require-utils.browser.js.map +1 -0
- package/dist/es5/lib/node/require-utils.node.js +17 -44
- package/dist/es5/lib/node/require-utils.node.js.map +1 -1
- package/dist/es5/lib/node/worker_threads-browser.js +4 -14
- package/dist/es5/lib/node/worker_threads-browser.js.map +1 -1
- package/dist/es5/lib/node/worker_threads.js +4 -5
- package/dist/es5/lib/node/worker_threads.js.map +1 -1
- package/dist/es5/lib/process-utils/child-process-proxy.js +84 -163
- package/dist/es5/lib/process-utils/child-process-proxy.js.map +1 -1
- package/dist/es5/lib/process-utils/process-utils.js +8 -8
- package/dist/es5/lib/process-utils/process-utils.js.map +1 -1
- package/dist/es5/lib/worker-api/create-worker.js +47 -122
- package/dist/es5/lib/worker-api/create-worker.js.map +1 -1
- package/dist/es5/lib/worker-api/get-worker-url.js +9 -9
- package/dist/es5/lib/worker-api/get-worker-url.js.map +1 -1
- package/dist/es5/lib/worker-api/process-on-worker.js +59 -113
- package/dist/es5/lib/worker-api/process-on-worker.js.map +1 -1
- package/dist/es5/lib/worker-api/validate-worker-version.js +3 -3
- package/dist/es5/lib/worker-api/validate-worker-version.js.map +1 -1
- package/dist/es5/lib/worker-farm/worker-body.js +68 -84
- package/dist/es5/lib/worker-farm/worker-body.js.map +1 -1
- package/dist/es5/lib/worker-farm/worker-farm.js +55 -88
- package/dist/es5/lib/worker-farm/worker-farm.js.map +1 -1
- package/dist/es5/lib/worker-farm/worker-job.js +25 -37
- package/dist/es5/lib/worker-farm/worker-job.js.map +1 -1
- package/dist/es5/lib/worker-farm/worker-pool.js +96 -175
- package/dist/es5/lib/worker-farm/worker-pool.js.map +1 -1
- package/dist/es5/lib/worker-farm/worker-thread.js +76 -99
- package/dist/es5/lib/worker-farm/worker-thread.js.map +1 -1
- package/dist/es5/lib/worker-utils/get-loadable-worker-url.js +4 -4
- package/dist/es5/lib/worker-utils/get-loadable-worker-url.js.map +1 -1
- package/dist/es5/lib/worker-utils/get-transfer-list.js +8 -10
- package/dist/es5/lib/worker-utils/get-transfer-list.js.map +1 -1
- package/dist/es5/lib/worker-utils/remove-nontransferable-options.js +3 -5
- package/dist/es5/lib/worker-utils/remove-nontransferable-options.js.map +1 -1
- package/dist/es5/workers/null-worker.js +3 -19
- package/dist/es5/workers/null-worker.js.map +1 -1
- package/dist/esm/lib/env-utils/version.js +2 -2
- package/dist/esm/lib/library-utils/library-utils.js +1 -1
- package/dist/esm/lib/node/require-utils.browser.js +5 -0
- package/dist/esm/lib/node/require-utils.browser.js.map +1 -0
- package/dist/esm/lib/worker-api/get-worker-url.js +1 -1
- package/dist/lib/node/require-utils.browser.d.ts +5 -0
- package/dist/lib/node/require-utils.browser.d.ts.map +1 -0
- package/package.json +5 -5
- package/src/lib/node/require-utils.browser.ts +4 -0
|
@@ -7,22 +7,22 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
7
7
|
exports.getAvailablePort = getAvailablePort;
|
|
8
8
|
var _child_process = _interopRequireDefault(require("child_process"));
|
|
9
9
|
function getAvailablePort() {
|
|
10
|
-
|
|
11
|
-
return new Promise(
|
|
12
|
-
_child_process.default.exec('lsof -i -P -n | grep LISTEN',
|
|
10
|
+
let defaultPort = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 3000;
|
|
11
|
+
return new Promise(resolve => {
|
|
12
|
+
_child_process.default.exec('lsof -i -P -n | grep LISTEN', (error, stdout) => {
|
|
13
13
|
if (error) {
|
|
14
14
|
resolve(defaultPort);
|
|
15
15
|
return;
|
|
16
16
|
}
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
stdout.split('\n').forEach(
|
|
20
|
-
|
|
17
|
+
const portsInUse = [];
|
|
18
|
+
const regex = /:(\d+) \(LISTEN\)/;
|
|
19
|
+
stdout.split('\n').forEach(line => {
|
|
20
|
+
const match = regex.exec(line);
|
|
21
21
|
if (match) {
|
|
22
22
|
portsInUse.push(Number(match[1]));
|
|
23
23
|
}
|
|
24
24
|
});
|
|
25
|
-
|
|
25
|
+
let port = defaultPort;
|
|
26
26
|
while (portsInUse.includes(port)) {
|
|
27
27
|
port++;
|
|
28
28
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"process-utils.js","names":["_child_process","_interopRequireDefault","require","getAvailablePort","defaultPort","arguments","length","undefined","Promise","resolve","ChildProcess","exec","error","stdout","portsInUse","regex","split","forEach","line","match","push","Number","port","includes"],"sources":["../../../../src/lib/process-utils/process-utils.ts"],"sourcesContent":["import ChildProcess from 'child_process';\n\n// Get an available port\n// Works on Unix systems\nexport function getAvailablePort(defaultPort: number = 3000): Promise<number> {\n return new Promise((resolve) => {\n // Get a list of all ports in use\n ChildProcess.exec('lsof -i -P -n | grep LISTEN', (error, stdout) => {\n if (error) {\n // likely no permission, e.g. CI\n resolve(defaultPort);\n return;\n }\n\n const portsInUse: number[] = [];\n const regex = /:(\\d+) \\(LISTEN\\)/;\n stdout.split('\\n').forEach((line) => {\n const match = regex.exec(line);\n if (match) {\n portsInUse.push(Number(match[1]));\n }\n });\n let port = defaultPort;\n while (portsInUse.includes(port)) {\n port++;\n }\n resolve(port);\n });\n });\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,cAAA,GAAAC,sBAAA,CAAAC,OAAA;AAIO,SAASC,gBAAgBA,CAAA,EAA8C;EAAA,IAA7CC,WAAmB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,IAAI;EACzD,OAAO,IAAIG,OAAO,
|
|
1
|
+
{"version":3,"file":"process-utils.js","names":["_child_process","_interopRequireDefault","require","getAvailablePort","defaultPort","arguments","length","undefined","Promise","resolve","ChildProcess","exec","error","stdout","portsInUse","regex","split","forEach","line","match","push","Number","port","includes"],"sources":["../../../../src/lib/process-utils/process-utils.ts"],"sourcesContent":["import ChildProcess from 'child_process';\n\n// Get an available port\n// Works on Unix systems\nexport function getAvailablePort(defaultPort: number = 3000): Promise<number> {\n return new Promise((resolve) => {\n // Get a list of all ports in use\n ChildProcess.exec('lsof -i -P -n | grep LISTEN', (error, stdout) => {\n if (error) {\n // likely no permission, e.g. CI\n resolve(defaultPort);\n return;\n }\n\n const portsInUse: number[] = [];\n const regex = /:(\\d+) \\(LISTEN\\)/;\n stdout.split('\\n').forEach((line) => {\n const match = regex.exec(line);\n if (match) {\n portsInUse.push(Number(match[1]));\n }\n });\n let port = defaultPort;\n while (portsInUse.includes(port)) {\n port++;\n }\n resolve(port);\n });\n });\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,cAAA,GAAAC,sBAAA,CAAAC,OAAA;AAIO,SAASC,gBAAgBA,CAAA,EAA8C;EAAA,IAA7CC,WAAmB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,IAAI;EACzD,OAAO,IAAIG,OAAO,CAAEC,OAAO,IAAK;IAE9BC,sBAAY,CAACC,IAAI,CAAC,6BAA6B,EAAE,CAACC,KAAK,EAAEC,MAAM,KAAK;MAClE,IAAID,KAAK,EAAE;QAETH,OAAO,CAACL,WAAW,CAAC;QACpB;MACF;MAEA,MAAMU,UAAoB,GAAG,EAAE;MAC/B,MAAMC,KAAK,GAAG,mBAAmB;MACjCF,MAAM,CAACG,KAAK,CAAC,IAAI,CAAC,CAACC,OAAO,CAAEC,IAAI,IAAK;QACnC,MAAMC,KAAK,GAAGJ,KAAK,CAACJ,IAAI,CAACO,IAAI,CAAC;QAC9B,IAAIC,KAAK,EAAE;UACTL,UAAU,CAACM,IAAI,CAACC,MAAM,CAACF,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACnC;MACF,CAAC,CAAC;MACF,IAAIG,IAAI,GAAGlB,WAAW;MACtB,OAAOU,UAAU,CAACS,QAAQ,CAACD,IAAI,CAAC,EAAE;QAChCA,IAAI,EAAE;MACR;MACAb,OAAO,CAACa,IAAI,CAAC;IACf,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ"}
|
|
@@ -5,140 +5,65 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
5
5
|
value: true
|
|
6
6
|
});
|
|
7
7
|
exports.createWorker = createWorker;
|
|
8
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
10
8
|
var _asyncQueue = _interopRequireDefault(require("../async-queue/async-queue"));
|
|
11
9
|
var _workerBody = _interopRequireDefault(require("../worker-farm/worker-body"));
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
var inputBatches;
|
|
16
|
-
var options;
|
|
10
|
+
let requestId = 0;
|
|
11
|
+
let inputBatches;
|
|
12
|
+
let options;
|
|
17
13
|
function createWorker(process, processInBatches) {
|
|
18
14
|
if (!_workerBody.default.inWorkerThread()) {
|
|
19
15
|
return;
|
|
20
16
|
}
|
|
21
|
-
|
|
17
|
+
const context = {
|
|
22
18
|
process: processOnMainThread
|
|
23
19
|
};
|
|
24
|
-
_workerBody.default.onmessage =
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
case 0:
|
|
30
|
-
_context.prev = 0;
|
|
31
|
-
_context.t0 = type;
|
|
32
|
-
_context.next = _context.t0 === 'process' ? 4 : _context.t0 === 'process-in-batches' ? 11 : _context.t0 === 'input-batch' ? 46 : _context.t0 === 'input-done' ? 48 : 50;
|
|
33
|
-
break;
|
|
34
|
-
case 4:
|
|
35
|
-
if (process) {
|
|
36
|
-
_context.next = 6;
|
|
37
|
-
break;
|
|
38
|
-
}
|
|
20
|
+
_workerBody.default.onmessage = async (type, payload) => {
|
|
21
|
+
try {
|
|
22
|
+
switch (type) {
|
|
23
|
+
case 'process':
|
|
24
|
+
if (!process) {
|
|
39
25
|
throw new Error('Worker does not support atomic processing');
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
return _context.abrupt("break", 50);
|
|
49
|
-
case 11:
|
|
50
|
-
if (processInBatches) {
|
|
51
|
-
_context.next = 13;
|
|
52
|
-
break;
|
|
53
|
-
}
|
|
26
|
+
}
|
|
27
|
+
const result = await process(payload.input, payload.options || {}, context);
|
|
28
|
+
_workerBody.default.postMessage('done', {
|
|
29
|
+
result
|
|
30
|
+
});
|
|
31
|
+
break;
|
|
32
|
+
case 'process-in-batches':
|
|
33
|
+
if (!processInBatches) {
|
|
54
34
|
throw new Error('Worker does not support batched processing');
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
_didIteratorError = false;
|
|
61
|
-
_context.prev = 18;
|
|
62
|
-
_iterator = _asyncIterator(resultIterator);
|
|
63
|
-
case 20:
|
|
64
|
-
_context.next = 22;
|
|
65
|
-
return _iterator.next();
|
|
66
|
-
case 22:
|
|
67
|
-
if (!(_iteratorAbruptCompletion = !(_step = _context.sent).done)) {
|
|
68
|
-
_context.next = 28;
|
|
69
|
-
break;
|
|
70
|
-
}
|
|
71
|
-
batch = _step.value;
|
|
35
|
+
}
|
|
36
|
+
inputBatches = new _asyncQueue.default();
|
|
37
|
+
options = payload.options || {};
|
|
38
|
+
const resultIterator = processInBatches(inputBatches, options, context);
|
|
39
|
+
for await (const batch of resultIterator) {
|
|
72
40
|
_workerBody.default.postMessage('output-batch', {
|
|
73
41
|
result: batch
|
|
74
42
|
});
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
}
|
|
94
|
-
_context.next = 39;
|
|
95
|
-
return _iterator.return();
|
|
96
|
-
case 39:
|
|
97
|
-
_context.prev = 39;
|
|
98
|
-
if (!_didIteratorError) {
|
|
99
|
-
_context.next = 42;
|
|
100
|
-
break;
|
|
101
|
-
}
|
|
102
|
-
throw _iteratorError;
|
|
103
|
-
case 42:
|
|
104
|
-
return _context.finish(39);
|
|
105
|
-
case 43:
|
|
106
|
-
return _context.finish(34);
|
|
107
|
-
case 44:
|
|
108
|
-
_workerBody.default.postMessage('done', {});
|
|
109
|
-
return _context.abrupt("break", 50);
|
|
110
|
-
case 46:
|
|
111
|
-
inputBatches.push(payload.input);
|
|
112
|
-
return _context.abrupt("break", 50);
|
|
113
|
-
case 48:
|
|
114
|
-
inputBatches.close();
|
|
115
|
-
return _context.abrupt("break", 50);
|
|
116
|
-
case 50:
|
|
117
|
-
_context.next = 56;
|
|
118
|
-
break;
|
|
119
|
-
case 52:
|
|
120
|
-
_context.prev = 52;
|
|
121
|
-
_context.t2 = _context["catch"](0);
|
|
122
|
-
message = _context.t2 instanceof Error ? _context.t2.message : '';
|
|
123
|
-
_workerBody.default.postMessage('error', {
|
|
124
|
-
error: message
|
|
125
|
-
});
|
|
126
|
-
case 56:
|
|
127
|
-
case "end":
|
|
128
|
-
return _context.stop();
|
|
129
|
-
}
|
|
130
|
-
}, _callee, null, [[0, 52], [18, 30, 34, 44], [35,, 39, 43]]);
|
|
131
|
-
}));
|
|
132
|
-
return function (_x, _x2) {
|
|
133
|
-
return _ref.apply(this, arguments);
|
|
134
|
-
};
|
|
135
|
-
}();
|
|
43
|
+
}
|
|
44
|
+
_workerBody.default.postMessage('done', {});
|
|
45
|
+
break;
|
|
46
|
+
case 'input-batch':
|
|
47
|
+
inputBatches.push(payload.input);
|
|
48
|
+
break;
|
|
49
|
+
case 'input-done':
|
|
50
|
+
inputBatches.close();
|
|
51
|
+
break;
|
|
52
|
+
default:
|
|
53
|
+
}
|
|
54
|
+
} catch (error) {
|
|
55
|
+
const message = error instanceof Error ? error.message : '';
|
|
56
|
+
_workerBody.default.postMessage('error', {
|
|
57
|
+
error: message
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
};
|
|
136
61
|
}
|
|
137
62
|
function processOnMainThread(arrayBuffer) {
|
|
138
|
-
|
|
139
|
-
return new Promise(
|
|
140
|
-
|
|
141
|
-
|
|
63
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
64
|
+
return new Promise((resolve, reject) => {
|
|
65
|
+
const id = requestId++;
|
|
66
|
+
const onMessage = (type, payload) => {
|
|
142
67
|
if (payload.id !== id) {
|
|
143
68
|
return;
|
|
144
69
|
}
|
|
@@ -155,10 +80,10 @@ function processOnMainThread(arrayBuffer) {
|
|
|
155
80
|
}
|
|
156
81
|
};
|
|
157
82
|
_workerBody.default.addEventListener(onMessage);
|
|
158
|
-
|
|
159
|
-
id
|
|
83
|
+
const payload = {
|
|
84
|
+
id,
|
|
160
85
|
input: arrayBuffer,
|
|
161
|
-
options
|
|
86
|
+
options
|
|
162
87
|
};
|
|
163
88
|
_workerBody.default.postMessage('process', payload);
|
|
164
89
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"create-worker.js","names":["_asyncQueue","_interopRequireDefault","require","_workerBody","
|
|
1
|
+
{"version":3,"file":"create-worker.js","names":["_asyncQueue","_interopRequireDefault","require","_workerBody","requestId","inputBatches","options","createWorker","process","processInBatches","WorkerBody","inWorkerThread","context","processOnMainThread","onmessage","type","payload","Error","result","input","postMessage","AsyncQueue","resultIterator","batch","push","close","error","message","arrayBuffer","arguments","length","undefined","Promise","resolve","reject","id","onMessage","removeEventListener","addEventListener"],"sources":["../../../../src/lib/worker-api/create-worker.ts"],"sourcesContent":["import type {\n WorkerMessageType,\n WorkerMessagePayload,\n WorkerContext,\n Process,\n ProcessInBatches\n} from '../../types';\nimport AsyncQueue from '../async-queue/async-queue';\nimport WorkerBody from '../worker-farm/worker-body';\n// import {validateWorkerVersion} from './validate-worker-version';\n\n/** Counter for jobs */\nlet requestId = 0;\nlet inputBatches: AsyncQueue<any>;\nlet options: {[key: string]: any};\n\nexport type ProcessOnMainThread = (\n data: any,\n options?: {[key: string]: any},\n context?: WorkerContext\n) => any;\n\n/**\n * Set up a WebWorkerGlobalScope to talk with the main thread\n */\nexport function createWorker(process: Process, processInBatches?: ProcessInBatches): void {\n if (!WorkerBody.inWorkerThread()) {\n return;\n }\n\n const context: WorkerContext = {\n process: processOnMainThread\n };\n\n // eslint-disable-next-line complexity\n WorkerBody.onmessage = async (type: WorkerMessageType, payload: WorkerMessagePayload) => {\n try {\n switch (type) {\n case 'process':\n if (!process) {\n throw new Error('Worker does not support atomic processing');\n }\n const result = await process(payload.input, payload.options || {}, context);\n WorkerBody.postMessage('done', {result});\n break;\n\n case 'process-in-batches':\n if (!processInBatches) {\n throw new Error('Worker does not support batched processing');\n }\n inputBatches = new AsyncQueue<any>();\n options = payload.options || {};\n const resultIterator = processInBatches(inputBatches, options, context);\n for await (const batch of resultIterator) {\n WorkerBody.postMessage('output-batch', {result: batch});\n }\n WorkerBody.postMessage('done', {});\n break;\n\n case 'input-batch':\n inputBatches.push(payload.input);\n break;\n\n case 'input-done':\n inputBatches.close();\n break;\n\n default:\n }\n } catch (error) {\n const message = error instanceof Error ? error.message : '';\n WorkerBody.postMessage('error', {error: message});\n }\n };\n}\n\nfunction processOnMainThread(arrayBuffer: ArrayBuffer, options = {}) {\n return new Promise((resolve, reject) => {\n const id = requestId++;\n\n /**\n */\n const onMessage = (type: string, payload: WorkerMessagePayload) => {\n if (payload.id !== id) {\n // not ours\n return;\n }\n\n switch (type) {\n case 'done':\n WorkerBody.removeEventListener(onMessage);\n resolve(payload.result);\n break;\n\n case 'error':\n WorkerBody.removeEventListener(onMessage);\n reject(payload.error);\n break;\n\n default:\n // ignore\n }\n };\n\n WorkerBody.addEventListener(onMessage);\n\n // Ask the main thread to decode data\n const payload = {id, input: arrayBuffer, options};\n WorkerBody.postMessage('process', payload);\n });\n}\n"],"mappings":";;;;;;;AAOA,IAAAA,WAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,WAAA,GAAAF,sBAAA,CAAAC,OAAA;AAIA,IAAIE,SAAS,GAAG,CAAC;AACjB,IAAIC,YAA6B;AACjC,IAAIC,OAA6B;AAW1B,SAASC,YAAYA,CAACC,OAAgB,EAAEC,gBAAmC,EAAQ;EACxF,IAAI,CAACC,mBAAU,CAACC,cAAc,CAAC,CAAC,EAAE;IAChC;EACF;EAEA,MAAMC,OAAsB,GAAG;IAC7BJ,OAAO,EAAEK;EACX,CAAC;EAGDH,mBAAU,CAACI,SAAS,GAAG,OAAOC,IAAuB,EAAEC,OAA6B,KAAK;IACvF,IAAI;MACF,QAAQD,IAAI;QACV,KAAK,SAAS;UACZ,IAAI,CAACP,OAAO,EAAE;YACZ,MAAM,IAAIS,KAAK,CAAC,2CAA2C,CAAC;UAC9D;UACA,MAAMC,MAAM,GAAG,MAAMV,OAAO,CAACQ,OAAO,CAACG,KAAK,EAAEH,OAAO,CAACV,OAAO,IAAI,CAAC,CAAC,EAAEM,OAAO,CAAC;UAC3EF,mBAAU,CAACU,WAAW,CAAC,MAAM,EAAE;YAACF;UAAM,CAAC,CAAC;UACxC;QAEF,KAAK,oBAAoB;UACvB,IAAI,CAACT,gBAAgB,EAAE;YACrB,MAAM,IAAIQ,KAAK,CAAC,4CAA4C,CAAC;UAC/D;UACAZ,YAAY,GAAG,IAAIgB,mBAAU,CAAM,CAAC;UACpCf,OAAO,GAAGU,OAAO,CAACV,OAAO,IAAI,CAAC,CAAC;UAC/B,MAAMgB,cAAc,GAAGb,gBAAgB,CAACJ,YAAY,EAAEC,OAAO,EAAEM,OAAO,CAAC;UACvE,WAAW,MAAMW,KAAK,IAAID,cAAc,EAAE;YACxCZ,mBAAU,CAACU,WAAW,CAAC,cAAc,EAAE;cAACF,MAAM,EAAEK;YAAK,CAAC,CAAC;UACzD;UACAb,mBAAU,CAACU,WAAW,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;UAClC;QAEF,KAAK,aAAa;UAChBf,YAAY,CAACmB,IAAI,CAACR,OAAO,CAACG,KAAK,CAAC;UAChC;QAEF,KAAK,YAAY;UACfd,YAAY,CAACoB,KAAK,CAAC,CAAC;UACpB;QAEF;MACF;IACF,CAAC,CAAC,OAAOC,KAAK,EAAE;MACd,MAAMC,OAAO,GAAGD,KAAK,YAAYT,KAAK,GAAGS,KAAK,CAACC,OAAO,GAAG,EAAE;MAC3DjB,mBAAU,CAACU,WAAW,CAAC,OAAO,EAAE;QAACM,KAAK,EAAEC;MAAO,CAAC,CAAC;IACnD;EACF,CAAC;AACH;AAEA,SAASd,mBAAmBA,CAACe,WAAwB,EAAgB;EAAA,IAAdtB,OAAO,GAAAuB,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EACjE,OAAO,IAAIG,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtC,MAAMC,EAAE,GAAG/B,SAAS,EAAE;IAItB,MAAMgC,SAAS,GAAGA,CAACrB,IAAY,EAAEC,OAA6B,KAAK;MACjE,IAAIA,OAAO,CAACmB,EAAE,KAAKA,EAAE,EAAE;QAErB;MACF;MAEA,QAAQpB,IAAI;QACV,KAAK,MAAM;UACTL,mBAAU,CAAC2B,mBAAmB,CAACD,SAAS,CAAC;UACzCH,OAAO,CAACjB,OAAO,CAACE,MAAM,CAAC;UACvB;QAEF,KAAK,OAAO;UACVR,mBAAU,CAAC2B,mBAAmB,CAACD,SAAS,CAAC;UACzCF,MAAM,CAAClB,OAAO,CAACU,KAAK,CAAC;UACrB;QAEF;MAEF;IACF,CAAC;IAEDhB,mBAAU,CAAC4B,gBAAgB,CAACF,SAAS,CAAC;IAGtC,MAAMpB,OAAO,GAAG;MAACmB,EAAE;MAAEhB,KAAK,EAAES,WAAW;MAAEtB;IAAO,CAAC;IACjDI,mBAAU,CAACU,WAAW,CAAC,SAAS,EAAEJ,OAAO,CAAC;EAC5C,CAAC,CAAC;AACJ"}
|
|
@@ -7,17 +7,17 @@ exports.getWorkerName = getWorkerName;
|
|
|
7
7
|
exports.getWorkerURL = getWorkerURL;
|
|
8
8
|
var _assert = require("../env-utils/assert");
|
|
9
9
|
var _version = require("../env-utils/version");
|
|
10
|
-
|
|
11
|
-
|
|
10
|
+
const NPM_TAG = 'latest';
|
|
11
|
+
const VERSION = typeof "3.4.15" !== 'undefined' ? "3.4.15" : NPM_TAG;
|
|
12
12
|
function getWorkerName(worker) {
|
|
13
|
-
|
|
13
|
+
const warning = worker.version !== VERSION ? " (worker-utils@".concat(VERSION, ")") : '';
|
|
14
14
|
return "".concat(worker.name, "@").concat(worker.version).concat(warning);
|
|
15
15
|
}
|
|
16
16
|
function getWorkerURL(worker) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
17
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
18
|
+
const workerOptions = options[worker.id] || {};
|
|
19
|
+
const workerFile = "".concat(worker.id, "-worker.js");
|
|
20
|
+
let url = workerOptions.workerUrl;
|
|
21
21
|
if (!url && worker.id === 'compression') {
|
|
22
22
|
url = options.workerUrl;
|
|
23
23
|
}
|
|
@@ -25,11 +25,11 @@ function getWorkerURL(worker) {
|
|
|
25
25
|
url = "modules/".concat(worker.module, "/dist/").concat(workerFile);
|
|
26
26
|
}
|
|
27
27
|
if (!url) {
|
|
28
|
-
|
|
28
|
+
let version = worker.version;
|
|
29
29
|
if (version === 'latest') {
|
|
30
30
|
version = NPM_TAG;
|
|
31
31
|
}
|
|
32
|
-
|
|
32
|
+
const versionTag = version ? "@".concat(version) : '';
|
|
33
33
|
url = "https://unpkg.com/@loaders.gl/".concat(worker.module).concat(versionTag, "/dist/").concat(workerFile);
|
|
34
34
|
}
|
|
35
35
|
(0, _assert.assert)(url);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"get-worker-url.js","names":["_assert","require","_version","NPM_TAG","VERSION","getWorkerName","worker","warning","version","concat","name","getWorkerURL","options","arguments","length","undefined","workerOptions","id","workerFile","url","workerUrl","_workerType","module","versionTag","assert"],"sources":["../../../../src/lib/worker-api/get-worker-url.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport type {WorkerObject, WorkerOptions} from '../../types';\nimport {assert} from '../env-utils/assert';\nimport {VERSION as __VERSION__} from '../env-utils/version';\n\nconst NPM_TAG = 'latest'; // 'beta', or 'latest' on release-branch\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : NPM_TAG;\n\n/**\n * Gets worker object's name (for debugging in Chrome thread inspector window)\n */\nexport function getWorkerName(worker: WorkerObject): string {\n const warning = worker.version !== VERSION ? ` (worker-utils@${VERSION})` : '';\n return `${worker.name}@${worker.version}${warning}`;\n}\n\n/**\n * Generate a worker URL based on worker object and options\n * @returns A URL to one of the following:\n * - a published worker on unpkg CDN\n * - a local test worker\n * - a URL provided by the user in options\n */\nexport function getWorkerURL(worker: WorkerObject, options: WorkerOptions = {}): string {\n const workerOptions = options[worker.id] || {};\n\n const workerFile = `${worker.id}-worker.js`;\n\n let url = workerOptions.workerUrl;\n\n // HACK: Allow for non-nested workerUrl for the CompressionWorker.\n // For the compression worker, workerOptions is currently not nested correctly. For most loaders,\n // you'd have options within an object, i.e. `{mvt: {coordinates: ...}}` but the CompressionWorker\n // puts options at the top level, not within a `compression` key (its `id`). For this reason, the\n // above `workerOptions` will always be a string (i.e. `'gzip'`) for the CompressionWorker. To not\n // break backwards compatibility, we allow the CompressionWorker to have options at the top level.\n if (!url && worker.id === 'compression') {\n url = options.workerUrl;\n }\n\n // If URL is test, generate local loaders.gl url\n // @ts-ignore _workerType\n if (options._workerType === 'test') {\n url = `modules/${worker.module}/dist/${workerFile}`;\n }\n\n // If url override is not provided, generate a URL to published version on npm CDN unpkg.com\n if (!url) {\n // GENERATE\n let version = worker.version;\n // On master we need to load npm alpha releases published with the `beta` tag\n if (version === 'latest') {\n // throw new Error('latest worker version specified');\n version = NPM_TAG;\n }\n const versionTag = version ? `@${version}` : '';\n url = `https://unpkg.com/@loaders.gl/${worker.module}${versionTag}/dist/${workerFile}`;\n }\n\n assert(url);\n\n // Allow user to override location\n return url;\n}\n"],"mappings":";;;;;;;AAGA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AAEA,
|
|
1
|
+
{"version":3,"file":"get-worker-url.js","names":["_assert","require","_version","NPM_TAG","VERSION","getWorkerName","worker","warning","version","concat","name","getWorkerURL","options","arguments","length","undefined","workerOptions","id","workerFile","url","workerUrl","_workerType","module","versionTag","assert"],"sources":["../../../../src/lib/worker-api/get-worker-url.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport type {WorkerObject, WorkerOptions} from '../../types';\nimport {assert} from '../env-utils/assert';\nimport {VERSION as __VERSION__} from '../env-utils/version';\n\nconst NPM_TAG = 'latest'; // 'beta', or 'latest' on release-branch\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : NPM_TAG;\n\n/**\n * Gets worker object's name (for debugging in Chrome thread inspector window)\n */\nexport function getWorkerName(worker: WorkerObject): string {\n const warning = worker.version !== VERSION ? ` (worker-utils@${VERSION})` : '';\n return `${worker.name}@${worker.version}${warning}`;\n}\n\n/**\n * Generate a worker URL based on worker object and options\n * @returns A URL to one of the following:\n * - a published worker on unpkg CDN\n * - a local test worker\n * - a URL provided by the user in options\n */\nexport function getWorkerURL(worker: WorkerObject, options: WorkerOptions = {}): string {\n const workerOptions = options[worker.id] || {};\n\n const workerFile = `${worker.id}-worker.js`;\n\n let url = workerOptions.workerUrl;\n\n // HACK: Allow for non-nested workerUrl for the CompressionWorker.\n // For the compression worker, workerOptions is currently not nested correctly. For most loaders,\n // you'd have options within an object, i.e. `{mvt: {coordinates: ...}}` but the CompressionWorker\n // puts options at the top level, not within a `compression` key (its `id`). For this reason, the\n // above `workerOptions` will always be a string (i.e. `'gzip'`) for the CompressionWorker. To not\n // break backwards compatibility, we allow the CompressionWorker to have options at the top level.\n if (!url && worker.id === 'compression') {\n url = options.workerUrl;\n }\n\n // If URL is test, generate local loaders.gl url\n // @ts-ignore _workerType\n if (options._workerType === 'test') {\n url = `modules/${worker.module}/dist/${workerFile}`;\n }\n\n // If url override is not provided, generate a URL to published version on npm CDN unpkg.com\n if (!url) {\n // GENERATE\n let version = worker.version;\n // On master we need to load npm alpha releases published with the `beta` tag\n if (version === 'latest') {\n // throw new Error('latest worker version specified');\n version = NPM_TAG;\n }\n const versionTag = version ? `@${version}` : '';\n url = `https://unpkg.com/@loaders.gl/${worker.module}${versionTag}/dist/${workerFile}`;\n }\n\n assert(url);\n\n // Allow user to override location\n return url;\n}\n"],"mappings":";;;;;;;AAGA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AAEA,MAAME,OAAO,GAAG,QAAQ;AACxB,MAAMC,OAAO,GAAG,eAAkB,KAAK,WAAW,cAAiBD,OAAO;AAKnE,SAASE,aAAaA,CAACC,MAAoB,EAAU;EAC1D,MAAMC,OAAO,GAAGD,MAAM,CAACE,OAAO,KAAKJ,OAAO,qBAAAK,MAAA,CAAqBL,OAAO,SAAM,EAAE;EAC9E,UAAAK,MAAA,CAAUH,MAAM,CAACI,IAAI,OAAAD,MAAA,CAAIH,MAAM,CAACE,OAAO,EAAAC,MAAA,CAAGF,OAAO;AACnD;AASO,SAASI,YAAYA,CAACL,MAAoB,EAAuC;EAAA,IAArCM,OAAsB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EAC5E,MAAMG,aAAa,GAAGJ,OAAO,CAACN,MAAM,CAACW,EAAE,CAAC,IAAI,CAAC,CAAC;EAE9C,MAAMC,UAAU,MAAAT,MAAA,CAAMH,MAAM,CAACW,EAAE,eAAY;EAE3C,IAAIE,GAAG,GAAGH,aAAa,CAACI,SAAS;EAQjC,IAAI,CAACD,GAAG,IAAIb,MAAM,CAACW,EAAE,KAAK,aAAa,EAAE;IACvCE,GAAG,GAAGP,OAAO,CAACQ,SAAS;EACzB;EAIA,IAAIR,OAAO,CAACS,WAAW,KAAK,MAAM,EAAE;IAClCF,GAAG,cAAAV,MAAA,CAAcH,MAAM,CAACgB,MAAM,YAAAb,MAAA,CAASS,UAAU,CAAE;EACrD;EAGA,IAAI,CAACC,GAAG,EAAE;IAER,IAAIX,OAAO,GAAGF,MAAM,CAACE,OAAO;IAE5B,IAAIA,OAAO,KAAK,QAAQ,EAAE;MAExBA,OAAO,GAAGL,OAAO;IACnB;IACA,MAAMoB,UAAU,GAAGf,OAAO,OAAAC,MAAA,CAAOD,OAAO,IAAK,EAAE;IAC/CW,GAAG,oCAAAV,MAAA,CAAoCH,MAAM,CAACgB,MAAM,EAAAb,MAAA,CAAGc,UAAU,YAAAd,MAAA,CAASS,UAAU,CAAE;EACxF;EAEA,IAAAM,cAAM,EAACL,GAAG,CAAC;EAGX,OAAOA,GAAG;AACZ"}
|
|
@@ -6,8 +6,6 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
});
|
|
7
7
|
exports.canProcessOnWorker = canProcessOnWorker;
|
|
8
8
|
exports.processOnWorker = processOnWorker;
|
|
9
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
10
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
11
9
|
var _workerFarm = _interopRequireDefault(require("../worker-farm/worker-farm"));
|
|
12
10
|
var _getWorkerUrl = require("./get-worker-url");
|
|
13
11
|
var _getTransferList = require("../worker-utils/get-transfer-list");
|
|
@@ -17,121 +15,69 @@ function canProcessOnWorker(worker, options) {
|
|
|
17
15
|
}
|
|
18
16
|
return worker.worker && (options === null || options === void 0 ? void 0 : options.worker);
|
|
19
17
|
}
|
|
20
|
-
function processOnWorker(
|
|
21
|
-
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
workerPoolProps = {
|
|
46
|
-
name: name,
|
|
47
|
-
source: source
|
|
48
|
-
};
|
|
49
|
-
if (!source) {
|
|
50
|
-
workerPoolProps.url = (0, _getWorkerUrl.getWorkerURL)(worker, options);
|
|
51
|
-
}
|
|
52
|
-
workerPool = workerFarm.getWorkerPool(workerPoolProps);
|
|
53
|
-
jobName = options.jobName || worker.name;
|
|
54
|
-
_context.next = 11;
|
|
55
|
-
return workerPool.startJob(jobName, onMessage.bind(null, context));
|
|
56
|
-
case 11:
|
|
57
|
-
job = _context.sent;
|
|
58
|
-
transferableOptions = (0, _getTransferList.getTransferListForWriter)(options);
|
|
59
|
-
job.postMessage('process', {
|
|
60
|
-
input: data,
|
|
61
|
-
options: transferableOptions
|
|
62
|
-
});
|
|
63
|
-
_context.next = 16;
|
|
64
|
-
return job.result;
|
|
65
|
-
case 16:
|
|
66
|
-
result = _context.sent;
|
|
67
|
-
return _context.abrupt("return", result.result);
|
|
68
|
-
case 18:
|
|
69
|
-
case "end":
|
|
70
|
-
return _context.stop();
|
|
71
|
-
}
|
|
72
|
-
}, _callee);
|
|
73
|
-
}));
|
|
74
|
-
return _processOnWorker.apply(this, arguments);
|
|
75
|
-
}
|
|
76
|
-
function onMessage(_x3, _x4, _x5, _x6) {
|
|
77
|
-
return _onMessage.apply(this, arguments);
|
|
18
|
+
async function processOnWorker(worker, data) {
|
|
19
|
+
let options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
|
|
20
|
+
let context = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
|
|
21
|
+
const name = (0, _getWorkerUrl.getWorkerName)(worker);
|
|
22
|
+
const workerFarm = _workerFarm.default.getWorkerFarm(options);
|
|
23
|
+
const {
|
|
24
|
+
source
|
|
25
|
+
} = options;
|
|
26
|
+
const workerPoolProps = {
|
|
27
|
+
name,
|
|
28
|
+
source
|
|
29
|
+
};
|
|
30
|
+
if (!source) {
|
|
31
|
+
workerPoolProps.url = (0, _getWorkerUrl.getWorkerURL)(worker, options);
|
|
32
|
+
}
|
|
33
|
+
const workerPool = workerFarm.getWorkerPool(workerPoolProps);
|
|
34
|
+
const jobName = options.jobName || worker.name;
|
|
35
|
+
const job = await workerPool.startJob(jobName, onMessage.bind(null, context));
|
|
36
|
+
const transferableOptions = (0, _getTransferList.getTransferListForWriter)(options);
|
|
37
|
+
job.postMessage('process', {
|
|
38
|
+
input: data,
|
|
39
|
+
options: transferableOptions
|
|
40
|
+
});
|
|
41
|
+
const result = await job.result;
|
|
42
|
+
return result.result;
|
|
78
43
|
}
|
|
79
|
-
function
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
id = payload.id, input = payload.input, options = payload.options;
|
|
96
|
-
_context2.prev = 8;
|
|
97
|
-
if (context.process) {
|
|
98
|
-
_context2.next = 12;
|
|
99
|
-
break;
|
|
100
|
-
}
|
|
44
|
+
async function onMessage(context, job, type, payload) {
|
|
45
|
+
switch (type) {
|
|
46
|
+
case 'done':
|
|
47
|
+
job.done(payload);
|
|
48
|
+
break;
|
|
49
|
+
case 'error':
|
|
50
|
+
job.error(new Error(payload.error));
|
|
51
|
+
break;
|
|
52
|
+
case 'process':
|
|
53
|
+
const {
|
|
54
|
+
id,
|
|
55
|
+
input,
|
|
56
|
+
options
|
|
57
|
+
} = payload;
|
|
58
|
+
try {
|
|
59
|
+
if (!context.process) {
|
|
101
60
|
job.postMessage('error', {
|
|
102
|
-
id
|
|
61
|
+
id,
|
|
103
62
|
error: 'Worker not set up to process on main thread'
|
|
104
63
|
});
|
|
105
|
-
return
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
result
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
_context2.prev = 18;
|
|
119
|
-
_context2.t1 = _context2["catch"](8);
|
|
120
|
-
message = _context2.t1 instanceof Error ? _context2.t1.message : 'unknown error';
|
|
121
|
-
job.postMessage('error', {
|
|
122
|
-
id: id,
|
|
123
|
-
error: message
|
|
124
|
-
});
|
|
125
|
-
case 22:
|
|
126
|
-
return _context2.abrupt("break", 24);
|
|
127
|
-
case 23:
|
|
128
|
-
console.warn("process-on-worker: unknown message ".concat(type));
|
|
129
|
-
case 24:
|
|
130
|
-
case "end":
|
|
131
|
-
return _context2.stop();
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
const result = await context.process(input, options);
|
|
67
|
+
job.postMessage('done', {
|
|
68
|
+
id,
|
|
69
|
+
result
|
|
70
|
+
});
|
|
71
|
+
} catch (error) {
|
|
72
|
+
const message = error instanceof Error ? error.message : 'unknown error';
|
|
73
|
+
job.postMessage('error', {
|
|
74
|
+
id,
|
|
75
|
+
error: message
|
|
76
|
+
});
|
|
132
77
|
}
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
78
|
+
break;
|
|
79
|
+
default:
|
|
80
|
+
console.warn("process-on-worker: unknown message ".concat(type));
|
|
81
|
+
}
|
|
136
82
|
}
|
|
137
83
|
//# sourceMappingURL=process-on-worker.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"process-on-worker.js","names":["_workerFarm","_interopRequireDefault","require","_getWorkerUrl","_getTransferList","canProcessOnWorker","worker","options","WorkerFarm","isSupported","processOnWorker","
|
|
1
|
+
{"version":3,"file":"process-on-worker.js","names":["_workerFarm","_interopRequireDefault","require","_getWorkerUrl","_getTransferList","canProcessOnWorker","worker","options","WorkerFarm","isSupported","processOnWorker","data","arguments","length","undefined","context","name","getWorkerName","workerFarm","getWorkerFarm","source","workerPoolProps","url","getWorkerURL","workerPool","getWorkerPool","jobName","job","startJob","onMessage","bind","transferableOptions","getTransferListForWriter","postMessage","input","result","type","payload","done","error","Error","id","process","message","console","warn","concat"],"sources":["../../../../src/lib/worker-api/process-on-worker.ts"],"sourcesContent":["import type {\n WorkerObject,\n WorkerOptions,\n WorkerContext,\n WorkerMessageType,\n WorkerMessagePayload\n} from '../../types';\nimport type WorkerJob from '../worker-farm/worker-job';\nimport WorkerFarm from '../worker-farm/worker-farm';\nimport {getWorkerURL, getWorkerName} from './get-worker-url';\nimport {getTransferListForWriter} from '../worker-utils/get-transfer-list';\n\ntype ProcessOnWorkerOptions = WorkerOptions & {\n jobName?: string;\n [key: string]: any;\n};\n\n/**\n * Determines if we can parse with worker\n * @param loader\n * @param data\n * @param options\n */\nexport function canProcessOnWorker(worker: WorkerObject, options?: WorkerOptions) {\n if (!WorkerFarm.isSupported()) {\n return false;\n }\n\n return worker.worker && options?.worker;\n}\n\n/**\n * This function expects that the worker thread sends certain messages,\n * Creating such a worker can be automated if the worker is wrapper by a call to\n * createWorker in @loaders.gl/worker-utils.\n */\nexport async function processOnWorker(\n worker: WorkerObject,\n data: any,\n options: ProcessOnWorkerOptions = {},\n context: WorkerContext = {}\n): Promise<any> {\n const name = getWorkerName(worker);\n\n const workerFarm = WorkerFarm.getWorkerFarm(options);\n const {source} = options;\n const workerPoolProps: {name: string; source?: string; url?: string} = {name, source};\n if (!source) {\n workerPoolProps.url = getWorkerURL(worker, options);\n }\n const workerPool = workerFarm.getWorkerPool(workerPoolProps);\n\n const jobName = options.jobName || worker.name;\n const job = await workerPool.startJob(\n jobName,\n // eslint-disable-next-line\n onMessage.bind(null, context)\n );\n\n // Kick off the processing in the worker\n const transferableOptions = getTransferListForWriter(options);\n job.postMessage('process', {input: data, options: transferableOptions});\n\n const result = await job.result;\n return result.result;\n}\n\n/**\n * Job completes when we receive the result\n * @param job\n * @param message\n */\nasync function onMessage(\n context: WorkerContext,\n job: WorkerJob,\n type: WorkerMessageType,\n payload: WorkerMessagePayload\n) {\n switch (type) {\n case 'done':\n // Worker is done\n job.done(payload);\n break;\n\n case 'error':\n // Worker encountered an error\n job.error(new Error(payload.error));\n break;\n\n case 'process':\n // Worker is asking for us (main thread) to process something\n const {id, input, options} = payload;\n try {\n if (!context.process) {\n job.postMessage('error', {id, error: 'Worker not set up to process on main thread'});\n return;\n }\n const result = await context.process(input, options);\n job.postMessage('done', {id, result});\n } catch (error) {\n const message = error instanceof Error ? error.message : 'unknown error';\n job.postMessage('error', {id, error: message});\n }\n break;\n\n default:\n // eslint-disable-next-line\n console.warn(`process-on-worker: unknown message ${type}`);\n }\n}\n"],"mappings":";;;;;;;;AAQA,IAAAA,WAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,aAAA,GAAAD,OAAA;AACA,IAAAE,gBAAA,GAAAF,OAAA;AAaO,SAASG,kBAAkBA,CAACC,MAAoB,EAAEC,OAAuB,EAAE;EAChF,IAAI,CAACC,mBAAU,CAACC,WAAW,CAAC,CAAC,EAAE;IAC7B,OAAO,KAAK;EACd;EAEA,OAAOH,MAAM,CAACA,MAAM,KAAIC,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAED,MAAM;AACzC;AAOO,eAAeI,eAAeA,CACnCJ,MAAoB,EACpBK,IAAS,EAGK;EAAA,IAFdJ,OAA+B,GAAAK,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EAAA,IACpCG,OAAsB,GAAAH,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EAE3B,MAAMI,IAAI,GAAG,IAAAC,2BAAa,EAACX,MAAM,CAAC;EAElC,MAAMY,UAAU,GAAGV,mBAAU,CAACW,aAAa,CAACZ,OAAO,CAAC;EACpD,MAAM;IAACa;EAAM,CAAC,GAAGb,OAAO;EACxB,MAAMc,eAA8D,GAAG;IAACL,IAAI;IAAEI;EAAM,CAAC;EACrF,IAAI,CAACA,MAAM,EAAE;IACXC,eAAe,CAACC,GAAG,GAAG,IAAAC,0BAAY,EAACjB,MAAM,EAAEC,OAAO,CAAC;EACrD;EACA,MAAMiB,UAAU,GAAGN,UAAU,CAACO,aAAa,CAACJ,eAAe,CAAC;EAE5D,MAAMK,OAAO,GAAGnB,OAAO,CAACmB,OAAO,IAAIpB,MAAM,CAACU,IAAI;EAC9C,MAAMW,GAAG,GAAG,MAAMH,UAAU,CAACI,QAAQ,CACnCF,OAAO,EAEPG,SAAS,CAACC,IAAI,CAAC,IAAI,EAAEf,OAAO,CAC9B,CAAC;EAGD,MAAMgB,mBAAmB,GAAG,IAAAC,yCAAwB,EAACzB,OAAO,CAAC;EAC7DoB,GAAG,CAACM,WAAW,CAAC,SAAS,EAAE;IAACC,KAAK,EAAEvB,IAAI;IAAEJ,OAAO,EAAEwB;EAAmB,CAAC,CAAC;EAEvE,MAAMI,MAAM,GAAG,MAAMR,GAAG,CAACQ,MAAM;EAC/B,OAAOA,MAAM,CAACA,MAAM;AACtB;AAOA,eAAeN,SAASA,CACtBd,OAAsB,EACtBY,GAAc,EACdS,IAAuB,EACvBC,OAA6B,EAC7B;EACA,QAAQD,IAAI;IACV,KAAK,MAAM;MAETT,GAAG,CAACW,IAAI,CAACD,OAAO,CAAC;MACjB;IAEF,KAAK,OAAO;MAEVV,GAAG,CAACY,KAAK,CAAC,IAAIC,KAAK,CAACH,OAAO,CAACE,KAAK,CAAC,CAAC;MACnC;IAEF,KAAK,SAAS;MAEZ,MAAM;QAACE,EAAE;QAAEP,KAAK;QAAE3B;MAAO,CAAC,GAAG8B,OAAO;MACpC,IAAI;QACF,IAAI,CAACtB,OAAO,CAAC2B,OAAO,EAAE;UACpBf,GAAG,CAACM,WAAW,CAAC,OAAO,EAAE;YAACQ,EAAE;YAAEF,KAAK,EAAE;UAA6C,CAAC,CAAC;UACpF;QACF;QACA,MAAMJ,MAAM,GAAG,MAAMpB,OAAO,CAAC2B,OAAO,CAACR,KAAK,EAAE3B,OAAO,CAAC;QACpDoB,GAAG,CAACM,WAAW,CAAC,MAAM,EAAE;UAACQ,EAAE;UAAEN;QAAM,CAAC,CAAC;MACvC,CAAC,CAAC,OAAOI,KAAK,EAAE;QACd,MAAMI,OAAO,GAAGJ,KAAK,YAAYC,KAAK,GAAGD,KAAK,CAACI,OAAO,GAAG,eAAe;QACxEhB,GAAG,CAACM,WAAW,CAAC,OAAO,EAAE;UAACQ,EAAE;UAAEF,KAAK,EAAEI;QAAO,CAAC,CAAC;MAChD;MACA;IAEF;MAEEC,OAAO,CAACC,IAAI,uCAAAC,MAAA,CAAuCV,IAAI,CAAE,CAAC;EAC9D;AACF"}
|
|
@@ -7,16 +7,16 @@ exports.validateWorkerVersion = validateWorkerVersion;
|
|
|
7
7
|
var _assert = require("../env-utils/assert");
|
|
8
8
|
var _version = require("../env-utils/version");
|
|
9
9
|
function validateWorkerVersion(worker) {
|
|
10
|
-
|
|
10
|
+
let coreVersion = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : _version.VERSION;
|
|
11
11
|
(0, _assert.assert)(worker, 'no worker provided');
|
|
12
|
-
|
|
12
|
+
const workerVersion = worker.version;
|
|
13
13
|
if (!coreVersion || !workerVersion) {
|
|
14
14
|
return false;
|
|
15
15
|
}
|
|
16
16
|
return true;
|
|
17
17
|
}
|
|
18
18
|
function parseVersion(version) {
|
|
19
|
-
|
|
19
|
+
const parts = version.split('.').map(Number);
|
|
20
20
|
return {
|
|
21
21
|
major: parts[0],
|
|
22
22
|
minor: parts[1]
|