@parcel/workers 2.0.0-nightly.150 → 2.0.0-nightly.1500
Sign up to get free protection for your applications and to get access to all the features.
- package/index.d.ts +23 -0
- package/lib/Handle.js +16 -58
- package/lib/Worker.js +103 -62
- package/lib/WorkerFarm.js +272 -192
- package/lib/backend.js +4 -6
- package/lib/bus.js +11 -13
- package/lib/child.js +140 -116
- package/lib/childState.js +2 -4
- package/lib/core-worker.browser.js +4 -0
- package/lib/core-worker.js +4 -0
- package/lib/cpuCount.js +36 -25
- package/lib/index.js +35 -32
- package/lib/process/ProcessChild.js +18 -24
- package/lib/process/ProcessWorker.js +27 -38
- package/lib/threads/ThreadsChild.js +26 -28
- package/lib/threads/ThreadsWorker.js +25 -31
- package/lib/web/WebChild.js +44 -0
- package/lib/web/WebWorker.js +85 -0
- package/package.json +19 -8
- package/src/Handle.js +10 -39
- package/src/Worker.js +95 -22
- package/src/WorkerFarm.js +267 -62
- package/src/backend.js +5 -0
- package/src/bus.js +3 -2
- package/src/child.js +95 -26
- package/src/core-worker.browser.js +3 -0
- package/src/core-worker.js +2 -0
- package/src/cpuCount.js +23 -10
- package/src/index.js +8 -2
- package/src/process/ProcessChild.js +2 -1
- package/src/process/ProcessWorker.js +1 -1
- package/src/threads/ThreadsWorker.js +2 -2
- package/src/types.js +1 -1
- package/src/web/WebChild.js +50 -0
- package/src/web/WebWorker.js +85 -0
- package/test/cpuCount.test.js +1 -1
- package/test/integration/workerfarm/console.js +1 -1
- package/test/integration/workerfarm/logging.js +1 -1
- package/test/integration/workerfarm/reverse-handle.js +2 -2
- package/test/workerfarm.js +5 -5
- package/lib/Profiler.js +0 -70
- package/lib/Trace.js +0 -126
- package/src/Profiler.js +0 -93
- package/src/Trace.js +0 -121
package/lib/WorkerFarm.js
CHANGED
@@ -10,226 +10,279 @@ Object.defineProperty(exports, "Handle", {
|
|
10
10
|
}
|
11
11
|
});
|
12
12
|
exports.default = void 0;
|
13
|
-
|
14
|
-
var
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
13
|
+
var coreWorker = _interopRequireWildcard(require("./core-worker"));
|
14
|
+
var bus = _interopRequireWildcard(require("./bus"));
|
15
|
+
function _assert() {
|
16
|
+
const data = _interopRequireDefault(require("assert"));
|
17
|
+
_assert = function () {
|
18
|
+
return data;
|
19
|
+
};
|
20
|
+
return data;
|
21
|
+
}
|
22
|
+
function _nullthrows() {
|
23
|
+
const data = _interopRequireDefault(require("nullthrows"));
|
24
|
+
_nullthrows = function () {
|
25
|
+
return data;
|
26
|
+
};
|
27
|
+
return data;
|
28
|
+
}
|
29
|
+
function _events() {
|
30
|
+
const data = _interopRequireDefault(require("events"));
|
31
|
+
_events = function () {
|
32
|
+
return data;
|
33
|
+
};
|
34
|
+
return data;
|
35
|
+
}
|
36
|
+
function _core() {
|
37
|
+
const data = require("@parcel/core");
|
38
|
+
_core = function () {
|
39
|
+
return data;
|
40
|
+
};
|
41
|
+
return data;
|
42
|
+
}
|
43
|
+
function _diagnostic() {
|
44
|
+
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
|
45
|
+
_diagnostic = function () {
|
46
|
+
return data;
|
47
|
+
};
|
48
|
+
return data;
|
49
|
+
}
|
24
50
|
var _Worker = _interopRequireDefault(require("./Worker"));
|
25
|
-
|
26
51
|
var _cpuCount = _interopRequireDefault(require("./cpuCount"));
|
27
|
-
|
28
52
|
var _Handle = _interopRequireDefault(require("./Handle"));
|
29
|
-
|
30
53
|
var _childState = require("./childState");
|
31
|
-
|
32
54
|
var _backend = require("./backend");
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
55
|
+
function _profiler() {
|
56
|
+
const data = require("@parcel/profiler");
|
57
|
+
_profiler = function () {
|
58
|
+
return data;
|
59
|
+
};
|
60
|
+
return data;
|
61
|
+
}
|
62
|
+
function _fs() {
|
63
|
+
const data = _interopRequireDefault(require("fs"));
|
64
|
+
_fs = function () {
|
65
|
+
return data;
|
66
|
+
};
|
67
|
+
return data;
|
68
|
+
}
|
69
|
+
function _logger() {
|
70
|
+
const data = _interopRequireDefault(require("@parcel/logger"));
|
71
|
+
_logger = function () {
|
72
|
+
return data;
|
73
|
+
};
|
74
|
+
return data;
|
75
|
+
}
|
46
76
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
47
|
-
|
48
|
-
function
|
49
|
-
|
50
|
-
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
51
|
-
|
52
|
-
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
53
|
-
|
54
|
-
let profileId = 1;
|
77
|
+
function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
|
78
|
+
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
|
55
79
|
let referenceId = 1;
|
80
|
+
const DEFAULT_MAX_CONCURRENT_CALLS = 30;
|
56
81
|
|
57
82
|
/**
|
58
83
|
* workerPath should always be defined inside farmOptions
|
59
84
|
*/
|
60
|
-
|
85
|
+
|
86
|
+
class WorkerFarm extends _events().default {
|
87
|
+
callQueue = [];
|
88
|
+
ending = false;
|
89
|
+
warmWorkers = 0;
|
90
|
+
readyWorkers = 0;
|
91
|
+
workers = new Map();
|
92
|
+
handles = new Map();
|
93
|
+
sharedReferences = new Map();
|
94
|
+
sharedReferencesByValue = new Map();
|
95
|
+
serializedSharedReferences = new Map();
|
61
96
|
constructor(farmOptions = {}) {
|
97
|
+
var _process$stdout;
|
62
98
|
super();
|
63
|
-
|
64
|
-
_defineProperty(this, "callQueue", []);
|
65
|
-
|
66
|
-
_defineProperty(this, "ending", false);
|
67
|
-
|
68
|
-
_defineProperty(this, "localWorker", void 0);
|
69
|
-
|
70
|
-
_defineProperty(this, "options", void 0);
|
71
|
-
|
72
|
-
_defineProperty(this, "run", void 0);
|
73
|
-
|
74
|
-
_defineProperty(this, "warmWorkers", 0);
|
75
|
-
|
76
|
-
_defineProperty(this, "workers", new Map());
|
77
|
-
|
78
|
-
_defineProperty(this, "handles", new Map());
|
79
|
-
|
80
|
-
_defineProperty(this, "sharedReferences", new Map());
|
81
|
-
|
82
|
-
_defineProperty(this, "sharedReferencesByValue", new Map());
|
83
|
-
|
84
|
-
_defineProperty(this, "profiler", void 0);
|
85
|
-
|
86
|
-
_defineProperty(this, "workerApi", {
|
87
|
-
callMaster: async (request, awaitResponse = true) => {
|
88
|
-
// $FlowFixMe
|
89
|
-
let result = await this.processRequest(_objectSpread({}, request, {
|
90
|
-
awaitResponse
|
91
|
-
}));
|
92
|
-
return (0, _core.deserialize)((0, _core.serialize)(result));
|
93
|
-
},
|
94
|
-
createReverseHandle: fn => this.createReverseHandle(fn),
|
95
|
-
callChild: (childId, request) => new Promise((resolve, reject) => {
|
96
|
-
(0, _nullthrows.default)(this.workers.get(childId)).call(_objectSpread({}, request, {
|
97
|
-
resolve,
|
98
|
-
reject,
|
99
|
-
retries: 0
|
100
|
-
}));
|
101
|
-
}),
|
102
|
-
getSharedReference: ref => this.sharedReferences.get(ref),
|
103
|
-
resolveSharedReference: value => this.sharedReferencesByValue.get(value)
|
104
|
-
});
|
105
|
-
|
106
|
-
this.options = _objectSpread({
|
99
|
+
this.options = {
|
107
100
|
maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
|
108
|
-
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(),
|
101
|
+
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(farmOptions.shouldTrace ? 1 : DEFAULT_MAX_CONCURRENT_CALLS),
|
109
102
|
forcedKillTime: 500,
|
110
103
|
warmWorkers: false,
|
111
104
|
useLocalWorker: true,
|
112
105
|
// TODO: setting this to false makes some tests fail, figure out why
|
113
|
-
backend: (0, _backend.detectBackend)()
|
114
|
-
|
115
|
-
|
106
|
+
backend: (0, _backend.detectBackend)(),
|
107
|
+
...farmOptions
|
108
|
+
};
|
116
109
|
if (!this.options.workerPath) {
|
117
110
|
throw new Error('Please provide a worker path!');
|
118
|
-
}
|
119
|
-
|
111
|
+
}
|
120
112
|
|
121
|
-
|
113
|
+
// $FlowFixMe
|
114
|
+
if (process.browser) {
|
115
|
+
if (this.options.workerPath === '@parcel/core/src/worker.js') {
|
116
|
+
this.localWorker = coreWorker;
|
117
|
+
} else {
|
118
|
+
throw new Error('No dynamic require possible: ' + this.options.workerPath);
|
119
|
+
}
|
120
|
+
} else {
|
121
|
+
// $FlowFixMe this must be dynamic
|
122
|
+
this.localWorker = require(this.options.workerPath);
|
123
|
+
}
|
124
|
+
this.localWorkerInit = this.localWorker.childInit != null ? this.localWorker.childInit() : null;
|
122
125
|
this.run = this.createHandle('run');
|
126
|
+
|
127
|
+
// Worker thread stdout is by default piped into the process stdout, if there are enough worker
|
128
|
+
// threads to exceed the default listener limit, then anything else piping into stdout will trigger
|
129
|
+
// the `MaxListenersExceededWarning`, so we should ensure the max listeners is at least equal to the
|
130
|
+
// number of workers + 1 for the main thread.
|
131
|
+
//
|
132
|
+
// Note this can't be fixed easily where other things pipe into stdout - even after starting > 10 worker
|
133
|
+
// threads `process.stdout.getMaxListeners()` will still return 10, however adding another pipe into `stdout`
|
134
|
+
// will give the warning with `<worker count + 1>` as the number of listeners.
|
135
|
+
(_process$stdout = process.stdout) === null || _process$stdout === void 0 || _process$stdout.setMaxListeners(Math.max(process.stdout.getMaxListeners(), WorkerFarm.getNumWorkers() + 1));
|
123
136
|
this.startMaxWorkers();
|
124
137
|
}
|
125
|
-
|
138
|
+
workerApi = {
|
139
|
+
callMaster: async (request, awaitResponse = true) => {
|
140
|
+
// $FlowFixMe
|
141
|
+
let result = await this.processRequest({
|
142
|
+
...request,
|
143
|
+
awaitResponse
|
144
|
+
});
|
145
|
+
return (0, _core().deserialize)((0, _core().serialize)(result));
|
146
|
+
},
|
147
|
+
createReverseHandle: fn => this.createReverseHandle(fn),
|
148
|
+
callChild: (childId, request) => new Promise((resolve, reject) => {
|
149
|
+
(0, _nullthrows().default)(this.workers.get(childId)).call({
|
150
|
+
...request,
|
151
|
+
resolve,
|
152
|
+
reject,
|
153
|
+
retries: 0
|
154
|
+
});
|
155
|
+
}),
|
156
|
+
runHandle: (handle, args) => this.workerApi.callChild((0, _nullthrows().default)(handle.childId), {
|
157
|
+
handle: handle.id,
|
158
|
+
args
|
159
|
+
}),
|
160
|
+
getSharedReference: ref => this.sharedReferences.get(ref),
|
161
|
+
resolveSharedReference: value => this.sharedReferencesByValue.get(value)
|
162
|
+
};
|
126
163
|
warmupWorker(method, args) {
|
127
164
|
// Workers are already stopping
|
128
165
|
if (this.ending) {
|
129
166
|
return;
|
130
|
-
}
|
167
|
+
}
|
168
|
+
|
169
|
+
// Workers are not warmed up yet.
|
131
170
|
// Send the job to a remote worker in the background,
|
132
171
|
// but use the result from the local worker - it will be faster.
|
133
|
-
|
134
|
-
|
135
172
|
let promise = this.addCall(method, [...args, true]);
|
136
|
-
|
137
173
|
if (promise) {
|
138
174
|
promise.then(() => {
|
139
175
|
this.warmWorkers++;
|
140
|
-
|
141
176
|
if (this.warmWorkers >= this.workers.size) {
|
142
177
|
this.emit('warmedup');
|
143
178
|
}
|
144
179
|
}).catch(() => {});
|
145
180
|
}
|
146
181
|
}
|
147
|
-
|
148
182
|
shouldStartRemoteWorkers() {
|
149
183
|
return this.options.maxConcurrentWorkers > 0 || !this.options.useLocalWorker;
|
150
184
|
}
|
151
|
-
|
152
|
-
|
153
|
-
|
185
|
+
createHandle(method, useMainThread = false) {
|
186
|
+
if (!this.options.useLocalWorker) {
|
187
|
+
useMainThread = false;
|
188
|
+
}
|
189
|
+
return async (...args) => {
|
154
190
|
// Child process workers are slow to start (~600ms).
|
155
191
|
// While we're waiting, just run on the main thread.
|
156
192
|
// This significantly speeds up startup time.
|
157
|
-
if (this.shouldUseRemoteWorkers()) {
|
193
|
+
if (this.shouldUseRemoteWorkers() && !useMainThread) {
|
158
194
|
return this.addCall(method, [...args, false]);
|
159
195
|
} else {
|
160
196
|
if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
|
161
197
|
this.warmupWorker(method, args);
|
162
198
|
}
|
163
|
-
|
164
|
-
|
199
|
+
let processedArgs;
|
200
|
+
if (!useMainThread) {
|
201
|
+
processedArgs = (0, _core().restoreDeserializedObject)((0, _core().prepareForSerialization)([...args, false]));
|
202
|
+
} else {
|
203
|
+
processedArgs = args;
|
204
|
+
}
|
205
|
+
if (this.localWorkerInit != null) {
|
206
|
+
await this.localWorkerInit;
|
207
|
+
this.localWorkerInit = null;
|
208
|
+
}
|
165
209
|
return this.localWorker[method](this.workerApi, ...processedArgs);
|
166
210
|
}
|
167
211
|
};
|
168
212
|
}
|
169
|
-
|
170
213
|
onError(error, worker) {
|
171
214
|
// Handle ipc errors
|
172
215
|
if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
|
173
216
|
return this.stopWorker(worker);
|
217
|
+
} else {
|
218
|
+
_logger().default.error(error, '@parcel/workers');
|
174
219
|
}
|
175
220
|
}
|
176
|
-
|
177
221
|
startChild() {
|
178
222
|
let worker = new _Worker.default({
|
179
223
|
forcedKillTime: this.options.forcedKillTime,
|
180
224
|
backend: this.options.backend,
|
181
|
-
|
225
|
+
shouldPatchConsole: this.options.shouldPatchConsole,
|
226
|
+
shouldTrace: this.options.shouldTrace,
|
227
|
+
sharedReferences: this.sharedReferences
|
182
228
|
});
|
183
|
-
worker.fork((0, _nullthrows.default)(this.options.workerPath));
|
229
|
+
worker.fork((0, _nullthrows().default)(this.options.workerPath));
|
184
230
|
worker.on('request', data => this.processRequest(data, worker));
|
185
|
-
worker.on('ready', () =>
|
231
|
+
worker.on('ready', () => {
|
232
|
+
this.readyWorkers++;
|
233
|
+
if (this.readyWorkers === this.options.maxConcurrentWorkers) {
|
234
|
+
this.emit('ready');
|
235
|
+
}
|
236
|
+
this.processQueue();
|
237
|
+
});
|
186
238
|
worker.on('response', () => this.processQueue());
|
187
239
|
worker.on('error', err => this.onError(err, worker));
|
188
240
|
worker.once('exit', () => this.stopWorker(worker));
|
189
241
|
this.workers.set(worker.id, worker);
|
190
242
|
}
|
191
|
-
|
192
243
|
async stopWorker(worker) {
|
193
244
|
if (!worker.stopped) {
|
194
245
|
this.workers.delete(worker.id);
|
195
246
|
worker.isStopping = true;
|
196
|
-
|
197
247
|
if (worker.calls.size) {
|
198
248
|
for (let call of worker.calls.values()) {
|
199
249
|
call.retries++;
|
200
250
|
this.callQueue.unshift(call);
|
201
251
|
}
|
202
252
|
}
|
203
|
-
|
204
253
|
worker.calls.clear();
|
205
|
-
await worker.stop();
|
254
|
+
await worker.stop();
|
206
255
|
|
256
|
+
// Process any requests that failed and start a new worker
|
207
257
|
this.processQueue();
|
208
258
|
}
|
209
259
|
}
|
210
|
-
|
211
260
|
processQueue() {
|
212
261
|
if (this.ending || !this.callQueue.length) return;
|
213
|
-
|
214
262
|
if (this.workers.size < this.options.maxConcurrentWorkers) {
|
215
263
|
this.startChild();
|
216
264
|
}
|
217
|
-
|
218
|
-
for (let worker of
|
265
|
+
let workers = [...this.workers.values()].sort((a, b) => a.calls.size - b.calls.size);
|
266
|
+
for (let worker of workers) {
|
219
267
|
if (!this.callQueue.length) {
|
220
268
|
break;
|
221
269
|
}
|
222
|
-
|
223
270
|
if (!worker.ready || worker.stopped || worker.isStopping) {
|
224
271
|
continue;
|
225
272
|
}
|
226
|
-
|
227
273
|
if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
|
228
|
-
|
274
|
+
this.callWorker(worker, this.callQueue.shift());
|
229
275
|
}
|
230
276
|
}
|
231
277
|
}
|
232
|
-
|
278
|
+
async callWorker(worker, call) {
|
279
|
+
for (let ref of this.sharedReferences.keys()) {
|
280
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
281
|
+
await worker.sendSharedReference(ref, this.getSerializedSharedReference(ref));
|
282
|
+
}
|
283
|
+
}
|
284
|
+
worker.call(call);
|
285
|
+
}
|
233
286
|
async processRequest(data, worker) {
|
234
287
|
let {
|
235
288
|
method,
|
@@ -240,73 +293,72 @@ class WorkerFarm extends _events.default {
|
|
240
293
|
handle: handleId
|
241
294
|
} = data;
|
242
295
|
let mod;
|
243
|
-
|
244
296
|
if (handleId != null) {
|
245
|
-
|
297
|
+
var _this$handles$get;
|
298
|
+
mod = (0, _nullthrows().default)((_this$handles$get = this.handles.get(handleId)) === null || _this$handles$get === void 0 ? void 0 : _this$handles$get.fn);
|
246
299
|
} else if (location) {
|
247
|
-
// $FlowFixMe
|
248
|
-
|
300
|
+
// $FlowFixMe
|
301
|
+
if (process.browser) {
|
302
|
+
if (location === '@parcel/workers/src/bus.js') {
|
303
|
+
mod = bus;
|
304
|
+
} else {
|
305
|
+
throw new Error('No dynamic require possible: ' + location);
|
306
|
+
}
|
307
|
+
} else {
|
308
|
+
// $FlowFixMe this must be dynamic
|
309
|
+
mod = require(location);
|
310
|
+
}
|
249
311
|
} else {
|
250
312
|
throw new Error('Unknown request');
|
251
313
|
}
|
252
|
-
|
253
314
|
const responseFromContent = content => ({
|
254
315
|
idx,
|
255
316
|
type: 'response',
|
256
317
|
contentType: 'data',
|
257
318
|
content
|
258
319
|
});
|
259
|
-
|
260
320
|
const errorResponseFromError = e => ({
|
261
321
|
idx,
|
262
322
|
type: 'response',
|
263
323
|
contentType: 'error',
|
264
|
-
content: (0, _diagnostic.anyToDiagnostic)(e)
|
324
|
+
content: (0, _diagnostic().anyToDiagnostic)(e)
|
265
325
|
});
|
266
|
-
|
267
326
|
let result;
|
268
|
-
|
269
327
|
if (method == null) {
|
270
328
|
try {
|
271
|
-
result = responseFromContent(
|
329
|
+
result = responseFromContent(await mod(...args));
|
272
330
|
} catch (e) {
|
273
331
|
result = errorResponseFromError(e);
|
274
332
|
}
|
275
333
|
} else {
|
276
334
|
// ESModule default interop
|
277
|
-
// $FlowFixMe
|
278
335
|
if (mod.__esModule && !mod[method] && mod.default) {
|
279
336
|
mod = mod.default;
|
280
337
|
}
|
281
|
-
|
282
338
|
try {
|
283
339
|
// $FlowFixMe
|
284
|
-
result = responseFromContent(
|
340
|
+
result = responseFromContent(await mod[method](...args));
|
285
341
|
} catch (e) {
|
286
342
|
result = errorResponseFromError(e);
|
287
343
|
}
|
288
344
|
}
|
289
|
-
|
290
345
|
if (awaitResponse) {
|
291
346
|
if (worker) {
|
292
347
|
worker.send(result);
|
293
348
|
} else {
|
294
349
|
if (result.contentType === 'error') {
|
295
|
-
throw new _diagnostic.default({
|
350
|
+
throw new (_diagnostic().default)({
|
296
351
|
diagnostic: result.content
|
297
352
|
});
|
298
353
|
}
|
299
|
-
|
300
354
|
return result.content;
|
301
355
|
}
|
302
356
|
}
|
303
357
|
}
|
304
|
-
|
305
358
|
addCall(method, args) {
|
306
359
|
if (this.ending) {
|
307
360
|
throw new Error('Cannot add a worker call if workerfarm is ending.');
|
308
361
|
}
|
309
|
-
|
310
362
|
return new Promise((resolve, reject) => {
|
311
363
|
this.callQueue.push({
|
312
364
|
method,
|
@@ -318,91 +370,87 @@ class WorkerFarm extends _events.default {
|
|
318
370
|
this.processQueue();
|
319
371
|
});
|
320
372
|
}
|
321
|
-
|
322
373
|
async end() {
|
323
374
|
this.ending = true;
|
324
|
-
|
375
|
+
await Promise.all(Array.from(this.workers.values()).map(worker => this.stopWorker(worker)));
|
325
376
|
for (let handle of this.handles.values()) {
|
326
377
|
handle.dispose();
|
327
378
|
}
|
328
|
-
|
329
379
|
this.handles = new Map();
|
330
380
|
this.sharedReferences = new Map();
|
331
381
|
this.sharedReferencesByValue = new Map();
|
332
|
-
await Promise.all(Array.from(this.workers.values()).map(worker => this.stopWorker(worker)));
|
333
382
|
this.ending = false;
|
334
383
|
}
|
335
|
-
|
336
384
|
startMaxWorkers() {
|
337
385
|
// Starts workers until the maximum is reached
|
338
386
|
if (this.workers.size < this.options.maxConcurrentWorkers) {
|
339
387
|
let toStart = this.options.maxConcurrentWorkers - this.workers.size;
|
340
|
-
|
341
388
|
while (toStart--) {
|
342
389
|
this.startChild();
|
343
390
|
}
|
344
391
|
}
|
345
392
|
}
|
346
|
-
|
347
393
|
shouldUseRemoteWorkers() {
|
348
394
|
return !this.options.useLocalWorker || (this.warmWorkers >= this.workers.size || !this.options.warmWorkers) && this.options.maxConcurrentWorkers > 0;
|
349
395
|
}
|
350
|
-
|
351
396
|
createReverseHandle(fn) {
|
352
397
|
let handle = new _Handle.default({
|
353
|
-
fn
|
354
|
-
workerApi: this.workerApi
|
398
|
+
fn
|
355
399
|
});
|
356
400
|
this.handles.set(handle.id, handle);
|
357
401
|
return handle;
|
358
402
|
}
|
359
|
-
|
360
|
-
async createSharedReference(value) {
|
403
|
+
createSharedReference(value, isCacheable = true) {
|
361
404
|
let ref = referenceId++;
|
362
405
|
this.sharedReferences.set(ref, value);
|
363
406
|
this.sharedReferencesByValue.set(value, ref);
|
364
|
-
|
365
|
-
|
366
|
-
for (let worker of this.workers.values()) {
|
367
|
-
promises.push(new Promise((resolve, reject) => {
|
368
|
-
worker.call({
|
369
|
-
method: 'createSharedReference',
|
370
|
-
args: [ref, value],
|
371
|
-
resolve,
|
372
|
-
reject,
|
373
|
-
retries: 0
|
374
|
-
});
|
375
|
-
}));
|
407
|
+
if (!isCacheable) {
|
408
|
+
this.serializedSharedReferences.set(ref, null);
|
376
409
|
}
|
377
|
-
|
378
|
-
await Promise.all(promises);
|
379
410
|
return {
|
380
411
|
ref,
|
381
412
|
dispose: () => {
|
382
413
|
this.sharedReferences.delete(ref);
|
383
414
|
this.sharedReferencesByValue.delete(value);
|
415
|
+
this.serializedSharedReferences.delete(ref);
|
384
416
|
let promises = [];
|
385
|
-
|
386
417
|
for (let worker of this.workers.values()) {
|
418
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
419
|
+
continue;
|
420
|
+
}
|
421
|
+
worker.sentSharedReferences.delete(ref);
|
387
422
|
promises.push(new Promise((resolve, reject) => {
|
388
423
|
worker.call({
|
389
424
|
method: 'deleteSharedReference',
|
390
425
|
args: [ref],
|
391
426
|
resolve,
|
392
427
|
reject,
|
428
|
+
skipReadyCheck: true,
|
393
429
|
retries: 0
|
394
430
|
});
|
395
431
|
}));
|
396
432
|
}
|
397
|
-
|
398
433
|
return Promise.all(promises);
|
399
434
|
}
|
400
435
|
};
|
401
436
|
}
|
437
|
+
getSerializedSharedReference(ref) {
|
438
|
+
let cached = this.serializedSharedReferences.get(ref);
|
439
|
+
if (cached) {
|
440
|
+
return cached;
|
441
|
+
}
|
442
|
+
let value = this.sharedReferences.get(ref);
|
443
|
+
let buf = (0, _core().serialize)(value).buffer;
|
402
444
|
|
445
|
+
// If the reference was created with the isCacheable option set to false,
|
446
|
+
// serializedSharedReferences will contain `null` as the value.
|
447
|
+
if (cached !== null) {
|
448
|
+
this.serializedSharedReferences.set(ref, buf);
|
449
|
+
}
|
450
|
+
return buf;
|
451
|
+
}
|
403
452
|
async startProfile() {
|
404
453
|
let promises = [];
|
405
|
-
|
406
454
|
for (let worker of this.workers.values()) {
|
407
455
|
promises.push(new Promise((resolve, reject) => {
|
408
456
|
worker.call({
|
@@ -410,24 +458,21 @@ class WorkerFarm extends _events.default {
|
|
410
458
|
args: [],
|
411
459
|
resolve,
|
412
460
|
reject,
|
413
|
-
retries: 0
|
461
|
+
retries: 0,
|
462
|
+
skipReadyCheck: true
|
414
463
|
});
|
415
464
|
}));
|
416
465
|
}
|
417
|
-
|
418
|
-
this.profiler = new _Profiler.default();
|
466
|
+
this.profiler = new (_profiler().SamplingProfiler)();
|
419
467
|
promises.push(this.profiler.startProfiling());
|
420
468
|
await Promise.all(promises);
|
421
469
|
}
|
422
|
-
|
423
470
|
async endProfile() {
|
424
471
|
if (!this.profiler) {
|
425
472
|
return;
|
426
473
|
}
|
427
|
-
|
428
474
|
let promises = [this.profiler.stopProfiling()];
|
429
475
|
let names = ['Master'];
|
430
|
-
|
431
476
|
for (let worker of this.workers.values()) {
|
432
477
|
names.push('Worker ' + worker.id);
|
433
478
|
promises.push(new Promise((resolve, reject) => {
|
@@ -436,48 +481,83 @@ class WorkerFarm extends _events.default {
|
|
436
481
|
args: [],
|
437
482
|
resolve,
|
438
483
|
reject,
|
439
|
-
retries: 0
|
484
|
+
retries: 0,
|
485
|
+
skipReadyCheck: true
|
440
486
|
});
|
441
487
|
}));
|
442
488
|
}
|
443
|
-
|
444
489
|
var profiles = await Promise.all(promises);
|
445
|
-
let trace = new
|
446
|
-
let filename = `profile-${
|
447
|
-
let stream = trace.pipe(_fs.default.createWriteStream(filename));
|
448
|
-
|
490
|
+
let trace = new (_profiler().Trace)();
|
491
|
+
let filename = `profile-${getTimeId()}.trace`;
|
492
|
+
let stream = trace.pipe(_fs().default.createWriteStream(filename));
|
449
493
|
for (let profile of profiles) {
|
450
494
|
trace.addCPUProfile(names.shift(), profile);
|
451
495
|
}
|
452
|
-
|
453
496
|
trace.flush();
|
454
497
|
await new Promise(resolve => {
|
455
498
|
stream.once('finish', resolve);
|
456
499
|
});
|
457
|
-
|
458
|
-
_logger.default.info({
|
500
|
+
_logger().default.info({
|
459
501
|
origin: '@parcel/workers',
|
460
|
-
message: `Wrote profile to ${filename}`
|
502
|
+
message: (0, _diagnostic().md)`Wrote profile to ${filename}`
|
461
503
|
});
|
462
504
|
}
|
463
|
-
|
505
|
+
async callAllWorkers(method, args) {
|
506
|
+
let promises = [];
|
507
|
+
for (let worker of this.workers.values()) {
|
508
|
+
promises.push(new Promise((resolve, reject) => {
|
509
|
+
worker.call({
|
510
|
+
method,
|
511
|
+
args,
|
512
|
+
resolve,
|
513
|
+
reject,
|
514
|
+
retries: 0
|
515
|
+
});
|
516
|
+
}));
|
517
|
+
}
|
518
|
+
promises.push(this.localWorker[method](this.workerApi, ...args));
|
519
|
+
await Promise.all(promises);
|
520
|
+
}
|
521
|
+
async takeHeapSnapshot() {
|
522
|
+
let snapshotId = getTimeId();
|
523
|
+
try {
|
524
|
+
let snapshotPaths = await Promise.all([...this.workers.values()].map(worker => new Promise((resolve, reject) => {
|
525
|
+
worker.call({
|
526
|
+
method: 'takeHeapSnapshot',
|
527
|
+
args: [snapshotId],
|
528
|
+
resolve,
|
529
|
+
reject,
|
530
|
+
retries: 0,
|
531
|
+
skipReadyCheck: true
|
532
|
+
});
|
533
|
+
})));
|
534
|
+
_logger().default.info({
|
535
|
+
origin: '@parcel/workers',
|
536
|
+
message: (0, _diagnostic().md)`Wrote heap snapshots to the following paths:\n${snapshotPaths.join('\n')}`
|
537
|
+
});
|
538
|
+
} catch {
|
539
|
+
_logger().default.error({
|
540
|
+
origin: '@parcel/workers',
|
541
|
+
message: 'Unable to take heap snapshots. Note: requires Node 11.13.0+'
|
542
|
+
});
|
543
|
+
}
|
544
|
+
}
|
464
545
|
static getNumWorkers() {
|
465
|
-
return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : (0, _cpuCount.default)();
|
546
|
+
return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : Math.min(4, Math.ceil((0, _cpuCount.default)() / 2));
|
466
547
|
}
|
467
|
-
|
468
548
|
static isWorker() {
|
469
549
|
return !!_childState.child;
|
470
550
|
}
|
471
|
-
|
472
551
|
static getWorkerApi() {
|
473
|
-
(0, _assert.default)(_childState.child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
|
552
|
+
(0, _assert().default)(_childState.child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
|
474
553
|
return _childState.child.workerApi;
|
475
554
|
}
|
476
|
-
|
477
|
-
|
478
|
-
return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || 5;
|
555
|
+
static getConcurrentCallsPerWorker(defaultValue = DEFAULT_MAX_CONCURRENT_CALLS) {
|
556
|
+
return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || defaultValue;
|
479
557
|
}
|
480
|
-
|
481
558
|
}
|
482
|
-
|
483
|
-
|
559
|
+
exports.default = WorkerFarm;
|
560
|
+
function getTimeId() {
|
561
|
+
let now = new Date();
|
562
|
+
return String(now.getFullYear()) + String(now.getMonth() + 1).padStart(2, '0') + String(now.getDate()).padStart(2, '0') + '-' + String(now.getHours()).padStart(2, '0') + String(now.getMinutes()).padStart(2, '0') + String(now.getSeconds()).padStart(2, '0');
|
563
|
+
}
|