@parcel/workers 2.0.0-nightly.137 → 2.0.0-nightly.1370
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +23 -0
- package/lib/Handle.js +16 -58
- package/lib/Worker.js +88 -53
- package/lib/WorkerFarm.js +240 -188
- package/lib/backend.js +0 -6
- package/lib/bus.js +8 -10
- package/lib/child.js +128 -114
- package/lib/childState.js +1 -2
- package/lib/cpuCount.js +25 -22
- package/lib/index.js +34 -30
- package/lib/process/ProcessChild.js +18 -24
- package/lib/process/ProcessWorker.js +27 -38
- package/lib/threads/ThreadsChild.js +26 -28
- package/lib/threads/ThreadsWorker.js +25 -31
- package/package.json +19 -8
- package/src/Handle.js +10 -39
- package/src/Worker.js +82 -15
- package/src/WorkerFarm.js +228 -55
- package/src/bus.js +1 -1
- package/src/child.js +83 -24
- package/src/cpuCount.js +9 -4
- package/src/index.js +8 -2
- package/src/process/ProcessChild.js +2 -1
- package/src/process/ProcessWorker.js +1 -1
- package/src/threads/ThreadsWorker.js +2 -2
- package/test/cpuCount.test.js +1 -1
- package/test/integration/workerfarm/console.js +1 -1
- package/test/integration/workerfarm/logging.js +1 -1
- package/test/integration/workerfarm/reverse-handle.js +2 -2
- package/test/workerfarm.js +5 -5
- package/lib/Profiler.js +0 -70
- package/lib/Trace.js +0 -126
- package/src/Profiler.js +0 -93
- package/src/Trace.js +0 -121
package/lib/WorkerFarm.js
CHANGED
@@ -10,177 +10,210 @@ Object.defineProperty(exports, "Handle", {
|
|
10
10
|
}
|
11
11
|
});
|
12
12
|
exports.default = void 0;
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
13
|
+
function _assert() {
|
14
|
+
const data = _interopRequireDefault(require("assert"));
|
15
|
+
_assert = function () {
|
16
|
+
return data;
|
17
|
+
};
|
18
|
+
return data;
|
19
|
+
}
|
20
|
+
function _nullthrows() {
|
21
|
+
const data = _interopRequireDefault(require("nullthrows"));
|
22
|
+
_nullthrows = function () {
|
23
|
+
return data;
|
24
|
+
};
|
25
|
+
return data;
|
26
|
+
}
|
27
|
+
function _events() {
|
28
|
+
const data = _interopRequireDefault(require("events"));
|
29
|
+
_events = function () {
|
30
|
+
return data;
|
31
|
+
};
|
32
|
+
return data;
|
33
|
+
}
|
34
|
+
function _core() {
|
35
|
+
const data = require("@parcel/core");
|
36
|
+
_core = function () {
|
37
|
+
return data;
|
38
|
+
};
|
39
|
+
return data;
|
40
|
+
}
|
41
|
+
function _diagnostic() {
|
42
|
+
const data = _interopRequireWildcard(require("@parcel/diagnostic"));
|
43
|
+
_diagnostic = function () {
|
44
|
+
return data;
|
45
|
+
};
|
46
|
+
return data;
|
47
|
+
}
|
24
48
|
var _Worker = _interopRequireDefault(require("./Worker"));
|
25
|
-
|
26
49
|
var _cpuCount = _interopRequireDefault(require("./cpuCount"));
|
27
|
-
|
28
50
|
var _Handle = _interopRequireDefault(require("./Handle"));
|
29
|
-
|
30
51
|
var _childState = require("./childState");
|
31
|
-
|
32
52
|
var _backend = require("./backend");
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
53
|
+
function _profiler() {
|
54
|
+
const data = require("@parcel/profiler");
|
55
|
+
_profiler = function () {
|
56
|
+
return data;
|
57
|
+
};
|
58
|
+
return data;
|
59
|
+
}
|
60
|
+
function _fs() {
|
61
|
+
const data = _interopRequireDefault(require("fs"));
|
62
|
+
_fs = function () {
|
63
|
+
return data;
|
64
|
+
};
|
65
|
+
return data;
|
66
|
+
}
|
67
|
+
function _logger() {
|
68
|
+
const data = _interopRequireDefault(require("@parcel/logger"));
|
69
|
+
_logger = function () {
|
70
|
+
return data;
|
71
|
+
};
|
72
|
+
return data;
|
73
|
+
}
|
74
|
+
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
75
|
+
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
46
76
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
47
|
-
|
48
|
-
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
|
49
|
-
|
50
|
-
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
51
|
-
|
52
|
-
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
53
|
-
|
54
|
-
let profileId = 1;
|
55
77
|
let referenceId = 1;
|
78
|
+
const DEFAULT_MAX_CONCURRENT_CALLS = 30;
|
56
79
|
|
57
80
|
/**
|
58
81
|
* workerPath should always be defined inside farmOptions
|
59
82
|
*/
|
60
|
-
|
83
|
+
|
84
|
+
class WorkerFarm extends _events().default {
|
85
|
+
callQueue = [];
|
86
|
+
ending = false;
|
87
|
+
warmWorkers = 0;
|
88
|
+
workers = new Map();
|
89
|
+
handles = new Map();
|
90
|
+
sharedReferences = new Map();
|
91
|
+
sharedReferencesByValue = new Map();
|
92
|
+
serializedSharedReferences = new Map();
|
61
93
|
constructor(farmOptions = {}) {
|
62
94
|
super();
|
63
|
-
|
64
|
-
_defineProperty(this, "callQueue", []);
|
65
|
-
|
66
|
-
_defineProperty(this, "ending", false);
|
67
|
-
|
68
|
-
_defineProperty(this, "localWorker", void 0);
|
69
|
-
|
70
|
-
_defineProperty(this, "options", void 0);
|
71
|
-
|
72
|
-
_defineProperty(this, "run", void 0);
|
73
|
-
|
74
|
-
_defineProperty(this, "warmWorkers", 0);
|
75
|
-
|
76
|
-
_defineProperty(this, "workers", new Map());
|
77
|
-
|
78
|
-
_defineProperty(this, "handles", new Map());
|
79
|
-
|
80
|
-
_defineProperty(this, "sharedReferences", new Map());
|
81
|
-
|
82
|
-
_defineProperty(this, "sharedReferencesByValue", new Map());
|
83
|
-
|
84
|
-
_defineProperty(this, "profiler", void 0);
|
85
|
-
|
86
|
-
_defineProperty(this, "workerApi", {
|
87
|
-
callMaster: async (request, awaitResponse = true) => {
|
88
|
-
// $FlowFixMe
|
89
|
-
let result = await this.processRequest(_objectSpread({}, request, {
|
90
|
-
awaitResponse
|
91
|
-
}));
|
92
|
-
return (0, _core.deserialize)((0, _core.serialize)(result));
|
93
|
-
},
|
94
|
-
createReverseHandle: fn => this.createReverseHandle(fn),
|
95
|
-
callChild: (childId, request) => new Promise((resolve, reject) => {
|
96
|
-
(0, _nullthrows.default)(this.workers.get(childId)).call(_objectSpread({}, request, {
|
97
|
-
resolve,
|
98
|
-
reject,
|
99
|
-
retries: 0
|
100
|
-
}));
|
101
|
-
}),
|
102
|
-
getSharedReference: ref => this.sharedReferences.get(ref),
|
103
|
-
resolveSharedReference: value => this.sharedReferencesByValue.get(value)
|
104
|
-
});
|
105
|
-
|
106
|
-
this.options = _objectSpread({
|
95
|
+
this.options = {
|
107
96
|
maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
|
108
|
-
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(),
|
97
|
+
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(farmOptions.shouldTrace ? 1 : DEFAULT_MAX_CONCURRENT_CALLS),
|
109
98
|
forcedKillTime: 500,
|
110
99
|
warmWorkers: false,
|
111
100
|
useLocalWorker: true,
|
112
101
|
// TODO: setting this to false makes some tests fail, figure out why
|
113
|
-
backend: (0, _backend.detectBackend)()
|
114
|
-
|
115
|
-
|
102
|
+
backend: (0, _backend.detectBackend)(),
|
103
|
+
...farmOptions
|
104
|
+
};
|
116
105
|
if (!this.options.workerPath) {
|
117
106
|
throw new Error('Please provide a worker path!');
|
118
|
-
}
|
119
|
-
|
107
|
+
}
|
120
108
|
|
109
|
+
// $FlowFixMe this must be dynamic
|
121
110
|
this.localWorker = require(this.options.workerPath);
|
111
|
+
this.localWorkerInit = this.localWorker.childInit != null ? this.localWorker.childInit() : null;
|
122
112
|
this.run = this.createHandle('run');
|
113
|
+
|
114
|
+
// Worker thread stdout is by default piped into the process stdout, if there are enough worker
|
115
|
+
// threads to exceed the default listener limit, then anything else piping into stdout will trigger
|
116
|
+
// the `MaxListenersExceededWarning`, so we should ensure the max listeners is at least equal to the
|
117
|
+
// number of workers + 1 for the main thread.
|
118
|
+
//
|
119
|
+
// Note this can't be fixed easily where other things pipe into stdout - even after starting > 10 worker
|
120
|
+
// threads `process.stdout.getMaxListeners()` will still return 10, however adding another pipe into `stdout`
|
121
|
+
// will give the warning with `<worker count + 1>` as the number of listeners.
|
122
|
+
process.stdout.setMaxListeners(Math.max(process.stdout.getMaxListeners(), WorkerFarm.getNumWorkers() + 1));
|
123
123
|
this.startMaxWorkers();
|
124
124
|
}
|
125
|
-
|
125
|
+
workerApi = {
|
126
|
+
callMaster: async (request, awaitResponse = true) => {
|
127
|
+
// $FlowFixMe
|
128
|
+
let result = await this.processRequest({
|
129
|
+
...request,
|
130
|
+
awaitResponse
|
131
|
+
});
|
132
|
+
return (0, _core().deserialize)((0, _core().serialize)(result));
|
133
|
+
},
|
134
|
+
createReverseHandle: fn => this.createReverseHandle(fn),
|
135
|
+
callChild: (childId, request) => new Promise((resolve, reject) => {
|
136
|
+
(0, _nullthrows().default)(this.workers.get(childId)).call({
|
137
|
+
...request,
|
138
|
+
resolve,
|
139
|
+
reject,
|
140
|
+
retries: 0
|
141
|
+
});
|
142
|
+
}),
|
143
|
+
runHandle: (handle, args) => this.workerApi.callChild((0, _nullthrows().default)(handle.childId), {
|
144
|
+
handle: handle.id,
|
145
|
+
args
|
146
|
+
}),
|
147
|
+
getSharedReference: ref => this.sharedReferences.get(ref),
|
148
|
+
resolveSharedReference: value => this.sharedReferencesByValue.get(value)
|
149
|
+
};
|
126
150
|
warmupWorker(method, args) {
|
127
151
|
// Workers are already stopping
|
128
152
|
if (this.ending) {
|
129
153
|
return;
|
130
|
-
}
|
154
|
+
}
|
155
|
+
|
156
|
+
// Workers are not warmed up yet.
|
131
157
|
// Send the job to a remote worker in the background,
|
132
158
|
// but use the result from the local worker - it will be faster.
|
133
|
-
|
134
|
-
|
135
159
|
let promise = this.addCall(method, [...args, true]);
|
136
|
-
|
137
160
|
if (promise) {
|
138
161
|
promise.then(() => {
|
139
162
|
this.warmWorkers++;
|
140
|
-
|
141
163
|
if (this.warmWorkers >= this.workers.size) {
|
142
164
|
this.emit('warmedup');
|
143
165
|
}
|
144
166
|
}).catch(() => {});
|
145
167
|
}
|
146
168
|
}
|
147
|
-
|
148
169
|
shouldStartRemoteWorkers() {
|
149
170
|
return this.options.maxConcurrentWorkers > 0 || !this.options.useLocalWorker;
|
150
171
|
}
|
151
|
-
|
152
|
-
|
153
|
-
|
172
|
+
createHandle(method, useMainThread = false) {
|
173
|
+
if (!this.options.useLocalWorker) {
|
174
|
+
useMainThread = false;
|
175
|
+
}
|
176
|
+
return async (...args) => {
|
154
177
|
// Child process workers are slow to start (~600ms).
|
155
178
|
// While we're waiting, just run on the main thread.
|
156
179
|
// This significantly speeds up startup time.
|
157
|
-
if (this.shouldUseRemoteWorkers()) {
|
180
|
+
if (this.shouldUseRemoteWorkers() && !useMainThread) {
|
158
181
|
return this.addCall(method, [...args, false]);
|
159
182
|
} else {
|
160
183
|
if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
|
161
184
|
this.warmupWorker(method, args);
|
162
185
|
}
|
163
|
-
|
164
|
-
|
186
|
+
let processedArgs;
|
187
|
+
if (!useMainThread) {
|
188
|
+
processedArgs = (0, _core().restoreDeserializedObject)((0, _core().prepareForSerialization)([...args, false]));
|
189
|
+
} else {
|
190
|
+
processedArgs = args;
|
191
|
+
}
|
192
|
+
if (this.localWorkerInit != null) {
|
193
|
+
await this.localWorkerInit;
|
194
|
+
this.localWorkerInit = null;
|
195
|
+
}
|
165
196
|
return this.localWorker[method](this.workerApi, ...processedArgs);
|
166
197
|
}
|
167
198
|
};
|
168
199
|
}
|
169
|
-
|
170
200
|
onError(error, worker) {
|
171
201
|
// Handle ipc errors
|
172
202
|
if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
|
173
203
|
return this.stopWorker(worker);
|
204
|
+
} else {
|
205
|
+
_logger().default.error(error, '@parcel/workers');
|
174
206
|
}
|
175
207
|
}
|
176
|
-
|
177
208
|
startChild() {
|
178
209
|
let worker = new _Worker.default({
|
179
210
|
forcedKillTime: this.options.forcedKillTime,
|
180
211
|
backend: this.options.backend,
|
181
|
-
|
212
|
+
shouldPatchConsole: this.options.shouldPatchConsole,
|
213
|
+
shouldTrace: this.options.shouldTrace,
|
214
|
+
sharedReferences: this.sharedReferences
|
182
215
|
});
|
183
|
-
worker.fork((0, _nullthrows.default)(this.options.workerPath));
|
216
|
+
worker.fork((0, _nullthrows().default)(this.options.workerPath));
|
184
217
|
worker.on('request', data => this.processRequest(data, worker));
|
185
218
|
worker.on('ready', () => this.processQueue());
|
186
219
|
worker.on('response', () => this.processQueue());
|
@@ -188,48 +221,49 @@ class WorkerFarm extends _events.default {
|
|
188
221
|
worker.once('exit', () => this.stopWorker(worker));
|
189
222
|
this.workers.set(worker.id, worker);
|
190
223
|
}
|
191
|
-
|
192
224
|
async stopWorker(worker) {
|
193
225
|
if (!worker.stopped) {
|
194
226
|
this.workers.delete(worker.id);
|
195
227
|
worker.isStopping = true;
|
196
|
-
|
197
228
|
if (worker.calls.size) {
|
198
229
|
for (let call of worker.calls.values()) {
|
199
230
|
call.retries++;
|
200
231
|
this.callQueue.unshift(call);
|
201
232
|
}
|
202
233
|
}
|
203
|
-
|
204
234
|
worker.calls.clear();
|
205
|
-
await worker.stop();
|
235
|
+
await worker.stop();
|
206
236
|
|
237
|
+
// Process any requests that failed and start a new worker
|
207
238
|
this.processQueue();
|
208
239
|
}
|
209
240
|
}
|
210
|
-
|
211
241
|
processQueue() {
|
212
242
|
if (this.ending || !this.callQueue.length) return;
|
213
|
-
|
214
243
|
if (this.workers.size < this.options.maxConcurrentWorkers) {
|
215
244
|
this.startChild();
|
216
245
|
}
|
217
|
-
|
218
|
-
for (let worker of
|
246
|
+
let workers = [...this.workers.values()].sort((a, b) => a.calls.size - b.calls.size);
|
247
|
+
for (let worker of workers) {
|
219
248
|
if (!this.callQueue.length) {
|
220
249
|
break;
|
221
250
|
}
|
222
|
-
|
223
251
|
if (!worker.ready || worker.stopped || worker.isStopping) {
|
224
252
|
continue;
|
225
253
|
}
|
226
|
-
|
227
254
|
if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
|
228
|
-
|
255
|
+
this.callWorker(worker, this.callQueue.shift());
|
229
256
|
}
|
230
257
|
}
|
231
258
|
}
|
232
|
-
|
259
|
+
async callWorker(worker, call) {
|
260
|
+
for (let ref of this.sharedReferences.keys()) {
|
261
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
262
|
+
await worker.sendSharedReference(ref, this.getSerializedSharedReference(ref));
|
263
|
+
}
|
264
|
+
}
|
265
|
+
worker.call(call);
|
266
|
+
}
|
233
267
|
async processRequest(data, worker) {
|
234
268
|
let {
|
235
269
|
method,
|
@@ -240,73 +274,63 @@ class WorkerFarm extends _events.default {
|
|
240
274
|
handle: handleId
|
241
275
|
} = data;
|
242
276
|
let mod;
|
243
|
-
|
244
277
|
if (handleId != null) {
|
245
|
-
|
278
|
+
var _this$handles$get;
|
279
|
+
mod = (0, _nullthrows().default)((_this$handles$get = this.handles.get(handleId)) === null || _this$handles$get === void 0 ? void 0 : _this$handles$get.fn);
|
246
280
|
} else if (location) {
|
247
281
|
// $FlowFixMe this must be dynamic
|
248
282
|
mod = require(location);
|
249
283
|
} else {
|
250
284
|
throw new Error('Unknown request');
|
251
285
|
}
|
252
|
-
|
253
286
|
const responseFromContent = content => ({
|
254
287
|
idx,
|
255
288
|
type: 'response',
|
256
289
|
contentType: 'data',
|
257
290
|
content
|
258
291
|
});
|
259
|
-
|
260
292
|
const errorResponseFromError = e => ({
|
261
293
|
idx,
|
262
294
|
type: 'response',
|
263
295
|
contentType: 'error',
|
264
|
-
content: (0, _diagnostic.anyToDiagnostic)(e)
|
296
|
+
content: (0, _diagnostic().anyToDiagnostic)(e)
|
265
297
|
});
|
266
|
-
|
267
298
|
let result;
|
268
|
-
|
269
299
|
if (method == null) {
|
270
300
|
try {
|
271
|
-
result = responseFromContent(
|
301
|
+
result = responseFromContent(await mod(...args));
|
272
302
|
} catch (e) {
|
273
303
|
result = errorResponseFromError(e);
|
274
304
|
}
|
275
305
|
} else {
|
276
306
|
// ESModule default interop
|
277
|
-
// $FlowFixMe
|
278
307
|
if (mod.__esModule && !mod[method] && mod.default) {
|
279
308
|
mod = mod.default;
|
280
309
|
}
|
281
|
-
|
282
310
|
try {
|
283
311
|
// $FlowFixMe
|
284
|
-
result = responseFromContent(
|
312
|
+
result = responseFromContent(await mod[method](...args));
|
285
313
|
} catch (e) {
|
286
314
|
result = errorResponseFromError(e);
|
287
315
|
}
|
288
316
|
}
|
289
|
-
|
290
317
|
if (awaitResponse) {
|
291
318
|
if (worker) {
|
292
319
|
worker.send(result);
|
293
320
|
} else {
|
294
321
|
if (result.contentType === 'error') {
|
295
|
-
throw new _diagnostic.default({
|
322
|
+
throw new (_diagnostic().default)({
|
296
323
|
diagnostic: result.content
|
297
324
|
});
|
298
325
|
}
|
299
|
-
|
300
326
|
return result.content;
|
301
327
|
}
|
302
328
|
}
|
303
329
|
}
|
304
|
-
|
305
330
|
addCall(method, args) {
|
306
331
|
if (this.ending) {
|
307
332
|
throw new Error('Cannot add a worker call if workerfarm is ending.');
|
308
333
|
}
|
309
|
-
|
310
334
|
return new Promise((resolve, reject) => {
|
311
335
|
this.callQueue.push({
|
312
336
|
method,
|
@@ -318,91 +342,87 @@ class WorkerFarm extends _events.default {
|
|
318
342
|
this.processQueue();
|
319
343
|
});
|
320
344
|
}
|
321
|
-
|
322
345
|
async end() {
|
323
346
|
this.ending = true;
|
324
|
-
|
347
|
+
await Promise.all(Array.from(this.workers.values()).map(worker => this.stopWorker(worker)));
|
325
348
|
for (let handle of this.handles.values()) {
|
326
349
|
handle.dispose();
|
327
350
|
}
|
328
|
-
|
329
351
|
this.handles = new Map();
|
330
352
|
this.sharedReferences = new Map();
|
331
353
|
this.sharedReferencesByValue = new Map();
|
332
|
-
await Promise.all(Array.from(this.workers.values()).map(worker => this.stopWorker(worker)));
|
333
354
|
this.ending = false;
|
334
355
|
}
|
335
|
-
|
336
356
|
startMaxWorkers() {
|
337
357
|
// Starts workers until the maximum is reached
|
338
358
|
if (this.workers.size < this.options.maxConcurrentWorkers) {
|
339
359
|
let toStart = this.options.maxConcurrentWorkers - this.workers.size;
|
340
|
-
|
341
360
|
while (toStart--) {
|
342
361
|
this.startChild();
|
343
362
|
}
|
344
363
|
}
|
345
364
|
}
|
346
|
-
|
347
365
|
shouldUseRemoteWorkers() {
|
348
366
|
return !this.options.useLocalWorker || (this.warmWorkers >= this.workers.size || !this.options.warmWorkers) && this.options.maxConcurrentWorkers > 0;
|
349
367
|
}
|
350
|
-
|
351
368
|
createReverseHandle(fn) {
|
352
369
|
let handle = new _Handle.default({
|
353
|
-
fn
|
354
|
-
workerApi: this.workerApi
|
370
|
+
fn
|
355
371
|
});
|
356
372
|
this.handles.set(handle.id, handle);
|
357
373
|
return handle;
|
358
374
|
}
|
359
|
-
|
360
|
-
async createSharedReference(value) {
|
375
|
+
createSharedReference(value, isCacheable = true) {
|
361
376
|
let ref = referenceId++;
|
362
377
|
this.sharedReferences.set(ref, value);
|
363
378
|
this.sharedReferencesByValue.set(value, ref);
|
364
|
-
|
365
|
-
|
366
|
-
for (let worker of this.workers.values()) {
|
367
|
-
promises.push(new Promise((resolve, reject) => {
|
368
|
-
worker.call({
|
369
|
-
method: 'createSharedReference',
|
370
|
-
args: [ref, value],
|
371
|
-
resolve,
|
372
|
-
reject,
|
373
|
-
retries: 0
|
374
|
-
});
|
375
|
-
}));
|
379
|
+
if (!isCacheable) {
|
380
|
+
this.serializedSharedReferences.set(ref, null);
|
376
381
|
}
|
377
|
-
|
378
|
-
await Promise.all(promises);
|
379
382
|
return {
|
380
383
|
ref,
|
381
384
|
dispose: () => {
|
382
385
|
this.sharedReferences.delete(ref);
|
383
386
|
this.sharedReferencesByValue.delete(value);
|
387
|
+
this.serializedSharedReferences.delete(ref);
|
384
388
|
let promises = [];
|
385
|
-
|
386
389
|
for (let worker of this.workers.values()) {
|
390
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
391
|
+
continue;
|
392
|
+
}
|
393
|
+
worker.sentSharedReferences.delete(ref);
|
387
394
|
promises.push(new Promise((resolve, reject) => {
|
388
395
|
worker.call({
|
389
396
|
method: 'deleteSharedReference',
|
390
397
|
args: [ref],
|
391
398
|
resolve,
|
392
399
|
reject,
|
400
|
+
skipReadyCheck: true,
|
393
401
|
retries: 0
|
394
402
|
});
|
395
403
|
}));
|
396
404
|
}
|
397
|
-
|
398
405
|
return Promise.all(promises);
|
399
406
|
}
|
400
407
|
};
|
401
408
|
}
|
409
|
+
getSerializedSharedReference(ref) {
|
410
|
+
let cached = this.serializedSharedReferences.get(ref);
|
411
|
+
if (cached) {
|
412
|
+
return cached;
|
413
|
+
}
|
414
|
+
let value = this.sharedReferences.get(ref);
|
415
|
+
let buf = (0, _core().serialize)(value).buffer;
|
402
416
|
|
417
|
+
// If the reference was created with the isCacheable option set to false,
|
418
|
+
// serializedSharedReferences will contain `null` as the value.
|
419
|
+
if (cached !== null) {
|
420
|
+
this.serializedSharedReferences.set(ref, buf);
|
421
|
+
}
|
422
|
+
return buf;
|
423
|
+
}
|
403
424
|
async startProfile() {
|
404
425
|
let promises = [];
|
405
|
-
|
406
426
|
for (let worker of this.workers.values()) {
|
407
427
|
promises.push(new Promise((resolve, reject) => {
|
408
428
|
worker.call({
|
@@ -410,24 +430,21 @@ class WorkerFarm extends _events.default {
|
|
410
430
|
args: [],
|
411
431
|
resolve,
|
412
432
|
reject,
|
413
|
-
retries: 0
|
433
|
+
retries: 0,
|
434
|
+
skipReadyCheck: true
|
414
435
|
});
|
415
436
|
}));
|
416
437
|
}
|
417
|
-
|
418
|
-
this.profiler = new _Profiler.default();
|
438
|
+
this.profiler = new (_profiler().SamplingProfiler)();
|
419
439
|
promises.push(this.profiler.startProfiling());
|
420
440
|
await Promise.all(promises);
|
421
441
|
}
|
422
|
-
|
423
442
|
async endProfile() {
|
424
443
|
if (!this.profiler) {
|
425
444
|
return;
|
426
445
|
}
|
427
|
-
|
428
446
|
let promises = [this.profiler.stopProfiling()];
|
429
447
|
let names = ['Master'];
|
430
|
-
|
431
448
|
for (let worker of this.workers.values()) {
|
432
449
|
names.push('Worker ' + worker.id);
|
433
450
|
promises.push(new Promise((resolve, reject) => {
|
@@ -436,48 +453,83 @@ class WorkerFarm extends _events.default {
|
|
436
453
|
args: [],
|
437
454
|
resolve,
|
438
455
|
reject,
|
439
|
-
retries: 0
|
456
|
+
retries: 0,
|
457
|
+
skipReadyCheck: true
|
440
458
|
});
|
441
459
|
}));
|
442
460
|
}
|
443
|
-
|
444
461
|
var profiles = await Promise.all(promises);
|
445
|
-
let trace = new
|
446
|
-
let filename = `profile-${
|
447
|
-
let stream = trace.pipe(_fs.default.createWriteStream(filename));
|
448
|
-
|
462
|
+
let trace = new (_profiler().Trace)();
|
463
|
+
let filename = `profile-${getTimeId()}.trace`;
|
464
|
+
let stream = trace.pipe(_fs().default.createWriteStream(filename));
|
449
465
|
for (let profile of profiles) {
|
450
466
|
trace.addCPUProfile(names.shift(), profile);
|
451
467
|
}
|
452
|
-
|
453
468
|
trace.flush();
|
454
469
|
await new Promise(resolve => {
|
455
470
|
stream.once('finish', resolve);
|
456
471
|
});
|
457
|
-
|
458
|
-
_logger.default.info({
|
472
|
+
_logger().default.info({
|
459
473
|
origin: '@parcel/workers',
|
460
|
-
message: `Wrote profile to ${filename}`
|
474
|
+
message: (0, _diagnostic().md)`Wrote profile to ${filename}`
|
461
475
|
});
|
462
476
|
}
|
463
|
-
|
477
|
+
async callAllWorkers(method, args) {
|
478
|
+
let promises = [];
|
479
|
+
for (let worker of this.workers.values()) {
|
480
|
+
promises.push(new Promise((resolve, reject) => {
|
481
|
+
worker.call({
|
482
|
+
method,
|
483
|
+
args,
|
484
|
+
resolve,
|
485
|
+
reject,
|
486
|
+
retries: 0
|
487
|
+
});
|
488
|
+
}));
|
489
|
+
}
|
490
|
+
promises.push(this.localWorker[method](this.workerApi, ...args));
|
491
|
+
await Promise.all(promises);
|
492
|
+
}
|
493
|
+
async takeHeapSnapshot() {
|
494
|
+
let snapshotId = getTimeId();
|
495
|
+
try {
|
496
|
+
let snapshotPaths = await Promise.all([...this.workers.values()].map(worker => new Promise((resolve, reject) => {
|
497
|
+
worker.call({
|
498
|
+
method: 'takeHeapSnapshot',
|
499
|
+
args: [snapshotId],
|
500
|
+
resolve,
|
501
|
+
reject,
|
502
|
+
retries: 0,
|
503
|
+
skipReadyCheck: true
|
504
|
+
});
|
505
|
+
})));
|
506
|
+
_logger().default.info({
|
507
|
+
origin: '@parcel/workers',
|
508
|
+
message: (0, _diagnostic().md)`Wrote heap snapshots to the following paths:\n${snapshotPaths.join('\n')}`
|
509
|
+
});
|
510
|
+
} catch {
|
511
|
+
_logger().default.error({
|
512
|
+
origin: '@parcel/workers',
|
513
|
+
message: 'Unable to take heap snapshots. Note: requires Node 11.13.0+'
|
514
|
+
});
|
515
|
+
}
|
516
|
+
}
|
464
517
|
static getNumWorkers() {
|
465
|
-
return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : (0, _cpuCount.default)();
|
518
|
+
return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : Math.ceil((0, _cpuCount.default)() / 2);
|
466
519
|
}
|
467
|
-
|
468
520
|
static isWorker() {
|
469
521
|
return !!_childState.child;
|
470
522
|
}
|
471
|
-
|
472
523
|
static getWorkerApi() {
|
473
|
-
(0, _assert.default)(_childState.child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
|
524
|
+
(0, _assert().default)(_childState.child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
|
474
525
|
return _childState.child.workerApi;
|
475
526
|
}
|
476
|
-
|
477
|
-
|
478
|
-
return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || 5;
|
527
|
+
static getConcurrentCallsPerWorker(defaultValue = DEFAULT_MAX_CONCURRENT_CALLS) {
|
528
|
+
return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || defaultValue;
|
479
529
|
}
|
480
|
-
|
481
530
|
}
|
482
|
-
|
483
|
-
|
531
|
+
exports.default = WorkerFarm;
|
532
|
+
function getTimeId() {
|
533
|
+
let now = new Date();
|
534
|
+
return String(now.getFullYear()) + String(now.getMonth() + 1).padStart(2, '0') + String(now.getDate()).padStart(2, '0') + '-' + String(now.getHours()).padStart(2, '0') + String(now.getMinutes()).padStart(2, '0') + String(now.getSeconds()).padStart(2, '0');
|
535
|
+
}
|