@parcel/workers 2.0.0-nightly.149 → 2.0.0-nightly.1491
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +23 -0
- package/lib/Handle.js +16 -58
- package/lib/Worker.js +103 -62
- package/lib/WorkerFarm.js +272 -192
- package/lib/backend.js +4 -6
- package/lib/bus.js +11 -13
- package/lib/child.js +140 -116
- package/lib/childState.js +2 -4
- package/lib/core-worker.browser.js +4 -0
- package/lib/core-worker.js +4 -0
- package/lib/cpuCount.js +36 -25
- package/lib/index.js +35 -32
- package/lib/process/ProcessChild.js +18 -24
- package/lib/process/ProcessWorker.js +27 -38
- package/lib/threads/ThreadsChild.js +26 -28
- package/lib/threads/ThreadsWorker.js +25 -31
- package/lib/web/WebChild.js +44 -0
- package/lib/web/WebWorker.js +85 -0
- package/package.json +19 -8
- package/src/Handle.js +10 -39
- package/src/Worker.js +95 -22
- package/src/WorkerFarm.js +267 -62
- package/src/backend.js +5 -0
- package/src/bus.js +3 -2
- package/src/child.js +95 -26
- package/src/core-worker.browser.js +3 -0
- package/src/core-worker.js +2 -0
- package/src/cpuCount.js +23 -10
- package/src/index.js +8 -2
- package/src/process/ProcessChild.js +2 -1
- package/src/process/ProcessWorker.js +1 -1
- package/src/threads/ThreadsWorker.js +2 -2
- package/src/types.js +1 -1
- package/src/web/WebChild.js +50 -0
- package/src/web/WebWorker.js +85 -0
- package/test/cpuCount.test.js +1 -1
- package/test/integration/workerfarm/console.js +1 -1
- package/test/integration/workerfarm/logging.js +1 -1
- package/test/integration/workerfarm/reverse-handle.js +2 -2
- package/test/workerfarm.js +5 -5
- package/lib/Profiler.js +0 -70
- package/lib/Trace.js +0 -126
- package/src/Profiler.js +0 -93
- package/src/Trace.js +0 -121
package/src/WorkerFarm.js
CHANGED
@@ -11,6 +11,8 @@ import type {
|
|
11
11
|
} from './types';
|
12
12
|
import type {HandleFunction} from './Handle';
|
13
13
|
|
14
|
+
import * as coreWorker from './core-worker';
|
15
|
+
import * as bus from './bus';
|
14
16
|
import invariant from 'assert';
|
15
17
|
import nullthrows from 'nullthrows';
|
16
18
|
import EventEmitter from 'events';
|
@@ -20,20 +22,20 @@ import {
|
|
20
22
|
restoreDeserializedObject,
|
21
23
|
serialize,
|
22
24
|
} from '@parcel/core';
|
23
|
-
import ThrowableDiagnostic, {anyToDiagnostic} from '@parcel/diagnostic';
|
25
|
+
import ThrowableDiagnostic, {anyToDiagnostic, md} from '@parcel/diagnostic';
|
24
26
|
import Worker, {type WorkerCall} from './Worker';
|
25
27
|
import cpuCount from './cpuCount';
|
26
28
|
import Handle from './Handle';
|
27
29
|
import {child} from './childState';
|
28
30
|
import {detectBackend} from './backend';
|
29
|
-
import
|
30
|
-
import Trace from './Trace';
|
31
|
+
import {SamplingProfiler, Trace} from '@parcel/profiler';
|
31
32
|
import fs from 'fs';
|
32
33
|
import logger from '@parcel/logger';
|
33
34
|
|
34
|
-
let profileId = 1;
|
35
35
|
let referenceId = 1;
|
36
36
|
|
37
|
+
export opaque type SharedReference = number;
|
38
|
+
|
37
39
|
export type FarmOptions = {|
|
38
40
|
maxConcurrentWorkers: number,
|
39
41
|
maxConcurrentCallsPerWorker: number,
|
@@ -42,23 +44,27 @@ export type FarmOptions = {|
|
|
42
44
|
warmWorkers: boolean,
|
43
45
|
workerPath?: FilePath,
|
44
46
|
backend: BackendType,
|
45
|
-
|
47
|
+
shouldPatchConsole?: boolean,
|
48
|
+
shouldTrace?: boolean,
|
46
49
|
|};
|
47
50
|
|
48
|
-
type WorkerModule = {
|
51
|
+
type WorkerModule = {
|
49
52
|
+[string]: (...args: Array<mixed>) => Promise<mixed>,
|
50
|
-
|
53
|
+
...
|
54
|
+
};
|
51
55
|
|
52
56
|
export type WorkerApi = {|
|
53
57
|
callMaster(CallRequest, ?boolean): Promise<mixed>,
|
54
58
|
createReverseHandle(fn: HandleFunction): Handle,
|
55
|
-
getSharedReference(ref:
|
56
|
-
resolveSharedReference(value: mixed): ?
|
59
|
+
getSharedReference(ref: SharedReference): mixed,
|
60
|
+
resolveSharedReference(value: mixed): ?SharedReference,
|
57
61
|
callChild?: (childId: number, request: HandleCallRequest) => Promise<mixed>,
|
58
62
|
|};
|
59
63
|
|
60
64
|
export {Handle};
|
61
65
|
|
66
|
+
const DEFAULT_MAX_CONCURRENT_CALLS: number = 30;
|
67
|
+
|
62
68
|
/**
|
63
69
|
* workerPath should always be defined inside farmOptions
|
64
70
|
*/
|
@@ -67,20 +73,25 @@ export default class WorkerFarm extends EventEmitter {
|
|
67
73
|
callQueue: Array<WorkerCall> = [];
|
68
74
|
ending: boolean = false;
|
69
75
|
localWorker: WorkerModule;
|
76
|
+
localWorkerInit: ?Promise<void>;
|
70
77
|
options: FarmOptions;
|
71
78
|
run: HandleFunction;
|
72
79
|
warmWorkers: number = 0;
|
80
|
+
readyWorkers: number = 0;
|
73
81
|
workers: Map<number, Worker> = new Map();
|
74
82
|
handles: Map<number, Handle> = new Map();
|
75
|
-
sharedReferences: Map<
|
76
|
-
sharedReferencesByValue: Map<mixed,
|
77
|
-
|
83
|
+
sharedReferences: Map<SharedReference, mixed> = new Map();
|
84
|
+
sharedReferencesByValue: Map<mixed, SharedReference> = new Map();
|
85
|
+
serializedSharedReferences: Map<SharedReference, ?ArrayBuffer> = new Map();
|
86
|
+
profiler: ?SamplingProfiler;
|
78
87
|
|
79
88
|
constructor(farmOptions: $Shape<FarmOptions> = {}) {
|
80
89
|
super();
|
81
90
|
this.options = {
|
82
91
|
maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
|
83
|
-
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(
|
92
|
+
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(
|
93
|
+
farmOptions.shouldTrace ? 1 : DEFAULT_MAX_CONCURRENT_CALLS,
|
94
|
+
),
|
84
95
|
forcedKillTime: 500,
|
85
96
|
warmWorkers: false,
|
86
97
|
useLocalWorker: true, // TODO: setting this to false makes some tests fail, figure out why
|
@@ -92,14 +103,54 @@ export default class WorkerFarm extends EventEmitter {
|
|
92
103
|
throw new Error('Please provide a worker path!');
|
93
104
|
}
|
94
105
|
|
95
|
-
// $FlowFixMe
|
96
|
-
|
106
|
+
// $FlowFixMe
|
107
|
+
if (process.browser) {
|
108
|
+
if (this.options.workerPath === '@parcel/core/src/worker.js') {
|
109
|
+
this.localWorker = coreWorker;
|
110
|
+
} else {
|
111
|
+
throw new Error(
|
112
|
+
'No dynamic require possible: ' + this.options.workerPath,
|
113
|
+
);
|
114
|
+
}
|
115
|
+
} else {
|
116
|
+
// $FlowFixMe this must be dynamic
|
117
|
+
this.localWorker = require(this.options.workerPath);
|
118
|
+
}
|
119
|
+
|
120
|
+
this.localWorkerInit =
|
121
|
+
this.localWorker.childInit != null ? this.localWorker.childInit() : null;
|
122
|
+
|
97
123
|
this.run = this.createHandle('run');
|
98
124
|
|
125
|
+
// Worker thread stdout is by default piped into the process stdout, if there are enough worker
|
126
|
+
// threads to exceed the default listener limit, then anything else piping into stdout will trigger
|
127
|
+
// the `MaxListenersExceededWarning`, so we should ensure the max listeners is at least equal to the
|
128
|
+
// number of workers + 1 for the main thread.
|
129
|
+
//
|
130
|
+
// Note this can't be fixed easily where other things pipe into stdout - even after starting > 10 worker
|
131
|
+
// threads `process.stdout.getMaxListeners()` will still return 10, however adding another pipe into `stdout`
|
132
|
+
// will give the warning with `<worker count + 1>` as the number of listeners.
|
133
|
+
process.stdout?.setMaxListeners(
|
134
|
+
Math.max(
|
135
|
+
process.stdout.getMaxListeners(),
|
136
|
+
WorkerFarm.getNumWorkers() + 1,
|
137
|
+
),
|
138
|
+
);
|
139
|
+
|
99
140
|
this.startMaxWorkers();
|
100
141
|
}
|
101
142
|
|
102
|
-
workerApi
|
143
|
+
workerApi: {|
|
144
|
+
callChild: (childId: number, request: HandleCallRequest) => Promise<mixed>,
|
145
|
+
callMaster: (
|
146
|
+
request: CallRequest,
|
147
|
+
awaitResponse?: ?boolean,
|
148
|
+
) => Promise<mixed>,
|
149
|
+
createReverseHandle: (fn: HandleFunction) => Handle,
|
150
|
+
getSharedReference: (ref: SharedReference) => mixed,
|
151
|
+
resolveSharedReference: (value: mixed) => void | SharedReference,
|
152
|
+
runHandle: (handle: Handle, args: Array<any>) => Promise<mixed>,
|
153
|
+
|} = {
|
103
154
|
callMaster: async (
|
104
155
|
request: CallRequest,
|
105
156
|
awaitResponse: ?boolean = true,
|
@@ -122,7 +173,13 @@ export default class WorkerFarm extends EventEmitter {
|
|
122
173
|
retries: 0,
|
123
174
|
});
|
124
175
|
}),
|
125
|
-
|
176
|
+
runHandle: (handle: Handle, args: Array<any>): Promise<mixed> =>
|
177
|
+
this.workerApi.callChild(nullthrows(handle.childId), {
|
178
|
+
handle: handle.id,
|
179
|
+
args,
|
180
|
+
}),
|
181
|
+
getSharedReference: (ref: SharedReference) =>
|
182
|
+
this.sharedReferences.get(ref),
|
126
183
|
resolveSharedReference: (value: mixed) =>
|
127
184
|
this.sharedReferencesByValue.get(value),
|
128
185
|
};
|
@@ -155,30 +212,46 @@ export default class WorkerFarm extends EventEmitter {
|
|
155
212
|
);
|
156
213
|
}
|
157
214
|
|
158
|
-
createHandle(method: string): HandleFunction {
|
159
|
-
|
215
|
+
createHandle(method: string, useMainThread: boolean = false): HandleFunction {
|
216
|
+
if (!this.options.useLocalWorker) {
|
217
|
+
useMainThread = false;
|
218
|
+
}
|
219
|
+
|
220
|
+
return async (...args) => {
|
160
221
|
// Child process workers are slow to start (~600ms).
|
161
222
|
// While we're waiting, just run on the main thread.
|
162
223
|
// This significantly speeds up startup time.
|
163
|
-
if (this.shouldUseRemoteWorkers()) {
|
224
|
+
if (this.shouldUseRemoteWorkers() && !useMainThread) {
|
164
225
|
return this.addCall(method, [...args, false]);
|
165
226
|
} else {
|
166
227
|
if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
|
167
228
|
this.warmupWorker(method, args);
|
168
229
|
}
|
169
230
|
|
170
|
-
let processedArgs
|
171
|
-
|
172
|
-
|
231
|
+
let processedArgs;
|
232
|
+
if (!useMainThread) {
|
233
|
+
processedArgs = restoreDeserializedObject(
|
234
|
+
prepareForSerialization([...args, false]),
|
235
|
+
);
|
236
|
+
} else {
|
237
|
+
processedArgs = args;
|
238
|
+
}
|
239
|
+
|
240
|
+
if (this.localWorkerInit != null) {
|
241
|
+
await this.localWorkerInit;
|
242
|
+
this.localWorkerInit = null;
|
243
|
+
}
|
173
244
|
return this.localWorker[method](this.workerApi, ...processedArgs);
|
174
245
|
}
|
175
246
|
};
|
176
247
|
}
|
177
248
|
|
178
|
-
onError(error: ErrorWithCode, worker: Worker) {
|
249
|
+
onError(error: ErrorWithCode, worker: Worker): void | Promise<void> {
|
179
250
|
// Handle ipc errors
|
180
251
|
if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
|
181
252
|
return this.stopWorker(worker);
|
253
|
+
} else {
|
254
|
+
logger.error(error, '@parcel/workers');
|
182
255
|
}
|
183
256
|
}
|
184
257
|
|
@@ -186,14 +259,22 @@ export default class WorkerFarm extends EventEmitter {
|
|
186
259
|
let worker = new Worker({
|
187
260
|
forcedKillTime: this.options.forcedKillTime,
|
188
261
|
backend: this.options.backend,
|
189
|
-
|
262
|
+
shouldPatchConsole: this.options.shouldPatchConsole,
|
263
|
+
shouldTrace: this.options.shouldTrace,
|
264
|
+
sharedReferences: this.sharedReferences,
|
190
265
|
});
|
191
266
|
|
192
267
|
worker.fork(nullthrows(this.options.workerPath));
|
193
268
|
|
194
269
|
worker.on('request', data => this.processRequest(data, worker));
|
195
270
|
|
196
|
-
worker.on('ready', () =>
|
271
|
+
worker.on('ready', () => {
|
272
|
+
this.readyWorkers++;
|
273
|
+
if (this.readyWorkers === this.options.maxConcurrentWorkers) {
|
274
|
+
this.emit('ready');
|
275
|
+
}
|
276
|
+
this.processQueue();
|
277
|
+
});
|
197
278
|
worker.on('response', () => this.processQueue());
|
198
279
|
|
199
280
|
worker.on('error', err => this.onError(err, worker));
|
@@ -231,7 +312,11 @@ export default class WorkerFarm extends EventEmitter {
|
|
231
312
|
this.startChild();
|
232
313
|
}
|
233
314
|
|
234
|
-
|
315
|
+
let workers = [...this.workers.values()].sort(
|
316
|
+
(a, b) => a.calls.size - b.calls.size,
|
317
|
+
);
|
318
|
+
|
319
|
+
for (let worker of workers) {
|
235
320
|
if (!this.callQueue.length) {
|
236
321
|
break;
|
237
322
|
}
|
@@ -241,9 +326,22 @@ export default class WorkerFarm extends EventEmitter {
|
|
241
326
|
}
|
242
327
|
|
243
328
|
if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
|
244
|
-
|
329
|
+
this.callWorker(worker, this.callQueue.shift());
|
330
|
+
}
|
331
|
+
}
|
332
|
+
}
|
333
|
+
|
334
|
+
async callWorker(worker: Worker, call: WorkerCall): Promise<void> {
|
335
|
+
for (let ref of this.sharedReferences.keys()) {
|
336
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
337
|
+
await worker.sendSharedReference(
|
338
|
+
ref,
|
339
|
+
this.getSerializedSharedReference(ref),
|
340
|
+
);
|
245
341
|
}
|
246
342
|
}
|
343
|
+
|
344
|
+
worker.call(call);
|
247
345
|
}
|
248
346
|
|
249
347
|
async processRequest(
|
@@ -255,10 +353,19 @@ export default class WorkerFarm extends EventEmitter {
|
|
255
353
|
let {method, args, location, awaitResponse, idx, handle: handleId} = data;
|
256
354
|
let mod;
|
257
355
|
if (handleId != null) {
|
258
|
-
mod = nullthrows(this.handles.get(handleId))
|
356
|
+
mod = nullthrows(this.handles.get(handleId)?.fn);
|
259
357
|
} else if (location) {
|
260
|
-
// $FlowFixMe
|
261
|
-
|
358
|
+
// $FlowFixMe
|
359
|
+
if (process.browser) {
|
360
|
+
if (location === '@parcel/workers/src/bus.js') {
|
361
|
+
mod = (bus: any);
|
362
|
+
} else {
|
363
|
+
throw new Error('No dynamic require possible: ' + location);
|
364
|
+
}
|
365
|
+
} else {
|
366
|
+
// $FlowFixMe this must be dynamic
|
367
|
+
mod = require(location);
|
368
|
+
}
|
262
369
|
} else {
|
263
370
|
throw new Error('Unknown request');
|
264
371
|
}
|
@@ -286,7 +393,6 @@ export default class WorkerFarm extends EventEmitter {
|
|
286
393
|
}
|
287
394
|
} else {
|
288
395
|
// ESModule default interop
|
289
|
-
// $FlowFixMe
|
290
396
|
if (mod.__esModule && !mod[method] && mod.default) {
|
291
397
|
mod = mod.default;
|
292
398
|
}
|
@@ -331,6 +437,10 @@ export default class WorkerFarm extends EventEmitter {
|
|
331
437
|
async end(): Promise<void> {
|
332
438
|
this.ending = true;
|
333
439
|
|
440
|
+
await Promise.all(
|
441
|
+
Array.from(this.workers.values()).map(worker => this.stopWorker(worker)),
|
442
|
+
);
|
443
|
+
|
334
444
|
for (let handle of this.handles.values()) {
|
335
445
|
handle.dispose();
|
336
446
|
}
|
@@ -338,9 +448,6 @@ export default class WorkerFarm extends EventEmitter {
|
|
338
448
|
this.sharedReferences = new Map();
|
339
449
|
this.sharedReferencesByValue = new Map();
|
340
450
|
|
341
|
-
await Promise.all(
|
342
|
-
Array.from(this.workers.values()).map(worker => this.stopWorker(worker)),
|
343
|
-
);
|
344
451
|
this.ending = false;
|
345
452
|
}
|
346
453
|
|
@@ -362,40 +469,37 @@ export default class WorkerFarm extends EventEmitter {
|
|
362
469
|
);
|
363
470
|
}
|
364
471
|
|
365
|
-
createReverseHandle(fn: HandleFunction) {
|
366
|
-
let handle = new Handle({fn
|
472
|
+
createReverseHandle(fn: HandleFunction): Handle {
|
473
|
+
let handle = new Handle({fn});
|
367
474
|
this.handles.set(handle.id, handle);
|
368
475
|
return handle;
|
369
476
|
}
|
370
477
|
|
371
|
-
|
478
|
+
createSharedReference(
|
479
|
+
value: mixed,
|
480
|
+
isCacheable: boolean = true,
|
481
|
+
): {|ref: SharedReference, dispose(): Promise<mixed>|} {
|
372
482
|
let ref = referenceId++;
|
373
483
|
this.sharedReferences.set(ref, value);
|
374
484
|
this.sharedReferencesByValue.set(value, ref);
|
375
|
-
|
376
|
-
|
377
|
-
promises.push(
|
378
|
-
new Promise((resolve, reject) => {
|
379
|
-
worker.call({
|
380
|
-
method: 'createSharedReference',
|
381
|
-
args: [ref, value],
|
382
|
-
resolve,
|
383
|
-
reject,
|
384
|
-
retries: 0,
|
385
|
-
});
|
386
|
-
}),
|
387
|
-
);
|
485
|
+
if (!isCacheable) {
|
486
|
+
this.serializedSharedReferences.set(ref, null);
|
388
487
|
}
|
389
488
|
|
390
|
-
await Promise.all(promises);
|
391
|
-
|
392
489
|
return {
|
393
490
|
ref,
|
394
491
|
dispose: () => {
|
395
492
|
this.sharedReferences.delete(ref);
|
396
493
|
this.sharedReferencesByValue.delete(value);
|
494
|
+
this.serializedSharedReferences.delete(ref);
|
495
|
+
|
397
496
|
let promises = [];
|
398
497
|
for (let worker of this.workers.values()) {
|
498
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
499
|
+
continue;
|
500
|
+
}
|
501
|
+
|
502
|
+
worker.sentSharedReferences.delete(ref);
|
399
503
|
promises.push(
|
400
504
|
new Promise((resolve, reject) => {
|
401
505
|
worker.call({
|
@@ -403,6 +507,7 @@ export default class WorkerFarm extends EventEmitter {
|
|
403
507
|
args: [ref],
|
404
508
|
resolve,
|
405
509
|
reject,
|
510
|
+
skipReadyCheck: true,
|
406
511
|
retries: 0,
|
407
512
|
});
|
408
513
|
}),
|
@@ -413,6 +518,24 @@ export default class WorkerFarm extends EventEmitter {
|
|
413
518
|
};
|
414
519
|
}
|
415
520
|
|
521
|
+
getSerializedSharedReference(ref: SharedReference): ArrayBuffer {
|
522
|
+
let cached = this.serializedSharedReferences.get(ref);
|
523
|
+
if (cached) {
|
524
|
+
return cached;
|
525
|
+
}
|
526
|
+
|
527
|
+
let value = this.sharedReferences.get(ref);
|
528
|
+
let buf = serialize(value).buffer;
|
529
|
+
|
530
|
+
// If the reference was created with the isCacheable option set to false,
|
531
|
+
// serializedSharedReferences will contain `null` as the value.
|
532
|
+
if (cached !== null) {
|
533
|
+
this.serializedSharedReferences.set(ref, buf);
|
534
|
+
}
|
535
|
+
|
536
|
+
return buf;
|
537
|
+
}
|
538
|
+
|
416
539
|
async startProfile() {
|
417
540
|
let promises = [];
|
418
541
|
for (let worker of this.workers.values()) {
|
@@ -424,12 +547,13 @@ export default class WorkerFarm extends EventEmitter {
|
|
424
547
|
resolve,
|
425
548
|
reject,
|
426
549
|
retries: 0,
|
550
|
+
skipReadyCheck: true,
|
427
551
|
});
|
428
552
|
}),
|
429
553
|
);
|
430
554
|
}
|
431
555
|
|
432
|
-
this.profiler = new
|
556
|
+
this.profiler = new SamplingProfiler();
|
433
557
|
|
434
558
|
promises.push(this.profiler.startProfiling());
|
435
559
|
await Promise.all(promises);
|
@@ -453,6 +577,7 @@ export default class WorkerFarm extends EventEmitter {
|
|
453
577
|
resolve,
|
454
578
|
reject,
|
455
579
|
retries: 0,
|
580
|
+
skipReadyCheck: true,
|
456
581
|
});
|
457
582
|
}),
|
458
583
|
);
|
@@ -460,7 +585,7 @@ export default class WorkerFarm extends EventEmitter {
|
|
460
585
|
|
461
586
|
var profiles = await Promise.all(promises);
|
462
587
|
let trace = new Trace();
|
463
|
-
let filename = `profile-${
|
588
|
+
let filename = `profile-${getTimeId()}.trace`;
|
464
589
|
let stream = trace.pipe(fs.createWriteStream(filename));
|
465
590
|
|
466
591
|
for (let profile of profiles) {
|
@@ -474,21 +599,84 @@ export default class WorkerFarm extends EventEmitter {
|
|
474
599
|
|
475
600
|
logger.info({
|
476
601
|
origin: '@parcel/workers',
|
477
|
-
message: `Wrote profile to ${filename}`,
|
602
|
+
message: md`Wrote profile to ${filename}`,
|
478
603
|
});
|
479
604
|
}
|
480
605
|
|
481
|
-
|
606
|
+
async callAllWorkers(method: string, args: Array<any>) {
|
607
|
+
let promises = [];
|
608
|
+
for (let worker of this.workers.values()) {
|
609
|
+
promises.push(
|
610
|
+
new Promise((resolve, reject) => {
|
611
|
+
worker.call({
|
612
|
+
method,
|
613
|
+
args,
|
614
|
+
resolve,
|
615
|
+
reject,
|
616
|
+
retries: 0,
|
617
|
+
});
|
618
|
+
}),
|
619
|
+
);
|
620
|
+
}
|
621
|
+
|
622
|
+
promises.push(this.localWorker[method](this.workerApi, ...args));
|
623
|
+
await Promise.all(promises);
|
624
|
+
}
|
625
|
+
|
626
|
+
async takeHeapSnapshot() {
|
627
|
+
let snapshotId = getTimeId();
|
628
|
+
|
629
|
+
try {
|
630
|
+
let snapshotPaths = await Promise.all(
|
631
|
+
[...this.workers.values()].map(
|
632
|
+
worker =>
|
633
|
+
new Promise((resolve, reject) => {
|
634
|
+
worker.call({
|
635
|
+
method: 'takeHeapSnapshot',
|
636
|
+
args: [snapshotId],
|
637
|
+
resolve,
|
638
|
+
reject,
|
639
|
+
retries: 0,
|
640
|
+
skipReadyCheck: true,
|
641
|
+
});
|
642
|
+
}),
|
643
|
+
),
|
644
|
+
);
|
645
|
+
|
646
|
+
logger.info({
|
647
|
+
origin: '@parcel/workers',
|
648
|
+
message: md`Wrote heap snapshots to the following paths:\n${snapshotPaths.join(
|
649
|
+
'\n',
|
650
|
+
)}`,
|
651
|
+
});
|
652
|
+
} catch {
|
653
|
+
logger.error({
|
654
|
+
origin: '@parcel/workers',
|
655
|
+
message: 'Unable to take heap snapshots. Note: requires Node 11.13.0+',
|
656
|
+
});
|
657
|
+
}
|
658
|
+
}
|
659
|
+
|
660
|
+
static getNumWorkers(): number {
|
482
661
|
return process.env.PARCEL_WORKERS
|
483
662
|
? parseInt(process.env.PARCEL_WORKERS, 10)
|
484
|
-
: cpuCount();
|
663
|
+
: Math.min(4, Math.ceil(cpuCount() / 2));
|
485
664
|
}
|
486
665
|
|
487
|
-
static isWorker() {
|
666
|
+
static isWorker(): boolean {
|
488
667
|
return !!child;
|
489
668
|
}
|
490
669
|
|
491
|
-
static getWorkerApi() {
|
670
|
+
static getWorkerApi(): {|
|
671
|
+
callMaster: (
|
672
|
+
request: CallRequest,
|
673
|
+
awaitResponse?: ?boolean,
|
674
|
+
) => Promise<mixed>,
|
675
|
+
createReverseHandle: (fn: (...args: Array<any>) => mixed) => Handle,
|
676
|
+
getSharedReference: (ref: SharedReference) => mixed,
|
677
|
+
resolveSharedReference: (value: mixed) => void | SharedReference,
|
678
|
+
runHandle: (handle: Handle, args: Array<any>) => Promise<mixed>,
|
679
|
+
|} {
|
492
680
|
invariant(
|
493
681
|
child != null,
|
494
682
|
'WorkerFarm.getWorkerApi can only be called within workers',
|
@@ -496,7 +684,24 @@ export default class WorkerFarm extends EventEmitter {
|
|
496
684
|
return child.workerApi;
|
497
685
|
}
|
498
686
|
|
499
|
-
static getConcurrentCallsPerWorker(
|
500
|
-
|
687
|
+
static getConcurrentCallsPerWorker(
|
688
|
+
defaultValue?: number = DEFAULT_MAX_CONCURRENT_CALLS,
|
689
|
+
): number {
|
690
|
+
return (
|
691
|
+
parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || defaultValue
|
692
|
+
);
|
501
693
|
}
|
502
694
|
}
|
695
|
+
|
696
|
+
function getTimeId() {
|
697
|
+
let now = new Date();
|
698
|
+
return (
|
699
|
+
String(now.getFullYear()) +
|
700
|
+
String(now.getMonth() + 1).padStart(2, '0') +
|
701
|
+
String(now.getDate()).padStart(2, '0') +
|
702
|
+
'-' +
|
703
|
+
String(now.getHours()).padStart(2, '0') +
|
704
|
+
String(now.getMinutes()).padStart(2, '0') +
|
705
|
+
String(now.getSeconds()).padStart(2, '0')
|
706
|
+
);
|
707
|
+
}
|
package/src/backend.js
CHANGED
@@ -2,6 +2,9 @@
|
|
2
2
|
import type {BackendType, WorkerImpl} from './types';
|
3
3
|
|
4
4
|
export function detectBackend(): BackendType {
|
5
|
+
// $FlowFixMe
|
6
|
+
if (process.browser) return 'web';
|
7
|
+
|
5
8
|
switch (process.env.PARCEL_WORKER_BACKEND) {
|
6
9
|
case 'threads':
|
7
10
|
case 'process':
|
@@ -22,6 +25,8 @@ export function getWorkerBackend(backend: BackendType): Class<WorkerImpl> {
|
|
22
25
|
return require('./threads/ThreadsWorker').default;
|
23
26
|
case 'process':
|
24
27
|
return require('./process/ProcessWorker').default;
|
28
|
+
case 'web':
|
29
|
+
return require('./web/WebWorker').default;
|
25
30
|
default:
|
26
31
|
throw new Error(`Invalid backend: ${backend}`);
|
27
32
|
}
|
package/src/bus.js
CHANGED
@@ -7,7 +7,8 @@ class Bus extends EventEmitter {
|
|
7
7
|
if (child) {
|
8
8
|
child.workerApi.callMaster(
|
9
9
|
{
|
10
|
-
|
10
|
+
// $FlowFixMe
|
11
|
+
location: process.browser ? '@parcel/workers/src/bus.js' : __filename,
|
11
12
|
method: 'emit',
|
12
13
|
args: [event, ...args],
|
13
14
|
},
|
@@ -20,4 +21,4 @@ class Bus extends EventEmitter {
|
|
20
21
|
}
|
21
22
|
}
|
22
23
|
|
23
|
-
export default new Bus();
|
24
|
+
export default (new Bus(): Bus);
|