@parcel/workers 2.0.0-nightly.137 → 2.0.0-nightly.1370
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.d.ts +23 -0
- package/lib/Handle.js +16 -58
- package/lib/Worker.js +88 -53
- package/lib/WorkerFarm.js +240 -188
- package/lib/backend.js +0 -6
- package/lib/bus.js +8 -10
- package/lib/child.js +128 -114
- package/lib/childState.js +1 -2
- package/lib/cpuCount.js +25 -22
- package/lib/index.js +34 -30
- package/lib/process/ProcessChild.js +18 -24
- package/lib/process/ProcessWorker.js +27 -38
- package/lib/threads/ThreadsChild.js +26 -28
- package/lib/threads/ThreadsWorker.js +25 -31
- package/package.json +19 -8
- package/src/Handle.js +10 -39
- package/src/Worker.js +82 -15
- package/src/WorkerFarm.js +228 -55
- package/src/bus.js +1 -1
- package/src/child.js +83 -24
- package/src/cpuCount.js +9 -4
- package/src/index.js +8 -2
- package/src/process/ProcessChild.js +2 -1
- package/src/process/ProcessWorker.js +1 -1
- package/src/threads/ThreadsWorker.js +2 -2
- package/test/cpuCount.test.js +1 -1
- package/test/integration/workerfarm/console.js +1 -1
- package/test/integration/workerfarm/logging.js +1 -1
- package/test/integration/workerfarm/reverse-handle.js +2 -2
- package/test/workerfarm.js +5 -5
- package/lib/Profiler.js +0 -70
- package/lib/Trace.js +0 -126
- package/src/Profiler.js +0 -93
- package/src/Trace.js +0 -121
package/src/WorkerFarm.js
CHANGED
@@ -20,20 +20,20 @@ import {
|
|
20
20
|
restoreDeserializedObject,
|
21
21
|
serialize,
|
22
22
|
} from '@parcel/core';
|
23
|
-
import ThrowableDiagnostic, {anyToDiagnostic} from '@parcel/diagnostic';
|
23
|
+
import ThrowableDiagnostic, {anyToDiagnostic, md} from '@parcel/diagnostic';
|
24
24
|
import Worker, {type WorkerCall} from './Worker';
|
25
25
|
import cpuCount from './cpuCount';
|
26
26
|
import Handle from './Handle';
|
27
27
|
import {child} from './childState';
|
28
28
|
import {detectBackend} from './backend';
|
29
|
-
import
|
30
|
-
import Trace from './Trace';
|
29
|
+
import {SamplingProfiler, Trace} from '@parcel/profiler';
|
31
30
|
import fs from 'fs';
|
32
31
|
import logger from '@parcel/logger';
|
33
32
|
|
34
|
-
let profileId = 1;
|
35
33
|
let referenceId = 1;
|
36
34
|
|
35
|
+
export opaque type SharedReference = number;
|
36
|
+
|
37
37
|
export type FarmOptions = {|
|
38
38
|
maxConcurrentWorkers: number,
|
39
39
|
maxConcurrentCallsPerWorker: number,
|
@@ -42,7 +42,8 @@ export type FarmOptions = {|
|
|
42
42
|
warmWorkers: boolean,
|
43
43
|
workerPath?: FilePath,
|
44
44
|
backend: BackendType,
|
45
|
-
|
45
|
+
shouldPatchConsole?: boolean,
|
46
|
+
shouldTrace?: boolean,
|
46
47
|
|};
|
47
48
|
|
48
49
|
type WorkerModule = {|
|
@@ -52,13 +53,15 @@ type WorkerModule = {|
|
|
52
53
|
export type WorkerApi = {|
|
53
54
|
callMaster(CallRequest, ?boolean): Promise<mixed>,
|
54
55
|
createReverseHandle(fn: HandleFunction): Handle,
|
55
|
-
getSharedReference(ref:
|
56
|
-
resolveSharedReference(value: mixed): ?
|
56
|
+
getSharedReference(ref: SharedReference): mixed,
|
57
|
+
resolveSharedReference(value: mixed): ?SharedReference,
|
57
58
|
callChild?: (childId: number, request: HandleCallRequest) => Promise<mixed>,
|
58
59
|
|};
|
59
60
|
|
60
61
|
export {Handle};
|
61
62
|
|
63
|
+
const DEFAULT_MAX_CONCURRENT_CALLS: number = 30;
|
64
|
+
|
62
65
|
/**
|
63
66
|
* workerPath should always be defined inside farmOptions
|
64
67
|
*/
|
@@ -67,20 +70,24 @@ export default class WorkerFarm extends EventEmitter {
|
|
67
70
|
callQueue: Array<WorkerCall> = [];
|
68
71
|
ending: boolean = false;
|
69
72
|
localWorker: WorkerModule;
|
73
|
+
localWorkerInit: ?Promise<void>;
|
70
74
|
options: FarmOptions;
|
71
75
|
run: HandleFunction;
|
72
76
|
warmWorkers: number = 0;
|
73
77
|
workers: Map<number, Worker> = new Map();
|
74
78
|
handles: Map<number, Handle> = new Map();
|
75
|
-
sharedReferences: Map<
|
76
|
-
sharedReferencesByValue: Map<mixed,
|
77
|
-
|
79
|
+
sharedReferences: Map<SharedReference, mixed> = new Map();
|
80
|
+
sharedReferencesByValue: Map<mixed, SharedReference> = new Map();
|
81
|
+
serializedSharedReferences: Map<SharedReference, ?ArrayBuffer> = new Map();
|
82
|
+
profiler: ?SamplingProfiler;
|
78
83
|
|
79
84
|
constructor(farmOptions: $Shape<FarmOptions> = {}) {
|
80
85
|
super();
|
81
86
|
this.options = {
|
82
87
|
maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
|
83
|
-
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(
|
88
|
+
maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(
|
89
|
+
farmOptions.shouldTrace ? 1 : DEFAULT_MAX_CONCURRENT_CALLS,
|
90
|
+
),
|
84
91
|
forcedKillTime: 500,
|
85
92
|
warmWorkers: false,
|
86
93
|
useLocalWorker: true, // TODO: setting this to false makes some tests fail, figure out why
|
@@ -94,12 +101,39 @@ export default class WorkerFarm extends EventEmitter {
|
|
94
101
|
|
95
102
|
// $FlowFixMe this must be dynamic
|
96
103
|
this.localWorker = require(this.options.workerPath);
|
104
|
+
this.localWorkerInit =
|
105
|
+
this.localWorker.childInit != null ? this.localWorker.childInit() : null;
|
97
106
|
this.run = this.createHandle('run');
|
98
107
|
|
108
|
+
// Worker thread stdout is by default piped into the process stdout, if there are enough worker
|
109
|
+
// threads to exceed the default listener limit, then anything else piping into stdout will trigger
|
110
|
+
// the `MaxListenersExceededWarning`, so we should ensure the max listeners is at least equal to the
|
111
|
+
// number of workers + 1 for the main thread.
|
112
|
+
//
|
113
|
+
// Note this can't be fixed easily where other things pipe into stdout - even after starting > 10 worker
|
114
|
+
// threads `process.stdout.getMaxListeners()` will still return 10, however adding another pipe into `stdout`
|
115
|
+
// will give the warning with `<worker count + 1>` as the number of listeners.
|
116
|
+
process.stdout.setMaxListeners(
|
117
|
+
Math.max(
|
118
|
+
process.stdout.getMaxListeners(),
|
119
|
+
WorkerFarm.getNumWorkers() + 1,
|
120
|
+
),
|
121
|
+
);
|
122
|
+
|
99
123
|
this.startMaxWorkers();
|
100
124
|
}
|
101
125
|
|
102
|
-
workerApi
|
126
|
+
workerApi: {|
|
127
|
+
callChild: (childId: number, request: HandleCallRequest) => Promise<mixed>,
|
128
|
+
callMaster: (
|
129
|
+
request: CallRequest,
|
130
|
+
awaitResponse?: ?boolean,
|
131
|
+
) => Promise<mixed>,
|
132
|
+
createReverseHandle: (fn: HandleFunction) => Handle,
|
133
|
+
getSharedReference: (ref: SharedReference) => mixed,
|
134
|
+
resolveSharedReference: (value: mixed) => void | SharedReference,
|
135
|
+
runHandle: (handle: Handle, args: Array<any>) => Promise<mixed>,
|
136
|
+
|} = {
|
103
137
|
callMaster: async (
|
104
138
|
request: CallRequest,
|
105
139
|
awaitResponse: ?boolean = true,
|
@@ -122,7 +156,13 @@ export default class WorkerFarm extends EventEmitter {
|
|
122
156
|
retries: 0,
|
123
157
|
});
|
124
158
|
}),
|
125
|
-
|
159
|
+
runHandle: (handle: Handle, args: Array<any>): Promise<mixed> =>
|
160
|
+
this.workerApi.callChild(nullthrows(handle.childId), {
|
161
|
+
handle: handle.id,
|
162
|
+
args,
|
163
|
+
}),
|
164
|
+
getSharedReference: (ref: SharedReference) =>
|
165
|
+
this.sharedReferences.get(ref),
|
126
166
|
resolveSharedReference: (value: mixed) =>
|
127
167
|
this.sharedReferencesByValue.get(value),
|
128
168
|
};
|
@@ -155,30 +195,46 @@ export default class WorkerFarm extends EventEmitter {
|
|
155
195
|
);
|
156
196
|
}
|
157
197
|
|
158
|
-
createHandle(method: string): HandleFunction {
|
159
|
-
|
198
|
+
createHandle(method: string, useMainThread: boolean = false): HandleFunction {
|
199
|
+
if (!this.options.useLocalWorker) {
|
200
|
+
useMainThread = false;
|
201
|
+
}
|
202
|
+
|
203
|
+
return async (...args) => {
|
160
204
|
// Child process workers are slow to start (~600ms).
|
161
205
|
// While we're waiting, just run on the main thread.
|
162
206
|
// This significantly speeds up startup time.
|
163
|
-
if (this.shouldUseRemoteWorkers()) {
|
207
|
+
if (this.shouldUseRemoteWorkers() && !useMainThread) {
|
164
208
|
return this.addCall(method, [...args, false]);
|
165
209
|
} else {
|
166
210
|
if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
|
167
211
|
this.warmupWorker(method, args);
|
168
212
|
}
|
169
213
|
|
170
|
-
let processedArgs
|
171
|
-
|
172
|
-
|
214
|
+
let processedArgs;
|
215
|
+
if (!useMainThread) {
|
216
|
+
processedArgs = restoreDeserializedObject(
|
217
|
+
prepareForSerialization([...args, false]),
|
218
|
+
);
|
219
|
+
} else {
|
220
|
+
processedArgs = args;
|
221
|
+
}
|
222
|
+
|
223
|
+
if (this.localWorkerInit != null) {
|
224
|
+
await this.localWorkerInit;
|
225
|
+
this.localWorkerInit = null;
|
226
|
+
}
|
173
227
|
return this.localWorker[method](this.workerApi, ...processedArgs);
|
174
228
|
}
|
175
229
|
};
|
176
230
|
}
|
177
231
|
|
178
|
-
onError(error: ErrorWithCode, worker: Worker) {
|
232
|
+
onError(error: ErrorWithCode, worker: Worker): void | Promise<void> {
|
179
233
|
// Handle ipc errors
|
180
234
|
if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
|
181
235
|
return this.stopWorker(worker);
|
236
|
+
} else {
|
237
|
+
logger.error(error, '@parcel/workers');
|
182
238
|
}
|
183
239
|
}
|
184
240
|
|
@@ -186,7 +242,9 @@ export default class WorkerFarm extends EventEmitter {
|
|
186
242
|
let worker = new Worker({
|
187
243
|
forcedKillTime: this.options.forcedKillTime,
|
188
244
|
backend: this.options.backend,
|
189
|
-
|
245
|
+
shouldPatchConsole: this.options.shouldPatchConsole,
|
246
|
+
shouldTrace: this.options.shouldTrace,
|
247
|
+
sharedReferences: this.sharedReferences,
|
190
248
|
});
|
191
249
|
|
192
250
|
worker.fork(nullthrows(this.options.workerPath));
|
@@ -231,7 +289,11 @@ export default class WorkerFarm extends EventEmitter {
|
|
231
289
|
this.startChild();
|
232
290
|
}
|
233
291
|
|
234
|
-
|
292
|
+
let workers = [...this.workers.values()].sort(
|
293
|
+
(a, b) => a.calls.size - b.calls.size,
|
294
|
+
);
|
295
|
+
|
296
|
+
for (let worker of workers) {
|
235
297
|
if (!this.callQueue.length) {
|
236
298
|
break;
|
237
299
|
}
|
@@ -241,11 +303,24 @@ export default class WorkerFarm extends EventEmitter {
|
|
241
303
|
}
|
242
304
|
|
243
305
|
if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
|
244
|
-
|
306
|
+
this.callWorker(worker, this.callQueue.shift());
|
245
307
|
}
|
246
308
|
}
|
247
309
|
}
|
248
310
|
|
311
|
+
async callWorker(worker: Worker, call: WorkerCall): Promise<void> {
|
312
|
+
for (let ref of this.sharedReferences.keys()) {
|
313
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
314
|
+
await worker.sendSharedReference(
|
315
|
+
ref,
|
316
|
+
this.getSerializedSharedReference(ref),
|
317
|
+
);
|
318
|
+
}
|
319
|
+
}
|
320
|
+
|
321
|
+
worker.call(call);
|
322
|
+
}
|
323
|
+
|
249
324
|
async processRequest(
|
250
325
|
data: {|
|
251
326
|
location: FilePath,
|
@@ -255,7 +330,7 @@ export default class WorkerFarm extends EventEmitter {
|
|
255
330
|
let {method, args, location, awaitResponse, idx, handle: handleId} = data;
|
256
331
|
let mod;
|
257
332
|
if (handleId != null) {
|
258
|
-
mod = nullthrows(this.handles.get(handleId))
|
333
|
+
mod = nullthrows(this.handles.get(handleId)?.fn);
|
259
334
|
} else if (location) {
|
260
335
|
// $FlowFixMe this must be dynamic
|
261
336
|
mod = require(location);
|
@@ -286,7 +361,6 @@ export default class WorkerFarm extends EventEmitter {
|
|
286
361
|
}
|
287
362
|
} else {
|
288
363
|
// ESModule default interop
|
289
|
-
// $FlowFixMe
|
290
364
|
if (mod.__esModule && !mod[method] && mod.default) {
|
291
365
|
mod = mod.default;
|
292
366
|
}
|
@@ -331,6 +405,10 @@ export default class WorkerFarm extends EventEmitter {
|
|
331
405
|
async end(): Promise<void> {
|
332
406
|
this.ending = true;
|
333
407
|
|
408
|
+
await Promise.all(
|
409
|
+
Array.from(this.workers.values()).map(worker => this.stopWorker(worker)),
|
410
|
+
);
|
411
|
+
|
334
412
|
for (let handle of this.handles.values()) {
|
335
413
|
handle.dispose();
|
336
414
|
}
|
@@ -338,9 +416,6 @@ export default class WorkerFarm extends EventEmitter {
|
|
338
416
|
this.sharedReferences = new Map();
|
339
417
|
this.sharedReferencesByValue = new Map();
|
340
418
|
|
341
|
-
await Promise.all(
|
342
|
-
Array.from(this.workers.values()).map(worker => this.stopWorker(worker)),
|
343
|
-
);
|
344
419
|
this.ending = false;
|
345
420
|
}
|
346
421
|
|
@@ -362,40 +437,37 @@ export default class WorkerFarm extends EventEmitter {
|
|
362
437
|
);
|
363
438
|
}
|
364
439
|
|
365
|
-
createReverseHandle(fn: HandleFunction) {
|
366
|
-
let handle = new Handle({fn
|
440
|
+
createReverseHandle(fn: HandleFunction): Handle {
|
441
|
+
let handle = new Handle({fn});
|
367
442
|
this.handles.set(handle.id, handle);
|
368
443
|
return handle;
|
369
444
|
}
|
370
445
|
|
371
|
-
|
446
|
+
createSharedReference(
|
447
|
+
value: mixed,
|
448
|
+
isCacheable: boolean = true,
|
449
|
+
): {|ref: SharedReference, dispose(): Promise<mixed>|} {
|
372
450
|
let ref = referenceId++;
|
373
451
|
this.sharedReferences.set(ref, value);
|
374
452
|
this.sharedReferencesByValue.set(value, ref);
|
375
|
-
|
376
|
-
|
377
|
-
promises.push(
|
378
|
-
new Promise((resolve, reject) => {
|
379
|
-
worker.call({
|
380
|
-
method: 'createSharedReference',
|
381
|
-
args: [ref, value],
|
382
|
-
resolve,
|
383
|
-
reject,
|
384
|
-
retries: 0,
|
385
|
-
});
|
386
|
-
}),
|
387
|
-
);
|
453
|
+
if (!isCacheable) {
|
454
|
+
this.serializedSharedReferences.set(ref, null);
|
388
455
|
}
|
389
456
|
|
390
|
-
await Promise.all(promises);
|
391
|
-
|
392
457
|
return {
|
393
458
|
ref,
|
394
459
|
dispose: () => {
|
395
460
|
this.sharedReferences.delete(ref);
|
396
461
|
this.sharedReferencesByValue.delete(value);
|
462
|
+
this.serializedSharedReferences.delete(ref);
|
463
|
+
|
397
464
|
let promises = [];
|
398
465
|
for (let worker of this.workers.values()) {
|
466
|
+
if (!worker.sentSharedReferences.has(ref)) {
|
467
|
+
continue;
|
468
|
+
}
|
469
|
+
|
470
|
+
worker.sentSharedReferences.delete(ref);
|
399
471
|
promises.push(
|
400
472
|
new Promise((resolve, reject) => {
|
401
473
|
worker.call({
|
@@ -403,6 +475,7 @@ export default class WorkerFarm extends EventEmitter {
|
|
403
475
|
args: [ref],
|
404
476
|
resolve,
|
405
477
|
reject,
|
478
|
+
skipReadyCheck: true,
|
406
479
|
retries: 0,
|
407
480
|
});
|
408
481
|
}),
|
@@ -413,6 +486,24 @@ export default class WorkerFarm extends EventEmitter {
|
|
413
486
|
};
|
414
487
|
}
|
415
488
|
|
489
|
+
getSerializedSharedReference(ref: SharedReference): ArrayBuffer {
|
490
|
+
let cached = this.serializedSharedReferences.get(ref);
|
491
|
+
if (cached) {
|
492
|
+
return cached;
|
493
|
+
}
|
494
|
+
|
495
|
+
let value = this.sharedReferences.get(ref);
|
496
|
+
let buf = serialize(value).buffer;
|
497
|
+
|
498
|
+
// If the reference was created with the isCacheable option set to false,
|
499
|
+
// serializedSharedReferences will contain `null` as the value.
|
500
|
+
if (cached !== null) {
|
501
|
+
this.serializedSharedReferences.set(ref, buf);
|
502
|
+
}
|
503
|
+
|
504
|
+
return buf;
|
505
|
+
}
|
506
|
+
|
416
507
|
async startProfile() {
|
417
508
|
let promises = [];
|
418
509
|
for (let worker of this.workers.values()) {
|
@@ -424,12 +515,13 @@ export default class WorkerFarm extends EventEmitter {
|
|
424
515
|
resolve,
|
425
516
|
reject,
|
426
517
|
retries: 0,
|
518
|
+
skipReadyCheck: true,
|
427
519
|
});
|
428
520
|
}),
|
429
521
|
);
|
430
522
|
}
|
431
523
|
|
432
|
-
this.profiler = new
|
524
|
+
this.profiler = new SamplingProfiler();
|
433
525
|
|
434
526
|
promises.push(this.profiler.startProfiling());
|
435
527
|
await Promise.all(promises);
|
@@ -453,6 +545,7 @@ export default class WorkerFarm extends EventEmitter {
|
|
453
545
|
resolve,
|
454
546
|
reject,
|
455
547
|
retries: 0,
|
548
|
+
skipReadyCheck: true,
|
456
549
|
});
|
457
550
|
}),
|
458
551
|
);
|
@@ -460,7 +553,7 @@ export default class WorkerFarm extends EventEmitter {
|
|
460
553
|
|
461
554
|
var profiles = await Promise.all(promises);
|
462
555
|
let trace = new Trace();
|
463
|
-
let filename = `profile-${
|
556
|
+
let filename = `profile-${getTimeId()}.trace`;
|
464
557
|
let stream = trace.pipe(fs.createWriteStream(filename));
|
465
558
|
|
466
559
|
for (let profile of profiles) {
|
@@ -474,21 +567,84 @@ export default class WorkerFarm extends EventEmitter {
|
|
474
567
|
|
475
568
|
logger.info({
|
476
569
|
origin: '@parcel/workers',
|
477
|
-
message: `Wrote profile to ${filename}`,
|
570
|
+
message: md`Wrote profile to ${filename}`,
|
478
571
|
});
|
479
572
|
}
|
480
573
|
|
481
|
-
|
574
|
+
async callAllWorkers(method: string, args: Array<any>) {
|
575
|
+
let promises = [];
|
576
|
+
for (let worker of this.workers.values()) {
|
577
|
+
promises.push(
|
578
|
+
new Promise((resolve, reject) => {
|
579
|
+
worker.call({
|
580
|
+
method,
|
581
|
+
args,
|
582
|
+
resolve,
|
583
|
+
reject,
|
584
|
+
retries: 0,
|
585
|
+
});
|
586
|
+
}),
|
587
|
+
);
|
588
|
+
}
|
589
|
+
|
590
|
+
promises.push(this.localWorker[method](this.workerApi, ...args));
|
591
|
+
await Promise.all(promises);
|
592
|
+
}
|
593
|
+
|
594
|
+
async takeHeapSnapshot() {
|
595
|
+
let snapshotId = getTimeId();
|
596
|
+
|
597
|
+
try {
|
598
|
+
let snapshotPaths = await Promise.all(
|
599
|
+
[...this.workers.values()].map(
|
600
|
+
worker =>
|
601
|
+
new Promise((resolve, reject) => {
|
602
|
+
worker.call({
|
603
|
+
method: 'takeHeapSnapshot',
|
604
|
+
args: [snapshotId],
|
605
|
+
resolve,
|
606
|
+
reject,
|
607
|
+
retries: 0,
|
608
|
+
skipReadyCheck: true,
|
609
|
+
});
|
610
|
+
}),
|
611
|
+
),
|
612
|
+
);
|
613
|
+
|
614
|
+
logger.info({
|
615
|
+
origin: '@parcel/workers',
|
616
|
+
message: md`Wrote heap snapshots to the following paths:\n${snapshotPaths.join(
|
617
|
+
'\n',
|
618
|
+
)}`,
|
619
|
+
});
|
620
|
+
} catch {
|
621
|
+
logger.error({
|
622
|
+
origin: '@parcel/workers',
|
623
|
+
message: 'Unable to take heap snapshots. Note: requires Node 11.13.0+',
|
624
|
+
});
|
625
|
+
}
|
626
|
+
}
|
627
|
+
|
628
|
+
static getNumWorkers(): number {
|
482
629
|
return process.env.PARCEL_WORKERS
|
483
630
|
? parseInt(process.env.PARCEL_WORKERS, 10)
|
484
|
-
: cpuCount();
|
631
|
+
: Math.ceil(cpuCount() / 2);
|
485
632
|
}
|
486
633
|
|
487
|
-
static isWorker() {
|
634
|
+
static isWorker(): boolean {
|
488
635
|
return !!child;
|
489
636
|
}
|
490
637
|
|
491
|
-
static getWorkerApi() {
|
638
|
+
static getWorkerApi(): {|
|
639
|
+
callMaster: (
|
640
|
+
request: CallRequest,
|
641
|
+
awaitResponse?: ?boolean,
|
642
|
+
) => Promise<mixed>,
|
643
|
+
createReverseHandle: (fn: (...args: Array<any>) => mixed) => Handle,
|
644
|
+
getSharedReference: (ref: SharedReference) => mixed,
|
645
|
+
resolveSharedReference: (value: mixed) => void | SharedReference,
|
646
|
+
runHandle: (handle: Handle, args: Array<any>) => Promise<mixed>,
|
647
|
+
|} {
|
492
648
|
invariant(
|
493
649
|
child != null,
|
494
650
|
'WorkerFarm.getWorkerApi can only be called within workers',
|
@@ -496,7 +652,24 @@ export default class WorkerFarm extends EventEmitter {
|
|
496
652
|
return child.workerApi;
|
497
653
|
}
|
498
654
|
|
499
|
-
static getConcurrentCallsPerWorker(
|
500
|
-
|
655
|
+
static getConcurrentCallsPerWorker(
|
656
|
+
defaultValue?: number = DEFAULT_MAX_CONCURRENT_CALLS,
|
657
|
+
): number {
|
658
|
+
return (
|
659
|
+
parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || defaultValue
|
660
|
+
);
|
501
661
|
}
|
502
662
|
}
|
663
|
+
|
664
|
+
function getTimeId() {
|
665
|
+
let now = new Date();
|
666
|
+
return (
|
667
|
+
String(now.getFullYear()) +
|
668
|
+
String(now.getMonth() + 1).padStart(2, '0') +
|
669
|
+
String(now.getDate()).padStart(2, '0') +
|
670
|
+
'-' +
|
671
|
+
String(now.getHours()).padStart(2, '0') +
|
672
|
+
String(now.getMinutes()).padStart(2, '0') +
|
673
|
+
String(now.getSeconds()).padStart(2, '0')
|
674
|
+
);
|
675
|
+
}
|
package/src/bus.js
CHANGED