@parcel/workers 2.0.0-beta.3.1 → 2.0.0-dev.1510

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/WorkerFarm.js CHANGED
@@ -6,279 +6,210 @@ Object.defineProperty(exports, "__esModule", {
6
6
  Object.defineProperty(exports, "Handle", {
7
7
  enumerable: true,
8
8
  get: function () {
9
- return _Handle().default;
9
+ return _Handle.default;
10
10
  }
11
11
  });
12
12
  exports.default = void 0;
13
-
13
+ var coreWorker = _interopRequireWildcard(require("./core-worker"));
14
+ var bus = _interopRequireWildcard(require("./bus"));
14
15
  function _assert() {
15
16
  const data = _interopRequireDefault(require("assert"));
16
-
17
17
  _assert = function () {
18
18
  return data;
19
19
  };
20
-
21
20
  return data;
22
21
  }
23
-
24
22
  function _nullthrows() {
25
23
  const data = _interopRequireDefault(require("nullthrows"));
26
-
27
24
  _nullthrows = function () {
28
25
  return data;
29
26
  };
30
-
31
27
  return data;
32
28
  }
33
-
34
29
  function _events() {
35
30
  const data = _interopRequireDefault(require("events"));
36
-
37
31
  _events = function () {
38
32
  return data;
39
33
  };
40
-
41
34
  return data;
42
35
  }
43
-
44
36
  function _core() {
45
37
  const data = require("@parcel/core");
46
-
47
38
  _core = function () {
48
39
  return data;
49
40
  };
50
-
51
41
  return data;
52
42
  }
53
-
54
43
  function _diagnostic() {
55
44
  const data = _interopRequireWildcard(require("@parcel/diagnostic"));
56
-
57
45
  _diagnostic = function () {
58
46
  return data;
59
47
  };
60
-
61
- return data;
62
- }
63
-
64
- function _Worker() {
65
- const data = _interopRequireDefault(require("./Worker"));
66
-
67
- _Worker = function () {
68
- return data;
69
- };
70
-
71
- return data;
72
- }
73
-
74
- function _cpuCount() {
75
- const data = _interopRequireDefault(require("./cpuCount"));
76
-
77
- _cpuCount = function () {
78
- return data;
79
- };
80
-
81
- return data;
82
- }
83
-
84
- function _Handle() {
85
- const data = _interopRequireDefault(require("./Handle"));
86
-
87
- _Handle = function () {
88
- return data;
89
- };
90
-
91
- return data;
92
- }
93
-
94
- function _childState() {
95
- const data = require("./childState");
96
-
97
- _childState = function () {
98
- return data;
99
- };
100
-
101
- return data;
102
- }
103
-
104
- function _backend() {
105
- const data = require("./backend");
106
-
107
- _backend = function () {
108
- return data;
109
- };
110
-
111
- return data;
112
- }
113
-
114
- function _Profiler() {
115
- const data = _interopRequireDefault(require("./Profiler"));
116
-
117
- _Profiler = function () {
118
- return data;
119
- };
120
-
121
48
  return data;
122
49
  }
123
-
124
- function _Trace() {
125
- const data = _interopRequireDefault(require("./Trace"));
126
-
127
- _Trace = function () {
50
+ var _Worker = _interopRequireDefault(require("./Worker"));
51
+ var _cpuCount = _interopRequireDefault(require("./cpuCount"));
52
+ var _Handle = _interopRequireDefault(require("./Handle"));
53
+ var _childState = require("./childState");
54
+ var _backend = require("./backend");
55
+ function _profiler() {
56
+ const data = require("@parcel/profiler");
57
+ _profiler = function () {
128
58
  return data;
129
59
  };
130
-
131
60
  return data;
132
61
  }
133
-
134
62
  function _fs() {
135
63
  const data = _interopRequireDefault(require("fs"));
136
-
137
64
  _fs = function () {
138
65
  return data;
139
66
  };
140
-
141
67
  return data;
142
68
  }
143
-
144
69
  function _logger() {
145
70
  const data = _interopRequireDefault(require("@parcel/logger"));
146
-
147
71
  _logger = function () {
148
72
  return data;
149
73
  };
150
-
151
74
  return data;
152
75
  }
153
-
154
- function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
155
-
156
- function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
157
-
158
76
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
159
-
160
- function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
161
-
77
+ function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
78
+ function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
162
79
  let referenceId = 1;
80
+ const DEFAULT_MAX_CONCURRENT_CALLS = 30;
163
81
 
164
82
  /**
165
83
  * workerPath should always be defined inside farmOptions
166
84
  */
85
+
167
86
  class WorkerFarm extends _events().default {
87
+ callQueue = [];
88
+ ending = false;
89
+ warmWorkers = 0;
90
+ readyWorkers = 0;
91
+ workers = new Map();
92
+ handles = new Map();
93
+ sharedReferences = new Map();
94
+ sharedReferencesByValue = new Map();
95
+ serializedSharedReferences = new Map();
168
96
  constructor(farmOptions = {}) {
97
+ var _process$stdout;
169
98
  super();
170
-
171
- _defineProperty(this, "callQueue", []);
172
-
173
- _defineProperty(this, "ending", false);
174
-
175
- _defineProperty(this, "localWorker", void 0);
176
-
177
- _defineProperty(this, "options", void 0);
178
-
179
- _defineProperty(this, "run", void 0);
180
-
181
- _defineProperty(this, "warmWorkers", 0);
182
-
183
- _defineProperty(this, "workers", new Map());
184
-
185
- _defineProperty(this, "handles", new Map());
186
-
187
- _defineProperty(this, "sharedReferences", new Map());
188
-
189
- _defineProperty(this, "sharedReferencesByValue", new Map());
190
-
191
- _defineProperty(this, "profiler", void 0);
192
-
193
- _defineProperty(this, "workerApi", {
194
- callMaster: async (request, awaitResponse = true) => {
195
- // $FlowFixMe
196
- let result = await this.processRequest({ ...request,
197
- awaitResponse
198
- });
199
- return (0, _core().deserialize)((0, _core().serialize)(result));
200
- },
201
- createReverseHandle: fn => this.createReverseHandle(fn),
202
- callChild: (childId, request) => new Promise((resolve, reject) => {
203
- (0, _nullthrows().default)(this.workers.get(childId)).call({ ...request,
204
- resolve,
205
- reject,
206
- retries: 0
207
- });
208
- }),
209
- runHandle: (handle, args) => this.workerApi.callChild((0, _nullthrows().default)(handle.childId), {
210
- handle: handle.id,
211
- args
212
- }),
213
- getSharedReference: ref => this.sharedReferences.get(ref),
214
- resolveSharedReference: value => this.sharedReferencesByValue.get(value)
215
- });
216
-
217
99
  this.options = {
218
100
  maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
219
- maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(),
101
+ maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(farmOptions.shouldTrace ? 1 : DEFAULT_MAX_CONCURRENT_CALLS),
220
102
  forcedKillTime: 500,
221
103
  warmWorkers: false,
222
104
  useLocalWorker: true,
223
105
  // TODO: setting this to false makes some tests fail, figure out why
224
- backend: (0, _backend().detectBackend)(),
106
+ backend: (0, _backend.detectBackend)(),
225
107
  ...farmOptions
226
108
  };
227
-
228
109
  if (!this.options.workerPath) {
229
110
  throw new Error('Please provide a worker path!');
230
- } // $FlowFixMe this must be dynamic
231
-
111
+ }
232
112
 
233
- this.localWorker = require(this.options.workerPath);
113
+ // $FlowFixMe
114
+ if (process.browser) {
115
+ if (this.options.workerPath === '@parcel/core/src/worker.js') {
116
+ this.localWorker = coreWorker;
117
+ } else {
118
+ throw new Error('No dynamic require possible: ' + this.options.workerPath);
119
+ }
120
+ } else {
121
+ // $FlowFixMe this must be dynamic
122
+ this.localWorker = require(this.options.workerPath);
123
+ }
124
+ this.localWorkerInit = this.localWorker.childInit != null ? this.localWorker.childInit() : null;
234
125
  this.run = this.createHandle('run');
126
+
127
+ // Worker thread stdout is by default piped into the process stdout, if there are enough worker
128
+ // threads to exceed the default listener limit, then anything else piping into stdout will trigger
129
+ // the `MaxListenersExceededWarning`, so we should ensure the max listeners is at least equal to the
130
+ // number of workers + 1 for the main thread.
131
+ //
132
+ // Note this can't be fixed easily where other things pipe into stdout - even after starting > 10 worker
133
+ // threads `process.stdout.getMaxListeners()` will still return 10, however adding another pipe into `stdout`
134
+ // will give the warning with `<worker count + 1>` as the number of listeners.
135
+ (_process$stdout = process.stdout) === null || _process$stdout === void 0 || _process$stdout.setMaxListeners(Math.max(process.stdout.getMaxListeners(), WorkerFarm.getNumWorkers() + 1));
235
136
  this.startMaxWorkers();
236
137
  }
237
-
138
+ workerApi = {
139
+ callMaster: async (request, awaitResponse = true) => {
140
+ // $FlowFixMe
141
+ let result = await this.processRequest({
142
+ ...request,
143
+ awaitResponse
144
+ });
145
+ return (0, _core().deserialize)((0, _core().serialize)(result));
146
+ },
147
+ createReverseHandle: fn => this.createReverseHandle(fn),
148
+ callChild: (childId, request) => new Promise((resolve, reject) => {
149
+ (0, _nullthrows().default)(this.workers.get(childId)).call({
150
+ ...request,
151
+ resolve,
152
+ reject,
153
+ retries: 0
154
+ });
155
+ }),
156
+ runHandle: (handle, args) => this.workerApi.callChild((0, _nullthrows().default)(handle.childId), {
157
+ handle: handle.id,
158
+ args
159
+ }),
160
+ getSharedReference: ref => this.sharedReferences.get(ref),
161
+ resolveSharedReference: value => this.sharedReferencesByValue.get(value)
162
+ };
238
163
  warmupWorker(method, args) {
239
164
  // Workers are already stopping
240
165
  if (this.ending) {
241
166
  return;
242
- } // Workers are not warmed up yet.
167
+ }
168
+
169
+ // Workers are not warmed up yet.
243
170
  // Send the job to a remote worker in the background,
244
171
  // but use the result from the local worker - it will be faster.
245
-
246
-
247
172
  let promise = this.addCall(method, [...args, true]);
248
-
249
173
  if (promise) {
250
174
  promise.then(() => {
251
175
  this.warmWorkers++;
252
-
253
176
  if (this.warmWorkers >= this.workers.size) {
254
177
  this.emit('warmedup');
255
178
  }
256
179
  }).catch(() => {});
257
180
  }
258
181
  }
259
-
260
182
  shouldStartRemoteWorkers() {
261
183
  return this.options.maxConcurrentWorkers > 0 || !this.options.useLocalWorker;
262
184
  }
263
-
264
- createHandle(method) {
265
- return (...args) => {
185
+ createHandle(method, useMainThread = false) {
186
+ if (!this.options.useLocalWorker) {
187
+ useMainThread = false;
188
+ }
189
+ return async (...args) => {
266
190
  // Child process workers are slow to start (~600ms).
267
191
  // While we're waiting, just run on the main thread.
268
192
  // This significantly speeds up startup time.
269
- if (this.shouldUseRemoteWorkers()) {
193
+ if (this.shouldUseRemoteWorkers() && !useMainThread) {
270
194
  return this.addCall(method, [...args, false]);
271
195
  } else {
272
196
  if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
273
197
  this.warmupWorker(method, args);
274
198
  }
275
-
276
- let processedArgs = (0, _core().restoreDeserializedObject)((0, _core().prepareForSerialization)([...args, false]));
199
+ let processedArgs;
200
+ if (!useMainThread) {
201
+ processedArgs = (0, _core().restoreDeserializedObject)((0, _core().prepareForSerialization)([...args, false]));
202
+ } else {
203
+ processedArgs = args;
204
+ }
205
+ if (this.localWorkerInit != null) {
206
+ await this.localWorkerInit;
207
+ this.localWorkerInit = null;
208
+ }
277
209
  return this.localWorker[method](this.workerApi, ...processedArgs);
278
210
  }
279
211
  };
280
212
  }
281
-
282
213
  onError(error, worker) {
283
214
  // Handle ipc errors
284
215
  if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
@@ -287,66 +218,71 @@ class WorkerFarm extends _events().default {
287
218
  _logger().default.error(error, '@parcel/workers');
288
219
  }
289
220
  }
290
-
291
221
  startChild() {
292
- let worker = new (_Worker().default)({
222
+ let worker = new _Worker.default({
293
223
  forcedKillTime: this.options.forcedKillTime,
294
224
  backend: this.options.backend,
295
225
  shouldPatchConsole: this.options.shouldPatchConsole,
226
+ shouldTrace: this.options.shouldTrace,
296
227
  sharedReferences: this.sharedReferences
297
228
  });
298
229
  worker.fork((0, _nullthrows().default)(this.options.workerPath));
299
230
  worker.on('request', data => this.processRequest(data, worker));
300
- worker.on('ready', () => this.processQueue());
231
+ worker.on('ready', () => {
232
+ this.readyWorkers++;
233
+ if (this.readyWorkers === this.options.maxConcurrentWorkers) {
234
+ this.emit('ready');
235
+ }
236
+ this.processQueue();
237
+ });
301
238
  worker.on('response', () => this.processQueue());
302
239
  worker.on('error', err => this.onError(err, worker));
303
240
  worker.once('exit', () => this.stopWorker(worker));
304
241
  this.workers.set(worker.id, worker);
305
242
  }
306
-
307
243
  async stopWorker(worker) {
308
244
  if (!worker.stopped) {
309
245
  this.workers.delete(worker.id);
310
246
  worker.isStopping = true;
311
-
312
247
  if (worker.calls.size) {
313
248
  for (let call of worker.calls.values()) {
314
249
  call.retries++;
315
250
  this.callQueue.unshift(call);
316
251
  }
317
252
  }
318
-
319
253
  worker.calls.clear();
320
- await worker.stop(); // Process any requests that failed and start a new worker
254
+ await worker.stop();
321
255
 
256
+ // Process any requests that failed and start a new worker
322
257
  this.processQueue();
323
258
  }
324
259
  }
325
-
326
260
  processQueue() {
327
261
  if (this.ending || !this.callQueue.length) return;
328
-
329
262
  if (this.workers.size < this.options.maxConcurrentWorkers) {
330
263
  this.startChild();
331
264
  }
332
-
333
265
  let workers = [...this.workers.values()].sort((a, b) => a.calls.size - b.calls.size);
334
-
335
266
  for (let worker of workers) {
336
267
  if (!this.callQueue.length) {
337
268
  break;
338
269
  }
339
-
340
270
  if (!worker.ready || worker.stopped || worker.isStopping) {
341
271
  continue;
342
272
  }
343
-
344
273
  if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
345
- worker.call(this.callQueue.shift());
274
+ this.callWorker(worker, this.callQueue.shift());
346
275
  }
347
276
  }
348
277
  }
349
-
278
+ async callWorker(worker, call) {
279
+ for (let ref of this.sharedReferences.keys()) {
280
+ if (!worker.sentSharedReferences.has(ref)) {
281
+ await worker.sendSharedReference(ref, this.getSerializedSharedReference(ref));
282
+ }
283
+ }
284
+ worker.call(call);
285
+ }
350
286
  async processRequest(data, worker) {
351
287
  let {
352
288
  method,
@@ -357,34 +293,37 @@ class WorkerFarm extends _events().default {
357
293
  handle: handleId
358
294
  } = data;
359
295
  let mod;
360
-
361
296
  if (handleId != null) {
362
297
  var _this$handles$get;
363
-
364
298
  mod = (0, _nullthrows().default)((_this$handles$get = this.handles.get(handleId)) === null || _this$handles$get === void 0 ? void 0 : _this$handles$get.fn);
365
299
  } else if (location) {
366
- // $FlowFixMe this must be dynamic
367
- mod = require(location);
300
+ // $FlowFixMe
301
+ if (process.browser) {
302
+ if (location === '@parcel/workers/src/bus.js') {
303
+ mod = bus;
304
+ } else {
305
+ throw new Error('No dynamic require possible: ' + location);
306
+ }
307
+ } else {
308
+ // $FlowFixMe this must be dynamic
309
+ mod = require(location);
310
+ }
368
311
  } else {
369
312
  throw new Error('Unknown request');
370
313
  }
371
-
372
314
  const responseFromContent = content => ({
373
315
  idx,
374
316
  type: 'response',
375
317
  contentType: 'data',
376
318
  content
377
319
  });
378
-
379
320
  const errorResponseFromError = e => ({
380
321
  idx,
381
322
  type: 'response',
382
323
  contentType: 'error',
383
324
  content: (0, _diagnostic().anyToDiagnostic)(e)
384
325
  });
385
-
386
326
  let result;
387
-
388
327
  if (method == null) {
389
328
  try {
390
329
  result = responseFromContent(await mod(...args));
@@ -396,7 +335,6 @@ class WorkerFarm extends _events().default {
396
335
  if (mod.__esModule && !mod[method] && mod.default) {
397
336
  mod = mod.default;
398
337
  }
399
-
400
338
  try {
401
339
  // $FlowFixMe
402
340
  result = responseFromContent(await mod[method](...args));
@@ -404,7 +342,6 @@ class WorkerFarm extends _events().default {
404
342
  result = errorResponseFromError(e);
405
343
  }
406
344
  }
407
-
408
345
  if (awaitResponse) {
409
346
  if (worker) {
410
347
  worker.send(result);
@@ -414,17 +351,14 @@ class WorkerFarm extends _events().default {
414
351
  diagnostic: result.content
415
352
  });
416
353
  }
417
-
418
354
  return result.content;
419
355
  }
420
356
  }
421
357
  }
422
-
423
358
  addCall(method, args) {
424
359
  if (this.ending) {
425
360
  throw new Error('Cannot add a worker call if workerfarm is ending.');
426
361
  }
427
-
428
362
  return new Promise((resolve, reject) => {
429
363
  this.callQueue.push({
430
364
  method,
@@ -436,68 +370,55 @@ class WorkerFarm extends _events().default {
436
370
  this.processQueue();
437
371
  });
438
372
  }
439
-
440
373
  async end() {
441
374
  this.ending = true;
442
375
  await Promise.all(Array.from(this.workers.values()).map(worker => this.stopWorker(worker)));
443
-
444
376
  for (let handle of this.handles.values()) {
445
377
  handle.dispose();
446
378
  }
447
-
448
379
  this.handles = new Map();
449
380
  this.sharedReferences = new Map();
450
381
  this.sharedReferencesByValue = new Map();
451
382
  this.ending = false;
452
383
  }
453
-
454
384
  startMaxWorkers() {
455
385
  // Starts workers until the maximum is reached
456
386
  if (this.workers.size < this.options.maxConcurrentWorkers) {
457
387
  let toStart = this.options.maxConcurrentWorkers - this.workers.size;
458
-
459
388
  while (toStart--) {
460
389
  this.startChild();
461
390
  }
462
391
  }
463
392
  }
464
-
465
393
  shouldUseRemoteWorkers() {
466
394
  return !this.options.useLocalWorker || (this.warmWorkers >= this.workers.size || !this.options.warmWorkers) && this.options.maxConcurrentWorkers > 0;
467
395
  }
468
-
469
396
  createReverseHandle(fn) {
470
- let handle = new (_Handle().default)({
397
+ let handle = new _Handle.default({
471
398
  fn
472
399
  });
473
400
  this.handles.set(handle.id, handle);
474
401
  return handle;
475
402
  }
476
-
477
- async createSharedReference(value, // An optional, pre-serialized representation of the value to be used
478
- // in its place.
479
- buffer) {
403
+ createSharedReference(value, isCacheable = true) {
480
404
  let ref = referenceId++;
481
405
  this.sharedReferences.set(ref, value);
482
406
  this.sharedReferencesByValue.set(value, ref);
483
- let toSend = buffer ? buffer.buffer : value;
484
- let promises = [];
485
-
486
- for (let worker of this.workers.values()) {
487
- if (worker.ready) {
488
- promises.push(worker.sendSharedReference(ref, toSend));
489
- }
407
+ if (!isCacheable) {
408
+ this.serializedSharedReferences.set(ref, null);
490
409
  }
491
-
492
- await Promise.all(promises);
493
410
  return {
494
411
  ref,
495
412
  dispose: () => {
496
413
  this.sharedReferences.delete(ref);
497
414
  this.sharedReferencesByValue.delete(value);
415
+ this.serializedSharedReferences.delete(ref);
498
416
  let promises = [];
499
-
500
417
  for (let worker of this.workers.values()) {
418
+ if (!worker.sentSharedReferences.has(ref)) {
419
+ continue;
420
+ }
421
+ worker.sentSharedReferences.delete(ref);
501
422
  promises.push(new Promise((resolve, reject) => {
502
423
  worker.call({
503
424
  method: 'deleteSharedReference',
@@ -509,15 +430,27 @@ class WorkerFarm extends _events().default {
509
430
  });
510
431
  }));
511
432
  }
512
-
513
433
  return Promise.all(promises);
514
434
  }
515
435
  };
516
436
  }
437
+ getSerializedSharedReference(ref) {
438
+ let cached = this.serializedSharedReferences.get(ref);
439
+ if (cached) {
440
+ return cached;
441
+ }
442
+ let value = this.sharedReferences.get(ref);
443
+ let buf = (0, _core().serialize)(value).buffer;
517
444
 
445
+ // If the reference was created with the isCacheable option set to false,
446
+ // serializedSharedReferences will contain `null` as the value.
447
+ if (cached !== null) {
448
+ this.serializedSharedReferences.set(ref, buf);
449
+ }
450
+ return buf;
451
+ }
518
452
  async startProfile() {
519
453
  let promises = [];
520
-
521
454
  for (let worker of this.workers.values()) {
522
455
  promises.push(new Promise((resolve, reject) => {
523
456
  worker.call({
@@ -530,20 +463,16 @@ class WorkerFarm extends _events().default {
530
463
  });
531
464
  }));
532
465
  }
533
-
534
- this.profiler = new (_Profiler().default)();
466
+ this.profiler = new (_profiler().SamplingProfiler)();
535
467
  promises.push(this.profiler.startProfiling());
536
468
  await Promise.all(promises);
537
469
  }
538
-
539
470
  async endProfile() {
540
471
  if (!this.profiler) {
541
472
  return;
542
473
  }
543
-
544
474
  let promises = [this.profiler.stopProfiling()];
545
475
  let names = ['Master'];
546
-
547
476
  for (let worker of this.workers.values()) {
548
477
  names.push('Worker ' + worker.id);
549
478
  promises.push(new Promise((resolve, reject) => {
@@ -557,30 +486,24 @@ class WorkerFarm extends _events().default {
557
486
  });
558
487
  }));
559
488
  }
560
-
561
489
  var profiles = await Promise.all(promises);
562
- let trace = new (_Trace().default)();
490
+ let trace = new (_profiler().Trace)();
563
491
  let filename = `profile-${getTimeId()}.trace`;
564
492
  let stream = trace.pipe(_fs().default.createWriteStream(filename));
565
-
566
493
  for (let profile of profiles) {
567
494
  trace.addCPUProfile(names.shift(), profile);
568
495
  }
569
-
570
496
  trace.flush();
571
497
  await new Promise(resolve => {
572
498
  stream.once('finish', resolve);
573
499
  });
574
-
575
500
  _logger().default.info({
576
501
  origin: '@parcel/workers',
577
502
  message: (0, _diagnostic().md)`Wrote profile to ${filename}`
578
503
  });
579
504
  }
580
-
581
505
  async callAllWorkers(method, args) {
582
506
  let promises = [];
583
-
584
507
  for (let worker of this.workers.values()) {
585
508
  promises.push(new Promise((resolve, reject) => {
586
509
  worker.call({
@@ -592,14 +515,11 @@ class WorkerFarm extends _events().default {
592
515
  });
593
516
  }));
594
517
  }
595
-
596
518
  promises.push(this.localWorker[method](this.workerApi, ...args));
597
519
  await Promise.all(promises);
598
520
  }
599
-
600
521
  async takeHeapSnapshot() {
601
522
  let snapshotId = getTimeId();
602
-
603
523
  try {
604
524
  let snapshotPaths = await Promise.all([...this.workers.values()].map(worker => new Promise((resolve, reject) => {
605
525
  worker.call({
@@ -611,7 +531,6 @@ class WorkerFarm extends _events().default {
611
531
  skipReadyCheck: true
612
532
  });
613
533
  })));
614
-
615
534
  _logger().default.info({
616
535
  origin: '@parcel/workers',
617
536
  message: (0, _diagnostic().md)`Wrote heap snapshots to the following paths:\n${snapshotPaths.join('\n')}`
@@ -623,28 +542,21 @@ class WorkerFarm extends _events().default {
623
542
  });
624
543
  }
625
544
  }
626
-
627
545
  static getNumWorkers() {
628
- return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : Math.ceil((0, _cpuCount().default)() / 2);
546
+ return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : Math.min(4, Math.ceil((0, _cpuCount.default)() / 2));
629
547
  }
630
-
631
548
  static isWorker() {
632
- return !!_childState().child;
549
+ return !!_childState.child;
633
550
  }
634
-
635
551
  static getWorkerApi() {
636
- (0, _assert().default)(_childState().child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
637
- return _childState().child.workerApi;
552
+ (0, _assert().default)(_childState.child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
553
+ return _childState.child.workerApi;
638
554
  }
639
-
640
- static getConcurrentCallsPerWorker() {
641
- return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || 30;
555
+ static getConcurrentCallsPerWorker(defaultValue = DEFAULT_MAX_CONCURRENT_CALLS) {
556
+ return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || defaultValue;
642
557
  }
643
-
644
558
  }
645
-
646
559
  exports.default = WorkerFarm;
647
-
648
560
  function getTimeId() {
649
561
  let now = new Date();
650
562
  return String(now.getFullYear()) + String(now.getMonth() + 1).padStart(2, '0') + String(now.getDate()).padStart(2, '0') + '-' + String(now.getHours()).padStart(2, '0') + String(now.getMinutes()).padStart(2, '0') + String(now.getSeconds()).padStart(2, '0');