@parcel/workers 2.8.4-nightly.0 → 2.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/WorkerFarm.js CHANGED
@@ -10,108 +10,77 @@ Object.defineProperty(exports, "Handle", {
10
10
  }
11
11
  });
12
12
  exports.default = void 0;
13
-
14
13
  function _assert() {
15
14
  const data = _interopRequireDefault(require("assert"));
16
-
17
15
  _assert = function () {
18
16
  return data;
19
17
  };
20
-
21
18
  return data;
22
19
  }
23
-
24
20
  function _nullthrows() {
25
21
  const data = _interopRequireDefault(require("nullthrows"));
26
-
27
22
  _nullthrows = function () {
28
23
  return data;
29
24
  };
30
-
31
25
  return data;
32
26
  }
33
-
34
27
  function _events() {
35
28
  const data = _interopRequireDefault(require("events"));
36
-
37
29
  _events = function () {
38
30
  return data;
39
31
  };
40
-
41
32
  return data;
42
33
  }
43
-
44
34
  function _core() {
45
35
  const data = require("@parcel/core");
46
-
47
36
  _core = function () {
48
37
  return data;
49
38
  };
50
-
51
39
  return data;
52
40
  }
53
-
54
41
  function _diagnostic() {
55
42
  const data = _interopRequireWildcard(require("@parcel/diagnostic"));
56
-
57
43
  _diagnostic = function () {
58
44
  return data;
59
45
  };
60
-
61
46
  return data;
62
47
  }
63
-
64
48
  var _Worker = _interopRequireDefault(require("./Worker"));
65
-
66
49
  var _cpuCount = _interopRequireDefault(require("./cpuCount"));
67
-
68
50
  var _Handle = _interopRequireDefault(require("./Handle"));
69
-
70
51
  var _childState = require("./childState");
71
-
72
52
  var _backend = require("./backend");
73
-
74
53
  function _profiler() {
75
54
  const data = require("@parcel/profiler");
76
-
77
55
  _profiler = function () {
78
56
  return data;
79
57
  };
80
-
81
58
  return data;
82
59
  }
83
-
84
60
  function _fs() {
85
61
  const data = _interopRequireDefault(require("fs"));
86
-
87
62
  _fs = function () {
88
63
  return data;
89
64
  };
90
-
91
65
  return data;
92
66
  }
93
-
94
67
  function _logger() {
95
68
  const data = _interopRequireDefault(require("@parcel/logger"));
96
-
97
69
  _logger = function () {
98
70
  return data;
99
71
  };
100
-
101
72
  return data;
102
73
  }
103
-
104
74
  function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
105
-
106
75
  function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
107
-
108
76
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
109
-
110
77
  let referenceId = 1;
78
+ const DEFAULT_MAX_CONCURRENT_CALLS = 30;
111
79
 
112
80
  /**
113
81
  * workerPath should always be defined inside farmOptions
114
82
  */
83
+
115
84
  class WorkerFarm extends _events().default {
116
85
  callQueue = [];
117
86
  ending = false;
@@ -121,12 +90,11 @@ class WorkerFarm extends _events().default {
121
90
  sharedReferences = new Map();
122
91
  sharedReferencesByValue = new Map();
123
92
  serializedSharedReferences = new Map();
124
-
125
93
  constructor(farmOptions = {}) {
126
94
  super();
127
95
  this.options = {
128
96
  maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
129
- maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(),
97
+ maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(farmOptions.shouldTrace ? 1 : DEFAULT_MAX_CONCURRENT_CALLS),
130
98
  forcedKillTime: 500,
131
99
  warmWorkers: false,
132
100
  useLocalWorker: true,
@@ -134,15 +102,16 @@ class WorkerFarm extends _events().default {
134
102
  backend: (0, _backend.detectBackend)(),
135
103
  ...farmOptions
136
104
  };
137
-
138
105
  if (!this.options.workerPath) {
139
106
  throw new Error('Please provide a worker path!');
140
- } // $FlowFixMe this must be dynamic
141
-
107
+ }
142
108
 
109
+ // $FlowFixMe this must be dynamic
143
110
  this.localWorker = require(this.options.workerPath);
144
111
  this.localWorkerInit = this.localWorker.childInit != null ? this.localWorker.childInit() : null;
145
- this.run = this.createHandle('run'); // Worker thread stdout is by default piped into the process stdout, if there are enough worker
112
+ this.run = this.createHandle('run');
113
+
114
+ // Worker thread stdout is by default piped into the process stdout, if there are enough worker
146
115
  // threads to exceed the default listener limit, then anything else piping into stdout will trigger
147
116
  // the `MaxListenersExceededWarning`, so we should ensure the max listeners is at least equal to the
148
117
  // number of workers + 1 for the main thread.
@@ -150,22 +119,22 @@ class WorkerFarm extends _events().default {
150
119
  // Note this can't be fixed easily where other things pipe into stdout - even after starting > 10 worker
151
120
  // threads `process.stdout.getMaxListeners()` will still return 10, however adding another pipe into `stdout`
152
121
  // will give the warning with `<worker count + 1>` as the number of listeners.
153
-
154
122
  process.stdout.setMaxListeners(Math.max(process.stdout.getMaxListeners(), WorkerFarm.getNumWorkers() + 1));
155
123
  this.startMaxWorkers();
156
124
  }
157
-
158
125
  workerApi = {
159
126
  callMaster: async (request, awaitResponse = true) => {
160
127
  // $FlowFixMe
161
- let result = await this.processRequest({ ...request,
128
+ let result = await this.processRequest({
129
+ ...request,
162
130
  awaitResponse
163
131
  });
164
132
  return (0, _core().deserialize)((0, _core().serialize)(result));
165
133
  },
166
134
  createReverseHandle: fn => this.createReverseHandle(fn),
167
135
  callChild: (childId, request) => new Promise((resolve, reject) => {
168
- (0, _nullthrows().default)(this.workers.get(childId)).call({ ...request,
136
+ (0, _nullthrows().default)(this.workers.get(childId)).call({
137
+ ...request,
169
138
  resolve,
170
139
  reject,
171
140
  retries: 0
@@ -178,38 +147,32 @@ class WorkerFarm extends _events().default {
178
147
  getSharedReference: ref => this.sharedReferences.get(ref),
179
148
  resolveSharedReference: value => this.sharedReferencesByValue.get(value)
180
149
  };
181
-
182
150
  warmupWorker(method, args) {
183
151
  // Workers are already stopping
184
152
  if (this.ending) {
185
153
  return;
186
- } // Workers are not warmed up yet.
154
+ }
155
+
156
+ // Workers are not warmed up yet.
187
157
  // Send the job to a remote worker in the background,
188
158
  // but use the result from the local worker - it will be faster.
189
-
190
-
191
159
  let promise = this.addCall(method, [...args, true]);
192
-
193
160
  if (promise) {
194
161
  promise.then(() => {
195
162
  this.warmWorkers++;
196
-
197
163
  if (this.warmWorkers >= this.workers.size) {
198
164
  this.emit('warmedup');
199
165
  }
200
166
  }).catch(() => {});
201
167
  }
202
168
  }
203
-
204
169
  shouldStartRemoteWorkers() {
205
170
  return this.options.maxConcurrentWorkers > 0 || !this.options.useLocalWorker;
206
171
  }
207
-
208
172
  createHandle(method, useMainThread = false) {
209
173
  if (!this.options.useLocalWorker) {
210
174
  useMainThread = false;
211
175
  }
212
-
213
176
  return async (...args) => {
214
177
  // Child process workers are slow to start (~600ms).
215
178
  // While we're waiting, just run on the main thread.
@@ -220,25 +183,20 @@ class WorkerFarm extends _events().default {
220
183
  if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
221
184
  this.warmupWorker(method, args);
222
185
  }
223
-
224
186
  let processedArgs;
225
-
226
187
  if (!useMainThread) {
227
188
  processedArgs = (0, _core().restoreDeserializedObject)((0, _core().prepareForSerialization)([...args, false]));
228
189
  } else {
229
190
  processedArgs = args;
230
191
  }
231
-
232
192
  if (this.localWorkerInit != null) {
233
193
  await this.localWorkerInit;
234
194
  this.localWorkerInit = null;
235
195
  }
236
-
237
196
  return this.localWorker[method](this.workerApi, ...processedArgs);
238
197
  }
239
198
  };
240
199
  }
241
-
242
200
  onError(error, worker) {
243
201
  // Handle ipc errors
244
202
  if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
@@ -247,7 +205,6 @@ class WorkerFarm extends _events().default {
247
205
  _logger().default.error(error, '@parcel/workers');
248
206
  }
249
207
  }
250
-
251
208
  startChild() {
252
209
  let worker = new _Worker.default({
253
210
  forcedKillTime: this.options.forcedKillTime,
@@ -264,60 +221,49 @@ class WorkerFarm extends _events().default {
264
221
  worker.once('exit', () => this.stopWorker(worker));
265
222
  this.workers.set(worker.id, worker);
266
223
  }
267
-
268
224
  async stopWorker(worker) {
269
225
  if (!worker.stopped) {
270
226
  this.workers.delete(worker.id);
271
227
  worker.isStopping = true;
272
-
273
228
  if (worker.calls.size) {
274
229
  for (let call of worker.calls.values()) {
275
230
  call.retries++;
276
231
  this.callQueue.unshift(call);
277
232
  }
278
233
  }
279
-
280
234
  worker.calls.clear();
281
- await worker.stop(); // Process any requests that failed and start a new worker
235
+ await worker.stop();
282
236
 
237
+ // Process any requests that failed and start a new worker
283
238
  this.processQueue();
284
239
  }
285
240
  }
286
-
287
241
  processQueue() {
288
242
  if (this.ending || !this.callQueue.length) return;
289
-
290
243
  if (this.workers.size < this.options.maxConcurrentWorkers) {
291
244
  this.startChild();
292
245
  }
293
-
294
246
  let workers = [...this.workers.values()].sort((a, b) => a.calls.size - b.calls.size);
295
-
296
247
  for (let worker of workers) {
297
248
  if (!this.callQueue.length) {
298
249
  break;
299
250
  }
300
-
301
251
  if (!worker.ready || worker.stopped || worker.isStopping) {
302
252
  continue;
303
253
  }
304
-
305
254
  if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
306
255
  this.callWorker(worker, this.callQueue.shift());
307
256
  }
308
257
  }
309
258
  }
310
-
311
259
  async callWorker(worker, call) {
312
260
  for (let ref of this.sharedReferences.keys()) {
313
261
  if (!worker.sentSharedReferences.has(ref)) {
314
262
  await worker.sendSharedReference(ref, this.getSerializedSharedReference(ref));
315
263
  }
316
264
  }
317
-
318
265
  worker.call(call);
319
266
  }
320
-
321
267
  async processRequest(data, worker) {
322
268
  let {
323
269
  method,
@@ -328,10 +274,8 @@ class WorkerFarm extends _events().default {
328
274
  handle: handleId
329
275
  } = data;
330
276
  let mod;
331
-
332
277
  if (handleId != null) {
333
278
  var _this$handles$get;
334
-
335
279
  mod = (0, _nullthrows().default)((_this$handles$get = this.handles.get(handleId)) === null || _this$handles$get === void 0 ? void 0 : _this$handles$get.fn);
336
280
  } else if (location) {
337
281
  // $FlowFixMe this must be dynamic
@@ -339,23 +283,19 @@ class WorkerFarm extends _events().default {
339
283
  } else {
340
284
  throw new Error('Unknown request');
341
285
  }
342
-
343
286
  const responseFromContent = content => ({
344
287
  idx,
345
288
  type: 'response',
346
289
  contentType: 'data',
347
290
  content
348
291
  });
349
-
350
292
  const errorResponseFromError = e => ({
351
293
  idx,
352
294
  type: 'response',
353
295
  contentType: 'error',
354
296
  content: (0, _diagnostic().anyToDiagnostic)(e)
355
297
  });
356
-
357
298
  let result;
358
-
359
299
  if (method == null) {
360
300
  try {
361
301
  result = responseFromContent(await mod(...args));
@@ -367,7 +307,6 @@ class WorkerFarm extends _events().default {
367
307
  if (mod.__esModule && !mod[method] && mod.default) {
368
308
  mod = mod.default;
369
309
  }
370
-
371
310
  try {
372
311
  // $FlowFixMe
373
312
  result = responseFromContent(await mod[method](...args));
@@ -375,7 +314,6 @@ class WorkerFarm extends _events().default {
375
314
  result = errorResponseFromError(e);
376
315
  }
377
316
  }
378
-
379
317
  if (awaitResponse) {
380
318
  if (worker) {
381
319
  worker.send(result);
@@ -385,17 +323,14 @@ class WorkerFarm extends _events().default {
385
323
  diagnostic: result.content
386
324
  });
387
325
  }
388
-
389
326
  return result.content;
390
327
  }
391
328
  }
392
329
  }
393
-
394
330
  addCall(method, args) {
395
331
  if (this.ending) {
396
332
  throw new Error('Cannot add a worker call if workerfarm is ending.');
397
333
  }
398
-
399
334
  return new Promise((resolve, reject) => {
400
335
  this.callQueue.push({
401
336
  method,
@@ -407,36 +342,29 @@ class WorkerFarm extends _events().default {
407
342
  this.processQueue();
408
343
  });
409
344
  }
410
-
411
345
  async end() {
412
346
  this.ending = true;
413
347
  await Promise.all(Array.from(this.workers.values()).map(worker => this.stopWorker(worker)));
414
-
415
348
  for (let handle of this.handles.values()) {
416
349
  handle.dispose();
417
350
  }
418
-
419
351
  this.handles = new Map();
420
352
  this.sharedReferences = new Map();
421
353
  this.sharedReferencesByValue = new Map();
422
354
  this.ending = false;
423
355
  }
424
-
425
356
  startMaxWorkers() {
426
357
  // Starts workers until the maximum is reached
427
358
  if (this.workers.size < this.options.maxConcurrentWorkers) {
428
359
  let toStart = this.options.maxConcurrentWorkers - this.workers.size;
429
-
430
360
  while (toStart--) {
431
361
  this.startChild();
432
362
  }
433
363
  }
434
364
  }
435
-
436
365
  shouldUseRemoteWorkers() {
437
366
  return !this.options.useLocalWorker || (this.warmWorkers >= this.workers.size || !this.options.warmWorkers) && this.options.maxConcurrentWorkers > 0;
438
367
  }
439
-
440
368
  createReverseHandle(fn) {
441
369
  let handle = new _Handle.default({
442
370
  fn
@@ -444,16 +372,13 @@ class WorkerFarm extends _events().default {
444
372
  this.handles.set(handle.id, handle);
445
373
  return handle;
446
374
  }
447
-
448
375
  createSharedReference(value, isCacheable = true) {
449
376
  let ref = referenceId++;
450
377
  this.sharedReferences.set(ref, value);
451
378
  this.sharedReferencesByValue.set(value, ref);
452
-
453
379
  if (!isCacheable) {
454
380
  this.serializedSharedReferences.set(ref, null);
455
381
  }
456
-
457
382
  return {
458
383
  ref,
459
384
  dispose: () => {
@@ -461,12 +386,10 @@ class WorkerFarm extends _events().default {
461
386
  this.sharedReferencesByValue.delete(value);
462
387
  this.serializedSharedReferences.delete(ref);
463
388
  let promises = [];
464
-
465
389
  for (let worker of this.workers.values()) {
466
390
  if (!worker.sentSharedReferences.has(ref)) {
467
391
  continue;
468
392
  }
469
-
470
393
  worker.sentSharedReferences.delete(ref);
471
394
  promises.push(new Promise((resolve, reject) => {
472
395
  worker.call({
@@ -479,33 +402,27 @@ class WorkerFarm extends _events().default {
479
402
  });
480
403
  }));
481
404
  }
482
-
483
405
  return Promise.all(promises);
484
406
  }
485
407
  };
486
408
  }
487
-
488
409
  getSerializedSharedReference(ref) {
489
410
  let cached = this.serializedSharedReferences.get(ref);
490
-
491
411
  if (cached) {
492
412
  return cached;
493
413
  }
494
-
495
414
  let value = this.sharedReferences.get(ref);
496
- let buf = (0, _core().serialize)(value).buffer; // If the reference was created with the isCacheable option set to false,
497
- // serializedSharedReferences will contain `null` as the value.
415
+ let buf = (0, _core().serialize)(value).buffer;
498
416
 
417
+ // If the reference was created with the isCacheable option set to false,
418
+ // serializedSharedReferences will contain `null` as the value.
499
419
  if (cached !== null) {
500
420
  this.serializedSharedReferences.set(ref, buf);
501
421
  }
502
-
503
422
  return buf;
504
423
  }
505
-
506
424
  async startProfile() {
507
425
  let promises = [];
508
-
509
426
  for (let worker of this.workers.values()) {
510
427
  promises.push(new Promise((resolve, reject) => {
511
428
  worker.call({
@@ -518,20 +435,16 @@ class WorkerFarm extends _events().default {
518
435
  });
519
436
  }));
520
437
  }
521
-
522
438
  this.profiler = new (_profiler().SamplingProfiler)();
523
439
  promises.push(this.profiler.startProfiling());
524
440
  await Promise.all(promises);
525
441
  }
526
-
527
442
  async endProfile() {
528
443
  if (!this.profiler) {
529
444
  return;
530
445
  }
531
-
532
446
  let promises = [this.profiler.stopProfiling()];
533
447
  let names = ['Master'];
534
-
535
448
  for (let worker of this.workers.values()) {
536
449
  names.push('Worker ' + worker.id);
537
450
  promises.push(new Promise((resolve, reject) => {
@@ -545,30 +458,24 @@ class WorkerFarm extends _events().default {
545
458
  });
546
459
  }));
547
460
  }
548
-
549
461
  var profiles = await Promise.all(promises);
550
462
  let trace = new (_profiler().Trace)();
551
463
  let filename = `profile-${getTimeId()}.trace`;
552
464
  let stream = trace.pipe(_fs().default.createWriteStream(filename));
553
-
554
465
  for (let profile of profiles) {
555
466
  trace.addCPUProfile(names.shift(), profile);
556
467
  }
557
-
558
468
  trace.flush();
559
469
  await new Promise(resolve => {
560
470
  stream.once('finish', resolve);
561
471
  });
562
-
563
472
  _logger().default.info({
564
473
  origin: '@parcel/workers',
565
474
  message: (0, _diagnostic().md)`Wrote profile to ${filename}`
566
475
  });
567
476
  }
568
-
569
477
  async callAllWorkers(method, args) {
570
478
  let promises = [];
571
-
572
479
  for (let worker of this.workers.values()) {
573
480
  promises.push(new Promise((resolve, reject) => {
574
481
  worker.call({
@@ -580,14 +487,11 @@ class WorkerFarm extends _events().default {
580
487
  });
581
488
  }));
582
489
  }
583
-
584
490
  promises.push(this.localWorker[method](this.workerApi, ...args));
585
491
  await Promise.all(promises);
586
492
  }
587
-
588
493
  async takeHeapSnapshot() {
589
494
  let snapshotId = getTimeId();
590
-
591
495
  try {
592
496
  let snapshotPaths = await Promise.all([...this.workers.values()].map(worker => new Promise((resolve, reject) => {
593
497
  worker.call({
@@ -599,7 +503,6 @@ class WorkerFarm extends _events().default {
599
503
  skipReadyCheck: true
600
504
  });
601
505
  })));
602
-
603
506
  _logger().default.info({
604
507
  origin: '@parcel/workers',
605
508
  message: (0, _diagnostic().md)`Wrote heap snapshots to the following paths:\n${snapshotPaths.join('\n')}`
@@ -611,28 +514,21 @@ class WorkerFarm extends _events().default {
611
514
  });
612
515
  }
613
516
  }
614
-
615
517
  static getNumWorkers() {
616
518
  return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : Math.ceil((0, _cpuCount.default)() / 2);
617
519
  }
618
-
619
520
  static isWorker() {
620
521
  return !!_childState.child;
621
522
  }
622
-
623
523
  static getWorkerApi() {
624
524
  (0, _assert().default)(_childState.child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
625
525
  return _childState.child.workerApi;
626
526
  }
627
-
628
- static getConcurrentCallsPerWorker() {
629
- return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || 30;
527
+ static getConcurrentCallsPerWorker(defaultValue = DEFAULT_MAX_CONCURRENT_CALLS) {
528
+ return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || defaultValue;
630
529
  }
631
-
632
530
  }
633
-
634
531
  exports.default = WorkerFarm;
635
-
636
532
  function getTimeId() {
637
533
  let now = new Date();
638
534
  return String(now.getFullYear()) + String(now.getMonth() + 1).padStart(2, '0') + String(now.getDate()).padStart(2, '0') + '-' + String(now.getHours()).padStart(2, '0') + String(now.getMinutes()).padStart(2, '0') + String(now.getSeconds()).padStart(2, '0');
package/lib/backend.js CHANGED
@@ -5,31 +5,25 @@ Object.defineProperty(exports, "__esModule", {
5
5
  });
6
6
  exports.detectBackend = detectBackend;
7
7
  exports.getWorkerBackend = getWorkerBackend;
8
-
9
8
  function detectBackend() {
10
9
  switch (process.env.PARCEL_WORKER_BACKEND) {
11
10
  case 'threads':
12
11
  case 'process':
13
12
  return process.env.PARCEL_WORKER_BACKEND;
14
13
  }
15
-
16
14
  try {
17
15
  require('worker_threads');
18
-
19
16
  return 'threads';
20
17
  } catch (err) {
21
18
  return 'process';
22
19
  }
23
20
  }
24
-
25
21
  function getWorkerBackend(backend) {
26
22
  switch (backend) {
27
23
  case 'threads':
28
24
  return require('./threads/ThreadsWorker').default;
29
-
30
25
  case 'process':
31
26
  return require('./process/ProcessWorker').default;
32
-
33
27
  default:
34
28
  throw new Error(`Invalid backend: ${backend}`);
35
29
  }
package/lib/bus.js CHANGED
@@ -4,21 +4,15 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.default = void 0;
7
-
8
7
  function _events() {
9
8
  const data = _interopRequireDefault(require("events"));
10
-
11
9
  _events = function () {
12
10
  return data;
13
11
  };
14
-
15
12
  return data;
16
13
  }
17
-
18
14
  var _childState = require("./childState");
19
-
20
15
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
21
-
22
16
  class Bus extends _events().default {
23
17
  emit(event, ...args) {
24
18
  if (_childState.child) {
@@ -27,15 +21,11 @@ class Bus extends _events().default {
27
21
  method: 'emit',
28
22
  args: [event, ...args]
29
23
  }, false);
30
-
31
24
  return true;
32
25
  } else {
33
26
  return super.emit(event, ...args);
34
27
  }
35
28
  }
36
-
37
29
  }
38
-
39
30
  var _default = new Bus();
40
-
41
31
  exports.default = _default;