@parcel/workers 2.8.3 → 2.9.0

Sign up to get free protection for your applications and to get access to all the features.
package/lib/WorkerFarm.js CHANGED
@@ -10,102 +10,77 @@ Object.defineProperty(exports, "Handle", {
10
10
  }
11
11
  });
12
12
  exports.default = void 0;
13
-
14
13
  function _assert() {
15
14
  const data = _interopRequireDefault(require("assert"));
16
-
17
15
  _assert = function () {
18
16
  return data;
19
17
  };
20
-
21
18
  return data;
22
19
  }
23
-
24
20
  function _nullthrows() {
25
21
  const data = _interopRequireDefault(require("nullthrows"));
26
-
27
22
  _nullthrows = function () {
28
23
  return data;
29
24
  };
30
-
31
25
  return data;
32
26
  }
33
-
34
27
  function _events() {
35
28
  const data = _interopRequireDefault(require("events"));
36
-
37
29
  _events = function () {
38
30
  return data;
39
31
  };
40
-
41
32
  return data;
42
33
  }
43
-
44
34
  function _core() {
45
35
  const data = require("@parcel/core");
46
-
47
36
  _core = function () {
48
37
  return data;
49
38
  };
50
-
51
39
  return data;
52
40
  }
53
-
54
41
  function _diagnostic() {
55
42
  const data = _interopRequireWildcard(require("@parcel/diagnostic"));
56
-
57
43
  _diagnostic = function () {
58
44
  return data;
59
45
  };
60
-
61
46
  return data;
62
47
  }
63
-
64
48
  var _Worker = _interopRequireDefault(require("./Worker"));
65
-
66
49
  var _cpuCount = _interopRequireDefault(require("./cpuCount"));
67
-
68
50
  var _Handle = _interopRequireDefault(require("./Handle"));
69
-
70
51
  var _childState = require("./childState");
71
-
72
52
  var _backend = require("./backend");
73
-
74
- var _Profiler = _interopRequireDefault(require("./Profiler"));
75
-
76
- var _Trace = _interopRequireDefault(require("./Trace"));
77
-
53
+ function _profiler() {
54
+ const data = require("@parcel/profiler");
55
+ _profiler = function () {
56
+ return data;
57
+ };
58
+ return data;
59
+ }
78
60
  function _fs() {
79
61
  const data = _interopRequireDefault(require("fs"));
80
-
81
62
  _fs = function () {
82
63
  return data;
83
64
  };
84
-
85
65
  return data;
86
66
  }
87
-
88
67
  function _logger() {
89
68
  const data = _interopRequireDefault(require("@parcel/logger"));
90
-
91
69
  _logger = function () {
92
70
  return data;
93
71
  };
94
-
95
72
  return data;
96
73
  }
97
-
98
74
  function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
99
-
100
75
  function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
101
-
102
76
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
103
-
104
77
  let referenceId = 1;
78
+ const DEFAULT_MAX_CONCURRENT_CALLS = 30;
105
79
 
106
80
  /**
107
81
  * workerPath should always be defined inside farmOptions
108
82
  */
83
+
109
84
  class WorkerFarm extends _events().default {
110
85
  callQueue = [];
111
86
  ending = false;
@@ -115,12 +90,11 @@ class WorkerFarm extends _events().default {
115
90
  sharedReferences = new Map();
116
91
  sharedReferencesByValue = new Map();
117
92
  serializedSharedReferences = new Map();
118
-
119
93
  constructor(farmOptions = {}) {
120
94
  super();
121
95
  this.options = {
122
96
  maxConcurrentWorkers: WorkerFarm.getNumWorkers(),
123
- maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(),
97
+ maxConcurrentCallsPerWorker: WorkerFarm.getConcurrentCallsPerWorker(farmOptions.shouldTrace ? 1 : DEFAULT_MAX_CONCURRENT_CALLS),
124
98
  forcedKillTime: 500,
125
99
  warmWorkers: false,
126
100
  useLocalWorker: true,
@@ -128,15 +102,16 @@ class WorkerFarm extends _events().default {
128
102
  backend: (0, _backend.detectBackend)(),
129
103
  ...farmOptions
130
104
  };
131
-
132
105
  if (!this.options.workerPath) {
133
106
  throw new Error('Please provide a worker path!');
134
- } // $FlowFixMe this must be dynamic
135
-
107
+ }
136
108
 
109
+ // $FlowFixMe this must be dynamic
137
110
  this.localWorker = require(this.options.workerPath);
138
111
  this.localWorkerInit = this.localWorker.childInit != null ? this.localWorker.childInit() : null;
139
- this.run = this.createHandle('run'); // Worker thread stdout is by default piped into the process stdout, if there are enough worker
112
+ this.run = this.createHandle('run');
113
+
114
+ // Worker thread stdout is by default piped into the process stdout, if there are enough worker
140
115
  // threads to exceed the default listener limit, then anything else piping into stdout will trigger
141
116
  // the `MaxListenersExceededWarning`, so we should ensure the max listeners is at least equal to the
142
117
  // number of workers + 1 for the main thread.
@@ -144,22 +119,22 @@ class WorkerFarm extends _events().default {
144
119
  // Note this can't be fixed easily where other things pipe into stdout - even after starting > 10 worker
145
120
  // threads `process.stdout.getMaxListeners()` will still return 10, however adding another pipe into `stdout`
146
121
  // will give the warning with `<worker count + 1>` as the number of listeners.
147
-
148
122
  process.stdout.setMaxListeners(Math.max(process.stdout.getMaxListeners(), WorkerFarm.getNumWorkers() + 1));
149
123
  this.startMaxWorkers();
150
124
  }
151
-
152
125
  workerApi = {
153
126
  callMaster: async (request, awaitResponse = true) => {
154
127
  // $FlowFixMe
155
- let result = await this.processRequest({ ...request,
128
+ let result = await this.processRequest({
129
+ ...request,
156
130
  awaitResponse
157
131
  });
158
132
  return (0, _core().deserialize)((0, _core().serialize)(result));
159
133
  },
160
134
  createReverseHandle: fn => this.createReverseHandle(fn),
161
135
  callChild: (childId, request) => new Promise((resolve, reject) => {
162
- (0, _nullthrows().default)(this.workers.get(childId)).call({ ...request,
136
+ (0, _nullthrows().default)(this.workers.get(childId)).call({
137
+ ...request,
163
138
  resolve,
164
139
  reject,
165
140
  retries: 0
@@ -172,34 +147,32 @@ class WorkerFarm extends _events().default {
172
147
  getSharedReference: ref => this.sharedReferences.get(ref),
173
148
  resolveSharedReference: value => this.sharedReferencesByValue.get(value)
174
149
  };
175
-
176
150
  warmupWorker(method, args) {
177
151
  // Workers are already stopping
178
152
  if (this.ending) {
179
153
  return;
180
- } // Workers are not warmed up yet.
154
+ }
155
+
156
+ // Workers are not warmed up yet.
181
157
  // Send the job to a remote worker in the background,
182
158
  // but use the result from the local worker - it will be faster.
183
-
184
-
185
159
  let promise = this.addCall(method, [...args, true]);
186
-
187
160
  if (promise) {
188
161
  promise.then(() => {
189
162
  this.warmWorkers++;
190
-
191
163
  if (this.warmWorkers >= this.workers.size) {
192
164
  this.emit('warmedup');
193
165
  }
194
166
  }).catch(() => {});
195
167
  }
196
168
  }
197
-
198
169
  shouldStartRemoteWorkers() {
199
170
  return this.options.maxConcurrentWorkers > 0 || !this.options.useLocalWorker;
200
171
  }
201
-
202
172
  createHandle(method, useMainThread = false) {
173
+ if (!this.options.useLocalWorker) {
174
+ useMainThread = false;
175
+ }
203
176
  return async (...args) => {
204
177
  // Child process workers are slow to start (~600ms).
205
178
  // While we're waiting, just run on the main thread.
@@ -210,25 +183,20 @@ class WorkerFarm extends _events().default {
210
183
  if (this.options.warmWorkers && this.shouldStartRemoteWorkers()) {
211
184
  this.warmupWorker(method, args);
212
185
  }
213
-
214
186
  let processedArgs;
215
-
216
187
  if (!useMainThread) {
217
188
  processedArgs = (0, _core().restoreDeserializedObject)((0, _core().prepareForSerialization)([...args, false]));
218
189
  } else {
219
190
  processedArgs = args;
220
191
  }
221
-
222
192
  if (this.localWorkerInit != null) {
223
193
  await this.localWorkerInit;
224
194
  this.localWorkerInit = null;
225
195
  }
226
-
227
196
  return this.localWorker[method](this.workerApi, ...processedArgs);
228
197
  }
229
198
  };
230
199
  }
231
-
232
200
  onError(error, worker) {
233
201
  // Handle ipc errors
234
202
  if (error.code === 'ERR_IPC_CHANNEL_CLOSED') {
@@ -237,12 +205,12 @@ class WorkerFarm extends _events().default {
237
205
  _logger().default.error(error, '@parcel/workers');
238
206
  }
239
207
  }
240
-
241
208
  startChild() {
242
209
  let worker = new _Worker.default({
243
210
  forcedKillTime: this.options.forcedKillTime,
244
211
  backend: this.options.backend,
245
212
  shouldPatchConsole: this.options.shouldPatchConsole,
213
+ shouldTrace: this.options.shouldTrace,
246
214
  sharedReferences: this.sharedReferences
247
215
  });
248
216
  worker.fork((0, _nullthrows().default)(this.options.workerPath));
@@ -253,60 +221,49 @@ class WorkerFarm extends _events().default {
253
221
  worker.once('exit', () => this.stopWorker(worker));
254
222
  this.workers.set(worker.id, worker);
255
223
  }
256
-
257
224
  async stopWorker(worker) {
258
225
  if (!worker.stopped) {
259
226
  this.workers.delete(worker.id);
260
227
  worker.isStopping = true;
261
-
262
228
  if (worker.calls.size) {
263
229
  for (let call of worker.calls.values()) {
264
230
  call.retries++;
265
231
  this.callQueue.unshift(call);
266
232
  }
267
233
  }
268
-
269
234
  worker.calls.clear();
270
- await worker.stop(); // Process any requests that failed and start a new worker
235
+ await worker.stop();
271
236
 
237
+ // Process any requests that failed and start a new worker
272
238
  this.processQueue();
273
239
  }
274
240
  }
275
-
276
241
  processQueue() {
277
242
  if (this.ending || !this.callQueue.length) return;
278
-
279
243
  if (this.workers.size < this.options.maxConcurrentWorkers) {
280
244
  this.startChild();
281
245
  }
282
-
283
246
  let workers = [...this.workers.values()].sort((a, b) => a.calls.size - b.calls.size);
284
-
285
247
  for (let worker of workers) {
286
248
  if (!this.callQueue.length) {
287
249
  break;
288
250
  }
289
-
290
251
  if (!worker.ready || worker.stopped || worker.isStopping) {
291
252
  continue;
292
253
  }
293
-
294
254
  if (worker.calls.size < this.options.maxConcurrentCallsPerWorker) {
295
255
  this.callWorker(worker, this.callQueue.shift());
296
256
  }
297
257
  }
298
258
  }
299
-
300
259
  async callWorker(worker, call) {
301
260
  for (let ref of this.sharedReferences.keys()) {
302
261
  if (!worker.sentSharedReferences.has(ref)) {
303
262
  await worker.sendSharedReference(ref, this.getSerializedSharedReference(ref));
304
263
  }
305
264
  }
306
-
307
265
  worker.call(call);
308
266
  }
309
-
310
267
  async processRequest(data, worker) {
311
268
  let {
312
269
  method,
@@ -317,10 +274,8 @@ class WorkerFarm extends _events().default {
317
274
  handle: handleId
318
275
  } = data;
319
276
  let mod;
320
-
321
277
  if (handleId != null) {
322
278
  var _this$handles$get;
323
-
324
279
  mod = (0, _nullthrows().default)((_this$handles$get = this.handles.get(handleId)) === null || _this$handles$get === void 0 ? void 0 : _this$handles$get.fn);
325
280
  } else if (location) {
326
281
  // $FlowFixMe this must be dynamic
@@ -328,23 +283,19 @@ class WorkerFarm extends _events().default {
328
283
  } else {
329
284
  throw new Error('Unknown request');
330
285
  }
331
-
332
286
  const responseFromContent = content => ({
333
287
  idx,
334
288
  type: 'response',
335
289
  contentType: 'data',
336
290
  content
337
291
  });
338
-
339
292
  const errorResponseFromError = e => ({
340
293
  idx,
341
294
  type: 'response',
342
295
  contentType: 'error',
343
296
  content: (0, _diagnostic().anyToDiagnostic)(e)
344
297
  });
345
-
346
298
  let result;
347
-
348
299
  if (method == null) {
349
300
  try {
350
301
  result = responseFromContent(await mod(...args));
@@ -356,7 +307,6 @@ class WorkerFarm extends _events().default {
356
307
  if (mod.__esModule && !mod[method] && mod.default) {
357
308
  mod = mod.default;
358
309
  }
359
-
360
310
  try {
361
311
  // $FlowFixMe
362
312
  result = responseFromContent(await mod[method](...args));
@@ -364,7 +314,6 @@ class WorkerFarm extends _events().default {
364
314
  result = errorResponseFromError(e);
365
315
  }
366
316
  }
367
-
368
317
  if (awaitResponse) {
369
318
  if (worker) {
370
319
  worker.send(result);
@@ -374,17 +323,14 @@ class WorkerFarm extends _events().default {
374
323
  diagnostic: result.content
375
324
  });
376
325
  }
377
-
378
326
  return result.content;
379
327
  }
380
328
  }
381
329
  }
382
-
383
330
  addCall(method, args) {
384
331
  if (this.ending) {
385
332
  throw new Error('Cannot add a worker call if workerfarm is ending.');
386
333
  }
387
-
388
334
  return new Promise((resolve, reject) => {
389
335
  this.callQueue.push({
390
336
  method,
@@ -396,36 +342,29 @@ class WorkerFarm extends _events().default {
396
342
  this.processQueue();
397
343
  });
398
344
  }
399
-
400
345
  async end() {
401
346
  this.ending = true;
402
347
  await Promise.all(Array.from(this.workers.values()).map(worker => this.stopWorker(worker)));
403
-
404
348
  for (let handle of this.handles.values()) {
405
349
  handle.dispose();
406
350
  }
407
-
408
351
  this.handles = new Map();
409
352
  this.sharedReferences = new Map();
410
353
  this.sharedReferencesByValue = new Map();
411
354
  this.ending = false;
412
355
  }
413
-
414
356
  startMaxWorkers() {
415
357
  // Starts workers until the maximum is reached
416
358
  if (this.workers.size < this.options.maxConcurrentWorkers) {
417
359
  let toStart = this.options.maxConcurrentWorkers - this.workers.size;
418
-
419
360
  while (toStart--) {
420
361
  this.startChild();
421
362
  }
422
363
  }
423
364
  }
424
-
425
365
  shouldUseRemoteWorkers() {
426
366
  return !this.options.useLocalWorker || (this.warmWorkers >= this.workers.size || !this.options.warmWorkers) && this.options.maxConcurrentWorkers > 0;
427
367
  }
428
-
429
368
  createReverseHandle(fn) {
430
369
  let handle = new _Handle.default({
431
370
  fn
@@ -433,16 +372,13 @@ class WorkerFarm extends _events().default {
433
372
  this.handles.set(handle.id, handle);
434
373
  return handle;
435
374
  }
436
-
437
375
  createSharedReference(value, isCacheable = true) {
438
376
  let ref = referenceId++;
439
377
  this.sharedReferences.set(ref, value);
440
378
  this.sharedReferencesByValue.set(value, ref);
441
-
442
379
  if (!isCacheable) {
443
380
  this.serializedSharedReferences.set(ref, null);
444
381
  }
445
-
446
382
  return {
447
383
  ref,
448
384
  dispose: () => {
@@ -450,12 +386,10 @@ class WorkerFarm extends _events().default {
450
386
  this.sharedReferencesByValue.delete(value);
451
387
  this.serializedSharedReferences.delete(ref);
452
388
  let promises = [];
453
-
454
389
  for (let worker of this.workers.values()) {
455
390
  if (!worker.sentSharedReferences.has(ref)) {
456
391
  continue;
457
392
  }
458
-
459
393
  worker.sentSharedReferences.delete(ref);
460
394
  promises.push(new Promise((resolve, reject) => {
461
395
  worker.call({
@@ -468,33 +402,27 @@ class WorkerFarm extends _events().default {
468
402
  });
469
403
  }));
470
404
  }
471
-
472
405
  return Promise.all(promises);
473
406
  }
474
407
  };
475
408
  }
476
-
477
409
  getSerializedSharedReference(ref) {
478
410
  let cached = this.serializedSharedReferences.get(ref);
479
-
480
411
  if (cached) {
481
412
  return cached;
482
413
  }
483
-
484
414
  let value = this.sharedReferences.get(ref);
485
- let buf = (0, _core().serialize)(value).buffer; // If the reference was created with the isCacheable option set to false,
486
- // serializedSharedReferences will contain `null` as the value.
415
+ let buf = (0, _core().serialize)(value).buffer;
487
416
 
417
+ // If the reference was created with the isCacheable option set to false,
418
+ // serializedSharedReferences will contain `null` as the value.
488
419
  if (cached !== null) {
489
420
  this.serializedSharedReferences.set(ref, buf);
490
421
  }
491
-
492
422
  return buf;
493
423
  }
494
-
495
424
  async startProfile() {
496
425
  let promises = [];
497
-
498
426
  for (let worker of this.workers.values()) {
499
427
  promises.push(new Promise((resolve, reject) => {
500
428
  worker.call({
@@ -507,20 +435,16 @@ class WorkerFarm extends _events().default {
507
435
  });
508
436
  }));
509
437
  }
510
-
511
- this.profiler = new _Profiler.default();
438
+ this.profiler = new (_profiler().SamplingProfiler)();
512
439
  promises.push(this.profiler.startProfiling());
513
440
  await Promise.all(promises);
514
441
  }
515
-
516
442
  async endProfile() {
517
443
  if (!this.profiler) {
518
444
  return;
519
445
  }
520
-
521
446
  let promises = [this.profiler.stopProfiling()];
522
447
  let names = ['Master'];
523
-
524
448
  for (let worker of this.workers.values()) {
525
449
  names.push('Worker ' + worker.id);
526
450
  promises.push(new Promise((resolve, reject) => {
@@ -534,30 +458,24 @@ class WorkerFarm extends _events().default {
534
458
  });
535
459
  }));
536
460
  }
537
-
538
461
  var profiles = await Promise.all(promises);
539
- let trace = new _Trace.default();
462
+ let trace = new (_profiler().Trace)();
540
463
  let filename = `profile-${getTimeId()}.trace`;
541
464
  let stream = trace.pipe(_fs().default.createWriteStream(filename));
542
-
543
465
  for (let profile of profiles) {
544
466
  trace.addCPUProfile(names.shift(), profile);
545
467
  }
546
-
547
468
  trace.flush();
548
469
  await new Promise(resolve => {
549
470
  stream.once('finish', resolve);
550
471
  });
551
-
552
472
  _logger().default.info({
553
473
  origin: '@parcel/workers',
554
474
  message: (0, _diagnostic().md)`Wrote profile to ${filename}`
555
475
  });
556
476
  }
557
-
558
477
  async callAllWorkers(method, args) {
559
478
  let promises = [];
560
-
561
479
  for (let worker of this.workers.values()) {
562
480
  promises.push(new Promise((resolve, reject) => {
563
481
  worker.call({
@@ -569,14 +487,11 @@ class WorkerFarm extends _events().default {
569
487
  });
570
488
  }));
571
489
  }
572
-
573
490
  promises.push(this.localWorker[method](this.workerApi, ...args));
574
491
  await Promise.all(promises);
575
492
  }
576
-
577
493
  async takeHeapSnapshot() {
578
494
  let snapshotId = getTimeId();
579
-
580
495
  try {
581
496
  let snapshotPaths = await Promise.all([...this.workers.values()].map(worker => new Promise((resolve, reject) => {
582
497
  worker.call({
@@ -588,7 +503,6 @@ class WorkerFarm extends _events().default {
588
503
  skipReadyCheck: true
589
504
  });
590
505
  })));
591
-
592
506
  _logger().default.info({
593
507
  origin: '@parcel/workers',
594
508
  message: (0, _diagnostic().md)`Wrote heap snapshots to the following paths:\n${snapshotPaths.join('\n')}`
@@ -600,28 +514,21 @@ class WorkerFarm extends _events().default {
600
514
  });
601
515
  }
602
516
  }
603
-
604
517
  static getNumWorkers() {
605
518
  return process.env.PARCEL_WORKERS ? parseInt(process.env.PARCEL_WORKERS, 10) : Math.ceil((0, _cpuCount.default)() / 2);
606
519
  }
607
-
608
520
  static isWorker() {
609
521
  return !!_childState.child;
610
522
  }
611
-
612
523
  static getWorkerApi() {
613
524
  (0, _assert().default)(_childState.child != null, 'WorkerFarm.getWorkerApi can only be called within workers');
614
525
  return _childState.child.workerApi;
615
526
  }
616
-
617
- static getConcurrentCallsPerWorker() {
618
- return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || 30;
527
+ static getConcurrentCallsPerWorker(defaultValue = DEFAULT_MAX_CONCURRENT_CALLS) {
528
+ return parseInt(process.env.PARCEL_MAX_CONCURRENT_CALLS, 10) || defaultValue;
619
529
  }
620
-
621
530
  }
622
-
623
531
  exports.default = WorkerFarm;
624
-
625
532
  function getTimeId() {
626
533
  let now = new Date();
627
534
  return String(now.getFullYear()) + String(now.getMonth() + 1).padStart(2, '0') + String(now.getDate()).padStart(2, '0') + '-' + String(now.getHours()).padStart(2, '0') + String(now.getMinutes()).padStart(2, '0') + String(now.getSeconds()).padStart(2, '0');
package/lib/backend.js CHANGED
@@ -5,31 +5,25 @@ Object.defineProperty(exports, "__esModule", {
5
5
  });
6
6
  exports.detectBackend = detectBackend;
7
7
  exports.getWorkerBackend = getWorkerBackend;
8
-
9
8
  function detectBackend() {
10
9
  switch (process.env.PARCEL_WORKER_BACKEND) {
11
10
  case 'threads':
12
11
  case 'process':
13
12
  return process.env.PARCEL_WORKER_BACKEND;
14
13
  }
15
-
16
14
  try {
17
15
  require('worker_threads');
18
-
19
16
  return 'threads';
20
17
  } catch (err) {
21
18
  return 'process';
22
19
  }
23
20
  }
24
-
25
21
  function getWorkerBackend(backend) {
26
22
  switch (backend) {
27
23
  case 'threads':
28
24
  return require('./threads/ThreadsWorker').default;
29
-
30
25
  case 'process':
31
26
  return require('./process/ProcessWorker').default;
32
-
33
27
  default:
34
28
  throw new Error(`Invalid backend: ${backend}`);
35
29
  }
package/lib/bus.js CHANGED
@@ -4,21 +4,15 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.default = void 0;
7
-
8
7
  function _events() {
9
8
  const data = _interopRequireDefault(require("events"));
10
-
11
9
  _events = function () {
12
10
  return data;
13
11
  };
14
-
15
12
  return data;
16
13
  }
17
-
18
14
  var _childState = require("./childState");
19
-
20
15
  function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
21
-
22
16
  class Bus extends _events().default {
23
17
  emit(event, ...args) {
24
18
  if (_childState.child) {
@@ -27,15 +21,11 @@ class Bus extends _events().default {
27
21
  method: 'emit',
28
22
  args: [event, ...args]
29
23
  }, false);
30
-
31
24
  return true;
32
25
  } else {
33
26
  return super.emit(event, ...args);
34
27
  }
35
28
  }
36
-
37
29
  }
38
-
39
30
  var _default = new Bus();
40
-
41
31
  exports.default = _default;