@platformatic/runtime 3.13.1 → 3.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/runtime.js CHANGED
@@ -16,7 +16,6 @@ import { existsSync } from 'node:fs'
16
16
  import { readFile } from 'node:fs/promises'
17
17
  import { STATUS_CODES } from 'node:http'
18
18
  import { createRequire } from 'node:module'
19
- import os from 'node:os'
20
19
  import { dirname, isAbsolute, join } from 'node:path'
21
20
  import { setImmediate as immediate, setTimeout as sleep } from 'node:timers/promises'
22
21
  import { pathToFileURL } from 'node:url'
@@ -25,29 +24,30 @@ import SonicBoom from 'sonic-boom'
25
24
  import { Agent, request, interceptors as undiciInterceptors } from 'undici'
26
25
  import { createThreadInterceptor } from 'undici-thread-interceptor'
27
26
  import { pprofCapturePreloadPath } from './config.js'
27
+ import { DynamicWorkersScaler } from './dynamic-workers-scaler.js'
28
28
  import {
29
29
  ApplicationAlreadyStartedError,
30
30
  ApplicationNotFoundError,
31
31
  ApplicationNotStartedError,
32
32
  ApplicationStartTimeoutError,
33
+ CannotRemoveEntrypointError,
34
+ GetHeapStatisticUnavailable,
33
35
  InvalidArgumentError,
34
36
  MessagingError,
35
37
  MissingEntrypointError,
36
38
  MissingPprofCapture,
37
39
  RuntimeAbortedError,
38
40
  RuntimeExitedError,
39
- WorkerNotFoundError,
40
- GetHeapStatisticUnavailable
41
+ WorkerNotFoundError
41
42
  } from './errors.js'
42
43
  import { abstractLogger, createLogger } from './logger.js'
43
44
  import { startManagementApi } from './management-api.js'
44
- import { getMemoryInfo } from './metrics.js'
45
45
  import { createChannelCreationHook } from './policies.js'
46
46
  import { startPrometheusServer } from './prom-server.js'
47
- import ScalingAlgorithm from './scaling-algorithm.js'
48
47
  import { startScheduler } from './scheduler.js'
49
48
  import { createSharedStore } from './shared-http-cache.js'
50
49
  import { version } from './version.js'
50
+ import { HealthSignalsQueue } from './worker/health-signals.js'
51
51
  import { sendViaITC, waitEventFromITC } from './worker/itc.js'
52
52
  import { RoundRobinMap } from './worker/round-robin-map.js'
53
53
  import {
@@ -55,11 +55,12 @@ import {
55
55
  kConfig,
56
56
  kFullId,
57
57
  kHealthCheckTimer,
58
+ kHealthMetricsTimer,
58
59
  kId,
59
60
  kITC,
60
61
  kLastHealthCheckELU,
61
- kLastVerticalScalerELU,
62
62
  kStderrMarker,
63
+ kWorkerHealthSignals,
63
64
  kWorkerId,
64
65
  kWorkersBroadcast,
65
66
  kWorkerStartTime,
@@ -83,6 +84,7 @@ export class Runtime extends EventEmitter {
83
84
  error
84
85
 
85
86
  #loggerDestination
87
+ #loggerContext
86
88
  #stdio
87
89
 
88
90
  #status // starting, started, stopping, stopped, closed
@@ -108,11 +110,13 @@ export class Runtime extends EventEmitter {
108
110
  #metricsLabelName
109
111
 
110
112
  #applicationsConfigsPatches
113
+ #applications
111
114
  #workers
112
- #workersConfigs
113
115
  #workersBroadcastChannel
114
116
  #workerITCHandlers
117
+ #restartingApplications
115
118
  #restartingWorkers
119
+ #dynamicWorkersScaler
116
120
 
117
121
  #sharedHttpCache
118
122
  #scheduler
@@ -129,6 +133,7 @@ export class Runtime extends EventEmitter {
129
133
  this.#context = context ?? {}
130
134
  this.#isProduction = this.#context.isProduction ?? this.#context.production ?? false
131
135
  this.#concurrency = this.#context.concurrency ?? MAX_CONCURRENCY
136
+ this.#applications = new Map()
132
137
  this.#workers = new RoundRobinMap()
133
138
  this.#url = undefined
134
139
  this.#channelCreationHook = createChannelCreationHook(this.#config)
@@ -139,6 +144,7 @@ export class Runtime extends EventEmitter {
139
144
  })
140
145
  this.logger = abstractLogger // This is replaced by the real logger in init() and eventually removed in close()
141
146
  this.#status = undefined
147
+ this.#restartingApplications = new Set()
142
148
  this.#restartingWorkers = new Map()
143
149
  this.#sharedHttpCache = null
144
150
  this.#applicationsConfigsPatches = new Map()
@@ -161,9 +167,18 @@ export class Runtime extends EventEmitter {
161
167
  deleteHttpCacheValue: this.#deleteHttpCacheValue.bind(this),
162
168
  invalidateHttpCache: this.invalidateHttpCache.bind(this),
163
169
  updateSharedContext: this.updateSharedContext.bind(this),
164
- getSharedContext: this.getSharedContext.bind(this)
170
+ getSharedContext: this.getSharedContext.bind(this),
171
+ sendHealthSignals: this.#processHealthSignals.bind(this)
165
172
  }
166
173
  this.#sharedContext = {}
174
+
175
+ if (this.#isProduction) {
176
+ this.#env.PLT_DEV = 'false'
177
+ this.#env.PLT_ENVIRONMENT = 'production'
178
+ } else {
179
+ this.#env.PLT_DEV = 'true'
180
+ this.#env.PLT_ENVIRONMENT = 'development'
181
+ }
167
182
  }
168
183
 
169
184
  async init () {
@@ -187,34 +202,24 @@ export class Runtime extends EventEmitter {
187
202
  }
188
203
 
189
204
  // Create the logger
190
- const [logger, destination] = await createLogger(config)
205
+ const [logger, destination, context] = await createLogger(config)
191
206
  this.logger = logger
192
207
  this.#loggerDestination = destination
208
+ this.#loggerContext = context
193
209
 
194
210
  this.#createWorkersBroadcastChannel()
195
211
 
196
- this.#workersConfigs = {}
197
- for (const application of this.#config.applications) {
198
- let count = application.workers ?? this.#config.workers ?? 1
199
- if (count > 1 && application.entrypoint && !features.node.reusePort) {
212
+ if (this.#config.workers.dynamic) {
213
+ if (this.#config.workers.dynamic === false) {
200
214
  this.logger.warn(
201
- `"${application.id}" is set as the entrypoint, but reusePort is not available in your OS; setting workers to 1 instead of ${count}`
215
+ `Vertical scaler disabled because the "workers" configuration is set to ${this.#config.workers.static}.`
202
216
  )
203
- count = 1
217
+ } else {
218
+ this.#dynamicWorkersScaler = new DynamicWorkersScaler(this, this.#config.workers)
204
219
  }
205
- this.#workersConfigs[application.id] = { count }
206
- }
207
-
208
- if (this.#isProduction) {
209
- this.#env.PLT_DEV = 'false'
210
- this.#env.PLT_ENVIRONMENT = 'production'
211
- } else {
212
- this.#env.PLT_DEV = 'true'
213
- this.#env.PLT_ENVIRONMENT = 'development'
214
220
  }
215
221
 
216
- await this.#setupApplications()
217
-
222
+ await this.addApplications(this.#config.applications)
218
223
  await this.#setDispatcher(config.undici)
219
224
 
220
225
  if (config.scheduler) {
@@ -236,12 +241,7 @@ export class Runtime extends EventEmitter {
236
241
  this.#createWorkersBroadcastChannel()
237
242
 
238
243
  try {
239
- const startInvocations = []
240
- for (const application of this.getApplicationsIds()) {
241
- startInvocations.push([application, silent])
242
- }
243
-
244
- await executeInParallel(this.startApplication.bind(this), startInvocations, this.#concurrency)
244
+ await this.startApplications(this.getApplicationsIds(), silent)
245
245
 
246
246
  if (this.#config.inspectorOptions) {
247
247
  const { port } = this.#config.inspectorOptions
@@ -287,66 +287,34 @@ export class Runtime extends EventEmitter {
287
287
  this.startCollectingMetrics()
288
288
  }
289
289
 
290
- if (this.#config.verticalScaler?.enabled) {
291
- await this.#setupVerticalScaler()
292
- }
293
-
290
+ await this.#dynamicWorkersScaler?.start()
294
291
  this.#showUrl()
295
292
  return this.#url
296
293
  }
297
294
 
298
295
  async stop (silent = false) {
299
- if (this.#scheduler) {
300
- await this.#scheduler.stop()
301
- }
302
-
303
296
  if (this.#status === 'starting') {
304
297
  await once(this, 'started')
305
298
  }
306
299
 
307
300
  this.#updateStatus('stopping')
308
301
 
302
+ if (this.#scheduler) {
303
+ await this.#scheduler.stop()
304
+ }
305
+
309
306
  if (this.#inspectorServer) {
310
307
  await this.#inspectorServer.close()
311
308
  }
312
309
 
310
+ await this.#dynamicWorkersScaler?.stop()
311
+
313
312
  // Stop the entrypoint first so that no new requests are accepted
314
313
  if (this.#entrypointId) {
315
314
  await this.stopApplication(this.#entrypointId, silent)
316
315
  }
317
316
 
318
- const stopInvocations = []
319
-
320
- // Construct the reverse dependency graph
321
- const dependents = {}
322
-
323
- try {
324
- const allApplications = await this.getApplications(true)
325
- for (const application of allApplications.applications) {
326
- for (const dependency of application.dependencies ?? []) {
327
- let applicationDependents = dependents[dependency]
328
- if (!applicationDependents) {
329
- applicationDependents = new Set()
330
- dependents[dependency] = applicationDependents
331
- }
332
-
333
- applicationDependents.add(application.id)
334
- }
335
- }
336
- } catch (e) {
337
- // Noop - This only happens if stop is invoked after a failed start, in which case we don't care about deps
338
- }
339
-
340
- for (const application of this.getApplicationsIds()) {
341
- // The entrypoint has been stopped above
342
- if (application === this.#entrypointId) {
343
- continue
344
- }
345
-
346
- stopInvocations.push([application, silent, Array.from(dependents[application] ?? [])])
347
- }
348
-
349
- await executeInParallel(this.stopApplication.bind(this), stopInvocations, this.#concurrency)
317
+ await this.stopApplications(this.getApplicationsIds(), silent)
350
318
 
351
319
  await this.#meshInterceptor.close()
352
320
  this.#workersBroadcastChannel?.close()
@@ -357,14 +325,13 @@ export class Runtime extends EventEmitter {
357
325
  async restart (applications = []) {
358
326
  this.emitAndNotify('restarting')
359
327
 
360
- const restartInvocations = []
328
+ const toRestart = []
361
329
  for (const application of this.getApplicationsIds()) {
362
330
  if (applications.length === 0 || applications.includes(application)) {
363
- restartInvocations.push([application])
331
+ toRestart.push(application)
364
332
  }
365
333
  }
366
-
367
- await executeInParallel(this.restartApplication.bind(this), restartInvocations, this.#concurrency)
334
+ await this.restartApplications(toRestart)
368
335
 
369
336
  this.emitAndNotify('restarted')
370
337
 
@@ -393,6 +360,7 @@ export class Runtime extends EventEmitter {
393
360
 
394
361
  this.logger = abstractLogger
395
362
  this.#loggerDestination = null
363
+ this.#loggerContext = null
396
364
  }
397
365
 
398
366
  this.#updateStatus('closed')
@@ -478,17 +446,126 @@ export class Runtime extends EventEmitter {
478
446
  }
479
447
  }
480
448
 
449
+ async addApplications (applications, start = false) {
450
+ const setupInvocations = []
451
+
452
+ const toStart = []
453
+ for (const application of applications) {
454
+ const workers = application.workers
455
+
456
+ if ((workers.static > 1 || workers.minimum > 1) && application.entrypoint && !features.node.reusePort) {
457
+ this.logger.warn(
458
+ `"${application.id}" is set as the entrypoint, but reusePort is not available in your OS; setting workers to 1 instead of ${workers.static}`
459
+ )
460
+ workers.static = 1
461
+ workers.minimum = 1
462
+ }
463
+
464
+ this.#applications.set(application.id, application)
465
+ setupInvocations.push([application])
466
+ toStart.push(application.id)
467
+ }
468
+
469
+ await executeInParallel(this.#setupApplication.bind(this), setupInvocations, this.#concurrency)
470
+
471
+ for (const application of applications) {
472
+ this.logger.info(`Added application "${application.id}"${application.entrypoint ? ' (entrypoint)' : ''}.`)
473
+ this.emitAndNotify('application:added', application)
474
+ }
475
+
476
+ if (start) {
477
+ await this.startApplications(toStart)
478
+ }
479
+
480
+ this.#updateLoggingPrefixes()
481
+ }
482
+
483
+ async removeApplications (applications, silent = false) {
484
+ if (applications.includes(this.#entrypointId)) {
485
+ throw new CannotRemoveEntrypointError()
486
+ }
487
+
488
+ await this.stopApplications(applications, silent, true)
489
+
490
+ for (const application of applications) {
491
+ this.#dynamicWorkersScaler?.remove(application)
492
+ this.#applications.delete(application)
493
+ }
494
+
495
+ for (const application of applications) {
496
+ this.logger.info(`Removed application "${application}".`)
497
+ this.emitAndNotify('application:removed', application)
498
+ }
499
+
500
+ this.#updateLoggingPrefixes()
501
+ }
502
+
503
+ async startApplications (applicationsToStart, silent = false) {
504
+ const startInvocations = []
505
+ for (const application of applicationsToStart) {
506
+ startInvocations.push([application, silent])
507
+ }
508
+
509
+ return executeInParallel(this.startApplication.bind(this), startInvocations, this.#concurrency)
510
+ }
511
+
512
+ async stopApplications (applicationsToStop, silent = false, skipDependencies = false) {
513
+ const stopInvocations = []
514
+
515
+ // Construct the reverse dependency graph
516
+ const dependents = {}
517
+
518
+ if (!skipDependencies) {
519
+ try {
520
+ const { applications } = await this.getApplications(true)
521
+ for (const application of applications) {
522
+ for (const dependency of application.dependencies ?? []) {
523
+ let applicationDependents = dependents[dependency]
524
+ if (!applicationDependents) {
525
+ applicationDependents = new Set()
526
+ dependents[dependency] = applicationDependents
527
+ }
528
+
529
+ applicationDependents.add(application.id)
530
+ }
531
+ }
532
+ } catch (e) {
533
+ // Noop - This only happens if stop is invoked after a failed start, in which case we don't care about deps
534
+ }
535
+ }
536
+
537
+ for (const application of applicationsToStop) {
538
+ // The entrypoint has been stopped above
539
+ if (application === this.#entrypointId) {
540
+ continue
541
+ }
542
+
543
+ stopInvocations.push([application, silent, Array.from(dependents[application] ?? [])])
544
+ }
545
+
546
+ return executeInParallel(this.stopApplication.bind(this), stopInvocations, this.#concurrency)
547
+ }
548
+
549
+ async restartApplications (applicationsToRestart) {
550
+ const restartInvocations = []
551
+
552
+ for (const application of applicationsToRestart) {
553
+ restartInvocations.push([application])
554
+ }
555
+
556
+ return executeInParallel(this.restartApplication.bind(this), restartInvocations, this.#concurrency)
557
+ }
558
+
481
559
  async startApplication (id, silent = false) {
482
560
  const config = this.#config
483
- const applicationConfig = config.applications.find(s => s.id === id)
561
+ const applicationConfig = this.#applications.get(id)
484
562
 
485
563
  if (!applicationConfig) {
486
564
  throw new ApplicationNotFoundError(id, this.getApplicationsIds().join(', '))
487
565
  }
488
566
 
489
- const workersConfigs = this.#workersConfigs[id]
490
-
491
- for (let i = 0; i < workersConfigs.count; i++) {
567
+ const workers = applicationConfig.workers.static
568
+ for (let i = 0; i < workers; i++) {
492
569
  const worker = this.#workers.get(`${id}:${i}`)
493
570
  const status = worker?.[kWorkerStatus]
494
571
 
@@ -499,18 +576,15 @@ export class Runtime extends EventEmitter {
499
576
 
500
577
  this.emitAndNotify('application:starting', id)
501
578
 
502
- for (let i = 0; i < workersConfigs.count; i++) {
503
- await this.#startWorker(config, applicationConfig, workersConfigs.count, id, i, silent)
579
+ for (let i = 0; i < workers; i++) {
580
+ await this.#startWorker(config, applicationConfig, workers, id, i, silent)
504
581
  }
505
582
 
506
583
  this.emitAndNotify('application:started', id)
507
584
  }
508
585
 
509
586
  async stopApplication (id, silent = false, dependents = []) {
510
- const config = this.#config
511
- const applicationConfig = config.applications.find(s => s.id === id)
512
-
513
- if (!applicationConfig) {
587
+ if (!this.#applications.has(id)) {
514
588
  throw new ApplicationNotFoundError(id, this.getApplicationsIds().join(', '))
515
589
  }
516
590
 
@@ -533,26 +607,39 @@ export class Runtime extends EventEmitter {
533
607
  }
534
608
 
535
609
  async restartApplication (id) {
536
- const config = this.#config
537
- const applicationConfig = this.#config.applications.find(s => s.id === id)
610
+ const applicationConfig = this.#applications.get(id)
538
611
 
539
- const workersIds = await this.#workers.getKeys(id)
540
- const workersCount = workersIds.length
612
+ if (!applicationConfig) {
613
+ throw new ApplicationNotFoundError(id, this.getApplicationsIds().join(', '))
614
+ }
541
615
 
542
- this.emitAndNotify('application:restarting', id)
616
+ if (this.#restartingApplications.has(id)) {
617
+ return
618
+ }
619
+ this.#restartingApplications.add(id)
543
620
 
544
- for (let i = 0; i < workersCount; i++) {
545
- const workerId = workersIds[i]
546
- const worker = this.#workers.get(workerId)
621
+ try {
622
+ const config = this.#config
623
+ const workersIds = await this.#workers.getKeys(id)
624
+ const workersCount = workersIds.length
625
+
626
+ this.emitAndNotify('application:restarting', id)
547
627
 
548
- if (i > 0 && config.workersRestartDelay > 0) {
549
- await sleep(config.workersRestartDelay)
628
+ for (let i = 0; i < workersCount; i++) {
629
+ const workerId = workersIds[i]
630
+ const worker = this.#workers.get(workerId)
631
+
632
+ if (i > 0 && config.workersRestartDelay > 0) {
633
+ await sleep(config.workersRestartDelay)
634
+ }
635
+
636
+ await this.#replaceWorker(config, applicationConfig, workersCount, id, i, worker, true)
550
637
  }
551
638
 
552
- await this.#replaceWorker(config, applicationConfig, workersCount, id, i, worker, true)
639
+ this.emitAndNotify('application:restarted', id)
640
+ } finally {
641
+ this.#restartingApplications.delete(id)
553
642
  }
554
-
555
- this.emitAndNotify('application:restarted', id)
556
643
  }
557
644
 
558
645
  async buildApplication (id) {
@@ -804,7 +891,7 @@ export class Runtime extends EventEmitter {
804
891
  this.#concurrency = concurrency
805
892
  }
806
893
 
807
- async getUrl () {
894
+ getUrl () {
808
895
  return this.#url
809
896
  }
810
897
 
@@ -867,8 +954,8 @@ export class Runtime extends EventEmitter {
867
954
  async getCustomHealthChecks () {
868
955
  const status = {}
869
956
 
870
- for (const application of this.#config.applications) {
871
- const workersIds = this.#workers.getKeys(application.id)
957
+ for (const id of this.#applications.keys()) {
958
+ const workersIds = this.#workers.getKeys(id)
872
959
  for (const workerId of workersIds) {
873
960
  const worker = this.#workers.get(workerId)
874
961
  status[workerId] = await sendViaITC(worker, 'getCustomHealthCheck')
@@ -881,8 +968,8 @@ export class Runtime extends EventEmitter {
881
968
  async getCustomReadinessChecks () {
882
969
  const status = {}
883
970
 
884
- for (const application of this.#config.applications) {
885
- const workersIds = this.#workers.getKeys(application.id)
971
+ for (const id of this.#applications.keys()) {
972
+ const workersIds = this.#workers.getKeys(id)
886
973
  for (const workerId of workersIds) {
887
974
  const worker = this.#workers.get(workerId)
888
975
  status[workerId] = await sendViaITC(worker, 'getCustomReadinessCheck')
@@ -1063,7 +1150,7 @@ export class Runtime extends EventEmitter {
1063
1150
  }
1064
1151
 
1065
1152
  getApplicationsIds () {
1066
- return this.#config.applications.map(application => application.id)
1153
+ return Array.from(this.#applications.keys())
1067
1154
  }
1068
1155
 
1069
1156
  async getApplications (allowUnloaded = false) {
@@ -1076,22 +1163,6 @@ export class Runtime extends EventEmitter {
1076
1163
  }
1077
1164
  }
1078
1165
 
1079
- async getWorkers () {
1080
- const status = {}
1081
-
1082
- for (const [key, worker] of this.#workers.entries()) {
1083
- const [application, index] = key.split(':')
1084
- status[key] = {
1085
- application,
1086
- worker: index,
1087
- status: worker[kWorkerStatus],
1088
- thread: worker.threadId
1089
- }
1090
- }
1091
-
1092
- return status
1093
- }
1094
-
1095
1166
  async getApplicationMeta (id) {
1096
1167
  const application = await this.#getApplicationById(id)
1097
1168
 
@@ -1174,6 +1245,45 @@ export class Runtime extends EventEmitter {
1174
1245
  return sendViaITC(application, 'getApplicationGraphQLSchema')
1175
1246
  }
1176
1247
 
1248
+ async getWorkers (includeRaw = false) {
1249
+ const status = {}
1250
+
1251
+ for (const [key, worker] of this.#workers.entries()) {
1252
+ const [application, index] = key.split(':')
1253
+
1254
+ status[key] = {
1255
+ application,
1256
+ worker: index,
1257
+ status: worker[kWorkerStatus],
1258
+ thread: worker.threadId,
1259
+ raw: includeRaw ? worker : undefined
1260
+ }
1261
+ }
1262
+
1263
+ return status
1264
+ }
1265
+
1266
+ async getWorkerHealth (worker, options = {}) {
1267
+ if (!features.node.worker.getHeapStatistics) {
1268
+ throw new GetHeapStatisticUnavailable()
1269
+ }
1270
+
1271
+ const currentELU = worker.performance.eventLoopUtilization()
1272
+ const previousELU = options.previousELU
1273
+
1274
+ let elu = currentELU
1275
+ if (previousELU) {
1276
+ elu = worker.performance.eventLoopUtilization(elu, previousELU)
1277
+ }
1278
+
1279
+ const { used_heap_size: heapUsed, total_heap_size: heapTotal } = await worker.getHeapStatistics()
1280
+ return { elu: elu.utilization, heapUsed, heapTotal, currentELU }
1281
+ }
1282
+
1283
+ getDynamicWorkersScaler () {
1284
+ return this.#dynamicWorkersScaler
1285
+ }
1286
+
1177
1287
  #getHttpCacheValue ({ request }) {
1178
1288
  if (!this.#sharedHttpCache) {
1179
1289
  return
@@ -1225,60 +1335,49 @@ export class Runtime extends EventEmitter {
1225
1335
  this.logger.info(`Platformatic is now listening at ${this.#url}`)
1226
1336
  }
1227
1337
 
1228
- async #setupApplications () {
1338
+ async #setupApplication (applicationConfig) {
1339
+ if (this.#status === 'stopping' || this.#status === 'closed') {
1340
+ return
1341
+ }
1342
+
1343
+ const id = applicationConfig.id
1229
1344
  const config = this.#config
1230
- const setupInvocations = []
1231
1345
 
1232
- // Parse all applications and verify we're not missing any path or resolved application
1233
- for (const applicationConfig of config.applications) {
1346
+ if (!applicationConfig.path) {
1234
1347
  // If there is no application path, check if the application was resolved
1235
- if (!applicationConfig.path) {
1236
- if (applicationConfig.url) {
1237
- // Try to backfill the path for external applications
1238
- applicationConfig.path = join(this.#root, config.resolvedApplicationsBasePath, applicationConfig.id)
1239
-
1240
- if (!existsSync(applicationConfig.path)) {
1241
- const executable = globalThis.platformatic?.executable ?? 'platformatic'
1242
- this.logger.error(
1243
- `The path for application "%s" does not exist. Please run "${executable} resolve" and try again.`,
1244
- applicationConfig.id
1245
- )
1348
+ if (applicationConfig.url) {
1349
+ // Try to backfill the path for external applications
1350
+ applicationConfig.path = join(this.#root, config.resolvedApplicationsBasePath, id)
1246
1351
 
1247
- await this.closeAndThrow(new RuntimeAbortedError())
1248
- }
1249
- } else {
1352
+ if (!existsSync(applicationConfig.path)) {
1353
+ const executable = globalThis.platformatic?.executable ?? 'platformatic'
1250
1354
  this.logger.error(
1251
- 'The application "%s" has no path defined. Please check your configuration and try again.',
1252
- applicationConfig.id
1355
+ `The path for application "%s" does not exist. Please run "${executable} resolve" and try again.`,
1356
+ id
1253
1357
  )
1254
1358
 
1255
1359
  await this.closeAndThrow(new RuntimeAbortedError())
1256
1360
  }
1257
- }
1258
-
1259
- setupInvocations.push([applicationConfig])
1260
- }
1261
-
1262
- await executeInParallel(this.#setupApplication.bind(this), setupInvocations, this.#concurrency)
1263
- }
1361
+ } else {
1362
+ this.logger.error(
1363
+ 'The application "%s" has no path defined. Please check your configuration and try again.',
1364
+ id
1365
+ )
1264
1366
 
1265
- async #setupApplication (applicationConfig) {
1266
- if (this.#status === 'stopping' || this.#status === 'closed') {
1267
- return
1367
+ await this.closeAndThrow(new RuntimeAbortedError())
1368
+ }
1268
1369
  }
1269
1370
 
1270
- const config = this.#config
1271
-
1272
- const workersConfigs = this.#workersConfigs[applicationConfig.id]
1273
- const id = applicationConfig.id
1371
+ const workers = applicationConfig.workers.static
1274
1372
  const setupInvocations = []
1275
1373
 
1276
- for (let i = 0; i < workersConfigs.count; i++) {
1277
- setupInvocations.push([config, applicationConfig, workersConfigs.count, id, i])
1374
+ for (let i = 0; i < workers; i++) {
1375
+ setupInvocations.push([config, applicationConfig, workers, id, i])
1278
1376
  }
1279
1377
 
1280
1378
  await executeInParallel(this.#setupWorker.bind(this), setupInvocations, this.#concurrency)
1281
1379
 
1380
+ await this.#dynamicWorkersScaler?.add(applicationConfig)
1282
1381
  this.emitAndNotify('application:init', id)
1283
1382
  }
1284
1383
 
@@ -1472,6 +1571,14 @@ export class Runtime extends EventEmitter {
1472
1571
  this.logger.trace({ event, payload }, 'Runtime event')
1473
1572
  })
1474
1573
 
1574
+ worker[kITC].on('request:restart', async () => {
1575
+ try {
1576
+ await this.restartApplication(applicationId)
1577
+ } catch (e) {
1578
+ this.logger.error(e)
1579
+ }
1580
+ })
1581
+
1475
1582
  // Only activate watch for the first instance
1476
1583
  if (index === 0) {
1477
1584
  // Handle applications changes
@@ -1523,100 +1630,135 @@ export class Runtime extends EventEmitter {
1523
1630
  return worker
1524
1631
  }
1525
1632
 
1526
- async #getHealth (worker, options = {}) {
1527
- if (!features.node.worker.getHeapStatistics) {
1528
- throw new GetHeapStatisticUnavailable()
1529
- }
1530
-
1531
- const currentELU = worker.performance.eventLoopUtilization()
1532
- const previousELU = options.previousELU
1533
-
1534
- let elu = currentELU
1535
- if (previousELU) {
1536
- elu = worker.performance.eventLoopUtilization(elu, previousELU)
1537
- }
1538
-
1539
- const { used_heap_size: heapUsed, total_heap_size: heapTotal } = await worker.getHeapStatistics()
1540
- return { elu: elu.utilization, heapUsed, heapTotal, currentELU }
1541
- }
1542
-
1543
- #setupHealthCheck (config, applicationConfig, workersCount, id, index, worker, errorLabel) {
1633
+ #setupHealthMetrics (id, index, worker, errorLabel) {
1544
1634
  // Clear the timeout when exiting
1545
- worker.on('exit', () => clearTimeout(worker[kHealthCheckTimer]))
1546
-
1547
- const { maxELU, maxHeapUsed, maxHeapTotal, maxUnhealthyChecks, interval } = worker[kConfig].health
1548
- const maxHeapTotalNumber = typeof maxHeapTotal === 'string' ? parseMemorySize(maxHeapTotal) : maxHeapTotal
1549
-
1550
- let unhealthyChecks = 0
1635
+ worker.on('exit', () => clearTimeout(worker[kHealthMetricsTimer]))
1551
1636
 
1552
- worker[kHealthCheckTimer] = setTimeout(async () => {
1553
- if (worker[kWorkerStatus] !== 'started') {
1554
- return
1555
- }
1637
+ worker[kHealthMetricsTimer] = setTimeout(async () => {
1638
+ if (worker[kWorkerStatus] !== 'started') return
1556
1639
 
1557
- let health, unhealthy, memoryUsage
1640
+ let health = null
1558
1641
  try {
1559
- health = await this.#getHealth(worker, {
1642
+ health = await this.getWorkerHealth(worker, {
1560
1643
  previousELU: worker[kLastHealthCheckELU]
1561
1644
  })
1562
- worker[kLastHealthCheckELU] = health.currentELU
1563
- memoryUsage = health.heapUsed / maxHeapTotalNumber
1564
- unhealthy = health.elu > maxELU || memoryUsage > maxHeapUsed
1565
1645
  } catch (err) {
1566
1646
  this.logger.error({ err }, `Failed to get health for ${errorLabel}.`)
1567
- worker[kLastHealthCheckELU] = null
1568
- unhealthy = true
1569
- memoryUsage = -1
1570
- health = { elu: -1, heapUsed: -1, heapTotal: -1 }
1647
+ } finally {
1648
+ worker[kLastHealthCheckELU] = health?.currentELU ?? null
1571
1649
  }
1572
1650
 
1573
- this.emitAndNotify('application:worker:health', {
1651
+ const healthSignals = worker[kWorkerHealthSignals]?.getAll() ?? []
1652
+
1653
+ this.emitAndNotify('application:worker:health:metrics', {
1574
1654
  id: worker[kId],
1575
1655
  application: id,
1576
1656
  worker: index,
1577
1657
  currentHealth: health,
1578
- unhealthy,
1579
- healthConfig: worker[kConfig].health
1658
+ healthSignals
1580
1659
  })
1581
1660
 
1582
- if (unhealthy) {
1661
+ worker[kHealthMetricsTimer].refresh()
1662
+ }, 1000).unref()
1663
+ }
1664
+
1665
+ #setupHealthCheck (config, applicationConfig, workersCount, id, index, worker, errorLabel) {
1666
+ let healthMetricsListener = null
1667
+
1668
+ // Clear the timeout and listener when exiting
1669
+ worker.on('exit', () => {
1670
+ clearTimeout(worker[kHealthCheckTimer])
1671
+ if (healthMetricsListener) {
1672
+ this.removeListener('application:worker:health:metrics', healthMetricsListener)
1673
+ }
1674
+ })
1675
+
1676
+ const healthConfig = worker[kConfig].health
1677
+
1678
+ let { maxELU, maxHeapUsed, maxHeapTotal, maxUnhealthyChecks, interval } = worker[kConfig].health
1679
+
1680
+ if (typeof maxHeapTotal === 'string') {
1681
+ maxHeapTotal = parseMemorySize(maxHeapTotal)
1682
+ }
1683
+
1684
+ if (interval < 1000) {
1685
+ interval = 1000
1686
+ this.logger.warn(
1687
+ `The health check interval for the "${errorLabel}" is set to ${healthConfig.interval}ms. ` +
1688
+ 'The minimum health check interval is 1s. It will be set to 1000ms.'
1689
+ )
1690
+ }
1691
+
1692
+ let lastHealthMetrics = null
1693
+
1694
+ healthMetricsListener = healthCheck => {
1695
+ if (healthCheck.id === worker[kId]) {
1696
+ lastHealthMetrics = healthCheck
1697
+ }
1698
+ }
1699
+
1700
+ this.on('application:worker:health:metrics', healthMetricsListener)
1701
+
1702
+ let unhealthyChecks = 0
1703
+
1704
+ worker[kHealthCheckTimer] = setTimeout(async () => {
1705
+ if (worker[kWorkerStatus] !== 'started') return
1706
+
1707
+ if (lastHealthMetrics) {
1708
+ const health = lastHealthMetrics.currentHealth
1709
+ const memoryUsage = health.heapUsed / maxHeapTotal
1710
+ const unhealthy = health.elu > maxELU || memoryUsage > maxHeapUsed
1711
+
1712
+ this.emitAndNotify('application:worker:health', {
1713
+ id: worker[kId],
1714
+ application: id,
1715
+ worker: index,
1716
+ currentHealth: health,
1717
+ unhealthy,
1718
+ healthConfig
1719
+ })
1720
+
1583
1721
  if (health.elu > maxELU) {
1584
1722
  this.logger.error(
1585
- `The ${errorLabel} has an ELU of ${(health.elu * 100).toFixed(2)} %, above the maximum allowed usage of ${(maxELU * 100).toFixed(2)} %.`
1723
+ `The ${errorLabel} has an ELU of ${(health.elu * 100).toFixed(2)} %, ` +
1724
+ `above the maximum allowed usage of ${(maxELU * 100).toFixed(2)} %.`
1586
1725
  )
1587
1726
  }
1588
1727
 
1589
1728
  if (memoryUsage > maxHeapUsed) {
1590
1729
  this.logger.error(
1591
- `The ${errorLabel} is using ${(memoryUsage * 100).toFixed(2)} % of the memory, above the maximum allowed usage of ${(maxHeapUsed * 100).toFixed(2)} %.`
1730
+ `The ${errorLabel} is using ${(memoryUsage * 100).toFixed(2)} % of the memory, ` +
1731
+ `above the maximum allowed usage of ${(maxHeapUsed * 100).toFixed(2)} %.`
1592
1732
  )
1593
1733
  }
1594
1734
 
1595
- unhealthyChecks++
1596
- } else {
1597
- unhealthyChecks = 0
1598
- }
1735
+ if (unhealthy) {
1736
+ unhealthyChecks++
1737
+ } else {
1738
+ unhealthyChecks = 0
1739
+ }
1599
1740
 
1600
- if (unhealthyChecks === maxUnhealthyChecks) {
1601
- try {
1602
- this.emitAndNotify('application:worker:unhealthy', { application: id, worker: index })
1741
+ if (unhealthyChecks === maxUnhealthyChecks) {
1742
+ try {
1743
+ this.emitAndNotify('application:worker:unhealthy', { application: id, worker: index })
1603
1744
 
1604
- this.logger.error(
1605
- { elu: health.elu, maxELU, memoryUsage: health.heapUsed, maxMemoryUsage: maxHeapUsed },
1606
- `The ${errorLabel} is unhealthy. Replacing it ...`
1607
- )
1745
+ this.logger.error(
1746
+ { elu: health.elu, maxELU, memoryUsage: health.heapUsed, maxMemoryUsage: maxHeapUsed },
1747
+ `The ${errorLabel} is unhealthy. Replacing it ...`
1748
+ )
1608
1749
 
1609
- await this.#replaceWorker(config, applicationConfig, workersCount, id, index, worker)
1610
- } catch (e) {
1611
- this.logger.error(
1612
- { elu: health.elu, maxELU, memoryUsage: health.heapUsed, maxMemoryUsage: maxHeapUsed },
1613
- `Cannot replace the ${errorLabel}. Forcefully terminating it ...`
1614
- )
1750
+ await this.#replaceWorker(config, applicationConfig, workersCount, id, index, worker)
1751
+ } catch (e) {
1752
+ this.logger.error(
1753
+ { elu: health.elu, maxELU, memoryUsage: health.heapUsed, maxMemoryUsage: maxHeapUsed },
1754
+ `Cannot replace the ${errorLabel}. Forcefully terminating it ...`
1755
+ )
1615
1756
 
1616
- worker.terminate()
1757
+ worker.terminate()
1758
+ }
1759
+ } else {
1760
+ worker[kHealthCheckTimer].refresh()
1617
1761
  }
1618
- } else {
1619
- worker[kHealthCheckTimer].refresh()
1620
1762
  }
1621
1763
  }, interval).unref()
1622
1764
  }
@@ -1684,6 +1826,8 @@ export class Runtime extends EventEmitter {
1684
1826
  this.logger.info(`Started the ${label}...`)
1685
1827
  }
1686
1828
 
1829
+ this.#setupHealthMetrics(id, index, worker, label)
1830
+
1687
1831
  const { enabled, gracePeriod } = worker[kConfig].health
1688
1832
  if (enabled && config.restartOnError > 0) {
1689
1833
  // if gracePeriod is 0, it will be set to 1 to start health checks immediately
@@ -1934,6 +2078,10 @@ export class Runtime extends EventEmitter {
1934
2078
  workerId = matched[2]
1935
2079
  }
1936
2080
 
2081
+ if (!this.#applications.has(applicationId)) {
2082
+ throw new ApplicationNotFoundError(applicationId, this.getApplicationsIds().join(', '))
2083
+ }
2084
+
1937
2085
  return this.#getWorkerByIdOrNext(applicationId, workerId, ensureStarted, mustExist)
1938
2086
  }
1939
2087
 
@@ -2158,9 +2306,7 @@ export class Runtime extends EventEmitter {
2158
2306
  async #updateApplicationConfigWorkers (applicationId, workers) {
2159
2307
  this.logger.info(`Updating application "${applicationId}" config workers to ${workers}`)
2160
2308
 
2161
- this.#config.applications.find(s => s.id === applicationId).workers = workers
2162
- const application = await this.#getApplicationById(applicationId)
2163
- application[kConfig].workers = workers
2309
+ this.#applications.get(applicationId).workers.static = workers
2164
2310
 
2165
2311
  const workersIds = this.#workers.getKeys(applicationId)
2166
2312
  const promises = []
@@ -2177,13 +2323,15 @@ export class Runtime extends EventEmitter {
2177
2323
  throw result.reason
2178
2324
  }
2179
2325
  }
2326
+
2327
+ this.#updateLoggingPrefixes()
2180
2328
  }
2181
2329
 
2182
2330
  async #updateApplicationConfigHealth (applicationId, health) {
2183
2331
  this.logger.info(`Updating application "${applicationId}" config health heap to ${JSON.stringify(health)}`)
2184
2332
  const { maxHeapTotal, maxYoungGeneration } = health
2185
2333
 
2186
- const application = this.#config.applications.find(s => s.id === applicationId)
2334
+ const application = this.#applications.get(applicationId)
2187
2335
  if (maxHeapTotal) {
2188
2336
  application.health.maxHeapTotal = maxHeapTotal
2189
2337
  }
@@ -2200,7 +2348,6 @@ export class Runtime extends EventEmitter {
2200
2348
  throw new InvalidArgumentError('updates', 'must have at least one element')
2201
2349
  }
2202
2350
 
2203
- const config = this.#config
2204
2351
  const validatedUpdates = []
2205
2352
  for (const update of updates) {
2206
2353
  const { application: applicationId } = update
@@ -2208,7 +2355,7 @@ export class Runtime extends EventEmitter {
2208
2355
  if (!applicationId) {
2209
2356
  throw new InvalidArgumentError('application', 'must be a string')
2210
2357
  }
2211
- const applicationConfig = config.applications.find(s => s.id === applicationId)
2358
+ const applicationConfig = this.#applications.get(applicationId)
2212
2359
  if (!applicationConfig) {
2213
2360
  throw new ApplicationNotFoundError(applicationId, Array.from(this.getApplicationsIds()).join(', '))
2214
2361
  }
@@ -2411,6 +2558,10 @@ export class Runtime extends EventEmitter {
2411
2558
  )
2412
2559
  }
2413
2560
  report.success = true
2561
+
2562
+ if (report.success) {
2563
+ this.emitAndNotify('application:resources:health:updated', { application: applicationId, health })
2564
+ }
2414
2565
  } catch (err) {
2415
2566
  if (report.updated.length < 1) {
2416
2567
  this.logger.error({ err }, 'Cannot update application health heap, no worker updated')
@@ -2483,6 +2634,10 @@ export class Runtime extends EventEmitter {
2483
2634
  await this.#updateApplicationConfigWorkers(applicationId, newWorkersCount)
2484
2635
  }
2485
2636
 
2637
+ if (report.success) {
2638
+ this.emitAndNotify('application:resources:workers:updated', { application: applicationId, workers })
2639
+ }
2640
+
2486
2641
  return report
2487
2642
  }
2488
2643
 
@@ -2494,203 +2649,6 @@ export class Runtime extends EventEmitter {
2494
2649
  }
2495
2650
  }
2496
2651
 
2497
- async #setupVerticalScaler () {
2498
- const fixedWorkersCount = this.#config.workers
2499
- if (fixedWorkersCount !== undefined) {
2500
- this.logger.warn(`Vertical scaler disabled because the "workers" configuration is set to ${fixedWorkersCount}`)
2501
- return
2502
- }
2503
-
2504
- const scalerConfig = this.#config.verticalScaler
2505
- const memInfo = await getMemoryInfo()
2506
- const memScope = memInfo.scope
2507
-
2508
- scalerConfig.maxTotalWorkers ??= os.availableParallelism()
2509
- scalerConfig.maxTotalMemory ??= memInfo.total * 0.9
2510
- scalerConfig.maxWorkers ??= scalerConfig.maxTotalWorkers
2511
- scalerConfig.minWorkers ??= 1
2512
- scalerConfig.cooldownSec ??= 60
2513
- scalerConfig.scaleUpELU ??= 0.8
2514
- scalerConfig.scaleDownELU ??= 0.2
2515
- scalerConfig.scaleIntervalSec ??= 60
2516
- scalerConfig.timeWindowSec ??= 10
2517
- scalerConfig.scaleDownTimeWindowSec ??= 60
2518
- scalerConfig.gracePeriod ??= 30 * 1000
2519
- scalerConfig.applications ??= {}
2520
-
2521
- const maxTotalWorkers = scalerConfig.maxTotalWorkers
2522
- const maxTotalMemory = scalerConfig.maxTotalMemory
2523
- const maxWorkers = scalerConfig.maxWorkers
2524
- const minWorkers = scalerConfig.minWorkers
2525
- const cooldown = scalerConfig.cooldownSec
2526
- const scaleUpELU = scalerConfig.scaleUpELU
2527
- const scaleDownELU = scalerConfig.scaleDownELU
2528
- const scaleIntervalSec = scalerConfig.scaleIntervalSec
2529
- const timeWindowSec = scalerConfig.timeWindowSec
2530
- const scaleDownTimeWindowSec = scalerConfig.scaleDownTimeWindowSec
2531
- const applicationsConfigs = scalerConfig.applications
2532
- const gracePeriod = scalerConfig.gracePeriod
2533
- const healthCheckInterval = 1000
2534
-
2535
- const initialResourcesUpdates = []
2536
-
2537
- for (const application of this.#config.applications) {
2538
- if (application.entrypoint && !features.node.reusePort) {
2539
- this.logger.warn(
2540
- `The "${application.id}" application cannot be scaled because it is an entrypoint` +
2541
- ' and the "reusePort" feature is not available in your OS.'
2542
- )
2543
-
2544
- applicationsConfigs[application.id] = {
2545
- minWorkers: 1,
2546
- maxWorkers: 1
2547
- }
2548
- continue
2549
- }
2550
- if (application.workers !== undefined) {
2551
- this.logger.warn(
2552
- `The "${application.id}" application cannot be scaled because` +
2553
- ` it has a fixed number of workers (${application.workers}).`
2554
- )
2555
- applicationsConfigs[application.id] = {
2556
- minWorkers: application.workers,
2557
- maxWorkers: application.workers
2558
- }
2559
- continue
2560
- }
2561
-
2562
- applicationsConfigs[application.id] ??= {}
2563
- applicationsConfigs[application.id].minWorkers ??= minWorkers
2564
- applicationsConfigs[application.id].maxWorkers ??= maxWorkers
2565
-
2566
- const appMinWorkers = applicationsConfigs[application.id].minWorkers
2567
- if (appMinWorkers > 1) {
2568
- initialResourcesUpdates.push({
2569
- application: application.id,
2570
- workers: minWorkers
2571
- })
2572
- }
2573
- }
2574
-
2575
- if (initialResourcesUpdates.length > 0) {
2576
- await this.updateApplicationsResources(initialResourcesUpdates)
2577
- }
2578
-
2579
- for (const applicationId in applicationsConfigs) {
2580
- const application = this.#config.applications.find(app => app.id === applicationId)
2581
- if (!application) {
2582
- delete applicationsConfigs[applicationId]
2583
-
2584
- this.logger.warn(
2585
- `Vertical scaler configuration has a configuration for non-existing application "${applicationId}"`
2586
- )
2587
- }
2588
- }
2589
-
2590
- const scalingAlgorithm = new ScalingAlgorithm({
2591
- maxTotalWorkers,
2592
- scaleUpELU,
2593
- scaleDownELU,
2594
- scaleUpTimeWindowSec: timeWindowSec,
2595
- scaleDownTimeWindowSec,
2596
- applications: applicationsConfigs
2597
- })
2598
-
2599
- const healthCheckTimeout = setTimeout(async () => {
2600
- let shouldCheckForScaling = false
2601
-
2602
- const now = Date.now()
2603
-
2604
- for (const worker of this.#workers.values()) {
2605
- if (worker[kWorkerStatus] !== 'started' || worker[kWorkerStartTime] + gracePeriod > now) {
2606
- continue
2607
- }
2608
-
2609
- try {
2610
- const health = await this.#getHealth(worker, {
2611
- previousELU: worker[kLastVerticalScalerELU]
2612
- })
2613
- worker[kLastVerticalScalerELU] = health.currentELU
2614
-
2615
- if (!health) continue
2616
-
2617
- scalingAlgorithm.addWorkerHealthInfo({
2618
- workerId: worker[kId],
2619
- applicationId: worker[kApplicationId],
2620
- elu: health.elu,
2621
- heapUsed: health.heapUsed,
2622
- heapTotal: health.heapTotal
2623
- })
2624
-
2625
- if (health.elu > scaleUpELU) {
2626
- shouldCheckForScaling = true
2627
- }
2628
- } catch (err) {
2629
- this.logger.error({ err }, 'Failed to get health for worker')
2630
- }
2631
- }
2632
-
2633
- if (shouldCheckForScaling) {
2634
- await checkForScaling()
2635
- }
2636
-
2637
- healthCheckTimeout.refresh()
2638
- }, healthCheckInterval).unref()
2639
-
2640
- let isScaling = false
2641
- let lastScaling = 0
2642
-
2643
- const checkForScaling = async () => {
2644
- const isInCooldown = Date.now() < lastScaling + cooldown * 1000
2645
- if (isScaling || isInCooldown) return
2646
- isScaling = true
2647
-
2648
- try {
2649
- const workersInfo = await this.getWorkers()
2650
- const mem = await getMemoryInfo({ scope: memScope })
2651
-
2652
- const appsWorkersInfo = {}
2653
- for (const worker of Object.values(workersInfo)) {
2654
- if (worker.status === 'exited') continue
2655
-
2656
- const applicationId = worker.application
2657
- appsWorkersInfo[applicationId] ??= 0
2658
- appsWorkersInfo[applicationId]++
2659
- }
2660
-
2661
- const availableMemory = maxTotalMemory - mem.used
2662
- const recommendations = scalingAlgorithm.getRecommendations(appsWorkersInfo, {
2663
- availableMemory
2664
- })
2665
- if (recommendations.length > 0) {
2666
- await applyRecommendations(recommendations)
2667
- lastScaling = Date.now()
2668
- }
2669
- } catch (err) {
2670
- this.logger.error({ err }, 'Failed to scale applications')
2671
- } finally {
2672
- isScaling = false
2673
- }
2674
- }
2675
-
2676
- const applyRecommendations = async recommendations => {
2677
- const resourcesUpdates = []
2678
- for (const recommendation of recommendations) {
2679
- const { applicationId, workersCount, direction } = recommendation
2680
- this.logger.info(`Scaling ${direction} the "${applicationId}" app to ${workersCount} workers`)
2681
-
2682
- resourcesUpdates.push({
2683
- application: applicationId,
2684
- workers: workersCount
2685
- })
2686
- }
2687
- await this.updateApplicationsResources(resourcesUpdates)
2688
- }
2689
-
2690
- // Interval for periodic scaling checks
2691
- setInterval(checkForScaling, scaleIntervalSec * 1000).unref()
2692
- }
2693
-
2694
2652
  #setupPermissions (applicationConfig) {
2695
2653
  const argv = []
2696
2654
  const allows = new Set()
@@ -2734,4 +2692,24 @@ export class Runtime extends EventEmitter {
2734
2692
  argv.push('--permission', ...allows)
2735
2693
  return argv
2736
2694
  }
2695
+
2696
+ #processHealthSignals ({ workerId, signals }) {
2697
+ const worker = this.#workers.get(workerId)
2698
+
2699
+ worker[kWorkerHealthSignals] ??= new HealthSignalsQueue()
2700
+ worker[kWorkerHealthSignals].add(signals)
2701
+ }
2702
+
2703
+ #updateLoggingPrefixes () {
2704
+ if (!this.#loggerContext) {
2705
+ return
2706
+ }
2707
+
2708
+ const ids = []
2709
+ for (const worker of this.#workers.values()) {
2710
+ ids.push(`${worker[kFullId]}`)
2711
+ }
2712
+
2713
+ this.#loggerContext.updatePrefixes(ids)
2714
+ }
2737
2715
  }