@platformatic/runtime 3.0.0-alpha.4 → 3.0.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/runtime.js CHANGED
@@ -1,55 +1,63 @@
1
- 'use strict'
2
-
3
- const { ITC } = require('@platformatic/itc')
4
- const {
5
- features,
6
- ensureLoggableError,
1
+ import {
2
+ deepmerge,
7
3
  ensureError,
4
+ ensureLoggableError,
8
5
  executeWithTimeout,
9
- deepmerge,
10
- parseMemorySize,
6
+ features,
7
+ kMetadata,
11
8
  kTimeout,
12
- kMetadata
13
- } = require('@platformatic/foundation')
14
- const { once, EventEmitter } = require('node:events')
15
- const { existsSync } = require('node:fs')
16
- const { readFile } = require('node:fs/promises')
17
- const { STATUS_CODES } = require('node:http')
18
- const { join } = require('node:path')
19
- const { pathToFileURL } = require('node:url')
20
- const { setTimeout: sleep, setImmediate: immediate } = require('node:timers/promises')
21
- const { Worker } = require('node:worker_threads')
22
- const { Agent, interceptors: undiciInterceptors, request } = require('undici')
23
- const { createThreadInterceptor } = require('undici-thread-interceptor')
24
- const SonicBoom = require('sonic-boom')
25
- const { checkDependencies, topologicalSort } = require('./dependencies')
26
- const errors = require('./errors')
27
- const { abstractLogger, createLogger } = require('./logger')
28
- const { startManagementApi } = require('./management-api')
29
- const { startPrometheusServer } = require('./prom-server')
30
- const { startScheduler } = require('./scheduler')
31
- const { createSharedStore } = require('./shared-http-cache')
32
- const { getRuntimeTmpDir } = require('./utils')
33
- const { sendViaITC, waitEventFromITC } = require('./worker/itc')
34
- const { RoundRobinMap } = require('./worker/round-robin-map.js')
35
- const {
36
- kId,
9
+ parseMemorySize
10
+ } from '@platformatic/foundation'
11
+ import { ITC } from '@platformatic/itc'
12
+ import fastify from 'fastify'
13
+ import { EventEmitter, once } from 'node:events'
14
+ import { existsSync } from 'node:fs'
15
+ import { readFile } from 'node:fs/promises'
16
+ import { STATUS_CODES } from 'node:http'
17
+ import { createRequire } from 'node:module'
18
+ import { join } from 'node:path'
19
+ import { setImmediate as immediate, setTimeout as sleep } from 'node:timers/promises'
20
+ import { pathToFileURL } from 'node:url'
21
+ import { Worker } from 'node:worker_threads'
22
+ import SonicBoom from 'sonic-boom'
23
+ import { Agent, request, interceptors as undiciInterceptors } from 'undici'
24
+ import { createThreadInterceptor } from 'undici-thread-interceptor'
25
+ import { checkDependencies, topologicalSort } from './dependencies.js'
26
+ import {
27
+ ApplicationAlreadyStartedError,
28
+ ApplicationNotFoundError,
29
+ ApplicationNotStartedError,
30
+ ApplicationStartTimeoutError,
31
+ InvalidArgumentError,
32
+ MessagingError,
33
+ MissingEntrypointError,
34
+ RuntimeAbortedError,
35
+ RuntimeExitedError,
36
+ WorkerNotFoundError
37
+ } from './errors.js'
38
+ import { abstractLogger, createLogger } from './logger.js'
39
+ import { startManagementApi } from './management-api.js'
40
+ import { startPrometheusServer } from './prom-server.js'
41
+ import { startScheduler } from './scheduler.js'
42
+ import { createSharedStore } from './shared-http-cache.js'
43
+ import { version } from './version.js'
44
+ import { sendViaITC, waitEventFromITC } from './worker/itc.js'
45
+ import { RoundRobinMap } from './worker/round-robin-map.js'
46
+ import {
47
+ kApplicationId,
48
+ kConfig,
37
49
  kFullId,
38
- kServiceId,
39
- kWorkerId,
40
- kITC,
41
50
  kHealthCheckTimer,
42
- kConfig,
43
- kWorkerStatus,
44
- kStderrMarker,
51
+ kId,
52
+ kITC,
45
53
  kLastELU,
46
- kWorkersBroadcast
47
- } = require('./worker/symbols')
48
- const fastify = require('fastify')
49
-
50
- const platformaticVersion = require('../package.json').version
51
- const kWorkerFile = join(__dirname, 'worker/main.js')
54
+ kStderrMarker,
55
+ kWorkerId,
56
+ kWorkersBroadcast,
57
+ kWorkerStatus
58
+ } from './worker/symbols.js'
52
59
 
60
+ const kWorkerFile = join(import.meta.dirname, 'worker/main.js')
53
61
  const kInspectorOptions = Symbol('plt.runtime.worker.inspectorOptions')
54
62
  const kForwardEvents = Symbol('plt.runtime.worker.forwardEvents')
55
63
 
@@ -61,37 +69,39 @@ const MAX_BOOTSTRAP_ATTEMPTS = 5
61
69
  const IMMEDIATE_RESTART_MAX_THRESHOLD = 10
62
70
  const MAX_WORKERS = 100
63
71
 
64
- const telemetryPath = require.resolve('@platformatic/telemetry')
65
- const openTelemetrySetupPath = join(telemetryPath, '..', 'lib', 'node-telemetry.js')
72
+ export class Runtime extends EventEmitter {
73
+ logger
74
+ #loggerDestination
75
+ #stdio
66
76
 
67
- class Runtime extends EventEmitter {
77
+ #status // starting, started, stopping, stopped, closed
68
78
  #root
69
79
  #config
70
80
  #env
71
81
  #context
82
+ #sharedContext
72
83
  #isProduction
73
- #runtimeTmpDir
74
- #servicesIds
75
84
  #entrypointId
76
85
  #url
77
- #loggerDestination
86
+
78
87
  #metrics
79
88
  #metricsTimeout
80
- #status // starting, started, stopping, stopped, closed
89
+
81
90
  #meshInterceptor
82
91
  #dispatcher
92
+
83
93
  #managementApi
84
94
  #prometheusServer
85
95
  #inspectorServer
96
+
97
+ #applicationsConfigsPatches
86
98
  #workers
87
99
  #workersBroadcastChannel
88
100
  #workerITCHandlers
89
101
  #restartingWorkers
102
+
90
103
  #sharedHttpCache
91
- servicesConfigsPatches
92
104
  #scheduler
93
- #stdio
94
- #sharedContext
95
105
 
96
106
  constructor (config, context) {
97
107
  super()
@@ -102,19 +112,14 @@ class Runtime extends EventEmitter {
102
112
  this.#env = config[kMetadata].env
103
113
  this.#context = context ?? {}
104
114
  this.#isProduction = this.#context.isProduction ?? this.#context.production ?? false
105
- this.#runtimeTmpDir = getRuntimeTmpDir(this.#root)
106
115
  this.#workers = new RoundRobinMap()
107
- this.#servicesIds = []
108
116
  this.#url = undefined
109
- this.#meshInterceptor = createThreadInterceptor({
110
- domain: '.plt.local',
111
- timeout: this.#config.serviceTimeout
112
- })
117
+ this.#meshInterceptor = createThreadInterceptor({ domain: '.plt.local', timeout: this.#config.applicationTimeout })
113
118
  this.logger = abstractLogger // This is replaced by the real logger in init() and eventually removed in close()
114
119
  this.#status = undefined
115
120
  this.#restartingWorkers = new Map()
116
121
  this.#sharedHttpCache = null
117
- this.servicesConfigsPatches = new Map()
122
+ this.#applicationsConfigsPatches = new Map()
118
123
 
119
124
  if (!this.#config.logger.captureStdio) {
120
125
  this.#stdio = {
@@ -124,9 +129,9 @@ class Runtime extends EventEmitter {
124
129
  }
125
130
 
126
131
  this.#workerITCHandlers = {
127
- getServiceMeta: this.getServiceMeta.bind(this),
128
- listServices: () => this.#servicesIds,
129
- getServices: this.getServices.bind(this),
132
+ getApplicationMeta: this.getApplicationMeta.bind(this),
133
+ listApplications: this.getApplicationsIds.bind(this),
134
+ getApplications: this.getApplications.bind(this),
130
135
  getWorkers: this.getWorkers.bind(this),
131
136
  getWorkerMessagingChannel: this.#getWorkerMessagingChannel.bind(this),
132
137
  getHttpCacheValue: this.#getHttpCacheValue.bind(this),
@@ -147,9 +152,6 @@ class Runtime extends EventEmitter {
147
152
  const config = this.#config
148
153
  const autoloadEnabled = config.autoload
149
154
 
150
- // This cannot be transferred to worker threads
151
- delete config.configManager
152
-
153
155
  if (config.managementApi) {
154
156
  this.#managementApi = await startManagementApi(this, this.#root)
155
157
  }
@@ -163,19 +165,18 @@ class Runtime extends EventEmitter {
163
165
  this.logger = logger
164
166
  this.#loggerDestination = destination
165
167
 
166
- this.#servicesIds = config.services.map(service => service.id)
167
168
  this.#createWorkersBroadcastChannel()
168
169
 
169
170
  const workersConfig = []
170
- for (const service of config.services) {
171
- const count = service.workers ?? this.#config.workers
172
- if (count > 1 && service.entrypoint && !features.node.reusePort) {
171
+ for (const application of config.applications) {
172
+ const count = application.workers ?? this.#config.workers
173
+ if (count > 1 && application.entrypoint && !features.node.reusePort) {
173
174
  this.logger.warn(
174
- `"${service.id}" is set as the entrypoint, but reusePort is not available in your OS; setting workers to 1 instead of ${count}`
175
+ `"${application.id}" is set as the entrypoint, but reusePort is not available in your OS; setting workers to 1 instead of ${count}`
175
176
  )
176
- workersConfig.push({ id: service.id, workers: 1 })
177
+ workersConfig.push({ id: application.id, workers: 1 })
177
178
  } else {
178
- workersConfig.push({ id: service.id, workers: count })
179
+ workersConfig.push({ id: application.id, workers: count })
179
180
  }
180
181
  }
181
182
 
@@ -189,56 +190,53 @@ class Runtime extends EventEmitter {
189
190
  this.#env['PLT_ENVIRONMENT'] = 'development'
190
191
  }
191
192
 
192
- // Create all services, each in is own worker thread
193
- for (const serviceConfig of config.services) {
194
- // If there is no service path, check if the service was resolved
195
- if (!serviceConfig.path) {
196
- if (serviceConfig.url) {
197
- // Try to backfill the path for external services
198
- serviceConfig.path = join(this.#root, config.resolvedServicesBasePath, serviceConfig.id)
193
+ // Create all applications, each in is own worker thread
194
+ for (const applicationConfig of config.applications) {
195
+ // If there is no application path, check if the application was resolved
196
+ if (!applicationConfig.path) {
197
+ if (applicationConfig.url) {
198
+ // Try to backfill the path for external applications
199
+ applicationConfig.path = join(this.#root, config.resolvedApplicationsBasePath, applicationConfig.id)
199
200
 
200
- if (!existsSync(serviceConfig.path)) {
201
+ if (!existsSync(applicationConfig.path)) {
201
202
  const executable = globalThis.platformatic?.executable ?? 'platformatic'
202
203
  this.logger.error(
203
- `The path for service "%s" does not exist. Please run "${executable} resolve" and try again.`,
204
- serviceConfig.id
204
+ `The path for application "%s" does not exist. Please run "${executable} resolve" and try again.`,
205
+ applicationConfig.id
205
206
  )
206
207
 
207
- await this.closeAndThrow(new errors.RuntimeAbortedError())
208
+ await this.closeAndThrow(new RuntimeAbortedError())
208
209
  }
209
210
  } else {
210
211
  this.logger.error(
211
- 'The service "%s" has no path defined. Please check your configuration and try again.',
212
- serviceConfig.id
212
+ 'The application "%s" has no path defined. Please check your configuration and try again.',
213
+ applicationConfig.id
213
214
  )
214
215
 
215
- await this.closeAndThrow(new errors.RuntimeAbortedError())
216
+ await this.closeAndThrow(new RuntimeAbortedError())
216
217
  }
217
218
  }
218
219
 
219
- await this.#setupService(serviceConfig)
220
+ await this.#setupApplication(applicationConfig)
220
221
  }
221
222
 
222
223
  try {
223
- checkDependencies(config.services)
224
+ checkDependencies(config.applications)
224
225
 
225
- // Make sure the list exists before computing the dependencies, otherwise some services might not be stopped
226
+ // Make sure the list exists before computing the dependencies, otherwise some applications might not be stopped
226
227
  if (autoloadEnabled) {
227
228
  this.#workers = topologicalSort(this.#workers, config)
228
229
  }
229
230
 
230
- // Recompute the list of services after sorting
231
- this.#servicesIds = config.services.map(service => service.id)
232
-
233
- // When autoloading is disabled, add a warning if a service is defined before its dependencies
231
+ // When autoloading is disabled, add a warning if an application is defined before its dependencies
234
232
  if (!autoloadEnabled) {
235
- for (let i = 0; i < config.services.length; i++) {
236
- const current = config.services[i]
233
+ for (let i = 0; i < config.applications.length; i++) {
234
+ const current = config.applications[i]
237
235
 
238
236
  for (const dep of current.dependencies ?? []) {
239
- if (config.services.findIndex(s => s.id === dep.id) > i) {
237
+ if (config.applications.findIndex(s => s.id === dep.id) > i) {
240
238
  this.logger.warn(
241
- `Service "${current.id}" depends on service "${dep.id}", but it is defined and it will be started before it. Please check your configuration file.`
239
+ `Application "${current.id}" depends on application "${dep.id}", but it is defined and it will be started before it. Please check your configuration file.`
242
240
  )
243
241
  }
244
242
  }
@@ -263,15 +261,15 @@ class Runtime extends EventEmitter {
263
261
  }
264
262
 
265
263
  if (typeof this.#config.entrypoint === 'undefined') {
266
- throw new errors.MissingEntrypointError()
264
+ throw new MissingEntrypointError()
267
265
  }
268
266
  this.#updateStatus('starting')
269
267
  this.#createWorkersBroadcastChannel()
270
268
 
271
269
  // Important: do not use Promise.all here since it won't properly manage dependencies
272
270
  try {
273
- for (const service of this.#servicesIds) {
274
- await this.startService(service, silent)
271
+ for (const application of this.getApplicationsIds()) {
272
+ await this.startApplication(application, silent)
275
273
  }
276
274
 
277
275
  if (this.#config.inspectorOptions) {
@@ -304,7 +302,7 @@ class Runtime extends EventEmitter {
304
302
 
305
303
  await server.listen({ port })
306
304
  this.logger.info(
307
- 'The inspector server is now listening for all services. Open `chrome://inspect` in Google Chrome to connect.'
305
+ 'The inspector server is now listening for all applications. Open `chrome://inspect` in Google Chrome to connect.'
308
306
  )
309
307
  this.#inspectorServer = server
310
308
  }
@@ -339,17 +337,17 @@ class Runtime extends EventEmitter {
339
337
 
340
338
  // Stop the entrypoint first so that no new requests are accepted
341
339
  if (this.#entrypointId) {
342
- await this.stopService(this.#entrypointId, silent)
340
+ await this.stopApplication(this.#entrypointId, silent)
343
341
  }
344
342
 
345
- // Stop services in reverse order to ensure services which depend on others are stopped first
346
- for (const service of this.#servicesIds.reverse()) {
343
+ // Stop applications in reverse order to ensure applications which depend on others are stopped first
344
+ for (const application of this.getApplicationsIds().reverse()) {
347
345
  // The entrypoint has been stopped above
348
- if (service === this.#entrypointId) {
346
+ if (application === this.#entrypointId) {
349
347
  continue
350
348
  }
351
349
 
352
- await this.stopService(service, silent)
350
+ await this.stopApplication(application, silent)
353
351
  }
354
352
 
355
353
  await this.#meshInterceptor.close()
@@ -370,16 +368,11 @@ class Runtime extends EventEmitter {
370
368
  return this.#url
371
369
  }
372
370
 
373
- getRuntimeStatus () {
374
- return this.#status
375
- }
376
-
377
371
  async close (silent = false) {
378
- this.#updateStatus('closing')
379
-
380
372
  clearInterval(this.#metricsTimeout)
381
373
 
382
374
  await this.stop(silent)
375
+ this.#updateStatus('closing')
383
376
 
384
377
  // The management API autocloses by itself via event in management-api.js.
385
378
  // This is needed to let management API stop endpoint to reply.
@@ -412,71 +405,9 @@ class Runtime extends EventEmitter {
412
405
  throw error
413
406
  }
414
407
 
415
- async startService (id, silent = false) {
416
- // Since when a service is stopped the worker is deleted, we consider a service start if its first service
417
- // is no longer in the init phase
418
- const firstWorker = this.#workers.get(`${id}:0`)
419
- if (firstWorker && firstWorker[kWorkerStatus] !== 'boot' && firstWorker[kWorkerStatus] !== 'init') {
420
- throw new errors.ApplicationAlreadyStartedError()
421
- }
422
-
423
- const config = this.#config
424
- const serviceConfig = config.services.find(s => s.id === id)
425
-
426
- if (!serviceConfig) {
427
- throw new errors.ServiceNotFoundError(id, Array.from(this.#servicesIds).join(', '))
428
- }
429
-
430
- const workersCount = await this.#workers.getCount(serviceConfig.id)
431
-
432
- this.emit('service:starting', id)
433
-
434
- for (let i = 0; i < workersCount; i++) {
435
- await this.#startWorker(config, serviceConfig, workersCount, id, i, silent)
436
- }
437
-
438
- this.emit('service:started', id)
439
- }
440
-
441
- async stopService (id, silent = false) {
442
- const config = this.#config
443
- const serviceConfig = config.services.find(s => s.id === id)
444
-
445
- if (!serviceConfig) {
446
- throw new errors.ServiceNotFoundError(id, Array.from(this.#servicesIds).join(', '))
447
- }
448
-
449
- const workersCount = await this.#workers.getCount(serviceConfig.id)
450
-
451
- this.emit('service:stopping', id)
452
-
453
- for (let i = 0; i < workersCount; i++) {
454
- await this.#stopWorker(workersCount, id, i, silent)
455
- }
456
-
457
- this.emit('service:stopped', id)
458
- }
459
-
460
- async buildService (id) {
461
- const service = await this.#getServiceById(id)
462
-
463
- this.emit('service:building', id)
464
- try {
465
- await sendViaITC(service, 'build')
466
- this.emit('service:built', id)
467
- } catch (e) {
468
- // The service exports no meta, return an empty object
469
- if (e.code === 'PLT_ITC_HANDLER_NOT_FOUND') {
470
- return {}
471
- }
472
-
473
- throw e
474
- }
475
- }
476
-
477
408
  async inject (id, injectParams) {
478
- // Make sure the service exists
479
- await this.#getServiceById(id, true)
409
+ // Make sure the application exists
410
+ await this.#getApplicationById(id, true)
480
411
 
481
412
  if (typeof injectParams === 'string') {
482
413
  injectParams = { url: injectParams }
@@ -519,6 +450,96 @@ class Runtime extends EventEmitter {
519
450
  }
520
451
  }
521
452
 
453
+ emit (event, payload) {
454
+ for (const worker of this.#workers.values()) {
455
+ if (worker[kForwardEvents]) {
456
+ worker[kITC].notify('runtime:event', { event, payload })
457
+ }
458
+ }
459
+
460
+ this.logger.trace({ event, payload }, 'Runtime event')
461
+ return super.emit(event, payload)
462
+ }
463
+
464
+ async sendCommandToApplication (id, name, message) {
465
+ const application = await this.#getApplicationById(id)
466
+
467
+ try {
468
+ return await sendViaITC(application, name, message)
469
+ } catch (e) {
470
+ // The application exports no meta, return an empty object
471
+ if (e.code === 'PLT_ITC_HANDLER_NOT_FOUND') {
472
+ return {}
473
+ }
474
+
475
+ throw e
476
+ }
477
+ }
478
+
479
+ async startApplication (id, silent = false) {
480
+ // Since when an application is stopped the worker is deleted, we consider an application start if its first application
481
+ // is no longer in the init phase
482
+ const firstWorker = this.#workers.get(`${id}:0`)
483
+ if (firstWorker && firstWorker[kWorkerStatus] !== 'boot' && firstWorker[kWorkerStatus] !== 'init') {
484
+ throw new ApplicationAlreadyStartedError()
485
+ }
486
+
487
+ const config = this.#config
488
+ const applicationConfig = config.applications.find(s => s.id === id)
489
+
490
+ if (!applicationConfig) {
491
+ throw new ApplicationNotFoundError(id, this.getApplicationsIds().join(', '))
492
+ }
493
+
494
+ const workersCount = await this.#workers.getCount(applicationConfig.id)
495
+
496
+ this.emit('application:starting', id)
497
+
498
+ for (let i = 0; i < workersCount; i++) {
499
+ await this.#startWorker(config, applicationConfig, workersCount, id, i, silent)
500
+ }
501
+
502
+ this.emit('application:started', id)
503
+ }
504
+
505
+ async stopApplication (id, silent = false) {
506
+ const config = this.#config
507
+ const applicationConfig = config.applications.find(s => s.id === id)
508
+
509
+ if (!applicationConfig) {
510
+ throw new ApplicationNotFoundError(id, this.getApplicationsIds().join(', '))
511
+ }
512
+
513
+ const workersCount = await this.#workers.getCount(applicationConfig.id)
514
+
515
+ this.emit('application:stopping', id)
516
+
517
+ if (typeof workersCount === 'number') {
518
+ for (let i = 0; i < workersCount; i++) {
519
+ await this.#stopWorker(workersCount, id, i, silent)
520
+ }
521
+ }
522
+
523
+ this.emit('application:stopped', id)
524
+ }
525
+
526
+ async buildApplication (id) {
527
+ const application = await this.#getApplicationById(id)
528
+
529
+ this.emit('application:building', id)
530
+ try {
531
+ await sendViaITC(application, 'build')
532
+ this.emit('application:built', id)
533
+ } catch (e) {
534
+ // The application exports no meta, return an empty object
535
+ if (e.code === 'PLT_ITC_HANDLER_NOT_FOUND') {
536
+ return {}
537
+ }
538
+
539
+ throw e
540
+ }
541
+ }
542
+
522
543
  async updateUndiciInterceptors (undiciConfig) {
523
544
  this.#config.undici = undiciConfig
524
545
 
@@ -546,7 +567,7 @@ class Runtime extends EventEmitter {
546
567
  try {
547
568
  metrics = await this.getFormattedMetrics()
548
569
  } catch (error) {
549
- if (!(error instanceof errors.RuntimeExitedError)) {
570
+ if (!(error instanceof RuntimeExitedError)) {
550
571
  this.logger.error({ err: ensureLoggableError(error) }, 'Error collecting metrics')
551
572
  }
552
573
  return
@@ -560,6 +581,25 @@ class Runtime extends EventEmitter {
560
581
  }, COLLECT_METRICS_TIMEOUT).unref()
561
582
  }
562
583
 
584
+ invalidateHttpCache (options = {}) {
585
+ const { keys, tags } = options
586
+
587
+ if (!this.#sharedHttpCache) {
588
+ return
589
+ }
590
+
591
+ const promises = []
592
+ if (keys && keys.length > 0) {
593
+ promises.push(this.#sharedHttpCache.deleteKeys(keys))
594
+ }
595
+
596
+ if (tags && tags.length > 0) {
597
+ promises.push(this.#sharedHttpCache.deleteTags(tags))
598
+ }
599
+
600
+ return Promise.all(promises)
601
+ }
602
+
563
603
  async addLoggerDestination (writableStream) {
564
604
  // Add the stream - We output everything we get
565
605
  this.#loggerDestination.add({ stream: writableStream, level: 1 })
@@ -579,10 +619,149 @@ class Runtime extends EventEmitter {
579
619
  this.on('closed', onClose)
580
620
  }
581
621
 
622
+ async updateSharedContext (options = {}) {
623
+ const { context, overwrite = false } = options
624
+
625
+ const sharedContext = overwrite ? {} : this.#sharedContext
626
+ Object.assign(sharedContext, context)
627
+
628
+ this.#sharedContext = sharedContext
629
+
630
+ const promises = []
631
+ for (const worker of this.#workers.values()) {
632
+ promises.push(sendViaITC(worker, 'setSharedContext', sharedContext))
633
+ }
634
+
635
+ const results = await Promise.allSettled(promises)
636
+ for (const result of results) {
637
+ if (result.status === 'rejected') {
638
+ this.logger.error({ err: result.reason }, 'Cannot update shared context')
639
+ }
640
+ }
641
+
642
+ return sharedContext
643
+ }
644
+
645
+ setApplicationConfigPatch (id, patch) {
646
+ this.#applicationsConfigsPatches.set(id, patch)
647
+ }
648
+
649
+ removeApplicationConfigPatch (id) {
650
+ this.#applicationsConfigsPatches.delete(id)
651
+ }
652
+
653
+ /**
654
+ * Updates the resources of the applications, such as the number of workers and health configurations (e.g., heap memory settings).
655
+ *
656
+ * This function handles three update scenarios for each application:
657
+ * 1. **Updating workers only**: Adjusts the number of workers for the application.
658
+ * 2. **Updating health configurations only**: Updates health parameters like `maxHeapTotal` or `maxYoungGeneration`.
659
+ * 3. **Updating both workers and health configurations**: Scales the workers and also applies health settings.
660
+ *
661
+ * When updating both workers and health:
662
+ * - **Scaling down workers**: Stops extra workers, then restarts the remaining workers with the previous settings.
663
+ * - **Scaling up workers**: Starts new workers with the updated heap settings, then restarts the old workers with the updated settings.
664
+ *
665
+ * Scaling up new resources (workers and/or heap memory) may fails due to insufficient memory, in this case the operation may fail partially or entirely.
666
+ * Scaling down is expected to succeed without issues.
667
+ *
668
+ * @param {Array<Object>} updates - An array of objects that define the updates for each application.
669
+ * @param {string} updates[].application - The ID of the application to update.
670
+ * @param {number} [updates[].workers] - The desired number of workers for the application. If omitted, workers will not be updated.
671
+ * @param {Object} [updates[].health] - The health configuration to update for the application, which may include:
672
+ * @param {string|number} [updates[].health.maxHeapTotal] - The maximum heap memory for the application. Can be a valid memory string (e.g., '1G', '512MB') or a number representing bytes.
673
+ * @param {string|number} [updates[].health.maxYoungGeneration] - The maximum young generation memory for the application. Can be a valid memory string (e.g., '128MB') or a number representing bytes.
674
+ *
675
+ * @returns {Promise<Array<Object>>} - A promise that resolves to an array of reports for each application, detailing the success or failure of the operations:
676
+ * - `application`: The application ID.
677
+ * - `workers`: The workers' update report, including the current, new number of workers, started workers, and success status.
678
+ * - `health`: The health update report, showing the current and new heap settings, updated workers, and success status.
679
+ *
680
+ * @example
681
+ * await runtime.updateApplicationsResources([
682
+ * { application: 'application-1', workers: 2, health: { maxHeapTotal: '1G', maxYoungGeneration: '128 MB' } },
683
+ * { application: 'application-2', health: { maxHeapTotal: '1G' } },
684
+ * { application: 'application-3', workers: 2 },
685
+ * ])
686
+ *
687
+ * In this example:
688
+ * - `application-1` will have 2 workers and updated heap memory configurations.
689
+ * - `application-2` will have updated heap memory settings (without changing workers).
690
+ * - `application-3` will have its workers set to 2 but no change in memory settings.
691
+ *
692
+ * @throws {InvalidArgumentError} - Throws if any update parameter is invalid, such as:
693
+ * - Missing application ID.
694
+ * - Invalid worker count (not a positive integer).
695
+ * - Invalid memory size format for `maxHeapTotal` or `maxYoungGeneration`.
696
+ * @throws {ApplicationNotFoundError} - Throws if the specified application ID does not exist in the current application configuration.
697
+ */
698
+ async updateApplicationsResources (updates) {
699
+ if (this.#status === 'stopping' || this.#status === 'closed') {
700
+ this.logger.warn('Cannot update application resources when the runtime is stopping or closed')
701
+ return
702
+ }
703
+
704
+ const ups = await this.#validateUpdateApplicationResources(updates)
705
+ const config = this.#config
706
+
707
+ const report = []
708
+ for (const update of ups) {
709
+ const { applicationId, config: applicationConfig, workers, health, currentWorkers, currentHealth } = update
710
+
711
+ if (workers && health) {
712
+ const r = await this.#updateApplicationWorkersAndHealth(
713
+ applicationId,
714
+ config,
715
+ applicationConfig,
716
+ workers,
717
+ health,
718
+ currentWorkers,
719
+ currentHealth
720
+ )
721
+ report.push({
722
+ application: applicationId,
723
+ workers: r.workers,
724
+ health: r.health
725
+ })
726
+ } else if (health) {
727
+ const r = await this.#updateApplicationHealth(
728
+ applicationId,
729
+ config,
730
+ applicationConfig,
731
+ currentWorkers,
732
+ currentHealth,
733
+ health
734
+ )
735
+ report.push({
736
+ application: applicationId,
737
+ health: r.health
738
+ })
739
+ } else if (workers) {
740
+ const r = await this.#updateApplicationWorkers(
741
+ applicationId,
742
+ config,
743
+ applicationConfig,
744
+ workers,
745
+ currentWorkers
746
+ )
747
+ report.push({
748
+ application: applicationId,
749
+ workers: r.workers
750
+ })
751
+ }
752
+ }
753
+
754
+ return report
755
+ }
756
+
582
757
  async getUrl () {
583
758
  return this.#url
584
759
  }
585
760
 
761
+ getRuntimeStatus () {
762
+ return this.#status
763
+ }
764
+
586
765
  async getRuntimeMetadata () {
587
766
  const packageJson = await this.#getRuntimePackageJson()
588
767
  const entrypointDetails = await this.getEntrypointDetails()
@@ -598,7 +777,7 @@ class Runtime extends EventEmitter {
598
777
  packageName: packageJson.name ?? null,
599
778
  packageVersion: packageJson.version ?? null,
600
779
  url: entrypointDetails?.url ?? null,
601
- platformaticVersion
780
+ platformaticVersion: version
602
781
  }
603
782
  }
604
783
 
@@ -619,145 +798,50 @@ class Runtime extends EventEmitter {
619
798
  return this.#meshInterceptor
620
799
  }
621
800
 
622
- getDispatcher () {
623
- return this.#dispatcher
624
- }
625
-
626
- getManagementApi () {
627
- return this.#managementApi
628
- }
629
-
630
- getManagementApiUrl () {
631
- return this.#managementApi?.server.address()
632
- }
633
-
634
- async getEntrypointDetails () {
635
- return this.getServiceDetails(this.#entrypointId)
636
- }
637
-
638
- async getServices () {
639
- return {
640
- entrypoint: this.#entrypointId,
641
- production: this.#isProduction,
642
- services: await Promise.all(this.#servicesIds.map(id => this.getServiceDetails(id)))
643
- }
644
- }
645
-
646
- async getWorkers () {
647
- const status = {}
648
-
649
- for (const [service, { count }] of Object.entries(this.#workers.configuration)) {
650
- for (let i = 0; i < count; i++) {
651
- const label = `${service}:${i}`
652
- const worker = this.#workers.get(label)
653
-
654
- status[label] = {
655
- service,
656
- worker: i,
657
- status: worker?.[kWorkerStatus] ?? 'exited',
658
- thread: worker?.threadId
659
- }
660
- }
661
- }
662
-
663
- return status
664
- }
665
-
666
- async getCustomHealthChecks () {
667
- const status = {}
668
-
669
- for (const [service, { count }] of Object.entries(this.#workers.configuration)) {
670
- for (let i = 0; i < count; i++) {
671
- const label = `${service}:${i}`
672
- const worker = this.#workers.get(label)
673
-
674
- status[label] = await sendViaITC(worker, 'getCustomHealthCheck')
675
- }
676
- }
677
-
678
- return status
679
- }
680
-
681
- async getCustomReadinessChecks () {
682
- const status = {}
683
-
684
- for (const [service, { count }] of Object.entries(this.#workers.configuration)) {
685
- for (let i = 0; i < count; i++) {
686
- const label = `${service}:${i}`
687
- const worker = this.#workers.get(label)
688
-
689
- status[label] = await sendViaITC(worker, 'getCustomReadinessCheck')
690
- }
691
- }
692
-
693
- return status
694
- }
695
-
696
- async getServiceDetails (id, allowUnloaded = false) {
697
- let service
698
-
699
- try {
700
- service = await this.#getServiceById(id)
701
- } catch (e) {
702
- if (allowUnloaded) {
703
- return { id, status: 'stopped' }
704
- }
705
-
706
- throw e
707
- }
708
-
709
- const { entrypoint, dependencies, localUrl } = service[kConfig]
710
-
711
- const status = await sendViaITC(service, 'getStatus')
712
- const { type, version } = await sendViaITC(service, 'getServiceInfo')
713
-
714
- const serviceDetails = {
715
- id,
716
- type,
717
- status,
718
- version,
719
- localUrl,
720
- entrypoint,
721
- dependencies
722
- }
723
-
724
- if (this.#isProduction) {
725
- serviceDetails.workers = this.#workers.getCount(id)
726
- }
801
+ getDispatcher () {
802
+ return this.#dispatcher
803
+ }
727
804
 
728
- if (entrypoint) {
729
- serviceDetails.url = status === 'started' ? this.#url : null
730
- }
805
+ getManagementApi () {
806
+ return this.#managementApi
807
+ }
731
808
 
732
- return serviceDetails
809
+ getManagementApiUrl () {
810
+ return this.#managementApi?.server.address() ?? null
733
811
  }
734
812
 
735
- async getService (id, ensureStarted = true) {
736
- return this.#getServiceById(id, ensureStarted)
813
+ async getEntrypointDetails () {
814
+ return this.getApplicationDetails(this.#entrypointId)
737
815
  }
738
816
 
739
- async getServiceConfig (id, ensureStarted = true) {
740
- const service = await this.#getServiceById(id, ensureStarted)
817
+ async getCustomHealthChecks () {
818
+ const status = {}
741
819
 
742
- return sendViaITC(service, 'getServiceConfig')
743
- }
820
+ for (const [application, { count }] of Object.entries(this.#workers.configuration)) {
821
+ for (let i = 0; i < count; i++) {
822
+ const label = `${application}:${i}`
823
+ const worker = this.#workers.get(label)
744
824
 
745
- async getServiceEnv (id, ensureStarted = true) {
746
- const service = await this.#getServiceById(id, ensureStarted)
825
+ status[label] = await sendViaITC(worker, 'getCustomHealthCheck')
826
+ }
827
+ }
747
828
 
748
- return sendViaITC(service, 'getServiceEnv')
829
+ return status
749
830
  }
750
831
 
751
- async getServiceOpenapiSchema (id) {
752
- const service = await this.#getServiceById(id, true)
832
+ async getCustomReadinessChecks () {
833
+ const status = {}
753
834
 
754
- return sendViaITC(service, 'getServiceOpenAPISchema')
755
- }
835
+ for (const [application, { count }] of Object.entries(this.#workers.configuration)) {
836
+ for (let i = 0; i < count; i++) {
837
+ const label = `${application}:${i}`
838
+ const worker = this.#workers.get(label)
756
839
 
757
- async getServiceGraphqlSchema (id) {
758
- const service = await this.#getServiceById(id, true)
840
+ status[label] = await sendViaITC(worker, 'getCustomReadinessCheck')
841
+ }
842
+ }
759
843
 
760
- return sendViaITC(service, 'getServiceGraphQLSchema')
844
+ return status
761
845
  }
762
846
 
763
847
  async getMetrics (format = 'json') {
@@ -765,26 +849,30 @@ class Runtime extends EventEmitter {
765
849
 
766
850
  for (const worker of this.#workers.values()) {
767
851
  try {
768
- // The service might be temporarily unavailable
852
+ // The application might be temporarily unavailable
769
853
  if (worker[kWorkerStatus] !== 'started') {
770
854
  continue
771
855
  }
772
856
 
773
- const serviceMetrics = await sendViaITC(worker, 'getMetrics', format)
774
- if (serviceMetrics) {
857
+ const applicationMetrics = await sendViaITC(worker, 'getMetrics', format)
858
+ if (applicationMetrics) {
775
859
  if (metrics === null) {
776
860
  metrics = format === 'json' ? [] : ''
777
861
  }
778
862
 
779
863
  if (format === 'json') {
780
- metrics.push(...serviceMetrics)
864
+ metrics.push(...applicationMetrics)
781
865
  } else {
782
- metrics += serviceMetrics
866
+ metrics += applicationMetrics
783
867
  }
784
868
  }
785
869
  } catch (e) {
786
- // The service exited while we were sending the ITC, skip it
787
- if (e.code === 'PLT_RUNTIME_SERVICE_NOT_STARTED' || e.code === 'PLT_RUNTIME_SERVICE_EXIT') {
870
+ // The application exited while we were sending the ITC, skip it
871
+ if (
872
+ e.code === 'PLT_RUNTIME_APPLICATION_NOT_STARTED' ||
873
+ e.code === 'PLT_RUNTIME_APPLICATION_EXIT' ||
874
+ e.code === 'PLT_RUNTIME_APPLICATION_WORKER_EXIT'
875
+ ) {
788
876
  continue
789
877
  }
790
878
 
@@ -817,7 +905,7 @@ class Runtime extends EventEmitter {
817
905
  'http_request_all_summary_seconds'
818
906
  ]
819
907
 
820
- const servicesMetrics = {}
908
+ const applicationsMetrics = {}
821
909
 
822
910
  for (const metric of metrics) {
823
911
  const { name, values } = metric
@@ -826,15 +914,15 @@ class Runtime extends EventEmitter {
826
914
  if (!values || values.length === 0) continue
827
915
 
828
916
  const labels = values[0].labels
829
- const serviceId = labels?.serviceId
917
+ const applicationId = labels?.applicationId
830
918
 
831
- if (!serviceId) {
832
- throw new Error('Missing serviceId label in metrics')
919
+ if (!applicationId) {
920
+ throw new Error('Missing applicationId label in metrics')
833
921
  }
834
922
 
835
- let serviceMetrics = servicesMetrics[serviceId]
836
- if (!serviceMetrics) {
837
- serviceMetrics = {
923
+ let applicationMetrics = applicationsMetrics[applicationId]
924
+ if (!applicationMetrics) {
925
+ applicationMetrics = {
838
926
  cpu: 0,
839
927
  rss: 0,
840
928
  totalHeapSize: 0,
@@ -849,45 +937,45 @@ class Runtime extends EventEmitter {
849
937
  p99: 0
850
938
  }
851
939
  }
852
- servicesMetrics[serviceId] = serviceMetrics
940
+ applicationsMetrics[applicationId] = applicationMetrics
853
941
  }
854
942
 
855
- parsePromMetric(serviceMetrics, metric)
943
+ parsePromMetric(applicationMetrics, metric)
856
944
  }
857
945
 
858
- function parsePromMetric (serviceMetrics, promMetric) {
946
+ function parsePromMetric (applicationMetrics, promMetric) {
859
947
  const { name } = promMetric
860
948
 
861
949
  if (name === 'process_cpu_percent_usage') {
862
- serviceMetrics.cpu = promMetric.values[0].value
950
+ applicationMetrics.cpu = promMetric.values[0].value
863
951
  return
864
952
  }
865
953
  if (name === 'process_resident_memory_bytes') {
866
- serviceMetrics.rss = promMetric.values[0].value
954
+ applicationMetrics.rss = promMetric.values[0].value
867
955
  return
868
956
  }
869
957
  if (name === 'nodejs_heap_size_total_bytes') {
870
- serviceMetrics.totalHeapSize = promMetric.values[0].value
958
+ applicationMetrics.totalHeapSize = promMetric.values[0].value
871
959
  return
872
960
  }
873
961
  if (name === 'nodejs_heap_size_used_bytes') {
874
- serviceMetrics.usedHeapSize = promMetric.values[0].value
962
+ applicationMetrics.usedHeapSize = promMetric.values[0].value
875
963
  return
876
964
  }
877
965
  if (name === 'nodejs_heap_space_size_total_bytes') {
878
966
  const newSpaceSize = promMetric.values.find(value => value.labels.space === 'new')
879
967
  const oldSpaceSize = promMetric.values.find(value => value.labels.space === 'old')
880
968
 
881
- serviceMetrics.newSpaceSize = newSpaceSize.value
882
- serviceMetrics.oldSpaceSize = oldSpaceSize.value
969
+ applicationMetrics.newSpaceSize = newSpaceSize.value
970
+ applicationMetrics.oldSpaceSize = oldSpaceSize.value
883
971
  return
884
972
  }
885
973
  if (name === 'nodejs_eventloop_utilization') {
886
- serviceMetrics.elu = promMetric.values[0].value
974
+ applicationMetrics.elu = promMetric.values[0].value
887
975
  return
888
976
  }
889
977
  if (name === 'http_request_all_summary_seconds') {
890
- serviceMetrics.latency = {
978
+ applicationMetrics.latency = {
891
979
  p50: promMetric.values.find(value => value.labels.quantile === 0.5)?.value || 0,
892
980
  p90: promMetric.values.find(value => value.labels.quantile === 0.9)?.value || 0,
893
981
  p95: promMetric.values.find(value => value.labels.quantile === 0.95)?.value || 0,
@@ -899,7 +987,7 @@ class Runtime extends EventEmitter {
899
987
  return {
900
988
  version: 1,
901
989
  date: new Date().toISOString(),
902
- services: servicesMetrics
990
+ applications: applicationsMetrics
903
991
  }
904
992
  } catch (err) {
905
993
  // If any metric is missing, return nothing
@@ -909,123 +997,155 @@ class Runtime extends EventEmitter {
909
997
  }
910
998
  }
911
999
 
912
- async getServiceMeta (id) {
913
- const service = await this.#getServiceById(id)
1000
+ getSharedContext () {
1001
+ return this.#sharedContext
1002
+ }
1003
+
1004
+ async getApplicationResourcesInfo (id) {
1005
+ const workers = this.#workers.getCount(id)
914
1006
 
915
- try {
916
- return await sendViaITC(service, 'getServiceMeta')
917
- } catch (e) {
918
- // The service exports no meta, return an empty object
919
- if (e.code === 'PLT_ITC_HANDLER_NOT_FOUND') {
920
- return {}
921
- }
1007
+ const worker = await this.#getWorkerById(id, 0, false, false)
1008
+ const health = worker[kConfig].health
922
1009
 
923
- throw e
924
- }
1010
+ return { workers, health }
925
1011
  }
926
1012
 
927
- setServiceConfigPatch (id, patch) {
928
- this.servicesConfigsPatches.set(id, patch)
1013
+ getApplicationsIds () {
1014
+ return this.#config.applications.map(application => application.id)
929
1015
  }
930
1016
 
931
- removeServiceConfigPatch (id) {
932
- this.servicesConfigsPatches.delete(id)
1017
+ async getApplications () {
1018
+ return {
1019
+ entrypoint: this.#entrypointId,
1020
+ production: this.#isProduction,
1021
+ applications: await Promise.all(this.getApplicationsIds().map(id => this.getApplicationDetails(id)))
1022
+ }
933
1023
  }
934
1024
 
935
- #getHttpCacheValue ({ request }) {
936
- if (!this.#sharedHttpCache) {
937
- return
1025
+ async getWorkers () {
1026
+ const status = {}
1027
+
1028
+ for (const [application, { count }] of Object.entries(this.#workers.configuration)) {
1029
+ for (let i = 0; i < count; i++) {
1030
+ const label = `${application}:${i}`
1031
+ const worker = this.#workers.get(label)
1032
+
1033
+ status[label] = {
1034
+ application,
1035
+ worker: i,
1036
+ status: worker?.[kWorkerStatus] ?? 'exited',
1037
+ thread: worker?.threadId
1038
+ }
1039
+ }
938
1040
  }
939
1041
 
940
- return this.#sharedHttpCache.getValue(request)
1042
+ return status
941
1043
  }
942
1044
 
943
- #setHttpCacheValue ({ request, response, payload }) {
944
- if (!this.#sharedHttpCache) {
945
- return
946
- }
1045
+ async getApplicationMeta (id) {
1046
+ const application = await this.#getApplicationById(id)
947
1047
 
948
- return this.#sharedHttpCache.setValue(request, response, payload)
1048
+ try {
1049
+ return await sendViaITC(application, 'getApplicationMeta')
1050
+ } catch (e) {
1051
+ // The application exports no meta, return an empty object
1052
+ if (e.code === 'PLT_ITC_HANDLER_NOT_FOUND') {
1053
+ return {}
1054
+ }
1055
+
1056
+ throw e
1057
+ }
949
1058
  }
950
1059
 
951
- #deleteHttpCacheValue ({ request }) {
952
- if (!this.#sharedHttpCache) {
953
- return
1060
+ async getApplicationDetails (id, allowUnloaded = false) {
1061
+ let application
1062
+
1063
+ try {
1064
+ application = await this.#getApplicationById(id)
1065
+ } catch (e) {
1066
+ if (allowUnloaded) {
1067
+ return { id, status: 'stopped' }
1068
+ }
1069
+
1070
+ throw e
954
1071
  }
955
1072
 
956
- return this.#sharedHttpCache.delete(request)
957
- }
1073
+ const { entrypoint, dependencies, localUrl } = application[kConfig]
958
1074
 
959
- invalidateHttpCache (options = {}) {
960
- const { keys, tags } = options
1075
+ const status = await sendViaITC(application, 'getStatus')
1076
+ const { type, version } = await sendViaITC(application, 'getApplicationInfo')
961
1077
 
962
- if (!this.#sharedHttpCache) {
963
- return
1078
+ const applicationDetails = {
1079
+ id,
1080
+ type,
1081
+ status,
1082
+ version,
1083
+ localUrl,
1084
+ entrypoint,
1085
+ dependencies
964
1086
  }
965
1087
 
966
- const promises = []
967
- if (keys && keys.length > 0) {
968
- promises.push(this.#sharedHttpCache.deleteKeys(keys))
1088
+ if (this.#isProduction) {
1089
+ applicationDetails.workers = this.#workers.getCount(id)
969
1090
  }
970
1091
 
971
- if (tags && tags.length > 0) {
972
- promises.push(this.#sharedHttpCache.deleteTags(tags))
1092
+ if (entrypoint) {
1093
+ applicationDetails.url = status === 'started' ? this.#url : null
973
1094
  }
974
1095
 
975
- return Promise.all(promises)
1096
+ return applicationDetails
976
1097
  }
977
1098
 
978
- async sendCommandToService (id, name, message) {
979
- const service = await this.#getServiceById(id)
1099
+ async getApplication (id, ensureStarted = true) {
1100
+ return this.#getApplicationById(id, ensureStarted)
1101
+ }
980
1102
 
981
- try {
982
- return await sendViaITC(service, name, message)
983
- } catch (e) {
984
- // The service exports no meta, return an empty object
985
- if (e.code === 'PLT_ITC_HANDLER_NOT_FOUND') {
986
- return {}
987
- }
1103
+ async getApplicationConfig (id, ensureStarted = true) {
1104
+ const application = await this.#getApplicationById(id, ensureStarted)
988
1105
 
989
- throw e
990
- }
1106
+ return sendViaITC(application, 'getApplicationConfig')
991
1107
  }
992
1108
 
993
- emit (event, payload) {
994
- for (const worker of this.#workers.values()) {
995
- if (worker[kForwardEvents]) {
996
- worker[kITC].notify('runtime:event', { event, payload })
997
- }
998
- }
1109
+ async getApplicationEnv (id, ensureStarted = true) {
1110
+ const application = await this.#getApplicationById(id, ensureStarted)
999
1111
 
1000
- this.logger.trace({ event, payload }, 'Runtime event')
1001
- return super.emit(event, payload)
1112
+ return sendViaITC(application, 'getApplicationEnv')
1002
1113
  }
1003
1114
 
1004
- async updateSharedContext (options = {}) {
1005
- const { context, overwrite = false } = options
1115
+ async getApplicationOpenapiSchema (id) {
1116
+ const application = await this.#getApplicationById(id, true)
1006
1117
 
1007
- const sharedContext = overwrite ? {} : this.#sharedContext
1008
- Object.assign(sharedContext, context)
1118
+ return sendViaITC(application, 'getApplicationOpenAPISchema')
1119
+ }
1009
1120
 
1010
- this.#sharedContext = sharedContext
1121
+ async getApplicationGraphqlSchema (id) {
1122
+ const application = await this.#getApplicationById(id, true)
1011
1123
 
1012
- const promises = []
1013
- for (const worker of this.#workers.values()) {
1014
- promises.push(sendViaITC(worker, 'setSharedContext', sharedContext))
1124
+ return sendViaITC(application, 'getApplicationGraphQLSchema')
1125
+ }
1126
+
1127
+ #getHttpCacheValue ({ request }) {
1128
+ if (!this.#sharedHttpCache) {
1129
+ return
1015
1130
  }
1016
1131
 
1017
- const results = await Promise.allSettled(promises)
1018
- for (const result of results) {
1019
- if (result.status === 'rejected') {
1020
- this.logger.error({ err: result.reason }, 'Cannot update shared context')
1021
- }
1132
+ return this.#sharedHttpCache.getValue(request)
1133
+ }
1134
+
1135
+ #setHttpCacheValue ({ request, response, payload }) {
1136
+ if (!this.#sharedHttpCache) {
1137
+ return
1022
1138
  }
1023
1139
 
1024
- return sharedContext
1140
+ return this.#sharedHttpCache.setValue(request, response, payload)
1025
1141
  }
1026
1142
 
1027
- getSharedContext () {
1028
- return this.#sharedContext
1143
+ #deleteHttpCacheValue ({ request }) {
1144
+ if (!this.#sharedHttpCache) {
1145
+ return
1146
+ }
1147
+
1148
+ return this.#sharedHttpCache.delete(request)
1029
1149
  }
1030
1150
 
1031
1151
  async #setDispatcher (undiciConfig) {
@@ -1055,23 +1175,23 @@ class Runtime extends EventEmitter {
1055
1175
  this.logger.info(`Platformatic is now listening at ${this.#url}`)
1056
1176
  }
1057
1177
 
1058
- async #setupService (serviceConfig) {
1178
+ async #setupApplication (applicationConfig) {
1059
1179
  if (this.#status === 'stopping' || this.#status === 'closed') return
1060
1180
 
1061
1181
  const config = this.#config
1062
- const workersCount = await this.#workers.getCount(serviceConfig.id)
1063
- const id = serviceConfig.id
1182
+ const workersCount = await this.#workers.getCount(applicationConfig.id)
1183
+ const id = applicationConfig.id
1064
1184
 
1065
1185
  for (let i = 0; i < workersCount; i++) {
1066
- await this.#setupWorker(config, serviceConfig, workersCount, id, i)
1186
+ await this.#setupWorker(config, applicationConfig, workersCount, id, i)
1067
1187
  }
1068
1188
 
1069
- this.emit('service:init', id)
1189
+ this.emit('application:init', id)
1070
1190
  }
1071
1191
 
1072
- async #setupWorker (config, serviceConfig, workersCount, serviceId, index, enabled = true) {
1192
+ async #setupWorker (config, applicationConfig, workersCount, applicationId, index, enabled = true) {
1073
1193
  const { restartOnError } = config
1074
- const workerId = `${serviceId}:${index}`
1194
+ const workerId = `${applicationId}:${index}`
1075
1195
 
1076
1196
  // Handle inspector
1077
1197
  let inspectorOptions
@@ -1085,36 +1205,40 @@ class Runtime extends EventEmitter {
1085
1205
  }
1086
1206
 
1087
1207
  if (config.telemetry) {
1088
- serviceConfig.telemetry = {
1208
+ applicationConfig.telemetry = {
1089
1209
  ...config.telemetry,
1090
- ...serviceConfig.telemetry,
1091
- serviceName: `${config.telemetry.serviceName}-${serviceConfig.id}`
1210
+ ...applicationConfig.telemetry,
1211
+ applicationName: `${config.telemetry.applicationName}-${applicationConfig.id}`
1092
1212
  }
1093
1213
  }
1094
1214
 
1095
- const errorLabel = this.#workerExtendedLabel(serviceId, index, workersCount)
1096
- const health = deepmerge(config.health ?? {}, serviceConfig.health ?? {})
1215
+ const errorLabel = this.#workerExtendedLabel(applicationId, index, workersCount)
1216
+ const health = deepmerge(config.health ?? {}, applicationConfig.health ?? {})
1097
1217
 
1098
1218
  const execArgv = []
1099
1219
 
1100
- if (!serviceConfig.skipTelemetryHooks && config.telemetry && config.telemetry.enabled !== false) {
1220
+ if (!applicationConfig.skipTelemetryHooks && config.telemetry && config.telemetry.enabled !== false) {
1221
+ const require = createRequire(import.meta.url)
1222
+ const telemetryPath = require.resolve('@platformatic/telemetry')
1223
+ const openTelemetrySetupPath = join(telemetryPath, '..', 'lib', 'node-telemetry.js')
1101
1224
  const hookUrl = pathToFileURL(require.resolve('@opentelemetry/instrumentation/hook.mjs'))
1225
+
1102
1226
  // We need the following because otherwise some open telemetry instrumentations won't work with ESM (like express)
1103
1227
  // see: https://github.com/open-telemetry/opentelemetry-js/blob/main/doc/esm-support.md#instrumentation-hook-required-for-esm
1104
1228
  execArgv.push('--import', `data:text/javascript, import { register } from 'node:module'; register('${hookUrl}')`)
1105
1229
  execArgv.push('--import', pathToFileURL(openTelemetrySetupPath))
1106
1230
  }
1107
1231
 
1108
- if ((serviceConfig.sourceMaps ?? config.sourceMaps) === true) {
1232
+ if ((applicationConfig.sourceMaps ?? config.sourceMaps) === true) {
1109
1233
  execArgv.push('--enable-source-maps')
1110
1234
  }
1111
1235
 
1112
1236
  const workerEnv = structuredClone(this.#env)
1113
1237
 
1114
- if (serviceConfig.nodeOptions?.trim().length > 0) {
1238
+ if (applicationConfig.nodeOptions?.trim().length > 0) {
1115
1239
  const originalNodeOptions = workerEnv['NODE_OPTIONS'] ?? ''
1116
1240
 
1117
- workerEnv['NODE_OPTIONS'] = `${originalNodeOptions} ${serviceConfig.nodeOptions}`.trim()
1241
+ workerEnv['NODE_OPTIONS'] = `${originalNodeOptions} ${applicationConfig.nodeOptions}`.trim()
1118
1242
  }
1119
1243
 
1120
1244
  const maxHeapTotal =
@@ -1132,10 +1256,10 @@ class Runtime extends EventEmitter {
1132
1256
  const worker = new Worker(kWorkerFile, {
1133
1257
  workerData: {
1134
1258
  config,
1135
- serviceConfig: {
1136
- ...serviceConfig,
1259
+ applicationConfig: {
1260
+ ...applicationConfig,
1137
1261
  isProduction: this.#isProduction,
1138
- configPatch: this.servicesConfigsPatches.get(serviceId)
1262
+ configPatch: this.#applicationsConfigsPatches.get(applicationId)
1139
1263
  },
1140
1264
  worker: {
1141
1265
  id: workerId,
@@ -1145,7 +1269,7 @@ class Runtime extends EventEmitter {
1145
1269
  inspectorOptions,
1146
1270
  dirname: this.#root
1147
1271
  },
1148
- argv: serviceConfig.arguments,
1272
+ argv: applicationConfig.arguments,
1149
1273
  execArgv,
1150
1274
  env: workerEnv,
1151
1275
  resourceLimits: {
@@ -1156,13 +1280,13 @@ class Runtime extends EventEmitter {
1156
1280
  stderr: true
1157
1281
  })
1158
1282
 
1159
- this.#handleWorkerStandardStreams(worker, serviceId, workersCount > 1 ? index : undefined)
1283
+ this.#handleWorkerStandardStreams(worker, applicationId, workersCount > 1 ? index : undefined)
1160
1284
 
1161
1285
  // Make sure the listener can handle a lot of API requests at once before raising a warning
1162
1286
  worker.setMaxListeners(1e3)
1163
1287
 
1164
- // Track service exiting
1165
- const eventPayload = { service: serviceId, worker: index, workersCount }
1288
+ // Track application exiting
1289
+ const eventPayload = { application: applicationId, worker: index, workersCount }
1166
1290
  worker.once('exit', code => {
1167
1291
  if (worker[kWorkerStatus] === 'exited') {
1168
1292
  return
@@ -1170,7 +1294,7 @@ class Runtime extends EventEmitter {
1170
1294
 
1171
1295
  const started = worker[kWorkerStatus] === 'started'
1172
1296
  worker[kWorkerStatus] = 'exited'
1173
- this.emit('service:worker:exited', eventPayload)
1297
+ this.emit('application:worker:exited', eventPayload)
1174
1298
 
1175
1299
  this.#cleanupWorker(worker)
1176
1300
 
@@ -1181,13 +1305,13 @@ class Runtime extends EventEmitter {
1181
1305
  // Wait for the next tick so that crashed from the thread are logged first
1182
1306
  setImmediate(() => {
1183
1307
  if (started && (!config.watch || code !== 0)) {
1184
- this.emit('service:worker:error', { ...eventPayload, code })
1308
+ this.emit('application:worker:error', { ...eventPayload, code })
1185
1309
  this.#broadcastWorkers()
1186
1310
 
1187
1311
  this.logger.warn(`The ${errorLabel} unexpectedly exited with code ${code}.`)
1188
1312
  }
1189
1313
 
1190
- // Restart the service if it was started
1314
+ // Restart the application if it was started
1191
1315
  if (started && this.#status === 'started') {
1192
1316
  if (restartOnError > 0) {
1193
1317
  if (restartOnError < IMMEDIATE_RESTART_MAX_THRESHOLD) {
@@ -1196,20 +1320,22 @@ class Runtime extends EventEmitter {
1196
1320
  this.logger.warn(`The ${errorLabel} will be restarted in ${restartOnError}ms ...`)
1197
1321
  }
1198
1322
 
1199
- this.#restartCrashedWorker(config, serviceConfig, workersCount, serviceId, index, false, 0).catch(err => {
1200
- this.logger.error({ err: ensureLoggableError(err) }, `${errorLabel} could not be restarted.`)
1201
- })
1323
+ this.#restartCrashedWorker(config, applicationConfig, workersCount, applicationId, index, false, 0).catch(
1324
+ err => {
1325
+ this.logger.error({ err: ensureLoggableError(err) }, `${errorLabel} could not be restarted.`)
1326
+ }
1327
+ )
1202
1328
  } else {
1203
- this.emit('service:worker:unvailable', eventPayload)
1329
+ this.emit('application:worker:unvailable', eventPayload)
1204
1330
  this.logger.warn(`The ${errorLabel} is no longer available.`)
1205
1331
  }
1206
1332
  }
1207
1333
  })
1208
1334
  })
1209
1335
 
1210
- worker[kId] = workersCount > 1 ? workerId : serviceId
1336
+ worker[kId] = workersCount > 1 ? workerId : applicationId
1211
1337
  worker[kFullId] = workerId
1212
- worker[kServiceId] = serviceId
1338
+ worker[kApplicationId] = applicationId
1213
1339
  worker[kWorkerId] = workersCount > 1 ? index : undefined
1214
1340
  worker[kWorkerStatus] = 'boot'
1215
1341
  worker[kForwardEvents] = false
@@ -1217,7 +1343,7 @@ class Runtime extends EventEmitter {
1217
1343
  if (inspectorOptions) {
1218
1344
  worker[kInspectorOptions] = {
1219
1345
  port: inspectorOptions.port,
1220
- id: serviceId,
1346
+ id: applicationId,
1221
1347
  dirname: this.#root
1222
1348
  }
1223
1349
  }
@@ -1237,30 +1363,30 @@ class Runtime extends EventEmitter {
1237
1363
 
1238
1364
  // Forward events from the worker
1239
1365
  worker[kITC].on('event', ({ event, payload }) => {
1240
- this.emit(`service:worker:event:${event}`, { ...eventPayload, payload })
1366
+ this.emit(`application:worker:event:${event}`, { ...eventPayload, payload })
1241
1367
  })
1242
1368
 
1243
1369
  // Only activate watch for the first instance
1244
1370
  if (index === 0) {
1245
- // Handle services changes
1371
+ // Handle applications changes
1246
1372
  // This is not purposely activated on when this.#config.watch === true
1247
- // so that services can eventually manually trigger a restart. This mechanism is current
1248
- // used by the composer.
1373
+ // so that applications can eventually manually trigger a restart. This mechanism is current
1374
+ // used by the gateway.
1249
1375
  worker[kITC].on('changed', async () => {
1250
- this.emit('service:worker:changed', eventPayload)
1376
+ this.emit('application:worker:changed', eventPayload)
1251
1377
 
1252
1378
  try {
1253
1379
  const wasStarted = worker[kWorkerStatus].startsWith('start')
1254
- await this.stopService(serviceId)
1380
+ await this.stopApplication(applicationId)
1255
1381
 
1256
1382
  if (wasStarted) {
1257
- await this.startService(serviceId)
1383
+ await this.startApplication(applicationId)
1258
1384
  }
1259
1385
 
1260
- this.logger.info(`The service "${serviceId}" has been successfully reloaded ...`)
1261
- this.emit('service:worker:reloaded', eventPayload)
1386
+ this.logger.info(`The application "${applicationId}" has been successfully reloaded ...`)
1387
+ this.emit('application:worker:reloaded', eventPayload)
1262
1388
 
1263
- if (serviceConfig.entrypoint) {
1389
+ if (applicationConfig.entrypoint) {
1264
1390
  this.#showUrl()
1265
1391
  }
1266
1392
  } catch (e) {
@@ -1274,27 +1400,21 @@ class Runtime extends EventEmitter {
1274
1400
  this.#workers.set(workerId, worker)
1275
1401
 
1276
1402
  // Setup the interceptor
1277
- this.#meshInterceptor.route(serviceId, worker)
1403
+ this.#meshInterceptor.route(applicationId, worker)
1278
1404
  }
1279
1405
 
1280
1406
  // Store dependencies
1281
1407
  const [{ dependencies }] = await waitEventFromITC(worker, 'init')
1408
+ applicationConfig.dependencies = dependencies
1282
1409
 
1283
- if (serviceConfig.entrypoint) {
1284
- this.#entrypointId = serviceId
1285
- }
1286
-
1287
- serviceConfig.dependencies = dependencies
1288
- for (const { envVar, url } of dependencies) {
1289
- if (envVar) {
1290
- serviceConfig.localServiceEnvVars.set(envVar, url)
1291
- }
1410
+ if (applicationConfig.entrypoint) {
1411
+ this.#entrypointId = applicationId
1292
1412
  }
1293
1413
 
1294
1414
  // This must be done here as the dependencies are filled above
1295
- worker[kConfig] = { ...serviceConfig, health, workers: workersCount }
1415
+ worker[kConfig] = { ...applicationConfig, health, workers: workersCount }
1296
1416
  worker[kWorkerStatus] = 'init'
1297
- this.emit('service:worker:init', eventPayload)
1417
+ this.emit('application:worker:init', eventPayload)
1298
1418
 
1299
1419
  return worker
1300
1420
  }
@@ -1312,7 +1432,7 @@ class Runtime extends EventEmitter {
1312
1432
  return health
1313
1433
  }
1314
1434
 
1315
- #setupHealthCheck (config, serviceConfig, workersCount, id, index, worker, errorLabel, timeout) {
1435
+ #setupHealthCheck (config, applicationConfig, workersCount, id, index, worker, errorLabel, timeout) {
1316
1436
  // Clear the timeout when exiting
1317
1437
  worker.on('exit', () => clearTimeout(worker[kHealthCheckTimer]))
1318
1438
 
@@ -1338,9 +1458,9 @@ class Runtime extends EventEmitter {
1338
1458
  health = { elu: -1, heapUsed: -1, heapTotal: -1 }
1339
1459
  }
1340
1460
 
1341
- this.emit('service:worker:health', {
1461
+ this.emit('application:worker:health', {
1342
1462
  id: worker[kId],
1343
- service: id,
1463
+ application: id,
1344
1464
  worker: index,
1345
1465
  currentHealth: health,
1346
1466
  unhealthy,
@@ -1367,14 +1487,14 @@ class Runtime extends EventEmitter {
1367
1487
 
1368
1488
  if (unhealthyChecks === maxUnhealthyChecks) {
1369
1489
  try {
1370
- this.emit('service:worker:unhealthy', { service: id, worker: index })
1490
+ this.emit('application:worker:unhealthy', { application: id, worker: index })
1371
1491
 
1372
1492
  this.logger.error(
1373
1493
  { elu: health.elu, maxELU, memoryUsage: health.heapUsed, maxMemoryUsage: maxHeapUsed },
1374
1494
  `The ${errorLabel} is unhealthy. Replacing it ...`
1375
1495
  )
1376
1496
 
1377
- await this.#replaceWorker(config, serviceConfig, workersCount, id, index, worker)
1497
+ await this.#replaceWorker(config, applicationConfig, workersCount, id, index, worker)
1378
1498
  } catch (e) {
1379
1499
  this.logger.error(
1380
1500
  { elu: health.elu, maxELU, memoryUsage: health.heapUsed, maxMemoryUsage: maxHeapUsed },
@@ -1391,7 +1511,7 @@ class Runtime extends EventEmitter {
1391
1511
 
1392
1512
  async #startWorker (
1393
1513
  config,
1394
- serviceConfig,
1514
+ applicationConfig,
1395
1515
  workersCount,
1396
1516
  id,
1397
1517
  index,
@@ -1410,16 +1530,16 @@ class Runtime extends EventEmitter {
1410
1530
  worker = await this.#getWorkerById(id, index, false, false)
1411
1531
  }
1412
1532
 
1413
- const eventPayload = { service: id, worker: index, workersCount }
1533
+ const eventPayload = { application: id, worker: index, workersCount }
1414
1534
 
1415
- // The service was stopped, recreate the thread
1535
+ // The application was stopped, recreate the thread
1416
1536
  if (!worker) {
1417
- await this.#setupService(serviceConfig, index)
1537
+ await this.#setupApplication(applicationConfig, index)
1418
1538
  worker = await this.#getWorkerById(id, index)
1419
1539
  }
1420
1540
 
1421
1541
  worker[kWorkerStatus] = 'starting'
1422
- this.emit('service:worker:starting', eventPayload)
1542
+ this.emit('application:worker:starting', eventPayload)
1423
1543
 
1424
1544
  try {
1425
1545
  let workerUrl
@@ -1427,10 +1547,10 @@ class Runtime extends EventEmitter {
1427
1547
  workerUrl = await executeWithTimeout(sendViaITC(worker, 'start'), config.startTimeout)
1428
1548
 
1429
1549
  if (workerUrl === kTimeout) {
1430
- this.emit('service:worker:startTimeout', eventPayload)
1550
+ this.emit('application:worker:startTimeout', eventPayload)
1431
1551
  this.logger.info(`The ${label} failed to start in ${config.startTimeout}ms. Forcefully killing the thread.`)
1432
1552
  worker.terminate()
1433
- throw new errors.ServiceStartTimeoutError(id, config.startTimeout)
1553
+ throw new ApplicationStartTimeoutError(id, config.startTimeout)
1434
1554
  }
1435
1555
  } else {
1436
1556
  workerUrl = await sendViaITC(worker, 'start')
@@ -1443,7 +1563,7 @@ class Runtime extends EventEmitter {
1443
1563
  }
1444
1564
 
1445
1565
  worker[kWorkerStatus] = 'started'
1446
- this.emit('service:worker:started', eventPayload)
1566
+ this.emit('application:worker:started', eventPayload)
1447
1567
  this.#broadcastWorkers()
1448
1568
 
1449
1569
  if (!silent) {
@@ -1456,7 +1576,7 @@ class Runtime extends EventEmitter {
1456
1576
  // however, the health event will start when the worker is started
1457
1577
  this.#setupHealthCheck(
1458
1578
  config,
1459
- serviceConfig,
1579
+ applicationConfig,
1460
1580
  workersCount,
1461
1581
  id,
1462
1582
  index,
@@ -1474,20 +1594,20 @@ class Runtime extends EventEmitter {
1474
1594
  this.#cleanupWorker(worker)
1475
1595
 
1476
1596
  if (worker[kWorkerStatus] !== 'exited') {
1477
- // This prevent the exit handler to restart service
1597
+ // This prevent the exit handler to restart application
1478
1598
  worker[kWorkerStatus] = 'exited'
1479
1599
 
1480
1600
  // Wait for the worker to exit gracefully, otherwise we terminate it
1481
- const waitTimeout = await executeWithTimeout(once(worker, 'exit'), config.gracefulShutdown.service)
1601
+ const waitTimeout = await executeWithTimeout(once(worker, 'exit'), config.gracefulShutdown.application)
1482
1602
 
1483
1603
  if (waitTimeout === kTimeout) {
1484
1604
  await worker.terminate()
1485
1605
  }
1486
1606
  }
1487
1607
 
1488
- this.emit('service:worker:start:error', { ...eventPayload, error })
1608
+ this.emit('application:worker:start:error', { ...eventPayload, error })
1489
1609
 
1490
- if (error.code !== 'PLT_RUNTIME_SERVICE_START_TIMEOUT') {
1610
+ if (error.code !== 'PLT_RUNTIME_APPLICATION_START_TIMEOUT') {
1491
1611
  this.logger.error({ err: ensureLoggableError(error) }, `Failed to start ${label}.`)
1492
1612
  }
1493
1613
 
@@ -1512,7 +1632,7 @@ class Runtime extends EventEmitter {
1512
1632
  )
1513
1633
  }
1514
1634
 
1515
- await this.#restartCrashedWorker(config, serviceConfig, workersCount, id, index, silent, bootstrapAttempt)
1635
+ await this.#restartCrashedWorker(config, applicationConfig, workersCount, id, index, silent, bootstrapAttempt)
1516
1636
  }
1517
1637
  }
1518
1638
 
@@ -1530,11 +1650,11 @@ class Runtime extends EventEmitter {
1530
1650
  return this.#discardWorker(worker)
1531
1651
  }
1532
1652
 
1533
- const eventPayload = { service: id, worker: index, workersCount }
1653
+ const eventPayload = { application: id, worker: index, workersCount }
1534
1654
 
1535
1655
  worker[kWorkerStatus] = 'stopping'
1536
1656
  worker[kITC].removeAllListeners('changed')
1537
- this.emit('service:worker:stopping', eventPayload)
1657
+ this.emit('application:worker:stopping', eventPayload)
1538
1658
 
1539
1659
  const label = this.#workerExtendedLabel(id, index, workersCount)
1540
1660
 
@@ -1549,7 +1669,7 @@ class Runtime extends EventEmitter {
1549
1669
  try {
1550
1670
  await executeWithTimeout(sendViaITC(worker, 'stop'), exitTimeout)
1551
1671
  } catch (error) {
1552
- this.emit('service:worker:stop:timeout', eventPayload)
1672
+ this.emit('application:worker:stop:timeout', eventPayload)
1553
1673
  this.logger.info({ error: ensureLoggableError(error) }, `Failed to stop ${label}. Killing a worker thread.`)
1554
1674
  } finally {
1555
1675
  worker[kITC].close()
@@ -1559,19 +1679,19 @@ class Runtime extends EventEmitter {
1559
1679
  this.logger.info(`Stopped the ${label}...`)
1560
1680
  }
1561
1681
 
1562
- // Wait for the worker thread to finish, we're going to create a new one if the service is ever restarted
1682
+ // Wait for the worker thread to finish, we're going to create a new one if the application is ever restarted
1563
1683
  const res = await executeWithTimeout(exitPromise, exitTimeout)
1564
1684
 
1565
1685
  // If the worker didn't exit in time, kill it
1566
1686
  if (res === kTimeout) {
1567
- this.emit('service:worker:exit:timeout', eventPayload)
1687
+ this.emit('application:worker:exit:timeout', eventPayload)
1568
1688
  await worker.terminate()
1569
1689
  }
1570
1690
 
1571
1691
  await this.#avoidOutOfOrderThreadLogs()
1572
1692
 
1573
1693
  worker[kWorkerStatus] = 'stopped'
1574
- this.emit('service:worker:stopped', eventPayload)
1694
+ this.emit('application:worker:stopped', eventPayload)
1575
1695
  this.#broadcastWorkers()
1576
1696
  }
1577
1697
 
@@ -1588,18 +1708,20 @@ class Runtime extends EventEmitter {
1588
1708
  }
1589
1709
 
1590
1710
  async #discardWorker (worker) {
1591
- this.#meshInterceptor.unroute(worker[kServiceId], worker, true)
1711
+ this.#meshInterceptor.unroute(worker[kApplicationId], worker, true)
1592
1712
  worker.removeAllListeners('exit')
1593
1713
  await worker.terminate()
1594
1714
 
1595
1715
  return this.#cleanupWorker(worker)
1596
1716
  }
1597
1717
 
1598
- #workerExtendedLabel (serviceId, workerId, workersCount) {
1599
- return workersCount > 1 ? `worker ${workerId} of the service "${serviceId}"` : `service "${serviceId}"`
1718
+ #workerExtendedLabel (applicationId, workerId, workersCount) {
1719
+ return workersCount > 1
1720
+ ? `worker ${workerId} of the application "${applicationId}"`
1721
+ : `application "${applicationId}"`
1600
1722
  }
1601
1723
 
1602
- async #restartCrashedWorker (config, serviceConfig, workersCount, id, index, silent, bootstrapAttempt) {
1724
+ async #restartCrashedWorker (config, applicationConfig, workersCount, id, index, silent, bootstrapAttempt) {
1603
1725
  const workerId = `${id}:${index}`
1604
1726
 
1605
1727
  let restartPromise = this.#restartingWorkers.get(workerId)
@@ -1619,8 +1741,8 @@ class Runtime extends EventEmitter {
1619
1741
  }
1620
1742
 
1621
1743
  try {
1622
- await this.#setupWorker(config, serviceConfig, workersCount, id, index)
1623
- await this.#startWorker(config, serviceConfig, workersCount, id, index, silent, bootstrapAttempt)
1744
+ await this.#setupWorker(config, applicationConfig, workersCount, id, index)
1745
+ await this.#startWorker(config, applicationConfig, workersCount, id, index, silent, bootstrapAttempt)
1624
1746
 
1625
1747
  this.logger.info(
1626
1748
  `The ${this.#workerExtendedLabel(id, index, workersCount)} has been successfully restarted ...`
@@ -1647,13 +1769,13 @@ class Runtime extends EventEmitter {
1647
1769
  await restartPromise
1648
1770
  }
1649
1771
 
1650
- async #replaceWorker (config, serviceConfig, workersCount, serviceId, index, worker) {
1651
- const workerId = `${serviceId}:${index}`
1772
+ async #replaceWorker (config, applicationConfig, workersCount, applicationId, index, worker) {
1773
+ const workerId = `${applicationId}:${index}`
1652
1774
  let newWorker
1653
1775
 
1654
1776
  try {
1655
1777
  // Create a new worker
1656
- newWorker = await this.#setupWorker(config, serviceConfig, workersCount, serviceId, index, false)
1778
+ newWorker = await this.#setupWorker(config, applicationConfig, workersCount, applicationId, index, false)
1657
1779
 
1658
1780
  // Make sure the runtime hasn't been stopped in the meanwhile
1659
1781
  if (this.#status !== 'started') {
@@ -1661,7 +1783,7 @@ class Runtime extends EventEmitter {
1661
1783
  }
1662
1784
 
1663
1785
  // Add the worker to the mesh
1664
- await this.#startWorker(config, serviceConfig, workersCount, serviceId, index, false, 0, newWorker, true)
1786
+ await this.#startWorker(config, applicationConfig, workersCount, applicationId, index, false, 0, newWorker, true)
1665
1787
 
1666
1788
  // Make sure the runtime hasn't been stopped in the meanwhile
1667
1789
  if (this.#status !== 'started') {
@@ -1669,7 +1791,7 @@ class Runtime extends EventEmitter {
1669
1791
  }
1670
1792
 
1671
1793
  this.#workers.set(workerId, newWorker)
1672
- this.#meshInterceptor.route(serviceId, newWorker)
1794
+ this.#meshInterceptor.route(applicationId, newWorker)
1673
1795
 
1674
1796
  // Remove the old worker and then kill it
1675
1797
  await sendViaITC(worker, 'removeFromMesh')
@@ -1678,52 +1800,54 @@ class Runtime extends EventEmitter {
1678
1800
  throw e
1679
1801
  }
1680
1802
 
1681
- await this.#stopWorker(workersCount, serviceId, index, false, worker)
1803
+ await this.#stopWorker(workersCount, applicationId, index, false, worker)
1682
1804
  }
1683
1805
 
1684
- async #getServiceById (serviceId, ensureStarted = false, mustExist = true) {
1685
- // If the serviceId includes the worker, properly split
1806
+ async #getApplicationById (applicationId, ensureStarted = false, mustExist = true) {
1807
+ // If the applicationId includes the worker, properly split
1686
1808
  let workerId
1687
- const matched = serviceId.match(/^(.+):(\d+)$/)
1809
+ const matched = applicationId.match(/^(.+):(\d+)$/)
1688
1810
 
1689
1811
  if (matched) {
1690
- serviceId = matched[1]
1812
+ applicationId = matched[1]
1691
1813
  workerId = matched[2]
1692
1814
  }
1693
1815
 
1694
- return this.#getWorkerById(serviceId, workerId, ensureStarted, mustExist)
1816
+ return this.#getWorkerById(applicationId, workerId, ensureStarted, mustExist)
1695
1817
  }
1696
1818
 
1697
- async #getWorkerById (serviceId, workerId, ensureStarted = false, mustExist = true) {
1819
+ async #getWorkerById (applicationId, workerId, ensureStarted = false, mustExist = true) {
1698
1820
  let worker
1699
1821
 
1700
1822
  if (typeof workerId !== 'undefined') {
1701
- worker = this.#workers.get(`${serviceId}:${workerId}`)
1823
+ worker = this.#workers.get(`${applicationId}:${workerId}`)
1702
1824
  } else {
1703
- worker = this.#workers.next(serviceId)
1825
+ worker = this.#workers.next(applicationId)
1704
1826
  }
1705
1827
 
1828
+ const applicationsIds = this.getApplicationsIds()
1829
+
1706
1830
  if (!worker) {
1707
- if (!mustExist && this.#servicesIds.includes(serviceId)) {
1831
+ if (!mustExist && applicationsIds.includes(applicationId)) {
1708
1832
  return null
1709
1833
  }
1710
1834
 
1711
- if (this.#servicesIds.includes(serviceId)) {
1835
+ if (applicationsIds.includes(applicationId)) {
1712
1836
  const availableWorkers = Array.from(this.#workers.keys())
1713
- .filter(key => key.startsWith(serviceId + ':'))
1837
+ .filter(key => key.startsWith(applicationId + ':'))
1714
1838
  .map(key => key.split(':')[1])
1715
1839
  .join(', ')
1716
- throw new errors.WorkerNotFoundError(workerId, serviceId, availableWorkers)
1840
+ throw new WorkerNotFoundError(workerId, applicationId, availableWorkers)
1717
1841
  } else {
1718
- throw new errors.ServiceNotFoundError(serviceId, Array.from(this.#servicesIds).join(', '))
1842
+ throw new ApplicationNotFoundError(applicationId, applicationsIds.join(', '))
1719
1843
  }
1720
1844
  }
1721
1845
 
1722
1846
  if (ensureStarted) {
1723
- const serviceStatus = await sendViaITC(worker, 'getStatus')
1847
+ const applicationStatus = await sendViaITC(worker, 'getStatus')
1724
1848
 
1725
- if (serviceStatus !== 'started') {
1726
- throw new errors.ServiceNotStartedError(serviceId)
1849
+ if (applicationStatus !== 'started') {
1850
+ throw new ApplicationNotStartedError(applicationId)
1727
1851
  }
1728
1852
  }
1729
1853
 
@@ -1744,17 +1868,17 @@ class Runtime extends EventEmitter {
1744
1868
  continue
1745
1869
  }
1746
1870
 
1747
- const service = worker[kServiceId]
1748
- let serviceWorkers = workers.get(service)
1871
+ const application = worker[kApplicationId]
1872
+ let applicationWorkers = workers.get(application)
1749
1873
 
1750
- if (!serviceWorkers) {
1751
- serviceWorkers = []
1752
- workers.set(service, serviceWorkers)
1874
+ if (!applicationWorkers) {
1875
+ applicationWorkers = []
1876
+ workers.set(application, applicationWorkers)
1753
1877
  }
1754
1878
 
1755
- serviceWorkers.push({
1879
+ applicationWorkers.push({
1756
1880
  id: worker[kId],
1757
- service: worker[kServiceId],
1881
+ application: worker[kApplicationId],
1758
1882
  worker: worker[kWorkerId],
1759
1883
  thread: worker.threadId
1760
1884
  })
@@ -1767,8 +1891,8 @@ class Runtime extends EventEmitter {
1767
1891
  }
1768
1892
  }
1769
1893
 
1770
- async #getWorkerMessagingChannel ({ service, worker }, context) {
1771
- const target = await this.#getWorkerById(service, worker, true, true)
1894
+ async #getWorkerMessagingChannel ({ application, worker }, context) {
1895
+ const target = await this.#getWorkerById(application, worker, true, true)
1772
1896
 
1773
1897
  const { port1, port2 } = new MessageChannel()
1774
1898
 
@@ -1779,11 +1903,11 @@ class Runtime extends EventEmitter {
1779
1903
  )
1780
1904
 
1781
1905
  if (response === kTimeout) {
1782
- throw new errors.MessagingError(service, 'Timeout while establishing a communication channel.')
1906
+ throw new MessagingError(application, 'Timeout while establishing a communication channel.')
1783
1907
  }
1784
1908
 
1785
1909
  context.transferList = [port2]
1786
- this.emit('service:worker:messagingChannel', { service, worker })
1910
+ this.emit('application:worker:messagingChannel', { application, worker })
1787
1911
  return port2
1788
1912
  }
1789
1913
 
@@ -1795,8 +1919,8 @@ class Runtime extends EventEmitter {
1795
1919
  return packageJson
1796
1920
  }
1797
1921
 
1798
- #handleWorkerStandardStreams (worker, serviceId, workerId) {
1799
- const binding = { name: serviceId }
1922
+ #handleWorkerStandardStreams (worker, applicationId, workerId) {
1923
+ const binding = { name: applicationId }
1800
1924
 
1801
1925
  if (typeof workerId !== 'undefined') {
1802
1926
  binding.worker = workerId
@@ -1893,187 +2017,83 @@ class Runtime extends EventEmitter {
1893
2017
  }
1894
2018
  }
1895
2019
 
1896
- async getServiceResourcesInfo (id) {
1897
- const workers = this.#workers.getCount(id)
1898
-
1899
- const worker = await this.#getWorkerById(id, 0, false, false)
1900
- const health = worker[kConfig].health
1901
-
1902
- return { workers, health }
1903
- }
1904
-
1905
- async #updateServiceConfigWorkers (serviceId, workers) {
1906
- this.logger.info(`Updating service "${serviceId}" config workers to ${workers}`)
2020
+ async #updateApplicationConfigWorkers (applicationId, workers) {
2021
+ this.logger.info(`Updating application "${applicationId}" config workers to ${workers}`)
1907
2022
 
1908
- this.#config.services.find(s => s.id === serviceId).workers = workers
1909
- const service = await this.#getServiceById(serviceId)
1910
- this.#workers.setCount(serviceId, workers)
1911
- service[kConfig].workers = workers
2023
+ this.#config.applications.find(s => s.id === applicationId).workers = workers
2024
+ const application = await this.#getApplicationById(applicationId)
2025
+ this.#workers.setCount(applicationId, workers)
2026
+ application[kConfig].workers = workers
1912
2027
 
1913
2028
  const promises = []
1914
2029
  for (const [workerId, worker] of this.#workers.entries()) {
1915
- if (workerId.startsWith(`${serviceId}:`)) {
1916
- promises.push(sendViaITC(worker, 'updateWorkersCount', { serviceId, workers }))
2030
+ if (workerId.startsWith(`${applicationId}:`)) {
2031
+ promises.push(sendViaITC(worker, 'updateWorkersCount', { applicationId, workers }))
1917
2032
  }
1918
2033
  }
1919
2034
 
1920
2035
  const results = await Promise.allSettled(promises)
1921
2036
  for (const result of results) {
1922
2037
  if (result.status === 'rejected') {
1923
- this.logger.error({ err: result.reason }, `Cannot update service "${serviceId}" workers`)
2038
+ this.logger.error({ err: result.reason }, `Cannot update application "${applicationId}" workers`)
1924
2039
  throw result.reason
1925
2040
  }
1926
2041
  }
1927
2042
  }
1928
2043
 
1929
- async #updateServiceConfigHealth (serviceId, health) {
1930
- this.logger.info(`Updating service "${serviceId}" config health heap to ${JSON.stringify(health)}`)
2044
+ async #updateApplicationConfigHealth (applicationId, health) {
2045
+ this.logger.info(`Updating application "${applicationId}" config health heap to ${JSON.stringify(health)}`)
1931
2046
  const { maxHeapTotal, maxYoungGeneration } = health
1932
2047
 
1933
- const service = this.#config.services.find(s => s.id === serviceId)
2048
+ const application = this.#config.applications.find(s => s.id === applicationId)
1934
2049
  if (maxHeapTotal) {
1935
- service.health.maxHeapTotal = maxHeapTotal
2050
+ application.health.maxHeapTotal = maxHeapTotal
1936
2051
  }
1937
2052
  if (maxYoungGeneration) {
1938
- service.health.maxYoungGeneration = maxYoungGeneration
1939
- }
1940
- }
1941
-
1942
- /**
1943
- * Updates the resources of the services, such as the number of workers and health configurations (e.g., heap memory settings).
1944
- *
1945
- * This function handles three update scenarios for each service:
1946
- * 1. **Updating workers only**: Adjusts the number of workers for the service.
1947
- * 2. **Updating health configurations only**: Updates health parameters like `maxHeapTotal` or `maxYoungGeneration`.
1948
- * 3. **Updating both workers and health configurations**: Scales the workers and also applies health settings.
1949
- *
1950
- * When updating both workers and health:
1951
- * - **Scaling down workers**: Stops extra workers, then restarts the remaining workers with the previous settings.
1952
- * - **Scaling up workers**: Starts new workers with the updated heap settings, then restarts the old workers with the updated settings.
1953
- *
1954
- * Scaling up new resources (workers and/or heap memory) may fails due to insufficient memory, in this case the operation may fail partially or entirely.
1955
- * Scaling down is expected to succeed without issues.
1956
- *
1957
- * @param {Array<Object>} updates - An array of objects that define the updates for each service.
1958
- * @param {string} updates[].service - The ID of the service to update.
1959
- * @param {number} [updates[].workers] - The desired number of workers for the service. If omitted, workers will not be updated.
1960
- * @param {Object} [updates[].health] - The health configuration to update for the service, which may include:
1961
- * @param {string|number} [updates[].health.maxHeapTotal] - The maximum heap memory for the service. Can be a valid memory string (e.g., '1G', '512MB') or a number representing bytes.
1962
- * @param {string|number} [updates[].health.maxYoungGeneration] - The maximum young generation memory for the service. Can be a valid memory string (e.g., '128MB') or a number representing bytes.
1963
- *
1964
- * @returns {Promise<Array<Object>>} - A promise that resolves to an array of reports for each service, detailing the success or failure of the operations:
1965
- * - `service`: The service ID.
1966
- * - `workers`: The workers' update report, including the current, new number of workers, started workers, and success status.
1967
- * - `health`: The health update report, showing the current and new heap settings, updated workers, and success status.
1968
- *
1969
- * @example
1970
- * await runtime.updateServicesResources([
1971
- * { service: 'service-1', workers: 2, health: { maxHeapTotal: '1G', maxYoungGeneration: '128 MB' } },
1972
- * { service: 'service-2', health: { maxHeapTotal: '1G' } },
1973
- * { service: 'service-3', workers: 2 },
1974
- * ])
1975
- *
1976
- * In this example:
1977
- * - `service-1` will have 2 workers and updated heap memory configurations.
1978
- * - `service-2` will have updated heap memory settings (without changing workers).
1979
- * - `service-3` will have its workers set to 2 but no change in memory settings.
1980
- *
1981
- * @throws {InvalidArgumentError} - Throws if any update parameter is invalid, such as:
1982
- * - Missing service ID.
1983
- * - Invalid worker count (not a positive integer).
1984
- * - Invalid memory size format for `maxHeapTotal` or `maxYoungGeneration`.
1985
- * @throws {ServiceNotFoundError} - Throws if the specified service ID does not exist in the current service configuration.
1986
- */
1987
- async updateServicesResources (updates) {
1988
- if (this.#status === 'stopping' || this.#status === 'closed') {
1989
- this.logger.warn('Cannot update service resources when the runtime is stopping or closed')
1990
- return
1991
- }
1992
-
1993
- const ups = await this.#validateUpdateServiceResources(updates)
1994
- const config = this.#config
1995
-
1996
- const report = []
1997
- for (const update of ups) {
1998
- const { serviceId, config: serviceConfig, workers, health, currentWorkers, currentHealth } = update
1999
-
2000
- if (workers && health) {
2001
- const r = await this.#updateServiceWorkersAndHealth(
2002
- serviceId,
2003
- config,
2004
- serviceConfig,
2005
- workers,
2006
- health,
2007
- currentWorkers,
2008
- currentHealth
2009
- )
2010
- report.push({
2011
- service: serviceId,
2012
- workers: r.workers,
2013
- health: r.health
2014
- })
2015
- } else if (health) {
2016
- const r = await this.#updateServiceHealth(
2017
- serviceId,
2018
- config,
2019
- serviceConfig,
2020
- currentWorkers,
2021
- currentHealth,
2022
- health
2023
- )
2024
- report.push({
2025
- service: serviceId,
2026
- health: r.health
2027
- })
2028
- } else if (workers) {
2029
- const r = await this.#updateServiceWorkers(serviceId, config, serviceConfig, workers, currentWorkers)
2030
- report.push({
2031
- service: serviceId,
2032
- workers: r.workers
2033
- })
2034
- }
2053
+ application.health.maxYoungGeneration = maxYoungGeneration
2035
2054
  }
2036
-
2037
- return report
2038
2055
  }
2039
2056
 
2040
- async #validateUpdateServiceResources (updates) {
2057
+ async #validateUpdateApplicationResources (updates) {
2041
2058
  if (!Array.isArray(updates)) {
2042
- throw new errors.InvalidArgumentError('updates', 'must be an array')
2059
+ throw new InvalidArgumentError('updates', 'must be an array')
2043
2060
  }
2044
2061
  if (updates.length === 0) {
2045
- throw new errors.InvalidArgumentError('updates', 'must have at least one element')
2062
+ throw new InvalidArgumentError('updates', 'must have at least one element')
2046
2063
  }
2047
2064
 
2048
2065
  const config = this.#config
2049
2066
  const validatedUpdates = []
2050
2067
  for (const update of updates) {
2051
- const { service: serviceId } = update
2068
+ const { application: applicationId } = update
2052
2069
 
2053
- if (!serviceId) {
2054
- throw new errors.InvalidArgumentError('service', 'must be a string')
2070
+ if (!applicationId) {
2071
+ throw new InvalidArgumentError('application', 'must be a string')
2055
2072
  }
2056
- const serviceConfig = config.services.find(s => s.id === serviceId)
2057
- if (!serviceConfig) {
2058
- throw new errors.ServiceNotFoundError(serviceId, Array.from(this.#servicesIds).join(', '))
2073
+ const applicationConfig = config.applications.find(s => s.id === applicationId)
2074
+ if (!applicationConfig) {
2075
+ throw new ApplicationNotFoundError(applicationId, Array.from(this.getApplicationsIds()).join(', '))
2059
2076
  }
2060
2077
 
2061
- const { workers: currentWorkers, health: currentHealth } = await this.getServiceResourcesInfo(serviceId)
2078
+ const { workers: currentWorkers, health: currentHealth } = await this.getApplicationResourcesInfo(applicationId)
2062
2079
 
2063
2080
  let workers
2064
2081
  if (update.workers !== undefined) {
2065
2082
  if (typeof update.workers !== 'number') {
2066
- throw new errors.InvalidArgumentError('workers', 'must be a number')
2083
+ throw new InvalidArgumentError('workers', 'must be a number')
2067
2084
  }
2068
2085
  if (update.workers <= 0) {
2069
- throw new errors.InvalidArgumentError('workers', 'must be greater than 0')
2086
+ throw new InvalidArgumentError('workers', 'must be greater than 0')
2070
2087
  }
2071
2088
  if (update.workers > MAX_WORKERS) {
2072
- throw new errors.InvalidArgumentError('workers', `must be less than ${MAX_WORKERS}`)
2089
+ throw new InvalidArgumentError('workers', `must be less than ${MAX_WORKERS}`)
2073
2090
  }
2074
2091
 
2075
2092
  if (currentWorkers === update.workers) {
2076
- this.logger.warn({ serviceId, workers: update.workers }, 'No change in the number of workers for service')
2093
+ this.logger.warn(
2094
+ { applicationId, workers: update.workers },
2095
+ 'No change in the number of workers for application'
2096
+ )
2077
2097
  } else {
2078
2098
  workers = update.workers
2079
2099
  }
@@ -2086,22 +2106,19 @@ class Runtime extends EventEmitter {
2086
2106
  try {
2087
2107
  maxHeapTotal = parseMemorySize(update.health.maxHeapTotal)
2088
2108
  } catch {
2089
- throw new errors.InvalidArgumentError('maxHeapTotal', 'must be a valid memory size')
2109
+ throw new InvalidArgumentError('maxHeapTotal', 'must be a valid memory size')
2090
2110
  }
2091
2111
  } else if (typeof update.health.maxHeapTotal === 'number') {
2092
2112
  maxHeapTotal = update.health.maxHeapTotal
2093
2113
  if (update.health.maxHeapTotal <= 0) {
2094
- throw new errors.InvalidArgumentError('maxHeapTotal', 'must be greater than 0')
2114
+ throw new InvalidArgumentError('maxHeapTotal', 'must be greater than 0')
2095
2115
  }
2096
2116
  } else {
2097
- throw new errors.InvalidArgumentError(
2098
- 'maxHeapTotal',
2099
- 'must be a number or a string representing a memory size'
2100
- )
2117
+ throw new InvalidArgumentError('maxHeapTotal', 'must be a number or a string representing a memory size')
2101
2118
  }
2102
2119
 
2103
2120
  if (currentHealth.maxHeapTotal === maxHeapTotal) {
2104
- this.logger.warn({ serviceId, maxHeapTotal }, 'No change in the max heap total for service')
2121
+ this.logger.warn({ applicationId, maxHeapTotal }, 'No change in the max heap total for application')
2105
2122
  maxHeapTotal = undefined
2106
2123
  }
2107
2124
  }
@@ -2111,22 +2128,25 @@ class Runtime extends EventEmitter {
2111
2128
  try {
2112
2129
  maxYoungGeneration = parseMemorySize(update.health.maxYoungGeneration)
2113
2130
  } catch {
2114
- throw new errors.InvalidArgumentError('maxYoungGeneration', 'must be a valid memory size')
2131
+ throw new InvalidArgumentError('maxYoungGeneration', 'must be a valid memory size')
2115
2132
  }
2116
2133
  } else if (typeof update.health.maxYoungGeneration === 'number') {
2117
2134
  maxYoungGeneration = update.health.maxYoungGeneration
2118
2135
  if (update.health.maxYoungGeneration <= 0) {
2119
- throw new errors.InvalidArgumentError('maxYoungGeneration', 'must be greater than 0')
2136
+ throw new InvalidArgumentError('maxYoungGeneration', 'must be greater than 0')
2120
2137
  }
2121
2138
  } else {
2122
- throw new errors.InvalidArgumentError(
2139
+ throw new InvalidArgumentError(
2123
2140
  'maxYoungGeneration',
2124
2141
  'must be a number or a string representing a memory size'
2125
2142
  )
2126
2143
  }
2127
2144
 
2128
2145
  if (currentHealth.maxYoungGeneration && currentHealth.maxYoungGeneration === maxYoungGeneration) {
2129
- this.logger.warn({ serviceId, maxYoungGeneration }, 'No change in the max young generation for service')
2146
+ this.logger.warn(
2147
+ { applicationId, maxYoungGeneration },
2148
+ 'No change in the max young generation for application'
2149
+ )
2130
2150
  maxYoungGeneration = undefined
2131
2151
  }
2132
2152
  }
@@ -2143,17 +2163,24 @@ class Runtime extends EventEmitter {
2143
2163
  health.maxYoungGeneration = maxYoungGeneration
2144
2164
  }
2145
2165
  }
2146
- validatedUpdates.push({ serviceId, config: serviceConfig, workers, health, currentWorkers, currentHealth })
2166
+ validatedUpdates.push({
2167
+ applicationId,
2168
+ config: applicationConfig,
2169
+ workers,
2170
+ health,
2171
+ currentWorkers,
2172
+ currentHealth
2173
+ })
2147
2174
  }
2148
2175
  }
2149
2176
 
2150
2177
  return validatedUpdates
2151
2178
  }
2152
2179
 
2153
- async #updateServiceWorkersAndHealth (
2154
- serviceId,
2180
+ async #updateApplicationWorkersAndHealth (
2181
+ applicationId,
2155
2182
  config,
2156
- serviceConfig,
2183
+ applicationConfig,
2157
2184
  workers,
2158
2185
  health,
2159
2186
  currentWorkers,
@@ -2161,12 +2188,18 @@ class Runtime extends EventEmitter {
2161
2188
  ) {
2162
2189
  if (currentWorkers > workers) {
2163
2190
  // stop workers
2164
- const reportWorkers = await this.#updateServiceWorkers(serviceId, config, serviceConfig, workers, currentWorkers)
2191
+ const reportWorkers = await this.#updateApplicationWorkers(
2192
+ applicationId,
2193
+ config,
2194
+ applicationConfig,
2195
+ workers,
2196
+ currentWorkers
2197
+ )
2165
2198
  // update heap for current workers
2166
- const reportHealth = await this.#updateServiceHealth(
2167
- serviceId,
2199
+ const reportHealth = await this.#updateApplicationHealth(
2200
+ applicationId,
2168
2201
  config,
2169
- serviceConfig,
2202
+ applicationConfig,
2170
2203
  workers,
2171
2204
  currentHealth,
2172
2205
  health
@@ -2174,15 +2207,21 @@ class Runtime extends EventEmitter {
2174
2207
 
2175
2208
  return { workers: reportWorkers, health: reportHealth }
2176
2209
  } else {
2177
- // update service heap
2178
- await this.#updateServiceConfigHealth(serviceId, health)
2210
+ // update application heap
2211
+ await this.#updateApplicationConfigHealth(applicationId, health)
2179
2212
  // start new workers with new heap
2180
- const reportWorkers = await this.#updateServiceWorkers(serviceId, config, serviceConfig, workers, currentWorkers)
2213
+ const reportWorkers = await this.#updateApplicationWorkers(
2214
+ applicationId,
2215
+ config,
2216
+ applicationConfig,
2217
+ workers,
2218
+ currentWorkers
2219
+ )
2181
2220
  // update heap for current workers
2182
- const reportHealth = await this.#updateServiceHealth(
2183
- serviceId,
2221
+ const reportHealth = await this.#updateApplicationHealth(
2222
+ applicationId,
2184
2223
  config,
2185
- serviceConfig,
2224
+ applicationConfig,
2186
2225
  currentWorkers,
2187
2226
  currentHealth,
2188
2227
  health,
@@ -2193,10 +2232,10 @@ class Runtime extends EventEmitter {
2193
2232
  }
2194
2233
  }
2195
2234
 
2196
- async #updateServiceHealth (
2197
- serviceId,
2235
+ async #updateApplicationHealth (
2236
+ applicationId,
2198
2237
  config,
2199
- serviceConfig,
2238
+ applicationConfig,
2200
2239
  currentWorkers,
2201
2240
  currentHealth,
2202
2241
  health,
@@ -2209,16 +2248,16 @@ class Runtime extends EventEmitter {
2209
2248
  }
2210
2249
  try {
2211
2250
  if (updateConfig) {
2212
- await this.#updateServiceConfigHealth(serviceId, health)
2251
+ await this.#updateApplicationConfigHealth(applicationId, health)
2213
2252
  }
2214
2253
 
2215
2254
  for (let i = 0; i < currentWorkers; i++) {
2216
2255
  this.logger.info(
2217
2256
  { health: { current: currentHealth, new: health } },
2218
- `Restarting service "${serviceId}" worker ${i} to update config health heap...`
2257
+ `Restarting application "${applicationId}" worker ${i} to update config health heap...`
2219
2258
  )
2220
2259
 
2221
- const worker = await this.#getWorkerById(serviceId, i)
2260
+ const worker = await this.#getWorkerById(applicationId, i)
2222
2261
  if (health.maxHeapTotal) {
2223
2262
  worker[kConfig].health.maxHeapTotal = health.maxHeapTotal
2224
2263
  }
@@ -2226,22 +2265,22 @@ class Runtime extends EventEmitter {
2226
2265
  worker[kConfig].health.maxYoungGeneration = health.maxYoungGeneration
2227
2266
  }
2228
2267
 
2229
- await this.#replaceWorker(config, serviceConfig, currentWorkers, serviceId, i, worker)
2268
+ await this.#replaceWorker(config, applicationConfig, currentWorkers, applicationId, i, worker)
2230
2269
  report.updated.push(i)
2231
2270
  this.logger.info(
2232
2271
  { health: { current: currentHealth, new: health } },
2233
- `Restarted service "${serviceId}" worker ${i}`
2272
+ `Restarted application "${applicationId}" worker ${i}`
2234
2273
  )
2235
2274
  }
2236
2275
  report.success = true
2237
2276
  } catch (err) {
2238
2277
  if (report.updated.length < 1) {
2239
- this.logger.error({ err }, 'Cannot update service health heap, no worker updated')
2240
- await this.#updateServiceConfigHealth(serviceId, currentHealth)
2278
+ this.logger.error({ err }, 'Cannot update application health heap, no worker updated')
2279
+ await this.#updateApplicationConfigHealth(applicationId, currentHealth)
2241
2280
  } else {
2242
2281
  this.logger.error(
2243
2282
  { err },
2244
- `Cannot update service health heap, updated workers: ${report.updated.length} out of ${currentWorkers}`
2283
+ `Cannot update application health heap, updated workers: ${report.updated.length} out of ${currentWorkers}`
2245
2284
  )
2246
2285
  }
2247
2286
  report.success = false
@@ -2249,7 +2288,7 @@ class Runtime extends EventEmitter {
2249
2288
  return report
2250
2289
  }
2251
2290
 
2252
- async #updateServiceWorkers (serviceId, config, serviceConfig, workers, currentWorkers) {
2291
+ async #updateApplicationWorkers (applicationId, config, applicationConfig, workers, currentWorkers) {
2253
2292
  const report = {
2254
2293
  current: currentWorkers,
2255
2294
  new: workers
@@ -2257,47 +2296,47 @@ class Runtime extends EventEmitter {
2257
2296
  if (currentWorkers < workers) {
2258
2297
  report.started = []
2259
2298
  try {
2260
- await this.#updateServiceConfigWorkers(serviceId, workers)
2299
+ await this.#updateApplicationConfigWorkers(applicationId, workers)
2261
2300
  for (let i = currentWorkers; i < workers; i++) {
2262
- await this.#setupWorker(config, serviceConfig, workers, serviceId, i)
2263
- await this.#startWorker(config, serviceConfig, workers, serviceId, i, false, 0)
2301
+ await this.#setupWorker(config, applicationConfig, workers, applicationId, i)
2302
+ await this.#startWorker(config, applicationConfig, workers, applicationId, i, false, 0)
2264
2303
  report.started.push(i)
2265
2304
  }
2266
2305
  report.success = true
2267
2306
  } catch (err) {
2268
2307
  if (report.started.length < 1) {
2269
- this.logger.error({ err }, 'Cannot start service workers, no worker started')
2270
- await this.#updateServiceConfigWorkers(serviceId, currentWorkers)
2308
+ this.logger.error({ err }, 'Cannot start application workers, no worker started')
2309
+ await this.#updateApplicationConfigWorkers(applicationId, currentWorkers)
2271
2310
  } else {
2272
2311
  this.logger.error(
2273
2312
  { err },
2274
- `Cannot start service workers, started workers: ${report.started.length} out of ${workers}`
2313
+ `Cannot start application workers, started workers: ${report.started.length} out of ${workers}`
2275
2314
  )
2276
- await this.#updateServiceConfigWorkers(serviceId, currentWorkers + report.started.length)
2315
+ await this.#updateApplicationConfigWorkers(applicationId, currentWorkers + report.started.length)
2277
2316
  }
2278
2317
  report.success = false
2279
2318
  }
2280
2319
  } else {
2281
- // keep the current workers count until all the service workers are all stopped
2320
+ // keep the current workers count until all the application workers are all stopped
2282
2321
  report.stopped = []
2283
2322
  try {
2284
2323
  for (let i = currentWorkers - 1; i >= workers; i--) {
2285
- const worker = await this.#getWorkerById(serviceId, i, false, false)
2324
+ const worker = await this.#getWorkerById(applicationId, i, false, false)
2286
2325
  await sendViaITC(worker, 'removeFromMesh')
2287
- await this.#stopWorker(currentWorkers, serviceId, i, false, worker)
2326
+ await this.#stopWorker(currentWorkers, applicationId, i, false, worker)
2288
2327
  report.stopped.push(i)
2289
2328
  }
2290
- await this.#updateServiceConfigWorkers(serviceId, workers)
2329
+ await this.#updateApplicationConfigWorkers(applicationId, workers)
2291
2330
  report.success = true
2292
2331
  } catch (err) {
2293
2332
  if (report.stopped.length < 1) {
2294
- this.logger.error({ err }, 'Cannot stop service workers, no worker stopped')
2333
+ this.logger.error({ err }, 'Cannot stop application workers, no worker stopped')
2295
2334
  } else {
2296
2335
  this.logger.error(
2297
2336
  { err },
2298
- `Cannot stop service workers, stopped workers: ${report.stopped.length} out of ${workers}`
2337
+ `Cannot stop application workers, stopped workers: ${report.stopped.length} out of ${workers}`
2299
2338
  )
2300
- await this.#updateServiceConfigWorkers(serviceId, currentWorkers - report.stopped)
2339
+ await this.#updateApplicationConfigWorkers(applicationId, currentWorkers - report.stopped)
2301
2340
  }
2302
2341
  report.success = false
2303
2342
  }
@@ -2305,5 +2344,3 @@ class Runtime extends EventEmitter {
2305
2344
  return report
2306
2345
  }
2307
2346
  }
2308
-
2309
- module.exports = { Runtime }