@platformatic/runtime 3.10.0 → 3.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/config.d.ts CHANGED
@@ -365,12 +365,13 @@ export type PlatformaticRuntimeConfig = {
365
365
  verticalScaler?: {
366
366
  enabled?: boolean;
367
367
  maxTotalWorkers?: number;
368
+ maxTotalMemory?: number;
368
369
  minWorkers?: number;
369
370
  maxWorkers?: number;
370
371
  scaleUpELU?: number;
371
372
  scaleDownELU?: number;
372
- minELUDiff?: number;
373
373
  timeWindowSec?: number;
374
+ scaleDownTimeWindowSec?: number;
374
375
  cooldownSec?: number;
375
376
  scaleIntervalSec?: number;
376
377
  gracePeriod?: number;
@@ -412,4 +413,13 @@ export type PlatformaticRuntimeConfig = {
412
413
  maxRetries?: number;
413
414
  [k: string]: unknown;
414
415
  }[];
416
+ policies?: {
417
+ deny: {
418
+ /**
419
+ * This interface was referenced by `undefined`'s JSON-Schema definition
420
+ * via the `patternProperty` "^.*$".
421
+ */
422
+ [k: string]: string | [string, ...string[]];
423
+ };
424
+ };
415
425
  };
package/index.js CHANGED
@@ -44,7 +44,7 @@ function handleSignal (runtime, config) {
44
44
 
45
45
  const cwg = closeWithGrace({ delay: config.gracefulShutdown?.runtime ?? 10000, onTimeout }, async event => {
46
46
  if (event.err instanceof Error) {
47
- console.error(event.err)
47
+ console.error(new Error('@platformatic/runtime threw an unexpected error', { cause: event.err }))
48
48
  }
49
49
  await runtime.close()
50
50
  })
package/lib/config.js CHANGED
@@ -335,13 +335,21 @@ export async function transform (config, _, context) {
335
335
  // like adding other applications.
336
336
  }
337
337
 
338
- if (config.metrics === true) {
338
+ if (typeof config.metrics === 'boolean') {
339
339
  config.metrics = {
340
- enabled: true,
340
+ enabled: config.metrics,
341
341
  timeout: 1000
342
342
  }
343
343
  }
344
344
 
345
+ if (config.policies?.deny) {
346
+ for (const [from, to] of Object.entries(config.policies.deny)) {
347
+ if (typeof to === 'string') {
348
+ config.policies.deny[from] = [to]
349
+ }
350
+ }
351
+ }
352
+
345
353
  config.applications = applications
346
354
  config.web = undefined
347
355
  config.services = undefined
@@ -132,6 +132,17 @@ export async function managementApiPlugin (app, opts) {
132
132
  })
133
133
 
134
134
  app.get('/metrics', { logLevel: 'debug' }, async (req, reply) => {
135
+ const config = await runtime.getRuntimeConfig()
136
+
137
+ if (config.metrics?.enabled === false) {
138
+ reply.code(501)
139
+ return {
140
+ statusCode: 501,
141
+ error: 'Not Implemented',
142
+ message: 'Metrics are disabled.'
143
+ }
144
+ }
145
+
135
146
  const accepts = req.accepts()
136
147
 
137
148
  if (!accepts.type('text/plain') && accepts.type('application/json')) {
@@ -145,6 +156,23 @@ export async function managementApiPlugin (app, opts) {
145
156
  })
146
157
 
147
158
  app.get('/metrics/live', { websocket: true }, async socket => {
159
+ const config = await runtime.getRuntimeConfig()
160
+
161
+ if (config.metrics?.enabled === false) {
162
+ socket.send(
163
+ JSON.stringify({
164
+ statusCode: 501,
165
+ error: 'Not Implemented',
166
+ message: 'Metrics are disabled.'
167
+ }),
168
+ () => {
169
+ socket.close()
170
+ }
171
+ )
172
+
173
+ return
174
+ }
175
+
148
176
  const cachedMetrics = runtime.getCachedMetrics()
149
177
  if (cachedMetrics.length > 0) {
150
178
  const serializedMetrics = cachedMetrics.map(metric => JSON.stringify(metric)).join('\n')
package/lib/metrics.js ADDED
@@ -0,0 +1,73 @@
1
+ import { readFile } from 'node:fs/promises'
2
+ import si from 'systeminformation'
3
+
4
+ async function readNumberFromCgroupFile (path) {
5
+ try {
6
+ const raw = (await readFile(path, 'utf8')).trim()
7
+ if (raw === 'max') return null
8
+ return Number(raw)
9
+ } catch {
10
+ return null
11
+ }
12
+ }
13
+
14
+ async function getCgroupV2MemoryInfo () {
15
+ let [total, used] = await Promise.all([
16
+ readNumberFromCgroupFile('/sys/fs/cgroup/memory.max'),
17
+ readNumberFromCgroupFile('/sys/fs/cgroup/memory.current')
18
+ ])
19
+ if (total == null && used == null) return null
20
+
21
+ if (total === null) {
22
+ const mem = await si.mem()
23
+ total = mem.total
24
+ }
25
+
26
+ return { scope: 'cgroup-v2', used, total }
27
+ }
28
+
29
+ async function getCgroupV1MemoryInfo () {
30
+ let [total, used] = await Promise.all([
31
+ readNumberFromCgroupFile('/sys/fs/cgroup/memory/memory.limit_in_bytes'),
32
+ readNumberFromCgroupFile('/sys/fs/cgroup/memory/memory.usage_in_bytes')
33
+ ])
34
+ if (total == null && used == null) return null
35
+
36
+ // Some v1 setups report 9.22e18 (≈unlimited)
37
+ if (total === null || total > 1e18) {
38
+ const mem = await si.mem()
39
+ total = mem.total
40
+ }
41
+
42
+ return { scope: 'cgroup-v1', used, total }
43
+ }
44
+
45
+ async function readHostMemoryInfo () {
46
+ const mem = await si.mem()
47
+ return { scope: 'host', used: mem.active, total: mem.total }
48
+ }
49
+
50
+ export async function getMemoryInfo (options = {}) {
51
+ const scope = options.scope
52
+
53
+ if (scope === 'cgroup-v2') {
54
+ return getCgroupV2MemoryInfo()
55
+ }
56
+ if (scope === 'cgroup-v1') {
57
+ return getCgroupV1MemoryInfo()
58
+ }
59
+ if (scope === 'host') {
60
+ return readHostMemoryInfo()
61
+ }
62
+
63
+ let memInfo = await getCgroupV2MemoryInfo()
64
+
65
+ if (!memInfo) {
66
+ memInfo = await getCgroupV1MemoryInfo()
67
+ }
68
+ if (!memInfo) {
69
+ memInfo = await readHostMemoryInfo()
70
+ }
71
+
72
+ return memInfo
73
+ }
@@ -0,0 +1,23 @@
1
+ export function createChannelCreationHook (config) {
2
+ const denyList = config.policies?.deny
3
+
4
+ if (typeof denyList === 'undefined') {
5
+ return undefined
6
+ }
7
+
8
+ const forbidden = new Set()
9
+
10
+ for (let [first, unalloweds] of Object.entries(denyList)) {
11
+ for (let second of unalloweds) {
12
+ first = first.toLowerCase()
13
+ second = second.toLowerCase()
14
+
15
+ forbidden.add(`${first}:${second}`)
16
+ forbidden.add(`${second}:${first}`)
17
+ }
18
+ }
19
+
20
+ return function channelCreationHook (first, second) {
21
+ return !forbidden.has(`${first.toLowerCase()}:${second.toLowerCase()}`)
22
+ }
23
+ }
package/lib/runtime.js CHANGED
@@ -40,6 +40,8 @@ import {
40
40
  } from './errors.js'
41
41
  import { abstractLogger, createLogger } from './logger.js'
42
42
  import { startManagementApi } from './management-api.js'
43
+ import { getMemoryInfo } from './metrics.js'
44
+ import { createChannelCreationHook } from './policies.js'
43
45
  import { startPrometheusServer } from './prom-server.js'
44
46
  import ScalingAlgorithm from './scaling-algorithm.js'
45
47
  import { startScheduler } from './scheduler.js'
@@ -58,8 +60,8 @@ import {
58
60
  kStderrMarker,
59
61
  kWorkerId,
60
62
  kWorkersBroadcast,
61
- kWorkerStatus,
62
- kWorkerStartTime
63
+ kWorkerStartTime,
64
+ kWorkerStatus
63
65
  } from './worker/symbols.js'
64
66
 
65
67
  const kWorkerFile = join(import.meta.dirname, 'worker/main.js')
@@ -112,6 +114,8 @@ export class Runtime extends EventEmitter {
112
114
  #sharedHttpCache
113
115
  #scheduler
114
116
 
117
+ #channelCreationHook
118
+
115
119
  constructor (config, context) {
116
120
  super()
117
121
  this.setMaxListeners(MAX_LISTENERS_COUNT)
@@ -124,7 +128,12 @@ export class Runtime extends EventEmitter {
124
128
  this.#concurrency = this.#context.concurrency ?? MAX_CONCURRENCY
125
129
  this.#workers = new RoundRobinMap()
126
130
  this.#url = undefined
127
- this.#meshInterceptor = createThreadInterceptor({ domain: '.plt.local', timeout: this.#config.applicationTimeout })
131
+ this.#channelCreationHook = createChannelCreationHook(this.#config)
132
+ this.#meshInterceptor = createThreadInterceptor({
133
+ domain: '.plt.local',
134
+ timeout: this.#config.applicationTimeout,
135
+ onChannelCreation: this.#channelCreationHook
136
+ })
128
137
  this.logger = abstractLogger // This is replaced by the real logger in init() and eventually removed in close()
129
138
  this.#status = undefined
130
139
  this.#restartingWorkers = new Map()
@@ -274,7 +283,7 @@ export class Runtime extends EventEmitter {
274
283
 
275
284
  this.#updateStatus('started')
276
285
 
277
- if (this.#managementApi && typeof this.#metrics === 'undefined') {
286
+ if (this.#config.metrics?.enabled !== false && typeof this.#metrics === 'undefined') {
278
287
  this.startCollectingMetrics()
279
288
  }
280
289
 
@@ -1605,7 +1614,7 @@ export class Runtime extends EventEmitter {
1605
1614
  } else {
1606
1615
  worker[kHealthCheckTimer].refresh()
1607
1616
  }
1608
- }, interval)
1617
+ }, interval).unref()
1609
1618
  }
1610
1619
 
1611
1620
  async #startWorker (
@@ -2002,7 +2011,14 @@ export class Runtime extends EventEmitter {
2002
2011
  }
2003
2012
  }
2004
2013
 
2005
- async #getWorkerMessagingChannel ({ application, worker }, context) {
2014
+ async #getWorkerMessagingChannel ({ id, application, worker }, context) {
2015
+ if (this.#channelCreationHook?.(id, application) === false) {
2016
+ throw new MessagingError(
2017
+ application,
2018
+ `Communication channels are disabled between applications "${id}" and "${application}".`
2019
+ )
2020
+ }
2021
+
2006
2022
  const target = await this.#getWorkerById(application, worker, true, true)
2007
2023
 
2008
2024
  const { port1, port2 } = new MessageChannel()
@@ -2084,8 +2100,15 @@ export class Runtime extends EventEmitter {
2084
2100
  }
2085
2101
  }
2086
2102
 
2087
- const pinoLog =
2088
- typeof message?.level === 'number' && typeof message?.time === 'number' && typeof message?.msg === 'string'
2103
+ let pinoLog
2104
+
2105
+ if (typeof message === 'object') {
2106
+ pinoLog =
2107
+ typeof message.level === 'number' &&
2108
+ // We want to accept both pino raw time (number) and time as formatted string
2109
+ (typeof message.time === 'number' || typeof message.time === 'string') &&
2110
+ typeof message.msg === 'string'
2111
+ }
2089
2112
 
2090
2113
  // Directly write to the Pino destination
2091
2114
  if (pinoLog) {
@@ -2466,35 +2489,37 @@ export class Runtime extends EventEmitter {
2466
2489
  async #setupVerticalScaler () {
2467
2490
  const fixedWorkersCount = this.#config.workers
2468
2491
  if (fixedWorkersCount !== undefined) {
2469
- this.logger.warn(
2470
- `Vertical scaler disabled because the "workers" configuration is set to ${fixedWorkersCount}`
2471
- )
2492
+ this.logger.warn(`Vertical scaler disabled because the "workers" configuration is set to ${fixedWorkersCount}`)
2472
2493
  return
2473
2494
  }
2474
2495
 
2475
2496
  const scalerConfig = this.#config.verticalScaler
2497
+ const memInfo = await getMemoryInfo()
2498
+ const memScope = memInfo.scope
2476
2499
 
2477
2500
  scalerConfig.maxTotalWorkers ??= os.availableParallelism()
2501
+ scalerConfig.maxTotalMemory ??= memInfo.total * 0.9
2478
2502
  scalerConfig.maxWorkers ??= scalerConfig.maxTotalWorkers
2479
2503
  scalerConfig.minWorkers ??= 1
2480
2504
  scalerConfig.cooldownSec ??= 60
2481
2505
  scalerConfig.scaleUpELU ??= 0.8
2482
2506
  scalerConfig.scaleDownELU ??= 0.2
2483
- scalerConfig.minELUDiff ??= 0.2
2484
2507
  scalerConfig.scaleIntervalSec ??= 60
2485
- scalerConfig.timeWindowSec ??= 60
2508
+ scalerConfig.timeWindowSec ??= 10
2509
+ scalerConfig.scaleDownTimeWindowSec ??= 60
2486
2510
  scalerConfig.gracePeriod ??= 30 * 1000
2487
2511
  scalerConfig.applications ??= {}
2488
2512
 
2489
2513
  const maxTotalWorkers = scalerConfig.maxTotalWorkers
2514
+ const maxTotalMemory = scalerConfig.maxTotalMemory
2490
2515
  const maxWorkers = scalerConfig.maxWorkers
2491
2516
  const minWorkers = scalerConfig.minWorkers
2492
2517
  const cooldown = scalerConfig.cooldownSec
2493
2518
  const scaleUpELU = scalerConfig.scaleUpELU
2494
2519
  const scaleDownELU = scalerConfig.scaleDownELU
2495
- const minELUDiff = scalerConfig.minELUDiff
2496
2520
  const scaleIntervalSec = scalerConfig.scaleIntervalSec
2497
2521
  const timeWindowSec = scalerConfig.timeWindowSec
2522
+ const scaleDownTimeWindowSec = scalerConfig.scaleDownTimeWindowSec
2498
2523
  const applicationsConfigs = scalerConfig.applications
2499
2524
  const gracePeriod = scalerConfig.gracePeriod
2500
2525
  const healthCheckInterval = 1000
@@ -2505,7 +2530,7 @@ export class Runtime extends EventEmitter {
2505
2530
  if (application.entrypoint && !features.node.reusePort) {
2506
2531
  this.logger.warn(
2507
2532
  `The "${application.id}" application cannot be scaled because it is an entrypoint` +
2508
- ' and the "reusePort" feature is not available in your OS.'
2533
+ ' and the "reusePort" feature is not available in your OS.'
2509
2534
  )
2510
2535
 
2511
2536
  applicationsConfigs[application.id] = {
@@ -2517,7 +2542,7 @@ export class Runtime extends EventEmitter {
2517
2542
  if (application.workers !== undefined) {
2518
2543
  this.logger.warn(
2519
2544
  `The "${application.id}" application cannot be scaled because` +
2520
- ` it has a fixed number of workers (${application.workers}).`
2545
+ ` it has a fixed number of workers (${application.workers}).`
2521
2546
  )
2522
2547
  applicationsConfigs[application.id] = {
2523
2548
  minWorkers: application.workers,
@@ -2558,8 +2583,8 @@ export class Runtime extends EventEmitter {
2558
2583
  maxTotalWorkers,
2559
2584
  scaleUpELU,
2560
2585
  scaleDownELU,
2561
- minELUDiff,
2562
- timeWindowSec,
2586
+ scaleUpTimeWindowSec: timeWindowSec,
2587
+ scaleDownTimeWindowSec,
2563
2588
  applications: applicationsConfigs
2564
2589
  })
2565
2590
 
@@ -2569,10 +2594,7 @@ export class Runtime extends EventEmitter {
2569
2594
  const now = Date.now()
2570
2595
 
2571
2596
  for (const worker of this.#workers.values()) {
2572
- if (
2573
- worker[kWorkerStatus] !== 'started' ||
2574
- worker[kWorkerStartTime] + gracePeriod > now
2575
- ) {
2597
+ if (worker[kWorkerStatus] !== 'started' || worker[kWorkerStartTime] + gracePeriod > now) {
2576
2598
  continue
2577
2599
  }
2578
2600
 
@@ -2583,7 +2605,9 @@ export class Runtime extends EventEmitter {
2583
2605
  scalingAlgorithm.addWorkerHealthInfo({
2584
2606
  workerId: worker[kId],
2585
2607
  applicationId: worker[kApplicationId],
2586
- elu: health.elu
2608
+ elu: health.elu,
2609
+ heapUsed: health.heapUsed,
2610
+ heapTotal: health.heapTotal
2587
2611
  })
2588
2612
 
2589
2613
  if (health.elu > scaleUpELU) {
@@ -2611,6 +2635,7 @@ export class Runtime extends EventEmitter {
2611
2635
 
2612
2636
  try {
2613
2637
  const workersInfo = await this.getWorkers()
2638
+ const mem = await getMemoryInfo({ scope: memScope })
2614
2639
 
2615
2640
  const appsWorkersInfo = {}
2616
2641
  for (const worker of Object.values(workersInfo)) {
@@ -2621,7 +2646,10 @@ export class Runtime extends EventEmitter {
2621
2646
  appsWorkersInfo[applicationId]++
2622
2647
  }
2623
2648
 
2624
- const recommendations = scalingAlgorithm.getRecommendations(appsWorkersInfo)
2649
+ const availableMemory = maxTotalMemory - mem.used
2650
+ const recommendations = scalingAlgorithm.getRecommendations(appsWorkersInfo, {
2651
+ availableMemory
2652
+ })
2625
2653
  if (recommendations.length > 0) {
2626
2654
  await applyRecommendations(recommendations)
2627
2655
  lastScaling = Date.now()
@@ -2,60 +2,63 @@ class ScalingAlgorithm {
2
2
  #scaleUpELU
3
3
  #scaleDownELU
4
4
  #maxTotalWorkers
5
- #timeWindowSec
6
- #appsELUs
7
- #minELUDiff
5
+ #scaleUpTimeWindowSec
6
+ #scaleDownTimeWindowSec
7
+ #appsMetrics
8
8
  #appsConfigs
9
9
 
10
10
  constructor (options = {}) {
11
11
  this.#scaleUpELU = options.scaleUpELU ?? 0.8
12
12
  this.#scaleDownELU = options.scaleDownELU ?? 0.2
13
- this.#maxTotalWorkers = options.maxTotalWorkers
14
- this.#minELUDiff = options.minELUDiff ?? 0.2
15
- this.#timeWindowSec = options.timeWindowSec ?? 60
13
+ this.#maxTotalWorkers = options.maxTotalWorkers ?? Infinity
14
+ this.#scaleUpTimeWindowSec = options.scaleUpTimeWindowSec ?? 10
15
+ this.#scaleDownTimeWindowSec = options.scaleDownTimeWindowSec ?? 60
16
16
  this.#appsConfigs = options.applications ?? {}
17
17
 
18
- this.#appsELUs = {}
18
+ this.#appsMetrics = {}
19
19
  }
20
20
 
21
21
  addWorkerHealthInfo (healthInfo) {
22
- const { workerId, applicationId, elu } = healthInfo
22
+ const { workerId, applicationId, elu, heapUsed } = healthInfo
23
23
  const timestamp = Date.now()
24
24
 
25
- if (!this.#appsELUs[applicationId]) {
26
- this.#appsELUs[applicationId] = {}
25
+ if (!this.#appsMetrics[applicationId]) {
26
+ this.#appsMetrics[applicationId] = {}
27
27
  }
28
- if (!this.#appsELUs[applicationId][workerId]) {
29
- this.#appsELUs[applicationId][workerId] = []
28
+ if (!this.#appsMetrics[applicationId][workerId]) {
29
+ this.#appsMetrics[applicationId][workerId] = []
30
30
  }
31
- this.#appsELUs[applicationId][workerId].push({ elu, timestamp })
31
+ this.#appsMetrics[applicationId][workerId].push({
32
+ elu,
33
+ timestamp,
34
+ heapUsed
35
+ })
32
36
  this.#removeOutdatedAppELUs(applicationId)
33
37
  }
34
38
 
35
- getRecommendations (appsWorkersInfo) {
39
+ getRecommendations (appsWorkersInfo, options = {}) {
36
40
  let totalWorkersCount = 0
37
- let appsInfo = []
41
+ let totalAvailableMemory = options.availableMemory ?? Infinity
42
+
43
+ const appsInfo = []
38
44
 
39
45
  for (const applicationId in appsWorkersInfo) {
40
46
  const workersCount = appsWorkersInfo[applicationId]
41
- const elu = this.#calculateAppAvgELU(applicationId)
42
- appsInfo.push({ applicationId, workersCount, elu })
47
+
48
+ const { heapUsed } = this.#calculateAppAvgMetrics(applicationId)
49
+
50
+ appsInfo.push({
51
+ applicationId,
52
+ workersCount,
53
+ avgHeapUsed: heapUsed,
54
+ })
55
+
43
56
  totalWorkersCount += workersCount
44
57
  }
45
58
 
46
- appsInfo = appsInfo.sort(
47
- (app1, app2) => {
48
- if (app1.elu > app2.elu) return 1
49
- if (app1.elu < app2.elu) return -1
50
- if (app1.workersCount < app2.workersCount) return 1
51
- if (app1.workersCount > app2.workersCount) return -1
52
- return 0
53
- }
54
- )
55
-
56
59
  const recommendations = []
57
60
 
58
- for (const { applicationId, elu, workersCount } of appsInfo) {
61
+ for (const { applicationId, workersCount, avgHeapUsed } of appsInfo) {
59
62
  const appMinWorkers = this.#appsConfigs[applicationId]?.minWorkers ?? 1
60
63
  const appMaxWorkers = this.#appsConfigs[applicationId]?.maxWorkers ?? this.#maxTotalWorkers
61
64
 
@@ -65,7 +68,10 @@ class ScalingAlgorithm {
65
68
  workersCount: appMinWorkers,
66
69
  direction: 'up'
67
70
  })
68
- totalWorkersCount += appMinWorkers - workersCount
71
+
72
+ const newWorkersCount = appMinWorkers - workersCount
73
+ totalWorkersCount += newWorkersCount
74
+ totalAvailableMemory += newWorkersCount * avgHeapUsed
69
75
  continue
70
76
  }
71
77
 
@@ -75,103 +81,122 @@ class ScalingAlgorithm {
75
81
  workersCount: appMaxWorkers,
76
82
  direction: 'down'
77
83
  })
78
- totalWorkersCount -= workersCount - appMaxWorkers
84
+
85
+ const removedWorkersCount = workersCount - appMaxWorkers
86
+ totalWorkersCount -= removedWorkersCount
87
+ totalAvailableMemory -= removedWorkersCount * avgHeapUsed
79
88
  continue
80
89
  }
81
90
 
82
- if (elu < this.#scaleDownELU && workersCount > appMinWorkers) {
83
- recommendations.push({
84
- applicationId,
85
- workersCount: workersCount - 1,
86
- direction: 'down'
87
- })
88
- totalWorkersCount--
91
+ if (workersCount > appMinWorkers) {
92
+ const recommendation = this.#getApplicationScaleRecommendation(applicationId)
93
+ if (recommendation.recommendation === 'scaleDown') {
94
+ recommendations.push({
95
+ applicationId,
96
+ workersCount: workersCount - 1,
97
+ direction: 'down'
98
+ })
99
+
100
+ const removedWorkersCount = 1
101
+ totalWorkersCount -= removedWorkersCount
102
+ totalAvailableMemory -= removedWorkersCount * avgHeapUsed
103
+ }
89
104
  }
90
105
  }
91
106
 
92
- for (const scaleUpCandidate of appsInfo.toReversed()) {
93
- if (scaleUpCandidate.elu < this.#scaleUpELU) break
94
-
95
- const { applicationId, workersCount } = scaleUpCandidate
96
-
97
- const isScaled = recommendations.some(
98
- r => r.applicationId === applicationId &&
99
- r.direction === 'up'
100
- )
101
- if (isScaled) continue
102
-
103
- const appMaxWorkers = this.#appsConfigs[applicationId]?.maxWorkers ?? this.#maxTotalWorkers
104
- if (workersCount >= appMaxWorkers) continue
105
-
106
- if (totalWorkersCount >= this.#maxTotalWorkers) {
107
- let scaleDownCandidate = null
108
- for (const app of appsInfo) {
109
- const appMinWorkers = this.#appsConfigs[app.applicationId]?.minWorkers ?? 1
110
- if (app.workersCount > appMinWorkers) {
111
- scaleDownCandidate = app
112
- break
107
+ if (totalWorkersCount < this.#maxTotalWorkers) {
108
+ let scaleUpCandidate = null
109
+
110
+ for (const { applicationId, workersCount, avgHeapUsed } of appsInfo) {
111
+ const appMaxWorkers = this.#appsConfigs[applicationId]?.maxWorkers ?? this.#maxTotalWorkers
112
+ if (workersCount >= appMaxWorkers) continue
113
+ if (avgHeapUsed >= totalAvailableMemory) continue
114
+
115
+ const isScaled = recommendations.some(
116
+ r => r.applicationId === applicationId
117
+ )
118
+ if (isScaled) continue
119
+
120
+ const recommendation = this.#getApplicationScaleRecommendation(applicationId)
121
+ if (recommendation.recommendation !== 'scaleUp') continue
122
+
123
+ if (
124
+ !scaleUpCandidate ||
125
+ (recommendation.scaleUpELU > scaleUpCandidate.scaleUpELU) ||
126
+ (recommendation.scaleUpELU === scaleUpCandidate.scaleUpELU &&
127
+ workersCount < scaleUpCandidate.workersCount
128
+ )
129
+ ) {
130
+ scaleUpCandidate = {
131
+ applicationId,
132
+ workersCount,
133
+ heapUsed: recommendation.avgHeapUsage,
134
+ elu: recommendation.scaleUpELU
113
135
  }
114
136
  }
137
+ }
115
138
 
116
- if (scaleDownCandidate) {
117
- const eluDiff = scaleUpCandidate.elu - scaleDownCandidate.elu
118
- const workersDiff = scaleDownCandidate.workersCount - scaleUpCandidate.workersCount
119
-
120
- if (eluDiff >= this.#minELUDiff || workersDiff >= 2) {
121
- recommendations.push({
122
- applicationId: scaleDownCandidate.applicationId,
123
- workersCount: scaleDownCandidate.workersCount - 1,
124
- direction: 'down'
125
- })
126
- recommendations.push({
127
- applicationId,
128
- workersCount: workersCount + 1,
129
- direction: 'up'
130
- })
131
- }
132
- }
133
- } else {
139
+ if (scaleUpCandidate) {
134
140
  recommendations.push({
135
- applicationId,
136
- workersCount: workersCount + 1,
141
+ applicationId: scaleUpCandidate.applicationId,
142
+ workersCount: scaleUpCandidate.workersCount + 1,
137
143
  direction: 'up'
138
144
  })
139
145
  totalWorkersCount++
146
+ totalAvailableMemory -= scaleUpCandidate.heapUsed
140
147
  }
141
- break
142
148
  }
143
149
 
144
150
  return recommendations
145
151
  }
146
152
 
147
- #calculateAppAvgELU (applicationId) {
153
+ #calculateAppAvgMetrics (applicationId, options = {}) {
148
154
  this.#removeOutdatedAppELUs(applicationId)
149
155
 
150
- const appELUs = this.#appsELUs[applicationId]
151
- if (!appELUs) return 0
156
+ const appMetrics = this.#appsMetrics[applicationId]
157
+ if (!appMetrics) return { elu: 0, heapUsed: 0 }
158
+
159
+ const defaultTimeWindow = this.#getMetricsTimeWindow()
160
+ const timeWindow = options.timeWindow ?? defaultTimeWindow
152
161
 
153
162
  let eluSum = 0
154
- let eluCount = 0
163
+ let heapUsedSum = 0
164
+ let count = 0
155
165
 
156
- for (const workerId in appELUs) {
157
- const workerELUs = appELUs[workerId]
158
- const workerELUSum = workerELUs.reduce(
159
- (sum, workerELU) => sum + workerELU.elu, 0
160
- )
161
- eluSum += workerELUSum / workerELUs.length
162
- eluCount++
163
- }
166
+ const now = Date.now()
167
+
168
+ for (const workerId in appMetrics) {
169
+ const workerMetrics = appMetrics[workerId]
170
+
171
+ let workerELUSum = 0
172
+ let workerHeapUsedSum = 0
173
+ let metricCount = 0
174
+
175
+ for (const metric of workerMetrics) {
176
+ if (metric.timestamp < now - timeWindow) continue
177
+ workerELUSum += metric.elu
178
+ workerHeapUsedSum += metric.heapUsed
179
+ metricCount++
180
+ }
164
181
 
165
- if (eluCount === 0) return 0
182
+ if (metricCount === 0) continue
166
183
 
167
- return Math.round(eluSum / eluCount * 100) / 100
184
+ eluSum += workerELUSum / metricCount
185
+ heapUsedSum += workerHeapUsedSum / metricCount
186
+ count++
187
+ }
188
+
189
+ const elu = Math.round(eluSum / count * 100) / 100
190
+ const heapUsed = Math.round(heapUsedSum / count * 100) / 100
191
+ return { elu, heapUsed }
168
192
  }
169
193
 
170
194
  #removeOutdatedAppELUs (applicationId) {
171
- const appELUs = this.#appsELUs[applicationId]
195
+ const appELUs = this.#appsMetrics[applicationId]
172
196
  if (!appELUs) return
173
197
 
174
198
  const now = Date.now()
199
+ const timeWindow = this.#getMetricsTimeWindow()
175
200
 
176
201
  for (const workerId in appELUs) {
177
202
  const workerELUs = appELUs[workerId]
@@ -179,7 +204,7 @@ class ScalingAlgorithm {
179
204
  let firstValidIndex = -1
180
205
  for (let i = 0; i < workerELUs.length; i++) {
181
206
  const timestamp = workerELUs[i].timestamp
182
- if (timestamp >= now - this.#timeWindowSec * 1000) {
207
+ if (timestamp >= now - timeWindow) {
183
208
  firstValidIndex = i
184
209
  break
185
210
  }
@@ -199,6 +224,30 @@ class ScalingAlgorithm {
199
224
  }
200
225
  }
201
226
  }
227
+
228
+ #getMetricsTimeWindow () {
229
+ return Math.max(this.#scaleUpTimeWindowSec, this.#scaleDownTimeWindowSec) * 1000
230
+ }
231
+
232
+ #getApplicationScaleRecommendation (applicationId) {
233
+ const { elu: scaleUpELU } = this.#calculateAppAvgMetrics(applicationId, {
234
+ timeWindow: this.#scaleUpTimeWindowSec * 1000
235
+ })
236
+ const { elu: scaleDownELU } = this.#calculateAppAvgMetrics(applicationId, {
237
+ timeWindow: this.#scaleDownTimeWindowSec * 1000
238
+ })
239
+ const { heapUsed: avgHeapUsage } = this.#calculateAppAvgMetrics(applicationId)
240
+
241
+ let recommendation = null
242
+ if (scaleUpELU > this.#scaleUpELU) {
243
+ recommendation = 'scaleUp'
244
+ }
245
+ if (scaleDownELU < this.#scaleDownELU) {
246
+ recommendation = 'scaleDown'
247
+ }
248
+
249
+ return { recommendation, scaleUpELU, scaleDownELU, avgHeapUsage }
250
+ }
202
251
  }
203
252
 
204
253
  export default ScalingAlgorithm
@@ -5,6 +5,7 @@ import { pathToFileURL } from 'node:url'
5
5
  import { parentPort, workerData } from 'node:worker_threads'
6
6
  import { Agent, Client, Pool, setGlobalDispatcher } from 'undici'
7
7
  import { wire } from 'undici-thread-interceptor'
8
+ import { createChannelCreationHook } from '../policies.js'
8
9
  import { RemoteCacheStore, httpCacheInterceptor } from './http-cache.js'
9
10
  import { kInterceptors } from './symbols.js'
10
11
 
@@ -171,6 +172,7 @@ function createThreadInterceptor (runtimeConfig) {
171
172
  domain: '.plt.local',
172
173
  port: parentPort,
173
174
  timeout: runtimeConfig.applicationTimeout,
175
+ onChannelCreation: createChannelCreationHook(runtimeConfig),
174
176
  ...telemetryHooks
175
177
  })
176
178
  return threadDispatcher
@@ -1,5 +1,5 @@
1
- import { executeWithTimeout, ensureLoggableError, kTimeout } from '@platformatic/foundation'
2
- import { ITC, parseRequest, generateRequest, generateResponse, sanitize, errors } from '@platformatic/itc'
1
+ import { ensureLoggableError, executeWithTimeout, kTimeout } from '@platformatic/foundation'
2
+ import { errors, generateRequest, generateResponse, ITC, parseRequest, sanitize } from '@platformatic/itc'
3
3
  import { MessagingError } from '../errors.js'
4
4
  import { RoundRobinMap } from './round-robin-map.js'
5
5
  import { kITC, kWorkersBroadcast } from './symbols.js'
@@ -7,6 +7,7 @@ import { kITC, kWorkersBroadcast } from './symbols.js'
7
7
  const kPendingResponses = Symbol('plt.messaging.pendingResponses')
8
8
 
9
9
  export class MessagingITC extends ITC {
10
+ #id
10
11
  #timeout
11
12
  #listener
12
13
  #closeResolvers
@@ -22,6 +23,7 @@ export class MessagingITC extends ITC {
22
23
  name: `${id}-messaging`
23
24
  })
24
25
 
26
+ this.#id = id
25
27
  this.#timeout = runtimeConfig.messagingTimeout
26
28
  this.#workers = new RoundRobinMap()
27
29
  this.#sources = new Set()
@@ -67,7 +69,11 @@ export class MessagingITC extends ITC {
67
69
  // Use twice the value here as a fallback measure. The target handler in the main thread is forwarding
68
70
  // the request to the worker, using executeWithTimeout with the user set timeout value.
69
71
  const channel = await executeWithTimeout(
70
- globalThis[kITC].send('getWorkerMessagingChannel', { application: worker.application, worker: worker.worker }),
72
+ globalThis[kITC].send('getWorkerMessagingChannel', {
73
+ id: this.#id,
74
+ application: worker.application,
75
+ worker: worker.worker
76
+ }),
71
77
  this.#timeout * 2
72
78
  )
73
79
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/runtime",
3
- "version": "3.10.0",
3
+ "version": "3.12.0",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "type": "module",
@@ -18,8 +18,8 @@
18
18
  "@fastify/compress": "^8.0.0",
19
19
  "@fastify/express": "^4.0.0",
20
20
  "@fastify/formbody": "^8.0.0",
21
- "autocannon": "^8.0.0",
22
21
  "atomic-sleep": "^1.0.0",
22
+ "autocannon": "^8.0.0",
23
23
  "c8": "^10.0.0",
24
24
  "cleaner-spec-reporter": "^0.5.0",
25
25
  "eslint": "9",
@@ -35,14 +35,14 @@
35
35
  "typescript": "^5.5.4",
36
36
  "undici-oidc-interceptor": "^0.5.0",
37
37
  "why-is-node-running": "^2.2.2",
38
- "@platformatic/composer": "3.10.0",
39
- "@platformatic/db": "3.10.0",
40
- "@platformatic/node": "3.10.0",
41
- "@platformatic/gateway": "3.10.0",
42
- "@platformatic/service": "3.10.0",
43
- "@platformatic/sql-graphql": "3.10.0",
44
- "@platformatic/sql-mapper": "3.10.0",
45
- "@platformatic/wattpm-pprof-capture": "3.10.0"
38
+ "@platformatic/composer": "3.12.0",
39
+ "@platformatic/db": "3.12.0",
40
+ "@platformatic/node": "3.12.0",
41
+ "@platformatic/gateway": "3.12.0",
42
+ "@platformatic/sql-graphql": "3.12.0",
43
+ "@platformatic/service": "3.12.0",
44
+ "@platformatic/sql-mapper": "3.12.0",
45
+ "@platformatic/wattpm-pprof-capture": "3.12.0"
46
46
  },
47
47
  "dependencies": {
48
48
  "@fastify/accepts": "^5.0.0",
@@ -69,15 +69,16 @@
69
69
  "prom-client": "^15.1.2",
70
70
  "semgrator": "^0.3.0",
71
71
  "sonic-boom": "^4.2.0",
72
+ "systeminformation": "^5.27.11",
72
73
  "undici": "^7.0.0",
73
- "undici-thread-interceptor": "^0.14.0",
74
+ "undici-thread-interceptor": "^0.15.0",
74
75
  "ws": "^8.16.0",
75
- "@platformatic/basic": "3.10.0",
76
- "@platformatic/foundation": "3.10.0",
77
- "@platformatic/itc": "3.10.0",
78
- "@platformatic/generators": "3.10.0",
79
- "@platformatic/metrics": "3.10.0",
80
- "@platformatic/telemetry": "3.10.0"
76
+ "@platformatic/foundation": "3.12.0",
77
+ "@platformatic/generators": "3.12.0",
78
+ "@platformatic/basic": "3.12.0",
79
+ "@platformatic/itc": "3.12.0",
80
+ "@platformatic/metrics": "3.12.0",
81
+ "@platformatic/telemetry": "3.12.0"
81
82
  },
82
83
  "engines": {
83
84
  "node": ">=22.19.0"
package/schema.json CHANGED
@@ -1,5 +1,5 @@
1
1
  {
2
- "$id": "https://schemas.platformatic.dev/@platformatic/runtime/3.10.0.json",
2
+ "$id": "https://schemas.platformatic.dev/@platformatic/runtime/3.12.0.json",
3
3
  "$schema": "http://json-schema.org/draft-07/schema#",
4
4
  "title": "Platformatic Runtime Config",
5
5
  "type": "object",
@@ -2001,6 +2001,10 @@
2001
2001
  "type": "number",
2002
2002
  "minimum": 1
2003
2003
  },
2004
+ "maxTotalMemory": {
2005
+ "type": "number",
2006
+ "minimum": 0
2007
+ },
2004
2008
  "minWorkers": {
2005
2009
  "type": "number",
2006
2010
  "minimum": 1
@@ -2019,12 +2023,11 @@
2019
2023
  "minimum": 0,
2020
2024
  "maximum": 1
2021
2025
  },
2022
- "minELUDiff": {
2026
+ "timeWindowSec": {
2023
2027
  "type": "number",
2024
- "minimum": 0,
2025
- "maximum": 1
2028
+ "minimum": 0
2026
2029
  },
2027
- "timeWindowSec": {
2030
+ "scaleDownTimeWindowSec": {
2028
2031
  "type": "number",
2029
2032
  "minimum": 0
2030
2033
  },
@@ -2180,6 +2183,34 @@
2180
2183
  "callbackUrl"
2181
2184
  ]
2182
2185
  }
2186
+ },
2187
+ "policies": {
2188
+ "type": "object",
2189
+ "properties": {
2190
+ "deny": {
2191
+ "type": "object",
2192
+ "patternProperties": {
2193
+ "^.*$": {
2194
+ "oneOf": [
2195
+ {
2196
+ "type": "string"
2197
+ },
2198
+ {
2199
+ "type": "array",
2200
+ "items": {
2201
+ "type": "string"
2202
+ },
2203
+ "minItems": 1
2204
+ }
2205
+ ]
2206
+ }
2207
+ }
2208
+ }
2209
+ },
2210
+ "required": [
2211
+ "deny"
2212
+ ],
2213
+ "additionalProperties": false
2183
2214
  }
2184
2215
  },
2185
2216
  "anyOf": [