@platformatic/runtime 2.74.3 → 2.75.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # @platformatic/runtime
2
2
 
3
- Check out the full documentation for Platformatic Runtime on [our website](https://docs.platformatic.dev/docs/runtime/overview).
3
+ Check out the full documentation for Platformatic Runtime on [our website](https://docs.platformatic.dev/docs/reference/runtime/overview).
4
4
 
5
5
  ## Install
6
6
 
package/config.d.ts CHANGED
@@ -5,7 +5,7 @@
5
5
  * and run json-schema-to-typescript to regenerate this file.
6
6
  */
7
7
 
8
- export type HttpsSchemasPlatformaticDevPlatformaticRuntime2743Json = {
8
+ export type HttpsSchemasPlatformaticDevPlatformaticRuntime2750Alpha1Json = {
9
9
  [k: string]: unknown;
10
10
  } & {
11
11
  $schema?: string;
@@ -129,6 +129,7 @@ export type HttpsSchemasPlatformaticDevPlatformaticRuntime2743Json = {
129
129
  };
130
130
  startTimeout?: number;
131
131
  restartOnError?: boolean | number;
132
+ exitOnUnhandledErrors?: boolean;
132
133
  gracefulShutdown?: {
133
134
  runtime: number | string;
134
135
  service: number | string;
package/lib/config.js CHANGED
@@ -16,6 +16,33 @@ const { schema } = require('./schema')
16
16
  const upgrade = require('./upgrade')
17
17
  const { parseArgs } = require('node:util')
18
18
 
19
+ function autoDetectPprofCapture (config) {
20
+ // Check if package is installed
21
+ try {
22
+ let pprofCapturePath
23
+ try {
24
+ pprofCapturePath = require.resolve('@platformatic/watt-pprof-capture')
25
+ } catch (err) {
26
+ pprofCapturePath = require.resolve('../../watt-pprof-capture/index.js')
27
+ }
28
+
29
+ // Add to preload if not already present
30
+ if (!config.preload) {
31
+ config.preload = []
32
+ } else if (typeof config.preload === 'string') {
33
+ config.preload = [config.preload]
34
+ }
35
+
36
+ if (!config.preload.includes(pprofCapturePath)) {
37
+ config.preload.push(pprofCapturePath)
38
+ }
39
+ } catch (err) {
40
+ // Package not installed, skip silently
41
+ }
42
+
43
+ return config
44
+ }
45
+
19
46
  async function _transformConfig (configManager, args) {
20
47
  const config = configManager.current
21
48
 
@@ -90,6 +117,41 @@ async function _transformConfig (configManager, args) {
90
117
 
91
118
  let hasValidEntrypoint = false
92
119
 
120
+ // Validate and coerce workers values early to avoid runtime hangs when invalid
121
+ function coercePositiveInteger (value) {
122
+ if (typeof value === 'number') {
123
+ if (!Number.isInteger(value) || value < 1) return null
124
+ return value
125
+ }
126
+ if (typeof value === 'string') {
127
+ // Trim to handle accidental spaces
128
+ const trimmed = value.trim()
129
+ if (trimmed.length === 0) return null
130
+ const num = Number(trimmed)
131
+ if (!Number.isFinite(num) || !Number.isInteger(num) || num < 1) return null
132
+ return num
133
+ }
134
+ return null
135
+ }
136
+
137
+ function raiseInvalidWorkersError (location, received, hint) {
138
+ const extra = hint ? ` (${hint})` : ''
139
+ throw new errors.InvalidArgumentError(
140
+ `${location} workers must be a positive integer; received "${received}"${extra}`
141
+ )
142
+ }
143
+
144
+ // Root-level workers
145
+ if (typeof config.workers !== 'undefined') {
146
+ const coerced = coercePositiveInteger(config.workers)
147
+ if (coerced === null) {
148
+ const raw = configManager.currentRaw?.workers
149
+ const hint = typeof raw === 'string' && /\{.*\}/.test(raw) ? 'check your environment variable' : ''
150
+ raiseInvalidWorkersError('Runtime', config.workers, hint)
151
+ }
152
+ config.workers = coerced
153
+ }
154
+
93
155
  for (let i = 0; i < services.length; ++i) {
94
156
  const service = services[i]
95
157
 
@@ -156,6 +218,17 @@ async function _transformConfig (configManager, args) {
156
218
  }
157
219
  }
158
220
 
221
+ // Validate and coerce per-service workers
222
+ if (typeof service.workers !== 'undefined') {
223
+ const coerced = coercePositiveInteger(service.workers)
224
+ if (coerced === null) {
225
+ const raw = configManager.currentRaw?.services?.[i]?.workers
226
+ const hint = typeof raw === 'string' && /\{.*\}/.test(raw) ? 'check your environment variable' : ''
227
+ raiseInvalidWorkersError(`Service "${service.id}"`, service.workers, hint)
228
+ }
229
+ service.workers = coerced
230
+ }
231
+
159
232
  service.entrypoint = service.id === config.entrypoint
160
233
  service.dependencies = []
161
234
  service.localServiceEnvVars = new Map()
@@ -226,6 +299,9 @@ async function _transformConfig (configManager, args) {
226
299
  configManager.current.restartOnError = 0
227
300
  }
228
301
  }
302
+
303
+ // Auto-detect and add pprof capture if available
304
+ autoDetectPprofCapture(configManager.current)
229
305
  }
230
306
 
231
307
  async function platformaticRuntime () {
@@ -358,5 +434,6 @@ function parseInspectorOptions (configManager) {
358
434
  module.exports = {
359
435
  parseInspectorOptions,
360
436
  platformaticRuntime,
361
- wrapConfigInRuntimeConfig
437
+ wrapConfigInRuntimeConfig,
438
+ autoDetectPprofCapture
362
439
  }
@@ -1,11 +1,11 @@
1
1
  # Platformatic Runtime API
2
2
 
3
- This is a generated [Platformatic Runtime](https://docs.platformatic.dev/docs/runtime/overview) application.
3
+ This is a generated [Platformatic Runtime](https://docs.platformatic.dev/docs/reference/runtime/overview) application.
4
4
 
5
5
  ## Requirements
6
6
 
7
- Platformatic supports macOS, Linux and Windows ([WSL](https://docs.microsoft.com/windows/wsl/) recommended).
8
- You'll need to have [Node.js](https://nodejs.org/) >= v18.8.0 or >= v20.6.0
7
+ Platformatic supports macOS, Linux and Windows ([WSL](https://learn.microsoft.com/en-us/windows/wsl/) recommended).
8
+ You'll need to have [Node.js](https://nodejs.org/) (v20.16.0+ or v22.3.0+)
9
9
 
10
10
  ## Setup
11
11
 
@@ -27,7 +27,7 @@ const wrappableProperties = {
27
27
  }
28
28
 
29
29
  const engines = {
30
- node: '^18.8.0 || >=20.6.0'
30
+ node: '^22.14.0 || ^20.6.0'
31
31
  }
32
32
 
33
33
  function getRuntimeBaseEnvVars (config) {
@@ -90,6 +90,23 @@ async function managementApiPlugin (app, opts) {
90
90
  await runtime.stopService(id)
91
91
  })
92
92
 
93
+ app.post('/services/:id/pprof/start', async (request, reply) => {
94
+ const { id } = request.params
95
+ app.log.debug('start profiling', { id })
96
+
97
+ const options = request.body || {}
98
+ await runtime.startServiceProfiling(id, options)
99
+ reply.code(200).send({})
100
+ })
101
+
102
+ app.post('/services/:id/pprof/stop', async (request, reply) => {
103
+ const { id } = request.params
104
+ app.log.debug('stop profiling', { id })
105
+
106
+ const profileData = await runtime.stopServiceProfiling(id)
107
+ reply.type('application/octet-stream').code(200).send(profileData)
108
+ })
109
+
93
110
  app.all('/services/:id/proxy/*', async (request, reply) => {
94
111
  const { id, '*': requestUrl } = request.params
95
112
  app.log.debug('proxy request', { id, requestUrl })
@@ -19,13 +19,21 @@ const DEFAULT_LIVENESS_FAIL_BODY = 'ERR'
19
19
  async function checkReadiness (runtime) {
20
20
  const workers = await runtime.getWorkers()
21
21
 
22
- // check if all workers are started
22
+ // Make sure there is at least one started worker
23
+ const services = new Set()
24
+ const started = new Set()
23
25
  for (const worker of Object.values(workers)) {
24
- if (worker.status !== 'started') {
25
- return { status: false }
26
+ services.add(worker.service)
27
+
28
+ if (worker.status === 'started') {
29
+ started.add(worker.service)
26
30
  }
27
31
  }
28
32
 
33
+ if (started.size !== services.size) {
34
+ return { status: false }
35
+ }
36
+
29
37
  // perform custom readiness checks, get custom response content if any
30
38
  const checks = await runtime.getCustomReadinessChecks()
31
39
 
@@ -89,7 +97,7 @@ async function startPrometheusServer (runtime, opts) {
89
97
  return reply.code(401).send({ message: 'Unauthorized' })
90
98
  }
91
99
  return done()
92
- },
100
+ }
93
101
  })
94
102
  onRequestHook = promServer.basicAuth
95
103
  }
@@ -129,7 +137,7 @@ async function startPrometheusServer (runtime, opts) {
129
137
  reply.type('text/plain')
130
138
  }
131
139
  return (await runtime.getMetrics(reqType)).metrics
132
- },
140
+ }
133
141
  })
134
142
 
135
143
  if (opts.readiness !== false) {
@@ -167,7 +175,7 @@ async function startPrometheusServer (runtime, opts) {
167
175
  reply.status(failStatusCode).send(failBody)
168
176
  }
169
177
  }
170
- },
178
+ }
171
179
  })
172
180
  }
173
181
 
@@ -206,7 +214,7 @@ async function startPrometheusServer (runtime, opts) {
206
214
  reply.status(failStatusCode).send(readiness?.body || failBody)
207
215
  }
208
216
  }
209
- },
217
+ }
210
218
  })
211
219
  }
212
220
 
@@ -215,5 +223,5 @@ async function startPrometheusServer (runtime, opts) {
215
223
  }
216
224
 
217
225
  module.exports = {
218
- startPrometheusServer,
226
+ startPrometheusServer
219
227
  }
package/lib/runtime.js CHANGED
@@ -1,7 +1,14 @@
1
1
  'use strict'
2
2
 
3
3
  const { ITC } = require('@platformatic/itc')
4
- const { features, ensureLoggableError, executeWithTimeout, deepmerge, parseMemorySize, kTimeout } = require('@platformatic/utils')
4
+ const {
5
+ features,
6
+ ensureLoggableError,
7
+ executeWithTimeout,
8
+ deepmerge,
9
+ parseMemorySize,
10
+ kTimeout
11
+ } = require('@platformatic/utils')
5
12
  const { once, EventEmitter } = require('node:events')
6
13
  const { createReadStream, watch, existsSync } = require('node:fs')
7
14
  const { readdir, readFile, stat, access } = require('node:fs/promises')
@@ -718,7 +725,9 @@ class Runtime extends EventEmitter {
718
725
  const label = `${service}:${i}`
719
726
  const worker = this.#workers.get(label)
720
727
 
721
- status[label] = await sendViaITC(worker, 'getCustomHealthCheck')
728
+ if (worker) {
729
+ status[label] = await sendViaITC(worker, 'getCustomHealthCheck')
730
+ }
722
731
  }
723
732
  }
724
733
 
@@ -733,7 +742,9 @@ class Runtime extends EventEmitter {
733
742
  const label = `${service}:${i}`
734
743
  const worker = this.#workers.get(label)
735
744
 
736
- status[label] = await sendViaITC(worker, 'getCustomReadinessCheck')
745
+ if (worker) {
746
+ status[label] = await sendViaITC(worker, 'getCustomReadinessCheck')
747
+ }
737
748
  }
738
749
  }
739
750
 
@@ -795,6 +806,18 @@ class Runtime extends EventEmitter {
795
806
  return sendViaITC(service, 'getServiceEnv')
796
807
  }
797
808
 
809
+ async startServiceProfiling (id, options = {}, ensureStarted = true) {
810
+ const service = await this.#getServiceById(id, ensureStarted)
811
+
812
+ return sendViaITC(service, 'startProfiling', options)
813
+ }
814
+
815
+ async stopServiceProfiling (id, ensureStarted = true) {
816
+ const service = await this.#getServiceById(id, ensureStarted)
817
+
818
+ return sendViaITC(service, 'stopProfiling')
819
+ }
820
+
798
821
  async getServiceOpenapiSchema (id) {
799
822
  const service = await this.#getServiceById(id, true)
800
823
 
@@ -1095,9 +1118,7 @@ class Runtime extends EventEmitter {
1095
1118
 
1096
1119
  const promises = []
1097
1120
  for (const worker of this.#workers.values()) {
1098
- promises.push(
1099
- sendViaITC(worker, 'setSharedContext', sharedContext)
1100
- )
1121
+ promises.push(sendViaITC(worker, 'setSharedContext', sharedContext))
1101
1122
  }
1102
1123
 
1103
1124
  const results = await Promise.allSettled(promises)
@@ -1202,8 +1223,12 @@ class Runtime extends EventEmitter {
1202
1223
  workerEnv['NODE_OPTIONS'] = `${originalNodeOptions} ${serviceConfig.nodeOptions}`.trim()
1203
1224
  }
1204
1225
 
1205
- const maxHeapTotal = typeof health.maxHeapTotal === 'string' ? parseMemorySize(health.maxHeapTotal) : health.maxHeapTotal
1206
- const maxYoungGeneration = typeof health.maxYoungGeneration === 'string' ? parseMemorySize(health.maxYoungGeneration) : health.maxYoungGeneration
1226
+ const maxHeapTotal =
1227
+ typeof health.maxHeapTotal === 'string' ? parseMemorySize(health.maxHeapTotal) : health.maxHeapTotal
1228
+ const maxYoungGeneration =
1229
+ typeof health.maxYoungGeneration === 'string'
1230
+ ? parseMemorySize(health.maxYoungGeneration)
1231
+ : health.maxYoungGeneration
1207
1232
 
1208
1233
  const maxOldGenerationSizeMb = Math.floor(
1209
1234
  (maxYoungGeneration > 0 ? maxHeapTotal - maxYoungGeneration : maxHeapTotal) / (1024 * 1024)
@@ -1379,9 +1404,7 @@ class Runtime extends EventEmitter {
1379
1404
  if (features.node.worker.getHeapStatistics) {
1380
1405
  const { used_heap_size: heapUsed, total_heap_size: heapTotal } = await worker.getHeapStatistics()
1381
1406
  const currentELU = worker.performance.eventLoopUtilization()
1382
- const elu = worker[kLastELU]
1383
- ? worker.performance.eventLoopUtilization(currentELU, worker[kLastELU])
1384
- : currentELU
1407
+ const elu = worker[kLastELU] ? worker.performance.eventLoopUtilization(currentELU, worker[kLastELU]) : currentELU
1385
1408
  worker[kLastELU] = currentELU
1386
1409
  return { elu: elu.utilization, heapUsed, heapTotal }
1387
1410
  }
@@ -1531,7 +1554,16 @@ class Runtime extends EventEmitter {
1531
1554
  if (enabled && config.restartOnError > 0) {
1532
1555
  // if gracePeriod is 0, it will be set to 1 to start health checks immediately
1533
1556
  // however, the health event will start when the worker is started
1534
- this.#setupHealthCheck(config, serviceConfig, workersCount, id, index, worker, label, gracePeriod > 0 ? gracePeriod : 1)
1557
+ this.#setupHealthCheck(
1558
+ config,
1559
+ serviceConfig,
1560
+ workersCount,
1561
+ id,
1562
+ index,
1563
+ worker,
1564
+ label,
1565
+ gracePeriod > 0 ? gracePeriod : 1
1566
+ )
1535
1567
  }
1536
1568
  } catch (error) {
1537
1569
  // TODO: handle port allocation error here
@@ -1601,7 +1633,7 @@ class Runtime extends EventEmitter {
1601
1633
  this.logger?.info(`Stopping the ${label}...`)
1602
1634
  }
1603
1635
 
1604
- const exitTimeout = this.#configManager.current.gracefulShutdown.runtime
1636
+ const exitTimeout = this.#configManager.current.gracefulShutdown.service
1605
1637
  const exitPromise = once(worker, 'exit')
1606
1638
 
1607
1639
  // Always send the stop message, it will shut down workers that only had ITC and interceptors setup
@@ -2100,14 +2132,29 @@ class Runtime extends EventEmitter {
2100
2132
  const { serviceId, config: serviceConfig, workers, health, currentWorkers, currentHealth } = update
2101
2133
 
2102
2134
  if (workers && health) {
2103
- const r = await this.#updateServiceWorkersAndHealth(serviceId, config, serviceConfig, workers, health, currentWorkers, currentHealth)
2135
+ const r = await this.#updateServiceWorkersAndHealth(
2136
+ serviceId,
2137
+ config,
2138
+ serviceConfig,
2139
+ workers,
2140
+ health,
2141
+ currentWorkers,
2142
+ currentHealth
2143
+ )
2104
2144
  report.push({
2105
2145
  service: serviceId,
2106
2146
  workers: r.workers,
2107
2147
  health: r.health
2108
2148
  })
2109
2149
  } else if (health) {
2110
- const r = await this.#updateServiceHealth(serviceId, config, serviceConfig, currentWorkers, currentHealth, health)
2150
+ const r = await this.#updateServiceHealth(
2151
+ serviceId,
2152
+ config,
2153
+ serviceConfig,
2154
+ currentWorkers,
2155
+ currentHealth,
2156
+ health
2157
+ )
2111
2158
  report.push({
2112
2159
  service: serviceId,
2113
2160
  health: r.health
@@ -2181,7 +2228,10 @@ class Runtime extends EventEmitter {
2181
2228
  throw new errors.InvalidArgumentError('maxHeapTotal', 'must be greater than 0')
2182
2229
  }
2183
2230
  } else {
2184
- throw new errors.InvalidArgumentError('maxHeapTotal', 'must be a number or a string representing a memory size')
2231
+ throw new errors.InvalidArgumentError(
2232
+ 'maxHeapTotal',
2233
+ 'must be a number or a string representing a memory size'
2234
+ )
2185
2235
  }
2186
2236
 
2187
2237
  if (currentHealth.maxHeapTotal === maxHeapTotal) {
@@ -2203,7 +2253,10 @@ class Runtime extends EventEmitter {
2203
2253
  throw new errors.InvalidArgumentError('maxYoungGeneration', 'must be greater than 0')
2204
2254
  }
2205
2255
  } else {
2206
- throw new errors.InvalidArgumentError('maxYoungGeneration', 'must be a number or a string representing a memory size')
2256
+ throw new errors.InvalidArgumentError(
2257
+ 'maxYoungGeneration',
2258
+ 'must be a number or a string representing a memory size'
2259
+ )
2207
2260
  }
2208
2261
 
2209
2262
  if (currentHealth.maxYoungGeneration && currentHealth.maxYoungGeneration === maxYoungGeneration) {
@@ -2216,7 +2269,7 @@ class Runtime extends EventEmitter {
2216
2269
  if (workers || maxHeapTotal || maxYoungGeneration) {
2217
2270
  let health
2218
2271
  if (maxHeapTotal || maxYoungGeneration) {
2219
- health = { }
2272
+ health = {}
2220
2273
  if (maxHeapTotal) {
2221
2274
  health.maxHeapTotal = maxHeapTotal
2222
2275
  }
@@ -2231,12 +2284,27 @@ class Runtime extends EventEmitter {
2231
2284
  return validatedUpdates
2232
2285
  }
2233
2286
 
2234
- async #updateServiceWorkersAndHealth (serviceId, config, serviceConfig, workers, health, currentWorkers, currentHealth) {
2287
+ async #updateServiceWorkersAndHealth (
2288
+ serviceId,
2289
+ config,
2290
+ serviceConfig,
2291
+ workers,
2292
+ health,
2293
+ currentWorkers,
2294
+ currentHealth
2295
+ ) {
2235
2296
  if (currentWorkers > workers) {
2236
2297
  // stop workers
2237
2298
  const reportWorkers = await this.#updateServiceWorkers(serviceId, config, serviceConfig, workers, currentWorkers)
2238
2299
  // update heap for current workers
2239
- const reportHealth = await this.#updateServiceHealth(serviceId, config, serviceConfig, workers, currentHealth, health)
2300
+ const reportHealth = await this.#updateServiceHealth(
2301
+ serviceId,
2302
+ config,
2303
+ serviceConfig,
2304
+ workers,
2305
+ currentHealth,
2306
+ health
2307
+ )
2240
2308
 
2241
2309
  return { workers: reportWorkers, health: reportHealth }
2242
2310
  } else {
@@ -2245,13 +2313,29 @@ class Runtime extends EventEmitter {
2245
2313
  // start new workers with new heap
2246
2314
  const reportWorkers = await this.#updateServiceWorkers(serviceId, config, serviceConfig, workers, currentWorkers)
2247
2315
  // update heap for current workers
2248
- const reportHealth = await this.#updateServiceHealth(serviceId, config, serviceConfig, currentWorkers, currentHealth, health, false)
2316
+ const reportHealth = await this.#updateServiceHealth(
2317
+ serviceId,
2318
+ config,
2319
+ serviceConfig,
2320
+ currentWorkers,
2321
+ currentHealth,
2322
+ health,
2323
+ false
2324
+ )
2249
2325
 
2250
2326
  return { workers: reportWorkers, health: reportHealth }
2251
2327
  }
2252
2328
  }
2253
2329
 
2254
- async #updateServiceHealth (serviceId, config, serviceConfig, currentWorkers, currentHealth, health, updateConfig = true) {
2330
+ async #updateServiceHealth (
2331
+ serviceId,
2332
+ config,
2333
+ serviceConfig,
2334
+ currentWorkers,
2335
+ currentHealth,
2336
+ health,
2337
+ updateConfig = true
2338
+ ) {
2255
2339
  const report = {
2256
2340
  current: currentHealth,
2257
2341
  new: health,
@@ -2263,15 +2347,25 @@ class Runtime extends EventEmitter {
2263
2347
  }
2264
2348
 
2265
2349
  for (let i = 0; i < currentWorkers; i++) {
2266
- this.logger.info({ health: { current: currentHealth, new: health } }, `Restarting service "${serviceId}" worker ${i} to update config health heap...`)
2350
+ this.logger.info(
2351
+ { health: { current: currentHealth, new: health } },
2352
+ `Restarting service "${serviceId}" worker ${i} to update config health heap...`
2353
+ )
2267
2354
 
2268
2355
  const worker = await this.#getWorkerById(serviceId, i)
2269
- if (health.maxHeapTotal) { worker[kConfig].health.maxHeapTotal = health.maxHeapTotal }
2270
- if (health.maxYoungGeneration) { worker[kConfig].health.maxYoungGeneration = health.maxYoungGeneration }
2356
+ if (health.maxHeapTotal) {
2357
+ worker[kConfig].health.maxHeapTotal = health.maxHeapTotal
2358
+ }
2359
+ if (health.maxYoungGeneration) {
2360
+ worker[kConfig].health.maxYoungGeneration = health.maxYoungGeneration
2361
+ }
2271
2362
 
2272
2363
  await this.#replaceWorker(config, serviceConfig, currentWorkers, serviceId, i, worker)
2273
2364
  report.updated.push(i)
2274
- this.logger.info({ health: { current: currentHealth, new: health } }, `Restarted service "${serviceId}" worker ${i}`)
2365
+ this.logger.info(
2366
+ { health: { current: currentHealth, new: health } },
2367
+ `Restarted service "${serviceId}" worker ${i}`
2368
+ )
2275
2369
  }
2276
2370
  report.success = true
2277
2371
  } catch (err) {
@@ -2279,7 +2373,10 @@ class Runtime extends EventEmitter {
2279
2373
  this.logger.error({ err }, 'Cannot update service health heap, no worker updated')
2280
2374
  await this.#updateServiceConfigHealth(serviceId, currentHealth)
2281
2375
  } else {
2282
- this.logger.error({ err }, `Cannot update service health heap, updated workers: ${report.updated.length} out of ${currentWorkers}`)
2376
+ this.logger.error(
2377
+ { err },
2378
+ `Cannot update service health heap, updated workers: ${report.updated.length} out of ${currentWorkers}`
2379
+ )
2283
2380
  }
2284
2381
  report.success = false
2285
2382
  }
@@ -2306,7 +2403,10 @@ class Runtime extends EventEmitter {
2306
2403
  this.logger.error({ err }, 'Cannot start service workers, no worker started')
2307
2404
  await this.#updateServiceConfigWorkers(serviceId, currentWorkers)
2308
2405
  } else {
2309
- this.logger.error({ err }, `Cannot start service workers, started workers: ${report.started.length} out of ${workers}`)
2406
+ this.logger.error(
2407
+ { err },
2408
+ `Cannot start service workers, started workers: ${report.started.length} out of ${workers}`
2409
+ )
2310
2410
  await this.#updateServiceConfigWorkers(serviceId, currentWorkers + report.started.length)
2311
2411
  }
2312
2412
  report.success = false
@@ -2327,7 +2427,10 @@ class Runtime extends EventEmitter {
2327
2427
  if (report.stopped.length < 1) {
2328
2428
  this.logger.error({ err }, 'Cannot stop service workers, no worker stopped')
2329
2429
  } else {
2330
- this.logger.error({ err }, `Cannot stop service workers, stopped workers: ${report.stopped.length} out of ${workers}`)
2430
+ this.logger.error(
2431
+ { err },
2432
+ `Cannot stop service workers, stopped workers: ${report.stopped.length} out of ${workers}`
2433
+ )
2331
2434
  await this.#updateServiceConfigWorkers(serviceId, currentWorkers - report.stopped)
2332
2435
  }
2333
2436
  report.success = false
package/lib/start.js CHANGED
@@ -150,7 +150,7 @@ async function startCommand (args, throwAllErrors = false, returnRuntime = false
150
150
  const runtime = startResult.runtime
151
151
  const res = startResult.address
152
152
 
153
- closeWithGrace(async event => {
153
+ closeWithGrace({ delay: config.configManager.current.gracefulShutdown?.runtime ?? 10000 }, async event => {
154
154
  if (event.err instanceof Error) {
155
155
  console.error(event.err)
156
156
  }
package/lib/worker/app.js CHANGED
@@ -1,5 +1,6 @@
1
1
  'use strict'
2
2
 
3
+ const { getActiveResourcesInfo } = require('node:process')
3
4
  const { existsSync } = require('node:fs')
4
5
  const { EventEmitter } = require('node:events')
5
6
  const { resolve } = require('node:path')
@@ -219,6 +220,7 @@ class PlatformaticApp extends EventEmitter {
219
220
  globalThis.platformatic.onHttpStatsSize(url, size || 0)
220
221
  }
221
222
  }
223
+ globalThis.platformatic.onActiveResourcesEventLoop(getActiveResourcesInfo().length)
222
224
  return this.stackable.getMetrics({ format })
223
225
  }
224
226
 
@@ -178,8 +178,18 @@ async function main () {
178
178
  !!config.watch
179
179
  )
180
180
 
181
- process.on('uncaughtException', handleUnhandled.bind(null, app, 'uncaught exception'))
182
- process.on('unhandledRejection', handleUnhandled.bind(null, app, 'unhandled rejection'))
181
+ if (config.exitOnUnhandledErrors) {
182
+ process.on('uncaughtException', handleUnhandled.bind(null, app, 'uncaught exception'))
183
+ process.on('unhandledRejection', handleUnhandled.bind(null, app, 'unhandled rejection'))
184
+
185
+ process.on('newListener', event => {
186
+ if (event === 'uncaughtException' || event === 'unhandledRejection') {
187
+ globalThis.platformatic.logger.warn(
188
+ `A listener has been added for the "process.${event}" event. This listener will be never triggered as Watt default behavior will kill the process before.\n To disable this behavior, set "exitOnUnhandledErrors" to false in the runtime config.`
189
+ )
190
+ }
191
+ })
192
+ }
183
193
 
184
194
  await app.init()
185
195
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/runtime",
3
- "version": "2.74.3",
3
+ "version": "2.75.0-alpha.1",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -33,16 +33,16 @@
33
33
  "neostandard": "^0.12.0",
34
34
  "pino-abstract-transport": "^2.0.0",
35
35
  "split2": "^4.2.0",
36
- "tsd": "^0.32.0",
36
+ "tsd": "^0.33.0",
37
37
  "typescript": "^5.5.4",
38
38
  "undici-oidc-interceptor": "^0.5.0",
39
39
  "why-is-node-running": "^2.2.2",
40
- "@platformatic/composer": "2.74.3",
41
- "@platformatic/db": "2.74.3",
42
- "@platformatic/node": "2.74.3",
43
- "@platformatic/service": "2.74.3",
44
- "@platformatic/sql-graphql": "2.74.3",
45
- "@platformatic/sql-mapper": "2.74.3"
40
+ "@platformatic/composer": "2.75.0-alpha.1",
41
+ "@platformatic/db": "2.75.0-alpha.1",
42
+ "@platformatic/node": "2.75.0-alpha.1",
43
+ "@platformatic/service": "2.75.0-alpha.1",
44
+ "@platformatic/sql-graphql": "2.75.0-alpha.1",
45
+ "@platformatic/sql-mapper": "2.75.0-alpha.1"
46
46
  },
47
47
  "dependencies": {
48
48
  "@fastify/accepts": "^5.0.0",
@@ -76,14 +76,14 @@
76
76
  "undici": "^7.0.0",
77
77
  "undici-thread-interceptor": "^0.14.0",
78
78
  "ws": "^8.16.0",
79
- "@platformatic/basic": "2.74.3",
80
- "@platformatic/config": "2.74.3",
81
- "@platformatic/generators": "2.74.3",
82
- "@platformatic/metrics": "2.74.3",
83
- "@platformatic/itc": "2.74.3",
84
- "@platformatic/telemetry": "2.74.3",
85
- "@platformatic/ts-compiler": "2.74.3",
86
- "@platformatic/utils": "2.74.3"
79
+ "@platformatic/basic": "2.75.0-alpha.1",
80
+ "@platformatic/config": "2.75.0-alpha.1",
81
+ "@platformatic/generators": "2.75.0-alpha.1",
82
+ "@platformatic/metrics": "2.75.0-alpha.1",
83
+ "@platformatic/itc": "2.75.0-alpha.1",
84
+ "@platformatic/telemetry": "2.75.0-alpha.1",
85
+ "@platformatic/ts-compiler": "2.75.0-alpha.1",
86
+ "@platformatic/utils": "2.75.0-alpha.1"
87
87
  },
88
88
  "scripts": {
89
89
  "test": "pnpm run lint && borp --concurrency=1 --timeout=1200000 && tsd",
package/schema.json CHANGED
@@ -1,5 +1,5 @@
1
1
  {
2
- "$id": "https://schemas.platformatic.dev/@platformatic/runtime/2.74.3.json",
2
+ "$id": "https://schemas.platformatic.dev/@platformatic/runtime/2.75.0-alpha.1.json",
3
3
  "$schema": "http://json-schema.org/draft-07/schema#",
4
4
  "type": "object",
5
5
  "properties": {
@@ -960,6 +960,10 @@
960
960
  }
961
961
  ]
962
962
  },
963
+ "exitOnUnhandledErrors": {
964
+ "default": true,
965
+ "type": "boolean"
966
+ },
963
967
  "gracefulShutdown": {
964
968
  "type": "object",
965
969
  "properties": {