@platformatic/runtime 2.37.1 → 2.38.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/config.d.ts CHANGED
@@ -5,7 +5,7 @@
5
5
  * and run json-schema-to-typescript to regenerate this file.
6
6
  */
7
7
 
8
- export type HttpsSchemasPlatformaticDevPlatformaticRuntime2371Json = {
8
+ export type HttpsSchemasPlatformaticDevPlatformaticRuntime2380Json = {
9
9
  [k: string]: unknown;
10
10
  } & {
11
11
  $schema?: string;
package/lib/logger.js CHANGED
@@ -1,5 +1,6 @@
1
1
  'use strict'
2
2
 
3
+ const { once } = require('node:events')
3
4
  const { join } = require('node:path')
4
5
  const { isatty } = require('node:tty')
5
6
 
@@ -17,7 +18,7 @@ const customPrettifiers = {
17
18
  }
18
19
  }
19
20
 
20
- function createLogger (config, runtimeLogsDir) {
21
+ async function createLogger (config, runtimeLogsDir) {
21
22
  const loggerConfig = { ...config.logger }
22
23
 
23
24
  // PLT_RUNTIME_LOGGER_STDOUT is used in test to reduce verbosity
@@ -62,6 +63,9 @@ function createLogger (config, runtimeLogsDir) {
62
63
  })
63
64
 
64
65
  multiStream.add({ level: 'trace', stream: pinoRoll })
66
+
67
+ // Make sure there is a file before continuing otherwise the management API log endpoint might bail out
68
+ await once(pinoRoll, 'ready')
65
69
  }
66
70
 
67
71
  return [pino({ level: 'trace' }, multiStream), multiStream]
package/lib/runtime.js CHANGED
@@ -1,16 +1,18 @@
1
1
  'use strict'
2
2
 
3
+ const { ITC } = require('@platformatic/itc')
4
+ const { ensureLoggableError, executeWithTimeout, deepmerge } = require('@platformatic/utils')
3
5
  const { once, EventEmitter } = require('node:events')
4
6
  const { createReadStream, watch, existsSync } = require('node:fs')
5
7
  const { readdir, readFile, stat, access } = require('node:fs/promises')
6
8
  const { STATUS_CODES } = require('node:http')
9
+ const { hostname } = require('node:os')
7
10
  const { join } = require('node:path')
8
- const { setTimeout: sleep } = require('node:timers/promises')
11
+ const { setTimeout: sleep, setImmediate: sleepUntilNextTick } = require('node:timers/promises')
9
12
  const { Worker } = require('node:worker_threads')
10
- const { ITC } = require('@platformatic/itc')
11
- const { Agent, interceptors: undiciInterceptors, request } = require('undici')
12
- const { ensureLoggableError, executeWithTimeout, deepmerge } = require('@platformatic/utils')
13
+ const split2 = require('split2')
13
14
  const ts = require('tail-file-stream')
15
+ const { Agent, interceptors: undiciInterceptors, request } = require('undici')
14
16
  const { createThreadInterceptor } = require('undici-thread-interceptor')
15
17
 
16
18
  const { checkDependencies, topologicalSort } = require('./dependencies')
@@ -29,9 +31,8 @@ const {
29
31
  kITC,
30
32
  kHealthCheckTimer,
31
33
  kConfig,
32
- kLoggerDestination,
33
- kLoggingPort,
34
- kWorkerStatus
34
+ kWorkerStatus,
35
+ kStderrMarker
35
36
  } = require('./worker/symbols')
36
37
 
37
38
  const fastify = require('fastify')
@@ -110,7 +111,7 @@ class Runtime extends EventEmitter {
110
111
  }
111
112
 
112
113
  // Create the logger
113
- const [logger, destination] = createLogger(config, this.#runtimeLogsDir)
114
+ const [logger, destination] = await createLogger(config, this.#runtimeLogsDir)
114
115
  this.logger = logger
115
116
  this.#loggerDestination = destination
116
117
 
@@ -461,6 +462,7 @@ class Runtime extends EventEmitter {
461
462
  runtimePID = runtimePID ?? process.pid
462
463
 
463
464
  const runtimeLogFiles = await this.#getRuntimeLogFiles(runtimePID)
465
+
464
466
  if (runtimeLogFiles.length === 0) {
465
467
  writableStream.end()
466
468
  return
@@ -958,9 +960,6 @@ class Runtime extends EventEmitter {
958
960
  const { restartOnError } = config
959
961
  const workerId = `${serviceId}:${index}`
960
962
 
961
- const { port1: loggerDestination, port2: loggingPort } = new MessageChannel()
962
- loggerDestination.on('message', this.#forwardThreadLog.bind(this))
963
-
964
963
  // Handle inspector
965
964
  let inspectorOptions
966
965
 
@@ -1019,27 +1018,20 @@ class Runtime extends EventEmitter {
1019
1018
  },
1020
1019
  inspectorOptions,
1021
1020
  dirname: this.#configManager.dirname,
1022
- runtimeLogsDir: this.#runtimeLogsDir,
1023
- loggingPort
1021
+ runtimeLogsDir: this.#runtimeLogsDir
1024
1022
  },
1025
1023
  argv: serviceConfig.arguments,
1026
1024
  execArgv,
1027
1025
  env: workerEnv,
1028
- transferList: [loggingPort],
1029
1026
  resourceLimits: {
1030
1027
  maxOldGenerationSizeMb: health.maxHeapTotal
1031
1028
  },
1032
- /*
1033
- Important: always set stdout and stderr to true, so that worker's output is not automatically
1034
- piped to the parent thread. We actually never output the thread output since we replace it
1035
- with PinoWritable, and disabling the piping avoids us to redeclare some internal Node.js methods.
1036
-
1037
- The author of this (Paolo and Matteo) are not proud of the solution. Forgive us.
1038
- */
1039
1029
  stdout: true,
1040
1030
  stderr: true
1041
1031
  })
1042
1032
 
1033
+ this.#handleWorkerStandardStreams(worker, serviceId, workersCount > 1 ? index : undefined)
1034
+
1043
1035
  // Make sure the listener can handle a lot of API requests at once before raising a warning
1044
1036
  worker.setMaxListeners(1e3)
1045
1037
 
@@ -1081,8 +1073,6 @@ class Runtime extends EventEmitter {
1081
1073
  worker[kId] = workersCount > 1 ? workerId : serviceId
1082
1074
  worker[kServiceId] = serviceId
1083
1075
  worker[kWorkerId] = workersCount > 1 ? index : undefined
1084
- worker[kLoggerDestination] = loggerDestination
1085
- worker[kLoggingPort] = loggingPort
1086
1076
 
1087
1077
  if (inspectorOptions) {
1088
1078
  worker[kInspectorOptions] = {
@@ -1226,6 +1216,8 @@ class Runtime extends EventEmitter {
1226
1216
  workerUrl = await sendViaITC(worker, 'start')
1227
1217
  }
1228
1218
 
1219
+ await this.#avoidOutOfOrderThreadLogs()
1220
+
1229
1221
  if (workerUrl) {
1230
1222
  this.#url = workerUrl
1231
1223
  }
@@ -1319,6 +1311,8 @@ class Runtime extends EventEmitter {
1319
1311
  await worker.terminate()
1320
1312
  }
1321
1313
 
1314
+ await this.#avoidOutOfOrderThreadLogs()
1315
+
1322
1316
  worker[kWorkerStatus] = 'stopped'
1323
1317
  }
1324
1318
 
@@ -1326,8 +1320,7 @@ class Runtime extends EventEmitter {
1326
1320
  this.#workers.delete(workerId)
1327
1321
 
1328
1322
  worker[kITC].close()
1329
- worker[kLoggerDestination].close()
1330
- worker[kLoggingPort].close()
1323
+
1331
1324
  clearTimeout(worker[kHealthCheckTimer])
1332
1325
  }
1333
1326
 
@@ -1466,46 +1459,75 @@ class Runtime extends EventEmitter {
1466
1459
  return runtimesLogFiles.sort((runtime1, runtime2) => runtime1.lastModified - runtime2.lastModified)
1467
1460
  }
1468
1461
 
1469
- #forwardThreadLog (message) {
1470
- if (!this.#loggerDestination) {
1471
- return
1462
+ #handleWorkerStandardStreams (worker, serviceId, workerId) {
1463
+ const pinoOptions = { level: 'trace', name: serviceId }
1464
+
1465
+ if (typeof workerId !== 'undefined') {
1466
+ pinoOptions.base = { pid: process.pid, hostname: hostname(), worker: workerId }
1472
1467
  }
1473
1468
 
1474
- for (const log of message.logs) {
1475
- // In order to being able to forward messages serialized in the
1476
- // worker threads by directly writing to the destinations using multistream
1477
- // we unfortunately need to reparse the message to set some internal flags
1478
- // of the destination which are never set since we bypass pino.
1479
- let message = JSON.parse(log)
1480
- let { level, time, msg, raw } = message
1469
+ const logger = this.logger.child(pinoOptions)
1481
1470
 
1482
- try {
1483
- const parsed = JSON.parse(raw.trimEnd())
1471
+ const selectors = {
1472
+ stdout: { level: 'info', caller: 'STDOUT' },
1473
+ stderr: { level: 'error', caller: 'STDERR' }
1474
+ }
1484
1475
 
1485
- if (typeof parsed.level === 'number' && typeof parsed.time === 'number') {
1486
- level = parsed.level
1487
- time = parsed.time
1488
- message = parsed
1489
- } else {
1490
- message.raw = undefined
1491
- message.payload = parsed
1492
- }
1493
- } catch {
1494
- if (typeof message.raw === 'string') {
1495
- message.msg = message.raw.replace(/\n$/, '')
1496
- }
1476
+ worker.stdout.pipe(split2()).on('data', raw => {
1477
+ let selector = selectors.stdout
1497
1478
 
1498
- message.raw = undefined
1479
+ if (raw.includes(kStderrMarker)) {
1480
+ selector = selectors.stderr
1481
+ raw = raw.replaceAll(kStderrMarker, '')
1499
1482
  }
1500
1483
 
1501
- this.#loggerDestination.lastLevel = level
1502
- this.#loggerDestination.lastTime = time
1503
- this.#loggerDestination.lastMsg = msg
1484
+ this.#forwardThreadLog(logger, selector, raw)
1485
+ })
1486
+
1487
+ // Whatever is outputted here, it come from a direct process.stderr.write in the thread.
1488
+ // There's nothing we can do about it in regard of out of order logs due to a Node bug.
1489
+ worker.stderr.pipe(split2()).on('data', raw => {
1490
+ this.#forwardThreadLog(logger, selectors.stderr, raw)
1491
+ })
1492
+ }
1493
+
1494
+ #forwardThreadLog (logger, { level, caller }, raw) {
1495
+ if (!this.#loggerDestination) {
1496
+ return
1497
+ }
1498
+
1499
+ // Attempt to check if the message is already in pino format. If so, we directly write it to the destination
1500
+ let message
1501
+ try {
1502
+ message = JSON.parse(raw)
1503
+ } catch (e) {
1504
+ // No-op, we assume the message is raw
1505
+ }
1506
+
1507
+ // Not a pino message, output it
1508
+ if (!message) {
1509
+ // Log the message
1510
+ logger[level]({ caller }, raw.replace(/\n$/, ''))
1511
+ } else if (typeof message?.level === 'number' && typeof message?.time === 'number') {
1512
+ this.#loggerDestination.lastLevel = message.level
1513
+ this.#loggerDestination.lastTime = message.time
1514
+ this.#loggerDestination.lastMsg = message.msg
1504
1515
  this.#loggerDestination.lastObj = message
1505
- this.#loggerDestination.lastLogger = this.logger
1516
+ this.#loggerDestination.lastLogger = logger
1517
+
1518
+ // Remember to add '\n' back as split2 removed it
1519
+ this.#loggerDestination.write(raw + '\n')
1520
+ } else {
1521
+ logger[level]({ payload: message })
1522
+ }
1523
+ }
1506
1524
 
1507
- // Never drop the `\n` as the worker thread trimmed the message
1508
- this.#loggerDestination.write(JSON.stringify(message) + '\n')
1525
+ // Due to Worker Threads implementation via MessagePort, it might happen that if two messages are printed almost
1526
+ // at the same time from a worker and the main thread, the latter always arrives first.
1527
+ // Let's wait few more ticks to ensure the right order.
1528
+ async #avoidOutOfOrderThreadLogs () {
1529
+ for (let i = 0; i < 2; i++) {
1530
+ await sleepUntilNextTick()
1509
1531
  }
1510
1532
  }
1511
1533
  }
package/lib/worker/itc.js CHANGED
@@ -65,6 +65,9 @@ function setupITC (app, service, dispatcher) {
65
65
  if (status === 'starting') {
66
66
  await once(app, 'start')
67
67
  } else {
68
+ // This gives a chance to a stackable to perform custom logic
69
+ globalThis.platformatic.events.emit('start')
70
+
68
71
  await app.start()
69
72
  }
70
73
 
@@ -84,6 +87,9 @@ function setupITC (app, service, dispatcher) {
84
87
  }
85
88
 
86
89
  if (status !== 'stopped') {
90
+ // This gives a chance to a stackable to perform custom logic
91
+ globalThis.platformatic.events.emit('stop')
92
+
87
93
  await app.stop()
88
94
  }
89
95
 
@@ -1,7 +1,6 @@
1
1
  'use strict'
2
2
 
3
3
  const { EventEmitter } = require('node:events')
4
- const { createRequire } = require('@platformatic/utils')
5
4
  const { hostname } = require('node:os')
6
5
  const { join, resolve } = require('node:path')
7
6
  const { parentPort, workerData, threadId } = require('node:worker_threads')
@@ -10,6 +9,15 @@ const inspector = require('node:inspector')
10
9
  const diagnosticChannel = require('node:diagnostics_channel')
11
10
  const { ServerResponse } = require('node:http')
12
11
 
12
+ const { createTelemetryThreadInterceptorHooks } = require('@platformatic/telemetry')
13
+ const {
14
+ createRequire,
15
+ disablePinoDirectWrite,
16
+ ensureFlushedWorkerStdio,
17
+ executeWithTimeout,
18
+ ensureLoggableError,
19
+ getPrivateSymbol
20
+ } = require('@platformatic/utils')
13
21
  const dotenv = require('dotenv')
14
22
  const pino = require('pino')
15
23
  const { fetch, setGlobalDispatcher, getGlobalDispatcher, Agent } = require('undici')
@@ -20,31 +28,9 @@ const { RemoteCacheStore, httpCacheInterceptor } = require('./http-cache')
20
28
  const { PlatformaticApp } = require('./app')
21
29
  const { setupITC } = require('./itc')
22
30
  const { loadInterceptors } = require('./interceptors')
23
- const { createTelemetryThreadInterceptorHooks } = require('@platformatic/telemetry')
31
+ const { kId, kITC, kStderrMarker } = require('./symbols')
24
32
 
25
- const {
26
- MessagePortWritable,
27
- createPinoWritable,
28
- executeWithTimeout,
29
- ensureLoggableError
30
- } = require('@platformatic/utils')
31
- const { kId, kITC } = require('./symbols')
32
-
33
- process.on('uncaughtException', handleUnhandled.bind(null, 'uncaught exception'))
34
- process.on('unhandledRejection', handleUnhandled.bind(null, 'unhandled rejection'))
35
-
36
- globalThis.fetch = fetch
37
- globalThis[kId] = threadId
38
-
39
- let app
40
-
41
- const config = workerData.config
42
- globalThis.platformatic = Object.assign(globalThis.platformatic ?? {}, {
43
- logger: createLogger(),
44
- events: new EventEmitter()
45
- })
46
-
47
- function handleUnhandled (type, err) {
33
+ function handleUnhandled (app, type, err) {
48
34
  const label =
49
35
  workerData.worker.count > 1
50
36
  ? `worker ${workerData.worker.index} of the service "${workerData.serviceConfig.id}"`
@@ -59,20 +45,37 @@ function handleUnhandled (type, err) {
59
45
  })
60
46
  }
61
47
 
48
+ function patchLogging () {
49
+ disablePinoDirectWrite()
50
+ ensureFlushedWorkerStdio()
51
+
52
+ const kFormatForStderr = getPrivateSymbol(console, 'kFormatForStderr')
53
+
54
+ // To avoid out of order printing on the main thread, instruct console to only print to the stdout.
55
+ console._stderr = console._stdout
56
+ console._stderrErrorHandler = console._stdoutErrorHandler
57
+
58
+ // To recognize stderr in the main thread, each line is prepended with a special private Unicode character.
59
+ const originalFormatter = console[kFormatForStderr]
60
+ console[kFormatForStderr] = function (args) {
61
+ let string = kStderrMarker + originalFormatter(args).replaceAll('\n', '\n' + kStderrMarker)
62
+
63
+ if (string.endsWith(kStderrMarker)) {
64
+ string = string.slice(0, -1)
65
+ }
66
+
67
+ return string
68
+ }
69
+ }
70
+
62
71
  function createLogger () {
63
- const destination = new MessagePortWritable({ port: workerData.loggingPort })
64
72
  const pinoOptions = { level: 'trace', name: workerData.serviceConfig.id }
65
73
 
66
- if (typeof workerData.worker?.index !== 'undefined') {
74
+ if (workerData.worker?.count > 1) {
67
75
  pinoOptions.base = { pid: process.pid, hostname: hostname(), worker: workerData.worker.index }
68
76
  }
69
77
 
70
- const loggerInstance = pino(pinoOptions, destination)
71
-
72
- Reflect.defineProperty(process, 'stdout', { value: createPinoWritable(loggerInstance, 'info', false, 'STDOUT') })
73
- Reflect.defineProperty(process, 'stderr', { value: createPinoWritable(loggerInstance, 'error', false, 'STDERR') })
74
-
75
- return loggerInstance
78
+ return pino(pinoOptions)
76
79
  }
77
80
 
78
81
  async function performPreloading (...sources) {
@@ -88,6 +91,15 @@ async function performPreloading (...sources) {
88
91
  }
89
92
 
90
93
  async function main () {
94
+ globalThis.fetch = fetch
95
+ globalThis[kId] = threadId
96
+ globalThis.platformatic = Object.assign(globalThis.platformatic ?? {}, {
97
+ logger: createLogger(),
98
+ events: new EventEmitter()
99
+ })
100
+
101
+ const config = workerData.config
102
+
91
103
  await performPreloading(config, workerData.serviceConfig)
92
104
 
93
105
  const service = workerData.serviceConfig
@@ -101,6 +113,7 @@ async function main () {
101
113
  path: envfile
102
114
  })
103
115
  }
116
+
104
117
  if (config.env) {
105
118
  Object.assign(process.env, config.env)
106
119
  }
@@ -219,7 +232,7 @@ async function main () {
219
232
  }
220
233
 
221
234
  // Create the application
222
- app = new PlatformaticApp(
235
+ const app = new PlatformaticApp(
223
236
  service,
224
237
  workerData.worker.count > 1 ? workerData.worker.index : undefined,
225
238
  service.telemetry,
@@ -230,6 +243,9 @@ async function main () {
230
243
  !!config.watch
231
244
  )
232
245
 
246
+ process.on('uncaughtException', handleUnhandled.bind(null, app, 'uncaught exception'))
247
+ process.on('unhandledRejection', handleUnhandled.bind(null, app, 'unhandled rejection'))
248
+
233
249
  await app.init()
234
250
 
235
251
  if (service.entrypoint && config.basePath) {
@@ -295,5 +311,7 @@ function stripBasePath (basePath) {
295
311
  }
296
312
  }
297
313
 
314
+ patchLogging()
315
+
298
316
  // No need to catch this because there is the unhadledRejection handler on top.
299
317
  main()
@@ -6,10 +6,11 @@ const kServiceId = Symbol.for('plt.runtime.service.id')
6
6
  const kWorkerId = Symbol.for('plt.runtime.worker.id')
7
7
  const kITC = Symbol.for('plt.runtime.itc')
8
8
  const kHealthCheckTimer = Symbol.for('plt.runtime.worker.healthCheckTimer')
9
- const kLoggerDestination = Symbol.for('plt.runtime.loggerDestination')
10
- const kLoggingPort = Symbol.for('plt.runtime.logginPort')
11
9
  const kWorkerStatus = Symbol('plt.runtime.worker.status')
12
10
 
11
+ // This string marker should be safe to use since it belongs to Unicode private area
12
+ const kStderrMarker = '\ue002'
13
+
13
14
  module.exports = {
14
15
  kConfig,
15
16
  kId,
@@ -17,7 +18,6 @@ module.exports = {
17
18
  kWorkerId,
18
19
  kITC,
19
20
  kHealthCheckTimer,
20
- kLoggerDestination,
21
- kLoggingPort,
22
- kWorkerStatus
21
+ kWorkerStatus,
22
+ kStderrMarker
23
23
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/runtime",
3
- "version": "2.37.1",
3
+ "version": "2.38.0",
4
4
  "description": "",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -36,12 +36,12 @@
36
36
  "typescript": "^5.5.4",
37
37
  "undici-oidc-interceptor": "^0.5.0",
38
38
  "why-is-node-running": "^2.2.2",
39
- "@platformatic/composer": "2.37.1",
40
- "@platformatic/db": "2.37.1",
41
- "@platformatic/service": "2.37.1",
42
- "@platformatic/node": "2.37.1",
43
- "@platformatic/sql-graphql": "2.37.1",
44
- "@platformatic/sql-mapper": "2.37.1"
39
+ "@platformatic/composer": "2.38.0",
40
+ "@platformatic/db": "2.38.0",
41
+ "@platformatic/node": "2.38.0",
42
+ "@platformatic/service": "2.38.0",
43
+ "@platformatic/sql-mapper": "2.38.0",
44
+ "@platformatic/sql-graphql": "2.38.0"
45
45
  },
46
46
  "dependencies": {
47
47
  "@fastify/accepts": "^5.0.0",
@@ -75,13 +75,13 @@
75
75
  "undici": "^7.0.0",
76
76
  "undici-thread-interceptor": "^0.11.0",
77
77
  "ws": "^8.16.0",
78
- "@platformatic/basic": "2.37.1",
79
- "@platformatic/config": "2.37.1",
80
- "@platformatic/generators": "2.37.1",
81
- "@platformatic/itc": "2.37.1",
82
- "@platformatic/telemetry": "2.37.1",
83
- "@platformatic/utils": "2.37.1",
84
- "@platformatic/ts-compiler": "2.37.1"
78
+ "@platformatic/basic": "2.38.0",
79
+ "@platformatic/config": "2.38.0",
80
+ "@platformatic/generators": "2.38.0",
81
+ "@platformatic/telemetry": "2.38.0",
82
+ "@platformatic/itc": "2.38.0",
83
+ "@platformatic/ts-compiler": "2.38.0",
84
+ "@platformatic/utils": "2.38.0"
85
85
  },
86
86
  "scripts": {
87
87
  "test": "npm run lint && borp --concurrency=1 --timeout=300000 && tsd",
package/schema.json CHANGED
@@ -1,5 +1,5 @@
1
1
  {
2
- "$id": "https://schemas.platformatic.dev/@platformatic/runtime/2.37.1.json",
2
+ "$id": "https://schemas.platformatic.dev/@platformatic/runtime/2.38.0.json",
3
3
  "$schema": "http://json-schema.org/draft-07/schema#",
4
4
  "type": "object",
5
5
  "properties": {