@platformatic/runtime 3.0.0-alpha.4 → 3.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config.d.ts +10 -7
- package/eslint.config.js +2 -4
- package/index.d.ts +11 -11
- package/index.js +35 -46
- package/lib/config.js +80 -102
- package/lib/dependencies.js +27 -29
- package/lib/errors.js +65 -99
- package/lib/generator.js +160 -164
- package/lib/logger.js +6 -8
- package/lib/management-api.js +36 -39
- package/lib/prom-server.js +10 -14
- package/lib/runtime.js +752 -715
- package/lib/scheduler.js +13 -15
- package/lib/schema.js +11 -8
- package/lib/shared-http-cache.js +5 -9
- package/lib/upgrade.js +5 -9
- package/lib/utils.js +6 -14
- package/lib/version.js +7 -0
- package/lib/versions/v1.36.0.js +2 -4
- package/lib/versions/v1.5.0.js +2 -4
- package/lib/versions/v2.0.0.js +3 -5
- package/lib/versions/v3.0.0.js +16 -0
- package/lib/worker/{app.js → controller.js} +46 -56
- package/lib/worker/http-cache.js +11 -14
- package/lib/worker/interceptors.js +14 -18
- package/lib/worker/itc.js +74 -74
- package/lib/worker/main.js +45 -49
- package/lib/worker/messaging.js +23 -27
- package/lib/worker/round-robin-map.js +23 -19
- package/lib/worker/shared-context.js +2 -6
- package/lib/worker/symbols.js +12 -29
- package/package.json +21 -21
- package/schema.json +254 -20
package/lib/worker/itc.js
CHANGED
|
@@ -1,16 +1,22 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
1
|
+
import { ensureLoggableError } from '@platformatic/foundation'
|
|
2
|
+
import { ITC } from '@platformatic/itc'
|
|
3
|
+
import { Unpromise } from '@watchable/unpromise'
|
|
4
|
+
import { once } from 'node:events'
|
|
5
|
+
import { parentPort, workerData } from 'node:worker_threads'
|
|
6
|
+
import {
|
|
7
|
+
ApplicationExitedError,
|
|
8
|
+
FailedToPerformCustomHealthCheckError,
|
|
9
|
+
FailedToPerformCustomReadinessCheckError,
|
|
10
|
+
FailedToRetrieveGraphQLSchemaError,
|
|
11
|
+
FailedToRetrieveHealthError,
|
|
12
|
+
FailedToRetrieveMetaError,
|
|
13
|
+
FailedToRetrieveMetricsError,
|
|
14
|
+
FailedToRetrieveOpenAPISchemaError,
|
|
15
|
+
WorkerExitedError
|
|
16
|
+
} from '../errors.js'
|
|
17
|
+
import { updateUndiciInterceptors } from './interceptors.js'
|
|
18
|
+
import { MessagingITC } from './messaging.js'
|
|
19
|
+
import { kApplicationId, kITC, kId, kWorkerId } from './symbols.js'
|
|
14
20
|
|
|
15
21
|
async function safeHandleInITC (worker, fn) {
|
|
16
22
|
try {
|
|
@@ -27,9 +33,9 @@ async function safeHandleInITC (worker, fn) {
|
|
|
27
33
|
|
|
28
34
|
if (typeof exitCode === 'number') {
|
|
29
35
|
if (typeof worker[kWorkerId] !== 'undefined') {
|
|
30
|
-
throw new
|
|
36
|
+
throw new WorkerExitedError(worker[kWorkerId], worker[kApplicationId], exitCode)
|
|
31
37
|
} else {
|
|
32
|
-
throw new
|
|
38
|
+
throw new ApplicationExitedError(worker[kId], exitCode)
|
|
33
39
|
}
|
|
34
40
|
} else {
|
|
35
41
|
ac.abort()
|
|
@@ -49,22 +55,22 @@ async function safeHandleInITC (worker, fn) {
|
|
|
49
55
|
}
|
|
50
56
|
}
|
|
51
57
|
|
|
52
|
-
async function sendViaITC (worker, name, message, transferList) {
|
|
53
|
-
return safeHandleInITC(worker, () => worker[kITC].send(name, message, { transferList }))
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
async function waitEventFromITC (worker, event) {
|
|
57
|
-
return safeHandleInITC(worker, () => once(worker[kITC], event))
|
|
58
|
-
}
|
|
59
|
-
|
|
60
58
|
async function closeITC (dispatcher, itc, messaging) {
|
|
61
59
|
await dispatcher.interceptor.close()
|
|
62
60
|
itc.close()
|
|
63
61
|
messaging.close()
|
|
64
62
|
}
|
|
65
63
|
|
|
66
|
-
function
|
|
67
|
-
|
|
64
|
+
export async function sendViaITC (worker, name, message, transferList) {
|
|
65
|
+
return safeHandleInITC(worker, () => worker[kITC].send(name, message, { transferList }))
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export async function waitEventFromITC (worker, event) {
|
|
69
|
+
return safeHandleInITC(worker, () => once(worker[kITC], event))
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export function setupITC (instance, application, dispatcher, sharedContext) {
|
|
73
|
+
const messaging = new MessagingITC(instance.appConfig.id, workerData.config)
|
|
68
74
|
|
|
69
75
|
Object.assign(globalThis.platformatic ?? {}, {
|
|
70
76
|
messaging: {
|
|
@@ -74,55 +80,55 @@ function setupITC (app, service, dispatcher, sharedContext) {
|
|
|
74
80
|
})
|
|
75
81
|
|
|
76
82
|
const itc = new ITC({
|
|
77
|
-
name:
|
|
83
|
+
name: instance.appConfig.id + '-worker',
|
|
78
84
|
port: parentPort,
|
|
79
85
|
handlers: {
|
|
80
86
|
async start () {
|
|
81
|
-
const status =
|
|
87
|
+
const status = instance.getStatus()
|
|
82
88
|
|
|
83
89
|
if (status === 'starting') {
|
|
84
|
-
await once(
|
|
90
|
+
await once(instance, 'start')
|
|
85
91
|
} else {
|
|
86
|
-
// This gives a chance to a
|
|
92
|
+
// This gives a chance to a capability to perform custom logic
|
|
87
93
|
globalThis.platformatic.events.emit('start')
|
|
88
94
|
|
|
89
95
|
try {
|
|
90
|
-
await
|
|
96
|
+
await instance.start()
|
|
91
97
|
} catch (e) {
|
|
92
|
-
await
|
|
98
|
+
await instance.stop(true)
|
|
93
99
|
await closeITC(dispatcher, itc, messaging)
|
|
94
100
|
|
|
95
101
|
throw ensureLoggableError(e)
|
|
96
102
|
}
|
|
97
103
|
}
|
|
98
104
|
|
|
99
|
-
if (
|
|
100
|
-
await
|
|
105
|
+
if (application.entrypoint) {
|
|
106
|
+
await instance.listen()
|
|
101
107
|
}
|
|
102
108
|
|
|
103
|
-
dispatcher.replaceServer(await
|
|
104
|
-
return
|
|
109
|
+
dispatcher.replaceServer(await instance.capability.getDispatchTarget())
|
|
110
|
+
return application.entrypoint ? instance.capability.getUrl() : null
|
|
105
111
|
},
|
|
106
112
|
|
|
107
113
|
async stop () {
|
|
108
|
-
const status =
|
|
114
|
+
const status = instance.getStatus()
|
|
109
115
|
|
|
110
116
|
if (status === 'starting') {
|
|
111
|
-
await once(
|
|
117
|
+
await once(instance, 'start')
|
|
112
118
|
}
|
|
113
119
|
|
|
114
120
|
if (status.startsWith('start')) {
|
|
115
|
-
// This gives a chance to a
|
|
121
|
+
// This gives a chance to a capability to perform custom logic
|
|
116
122
|
globalThis.platformatic.events.emit('stop')
|
|
117
123
|
|
|
118
|
-
await
|
|
124
|
+
await instance.stop()
|
|
119
125
|
}
|
|
120
126
|
|
|
121
127
|
await closeITC(dispatcher, itc, messaging)
|
|
122
128
|
},
|
|
123
129
|
|
|
124
130
|
async build () {
|
|
125
|
-
return
|
|
131
|
+
return instance.capability.build()
|
|
126
132
|
},
|
|
127
133
|
|
|
128
134
|
async removeFromMesh () {
|
|
@@ -130,7 +136,7 @@ function setupITC (app, service, dispatcher, sharedContext) {
|
|
|
130
136
|
},
|
|
131
137
|
|
|
132
138
|
inject (injectParams) {
|
|
133
|
-
return
|
|
139
|
+
return instance.capability.inject(injectParams)
|
|
134
140
|
},
|
|
135
141
|
|
|
136
142
|
async updateUndiciInterceptors (undiciConfig) {
|
|
@@ -138,87 +144,83 @@ function setupITC (app, service, dispatcher, sharedContext) {
|
|
|
138
144
|
},
|
|
139
145
|
|
|
140
146
|
async updateWorkersCount (data) {
|
|
141
|
-
const {
|
|
142
|
-
|
|
143
|
-
if (worker) {
|
|
144
|
-
worker.workers = workers
|
|
145
|
-
}
|
|
146
|
-
workerData.serviceConfig.workers = workers
|
|
147
|
+
const { workers } = data
|
|
148
|
+
workerData.applicationConfig.workers = workers
|
|
147
149
|
workerData.worker.count = workers
|
|
148
150
|
},
|
|
149
151
|
|
|
150
152
|
getStatus () {
|
|
151
|
-
return
|
|
153
|
+
return instance.getStatus()
|
|
152
154
|
},
|
|
153
155
|
|
|
154
|
-
|
|
155
|
-
return
|
|
156
|
+
getApplicationInfo () {
|
|
157
|
+
return instance.capability.getInfo()
|
|
156
158
|
},
|
|
157
159
|
|
|
158
|
-
async
|
|
159
|
-
const current = await
|
|
160
|
+
async getApplicationConfig () {
|
|
161
|
+
const current = await instance.capability.getConfig()
|
|
160
162
|
// Remove all undefined keys from the config
|
|
161
163
|
return JSON.parse(JSON.stringify(current))
|
|
162
164
|
},
|
|
163
165
|
|
|
164
|
-
async
|
|
166
|
+
async getApplicationEnv () {
|
|
165
167
|
// Remove all undefined keys from the config
|
|
166
|
-
return JSON.parse(JSON.stringify({ ...process.env, ...(await
|
|
168
|
+
return JSON.parse(JSON.stringify({ ...process.env, ...(await instance.capability.getEnv()) }))
|
|
167
169
|
},
|
|
168
170
|
|
|
169
|
-
async
|
|
171
|
+
async getApplicationOpenAPISchema () {
|
|
170
172
|
try {
|
|
171
|
-
return await
|
|
173
|
+
return await instance.capability.getOpenapiSchema()
|
|
172
174
|
} catch (err) {
|
|
173
|
-
throw new
|
|
175
|
+
throw new FailedToRetrieveOpenAPISchemaError(application.id, err.message)
|
|
174
176
|
}
|
|
175
177
|
},
|
|
176
178
|
|
|
177
|
-
async
|
|
179
|
+
async getApplicationGraphQLSchema () {
|
|
178
180
|
try {
|
|
179
|
-
return await
|
|
181
|
+
return await instance.capability.getGraphqlSchema()
|
|
180
182
|
} catch (err) {
|
|
181
|
-
throw new
|
|
183
|
+
throw new FailedToRetrieveGraphQLSchemaError(application.id, err.message)
|
|
182
184
|
}
|
|
183
185
|
},
|
|
184
186
|
|
|
185
|
-
async
|
|
187
|
+
async getApplicationMeta () {
|
|
186
188
|
try {
|
|
187
|
-
return await
|
|
189
|
+
return await instance.capability.getMeta()
|
|
188
190
|
} catch (err) {
|
|
189
|
-
throw new
|
|
191
|
+
throw new FailedToRetrieveMetaError(application.id, err.message)
|
|
190
192
|
}
|
|
191
193
|
},
|
|
192
194
|
|
|
193
195
|
async getMetrics (format) {
|
|
194
196
|
try {
|
|
195
|
-
return await
|
|
197
|
+
return await instance.getMetrics({ format })
|
|
196
198
|
} catch (err) {
|
|
197
|
-
throw new
|
|
199
|
+
throw new FailedToRetrieveMetricsError(application.id, err.message)
|
|
198
200
|
}
|
|
199
201
|
},
|
|
200
202
|
|
|
201
203
|
async getHealth () {
|
|
202
204
|
try {
|
|
203
|
-
return await
|
|
205
|
+
return await instance.getHealth()
|
|
204
206
|
} catch (err) {
|
|
205
|
-
throw new
|
|
207
|
+
throw new FailedToRetrieveHealthError(application.id, err.message)
|
|
206
208
|
}
|
|
207
209
|
},
|
|
208
210
|
|
|
209
211
|
async getCustomHealthCheck () {
|
|
210
212
|
try {
|
|
211
|
-
return await
|
|
213
|
+
return await instance.capability.getCustomHealthCheck()
|
|
212
214
|
} catch (err) {
|
|
213
|
-
throw new
|
|
215
|
+
throw new FailedToPerformCustomHealthCheckError(application.id, err.message)
|
|
214
216
|
}
|
|
215
217
|
},
|
|
216
218
|
|
|
217
219
|
async getCustomReadinessCheck () {
|
|
218
220
|
try {
|
|
219
|
-
return await
|
|
221
|
+
return await instance.capability.getCustomReadinessCheck()
|
|
220
222
|
} catch (err) {
|
|
221
|
-
throw new
|
|
223
|
+
throw new FailedToPerformCustomReadinessCheckError(application.id, err.message)
|
|
222
224
|
}
|
|
223
225
|
},
|
|
224
226
|
|
|
@@ -232,12 +234,10 @@ function setupITC (app, service, dispatcher, sharedContext) {
|
|
|
232
234
|
}
|
|
233
235
|
})
|
|
234
236
|
|
|
235
|
-
|
|
237
|
+
instance.on('changed', () => {
|
|
236
238
|
itc.notify('changed')
|
|
237
239
|
})
|
|
238
240
|
|
|
239
241
|
itc.listen()
|
|
240
242
|
return itc
|
|
241
243
|
}
|
|
242
|
-
|
|
243
|
-
module.exports = { sendViaITC, setupITC, waitEventFromITC }
|
package/lib/worker/main.js
CHANGED
|
@@ -1,37 +1,33 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
const { hostname } = require('node:os')
|
|
5
|
-
const { resolve } = require('node:path')
|
|
6
|
-
const { workerData, threadId } = require('node:worker_threads')
|
|
7
|
-
const { pathToFileURL } = require('node:url')
|
|
8
|
-
const inspector = require('node:inspector')
|
|
9
|
-
const diagnosticChannel = require('node:diagnostics_channel')
|
|
10
|
-
const { ServerResponse } = require('node:http')
|
|
11
|
-
|
|
12
|
-
const {
|
|
1
|
+
import {
|
|
2
|
+
buildPinoFormatters,
|
|
3
|
+
buildPinoTimestamp,
|
|
13
4
|
disablePinoDirectWrite,
|
|
14
|
-
executeWithTimeout,
|
|
15
5
|
ensureLoggableError,
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
6
|
+
executeWithTimeout,
|
|
7
|
+
getPrivateSymbol
|
|
8
|
+
} from '@platformatic/foundation'
|
|
9
|
+
import dotenv from 'dotenv'
|
|
10
|
+
import { subscribe } from 'node:diagnostics_channel'
|
|
11
|
+
import { EventEmitter } from 'node:events'
|
|
12
|
+
import { ServerResponse } from 'node:http'
|
|
13
|
+
import inspector from 'node:inspector'
|
|
14
|
+
import { hostname } from 'node:os'
|
|
15
|
+
import { resolve } from 'node:path'
|
|
16
|
+
import { pathToFileURL } from 'node:url'
|
|
17
|
+
import { threadId, workerData } from 'node:worker_threads'
|
|
18
|
+
import pino from 'pino'
|
|
19
|
+
import { fetch } from 'undici'
|
|
20
|
+
import { Controller } from './controller.js'
|
|
21
|
+
import { setDispatcher } from './interceptors.js'
|
|
22
|
+
import { setupITC } from './itc.js'
|
|
23
|
+
import { SharedContext } from './shared-context.js'
|
|
24
|
+
import { kId, kITC, kStderrMarker } from './symbols.js'
|
|
29
25
|
|
|
30
26
|
function handleUnhandled (app, type, err) {
|
|
31
27
|
const label =
|
|
32
28
|
workerData.worker.count > 1
|
|
33
|
-
? `worker ${workerData.worker.index} of the
|
|
34
|
-
: `
|
|
29
|
+
? `worker ${workerData.worker.index} of the application "${workerData.applicationConfig.id}"`
|
|
30
|
+
: `application "${workerData.applicationConfig.id}"`
|
|
35
31
|
|
|
36
32
|
globalThis.platformatic.logger.error({ err: ensureLoggableError(err) }, `The ${label} threw an ${type}.`)
|
|
37
33
|
|
|
@@ -72,7 +68,7 @@ function createLogger () {
|
|
|
72
68
|
|
|
73
69
|
const pinoOptions = {
|
|
74
70
|
level: 'trace',
|
|
75
|
-
name: workerData.
|
|
71
|
+
name: workerData.applicationConfig.id,
|
|
76
72
|
...workerData.config.logger
|
|
77
73
|
}
|
|
78
74
|
|
|
@@ -112,16 +108,16 @@ async function main () {
|
|
|
112
108
|
|
|
113
109
|
const config = workerData.config
|
|
114
110
|
|
|
115
|
-
await performPreloading(config, workerData.
|
|
111
|
+
await performPreloading(config, workerData.applicationConfig)
|
|
116
112
|
|
|
117
|
-
const
|
|
113
|
+
const application = workerData.applicationConfig
|
|
118
114
|
|
|
119
|
-
// Load env file and mixin env vars from
|
|
115
|
+
// Load env file and mixin env vars from application config
|
|
120
116
|
let envfile
|
|
121
|
-
if (
|
|
122
|
-
envfile = resolve(workerData.dirname,
|
|
117
|
+
if (application.envfile) {
|
|
118
|
+
envfile = resolve(workerData.dirname, application.envfile)
|
|
123
119
|
} else {
|
|
124
|
-
envfile = resolve(workerData.
|
|
120
|
+
envfile = resolve(workerData.applicationConfig.path, '.env')
|
|
125
121
|
}
|
|
126
122
|
|
|
127
123
|
globalThis.platformatic.logger.debug({ envfile }, 'Loading envfile...')
|
|
@@ -133,17 +129,17 @@ async function main () {
|
|
|
133
129
|
if (config.env) {
|
|
134
130
|
Object.assign(process.env, config.env)
|
|
135
131
|
}
|
|
136
|
-
if (
|
|
137
|
-
Object.assign(process.env,
|
|
132
|
+
if (application.env) {
|
|
133
|
+
Object.assign(process.env, application.env)
|
|
138
134
|
}
|
|
139
135
|
|
|
140
136
|
const { threadDispatcher } = await setDispatcher(config)
|
|
141
137
|
|
|
142
|
-
// If the
|
|
138
|
+
// If the application is an entrypoint and runtime server config is defined, use it.
|
|
143
139
|
let serverConfig = null
|
|
144
|
-
if (config.server &&
|
|
140
|
+
if (config.server && application.entrypoint) {
|
|
145
141
|
serverConfig = config.server
|
|
146
|
-
} else if (
|
|
142
|
+
} else if (application.useHttp) {
|
|
147
143
|
serverConfig = {
|
|
148
144
|
port: 0,
|
|
149
145
|
hostname: '127.0.0.1',
|
|
@@ -166,14 +162,14 @@ async function main () {
|
|
|
166
162
|
const res = await fetch(url)
|
|
167
163
|
const [{ devtoolsFrontendUrl }] = await res.json()
|
|
168
164
|
|
|
169
|
-
console.log(`For ${
|
|
165
|
+
console.log(`For ${application.id} debugger open the following in chrome: "${devtoolsFrontendUrl}"`)
|
|
170
166
|
}
|
|
171
167
|
|
|
172
168
|
// Create the application
|
|
173
|
-
const app = new
|
|
174
|
-
|
|
169
|
+
const app = new Controller(
|
|
170
|
+
application,
|
|
175
171
|
workerData.worker.count > 1 ? workerData.worker.index : undefined,
|
|
176
|
-
|
|
172
|
+
application.telemetry,
|
|
177
173
|
config.logger,
|
|
178
174
|
serverConfig,
|
|
179
175
|
config.metrics,
|
|
@@ -186,9 +182,9 @@ async function main () {
|
|
|
186
182
|
|
|
187
183
|
await app.init()
|
|
188
184
|
|
|
189
|
-
if (
|
|
190
|
-
const meta = await app.
|
|
191
|
-
if (!meta.
|
|
185
|
+
if (application.entrypoint && config.basePath) {
|
|
186
|
+
const meta = await app.capability.getMeta()
|
|
187
|
+
if (!meta.gateway.wantsAbsoluteUrls) {
|
|
192
188
|
stripBasePath(config.basePath)
|
|
193
189
|
}
|
|
194
190
|
}
|
|
@@ -201,7 +197,7 @@ async function main () {
|
|
|
201
197
|
}
|
|
202
198
|
|
|
203
199
|
// Setup interaction with parent port
|
|
204
|
-
const itc = setupITC(app,
|
|
200
|
+
const itc = setupITC(app, application, threadDispatcher, sharedContext)
|
|
205
201
|
globalThis[kITC] = itc
|
|
206
202
|
|
|
207
203
|
// Get the dependencies
|
|
@@ -212,7 +208,7 @@ async function main () {
|
|
|
212
208
|
function stripBasePath (basePath) {
|
|
213
209
|
const kBasePath = Symbol('kBasePath')
|
|
214
210
|
|
|
215
|
-
|
|
211
|
+
subscribe('http.server.request.start', ({ request, response }) => {
|
|
216
212
|
if (request.url.startsWith(basePath)) {
|
|
217
213
|
request.url = request.url.slice(basePath.length)
|
|
218
214
|
|
package/lib/worker/messaging.js
CHANGED
|
@@ -1,14 +1,12 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
const { RoundRobinMap } = require('./round-robin-map')
|
|
7
|
-
const { kWorkersBroadcast, kITC } = require('./symbols')
|
|
1
|
+
import { executeWithTimeout, kTimeout } from '@platformatic/foundation'
|
|
2
|
+
import { ITC, generateResponse, sanitize } from '@platformatic/itc'
|
|
3
|
+
import { MessagingError } from '../errors.js'
|
|
4
|
+
import { RoundRobinMap } from './round-robin-map.js'
|
|
5
|
+
import { kITC, kWorkersBroadcast } from './symbols.js'
|
|
8
6
|
|
|
9
7
|
const kPendingResponses = Symbol('plt.messaging.pendingResponses')
|
|
10
8
|
|
|
11
|
-
class MessagingITC extends ITC {
|
|
9
|
+
export class MessagingITC extends ITC {
|
|
12
10
|
#timeout
|
|
13
11
|
#listener
|
|
14
12
|
#closeResolvers
|
|
@@ -26,7 +24,7 @@ class MessagingITC extends ITC {
|
|
|
26
24
|
this.#workers = new RoundRobinMap()
|
|
27
25
|
this.#sources = new Set()
|
|
28
26
|
|
|
29
|
-
// Start listening on the BroadcastChannel for the list of
|
|
27
|
+
// Start listening on the BroadcastChannel for the list of applications
|
|
30
28
|
this.#broadcastChannel = new BroadcastChannel(kWorkersBroadcast)
|
|
31
29
|
this.#broadcastChannel.onmessage = this.#updateWorkers.bind(this)
|
|
32
30
|
|
|
@@ -47,25 +45,25 @@ class MessagingITC extends ITC {
|
|
|
47
45
|
}
|
|
48
46
|
}
|
|
49
47
|
|
|
50
|
-
async send (
|
|
51
|
-
// Get the next worker for the
|
|
52
|
-
const worker = this.#workers.next(
|
|
48
|
+
async send (application, name, message, options) {
|
|
49
|
+
// Get the next worker for the application
|
|
50
|
+
const worker = this.#workers.next(application)
|
|
53
51
|
|
|
54
52
|
if (!worker) {
|
|
55
|
-
throw new
|
|
53
|
+
throw new MessagingError(application, 'No workers available')
|
|
56
54
|
}
|
|
57
55
|
|
|
58
56
|
if (!worker.channel) {
|
|
59
57
|
// Use twice the value here as a fallback measure. The target handler in the main thread is forwarding
|
|
60
58
|
// the request to the worker, using executeWithTimeout with the user set timeout value.
|
|
61
59
|
const channel = await executeWithTimeout(
|
|
62
|
-
globalThis[kITC].send('getWorkerMessagingChannel', {
|
|
60
|
+
globalThis[kITC].send('getWorkerMessagingChannel', { application: worker.application, worker: worker.worker }),
|
|
63
61
|
this.#timeout * 2
|
|
64
62
|
)
|
|
65
63
|
|
|
66
64
|
/* c8 ignore next 3 - Hard to test */
|
|
67
65
|
if (channel === kTimeout) {
|
|
68
|
-
throw new
|
|
66
|
+
throw new MessagingError(application, 'Timeout while waiting for a communication channel.')
|
|
69
67
|
}
|
|
70
68
|
|
|
71
69
|
worker.channel = channel
|
|
@@ -77,13 +75,13 @@ class MessagingITC extends ITC {
|
|
|
77
75
|
|
|
78
76
|
const context = { ...options }
|
|
79
77
|
context.channel = worker.channel
|
|
80
|
-
context.
|
|
78
|
+
context.application = worker.application
|
|
81
79
|
context.trackResponse = true
|
|
82
80
|
|
|
83
81
|
const response = await executeWithTimeout(super.send(name, message, context), this.#timeout)
|
|
84
82
|
|
|
85
83
|
if (response === kTimeout) {
|
|
86
|
-
throw new
|
|
84
|
+
throw new MessagingError(application, 'Timeout while waiting for a response.')
|
|
87
85
|
}
|
|
88
86
|
|
|
89
87
|
return response
|
|
@@ -104,8 +102,8 @@ class MessagingITC extends ITC {
|
|
|
104
102
|
const { channel, transferList } = context
|
|
105
103
|
|
|
106
104
|
if (context.trackResponse) {
|
|
107
|
-
const
|
|
108
|
-
channel[kPendingResponses].set(request.reqId, {
|
|
105
|
+
const application = context.application
|
|
106
|
+
channel[kPendingResponses].set(request.reqId, { application, request })
|
|
109
107
|
}
|
|
110
108
|
|
|
111
109
|
channel.postMessage(sanitize(request, transferList), { transferList })
|
|
@@ -150,18 +148,18 @@ class MessagingITC extends ITC {
|
|
|
150
148
|
this.#workers = new RoundRobinMap()
|
|
151
149
|
|
|
152
150
|
const instances = []
|
|
153
|
-
for (const [
|
|
151
|
+
for (const [application, workers] of event.data) {
|
|
154
152
|
const count = workers.length
|
|
155
153
|
const next = Math.floor(Math.random() * count)
|
|
156
154
|
|
|
157
|
-
instances.push({ id:
|
|
155
|
+
instances.push({ id: application, next, workers: count })
|
|
158
156
|
|
|
159
157
|
for (let i = 0; i < count; i++) {
|
|
160
158
|
const worker = workers[i]
|
|
161
159
|
const channel = existingChannels.get(worker.thread)
|
|
162
160
|
|
|
163
|
-
// Note i is not the worker index as in runtime, but the index in the list of current alive workers for the
|
|
164
|
-
this.#workers.set(`${
|
|
161
|
+
// Note i is not the worker index as in runtime, but the index in the list of current alive workers for the application
|
|
162
|
+
this.#workers.set(`${application}:${i}`, { ...worker, channel })
|
|
165
163
|
}
|
|
166
164
|
}
|
|
167
165
|
|
|
@@ -169,11 +167,11 @@ class MessagingITC extends ITC {
|
|
|
169
167
|
}
|
|
170
168
|
|
|
171
169
|
#handlePendingResponse (channel) {
|
|
172
|
-
for (const {
|
|
170
|
+
for (const { application, request } of channel[kPendingResponses].values()) {
|
|
173
171
|
this._emitResponse(
|
|
174
172
|
generateResponse(
|
|
175
173
|
request,
|
|
176
|
-
new
|
|
174
|
+
new MessagingError(application, 'The communication channel was closed before receiving a response.'),
|
|
177
175
|
null
|
|
178
176
|
)
|
|
179
177
|
)
|
|
@@ -182,5 +180,3 @@ class MessagingITC extends ITC {
|
|
|
182
180
|
channel[kPendingResponses].clear()
|
|
183
181
|
}
|
|
184
182
|
}
|
|
185
|
-
|
|
186
|
-
module.exports = { MessagingITC }
|
|
@@ -1,9 +1,7 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
class RoundRobinMap extends Map {
|
|
1
|
+
export class RoundRobinMap extends Map {
|
|
4
2
|
#instances
|
|
5
3
|
|
|
6
|
-
constructor (iterable, instances) {
|
|
4
|
+
constructor (iterable, instances = {}) {
|
|
7
5
|
super(iterable)
|
|
8
6
|
this.#instances = instances
|
|
9
7
|
}
|
|
@@ -12,29 +10,37 @@ class RoundRobinMap extends Map {
|
|
|
12
10
|
return { ...this.#instances }
|
|
13
11
|
}
|
|
14
12
|
|
|
15
|
-
configure (
|
|
13
|
+
configure (applications) {
|
|
16
14
|
this.#instances = {}
|
|
17
15
|
|
|
18
|
-
for (const
|
|
19
|
-
this.#instances[
|
|
16
|
+
for (const application of applications) {
|
|
17
|
+
this.#instances[application.id] = { next: application.next ?? 0, count: application.workers }
|
|
20
18
|
}
|
|
21
19
|
}
|
|
22
20
|
|
|
23
|
-
getCount (
|
|
24
|
-
|
|
21
|
+
getCount (application) {
|
|
22
|
+
if (!this.#instances[application]) {
|
|
23
|
+
return null
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
return this.#instances[application].count
|
|
25
27
|
}
|
|
26
28
|
|
|
27
|
-
setCount (
|
|
28
|
-
this.#instances[
|
|
29
|
+
setCount (application, count) {
|
|
30
|
+
if (!this.#instances[application]) {
|
|
31
|
+
throw new Error(`Application ${application} is not configured.`)
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
this.#instances[application].count = count
|
|
29
35
|
}
|
|
30
36
|
|
|
31
|
-
next (
|
|
32
|
-
if (!this.#instances[
|
|
33
|
-
return
|
|
37
|
+
next (application) {
|
|
38
|
+
if (!this.#instances[application]) {
|
|
39
|
+
return null
|
|
34
40
|
}
|
|
35
41
|
|
|
36
42
|
let worker
|
|
37
|
-
let { next, count } = this.#instances[
|
|
43
|
+
let { next, count } = this.#instances[application]
|
|
38
44
|
|
|
39
45
|
// Try count times to get the next worker. This is to handle the case where a worker is being restarted.
|
|
40
46
|
for (let i = 0; i < count; i++) {
|
|
@@ -43,16 +49,14 @@ class RoundRobinMap extends Map {
|
|
|
43
49
|
next = 0
|
|
44
50
|
}
|
|
45
51
|
|
|
46
|
-
worker = this.get(`${
|
|
52
|
+
worker = this.get(`${application}:${current}`)
|
|
47
53
|
|
|
48
54
|
if (worker) {
|
|
49
55
|
break
|
|
50
56
|
}
|
|
51
57
|
}
|
|
52
58
|
|
|
53
|
-
this.#instances[
|
|
59
|
+
this.#instances[application].next = next
|
|
54
60
|
return worker
|
|
55
61
|
}
|
|
56
62
|
}
|
|
57
|
-
|
|
58
|
-
module.exports = { RoundRobinMap }
|