@platformatic/runtime 3.0.0-alpha.4 → 3.0.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/worker/itc.js CHANGED
@@ -1,16 +1,22 @@
1
- 'use strict'
2
-
3
- const { once } = require('node:events')
4
- const { parentPort, workerData } = require('node:worker_threads')
5
-
6
- const { ITC } = require('@platformatic/itc')
7
- const { ensureLoggableError } = require('@platformatic/foundation')
8
- const { Unpromise } = require('@watchable/unpromise')
9
-
10
- const errors = require('../errors')
11
- const { updateUndiciInterceptors } = require('./interceptors')
12
- const { kITC, kId, kServiceId, kWorkerId } = require('./symbols')
13
- const { MessagingITC } = require('./messaging')
1
+ import { ensureLoggableError } from '@platformatic/foundation'
2
+ import { ITC } from '@platformatic/itc'
3
+ import { Unpromise } from '@watchable/unpromise'
4
+ import { once } from 'node:events'
5
+ import { parentPort, workerData } from 'node:worker_threads'
6
+ import {
7
+ ApplicationExitedError,
8
+ FailedToPerformCustomHealthCheckError,
9
+ FailedToPerformCustomReadinessCheckError,
10
+ FailedToRetrieveGraphQLSchemaError,
11
+ FailedToRetrieveHealthError,
12
+ FailedToRetrieveMetaError,
13
+ FailedToRetrieveMetricsError,
14
+ FailedToRetrieveOpenAPISchemaError,
15
+ WorkerExitedError
16
+ } from '../errors.js'
17
+ import { updateUndiciInterceptors } from './interceptors.js'
18
+ import { MessagingITC } from './messaging.js'
19
+ import { kApplicationId, kITC, kId, kWorkerId } from './symbols.js'
14
20
 
15
21
  async function safeHandleInITC (worker, fn) {
16
22
  try {
@@ -27,9 +33,9 @@ async function safeHandleInITC (worker, fn) {
27
33
 
28
34
  if (typeof exitCode === 'number') {
29
35
  if (typeof worker[kWorkerId] !== 'undefined') {
30
- throw new errors.WorkerExitedError(worker[kWorkerId], worker[kServiceId], exitCode)
36
+ throw new WorkerExitedError(worker[kWorkerId], worker[kApplicationId], exitCode)
31
37
  } else {
32
- throw new errors.ServiceExitedError(worker[kId], exitCode)
38
+ throw new ApplicationExitedError(worker[kId], exitCode)
33
39
  }
34
40
  } else {
35
41
  ac.abort()
@@ -49,22 +55,22 @@ async function safeHandleInITC (worker, fn) {
49
55
  }
50
56
  }
51
57
 
52
- async function sendViaITC (worker, name, message, transferList) {
53
- return safeHandleInITC(worker, () => worker[kITC].send(name, message, { transferList }))
54
- }
55
-
56
- async function waitEventFromITC (worker, event) {
57
- return safeHandleInITC(worker, () => once(worker[kITC], event))
58
- }
59
-
60
58
  async function closeITC (dispatcher, itc, messaging) {
61
59
  await dispatcher.interceptor.close()
62
60
  itc.close()
63
61
  messaging.close()
64
62
  }
65
63
 
66
- function setupITC (app, service, dispatcher, sharedContext) {
67
- const messaging = new MessagingITC(app.appConfig.id, workerData.config)
64
+ export async function sendViaITC (worker, name, message, transferList) {
65
+ return safeHandleInITC(worker, () => worker[kITC].send(name, message, { transferList }))
66
+ }
67
+
68
+ export async function waitEventFromITC (worker, event) {
69
+ return safeHandleInITC(worker, () => once(worker[kITC], event))
70
+ }
71
+
72
+ export function setupITC (instance, application, dispatcher, sharedContext) {
73
+ const messaging = new MessagingITC(instance.appConfig.id, workerData.config)
68
74
 
69
75
  Object.assign(globalThis.platformatic ?? {}, {
70
76
  messaging: {
@@ -74,55 +80,55 @@ function setupITC (app, service, dispatcher, sharedContext) {
74
80
  })
75
81
 
76
82
  const itc = new ITC({
77
- name: app.appConfig.id + '-worker',
83
+ name: instance.appConfig.id + '-worker',
78
84
  port: parentPort,
79
85
  handlers: {
80
86
  async start () {
81
- const status = app.getStatus()
87
+ const status = instance.getStatus()
82
88
 
83
89
  if (status === 'starting') {
84
- await once(app, 'start')
90
+ await once(instance, 'start')
85
91
  } else {
86
- // This gives a chance to a stackable to perform custom logic
92
+ // This gives a chance to a capability to perform custom logic
87
93
  globalThis.platformatic.events.emit('start')
88
94
 
89
95
  try {
90
- await app.start()
96
+ await instance.start()
91
97
  } catch (e) {
92
- await app.stop(true)
98
+ await instance.stop(true)
93
99
  await closeITC(dispatcher, itc, messaging)
94
100
 
95
101
  throw ensureLoggableError(e)
96
102
  }
97
103
  }
98
104
 
99
- if (service.entrypoint) {
100
- await app.listen()
105
+ if (application.entrypoint) {
106
+ await instance.listen()
101
107
  }
102
108
 
103
- dispatcher.replaceServer(await app.stackable.getDispatchTarget())
104
- return service.entrypoint ? app.stackable.getUrl() : null
109
+ dispatcher.replaceServer(await instance.capability.getDispatchTarget())
110
+ return application.entrypoint ? instance.capability.getUrl() : null
105
111
  },
106
112
 
107
113
  async stop () {
108
- const status = app.getStatus()
114
+ const status = instance.getStatus()
109
115
 
110
116
  if (status === 'starting') {
111
- await once(app, 'start')
117
+ await once(instance, 'start')
112
118
  }
113
119
 
114
120
  if (status.startsWith('start')) {
115
- // This gives a chance to a stackable to perform custom logic
121
+ // This gives a chance to a capability to perform custom logic
116
122
  globalThis.platformatic.events.emit('stop')
117
123
 
118
- await app.stop()
124
+ await instance.stop()
119
125
  }
120
126
 
121
127
  await closeITC(dispatcher, itc, messaging)
122
128
  },
123
129
 
124
130
  async build () {
125
- return app.stackable.build()
131
+ return instance.capability.build()
126
132
  },
127
133
 
128
134
  async removeFromMesh () {
@@ -130,7 +136,7 @@ function setupITC (app, service, dispatcher, sharedContext) {
130
136
  },
131
137
 
132
138
  inject (injectParams) {
133
- return app.stackable.inject(injectParams)
139
+ return instance.capability.inject(injectParams)
134
140
  },
135
141
 
136
142
  async updateUndiciInterceptors (undiciConfig) {
@@ -138,87 +144,83 @@ function setupITC (app, service, dispatcher, sharedContext) {
138
144
  },
139
145
 
140
146
  async updateWorkersCount (data) {
141
- const { serviceId, workers } = data
142
- const worker = workerData.config.serviceMap.get(serviceId)
143
- if (worker) {
144
- worker.workers = workers
145
- }
146
- workerData.serviceConfig.workers = workers
147
+ const { workers } = data
148
+ workerData.applicationConfig.workers = workers
147
149
  workerData.worker.count = workers
148
150
  },
149
151
 
150
152
  getStatus () {
151
- return app.getStatus()
153
+ return instance.getStatus()
152
154
  },
153
155
 
154
- getServiceInfo () {
155
- return app.stackable.getInfo()
156
+ getApplicationInfo () {
157
+ return instance.capability.getInfo()
156
158
  },
157
159
 
158
- async getServiceConfig () {
159
- const current = await app.stackable.getConfig()
160
+ async getApplicationConfig () {
161
+ const current = await instance.capability.getConfig()
160
162
  // Remove all undefined keys from the config
161
163
  return JSON.parse(JSON.stringify(current))
162
164
  },
163
165
 
164
- async getServiceEnv () {
166
+ async getApplicationEnv () {
165
167
  // Remove all undefined keys from the config
166
- return JSON.parse(JSON.stringify({ ...process.env, ...(await app.stackable.getEnv()) }))
168
+ return JSON.parse(JSON.stringify({ ...process.env, ...(await instance.capability.getEnv()) }))
167
169
  },
168
170
 
169
- async getServiceOpenAPISchema () {
171
+ async getApplicationOpenAPISchema () {
170
172
  try {
171
- return await app.stackable.getOpenapiSchema()
173
+ return await instance.capability.getOpenapiSchema()
172
174
  } catch (err) {
173
- throw new errors.FailedToRetrieveOpenAPISchemaError(service.id, err.message)
175
+ throw new FailedToRetrieveOpenAPISchemaError(application.id, err.message)
174
176
  }
175
177
  },
176
178
 
177
- async getServiceGraphQLSchema () {
179
+ async getApplicationGraphQLSchema () {
178
180
  try {
179
- return await app.stackable.getGraphqlSchema()
181
+ return await instance.capability.getGraphqlSchema()
180
182
  } catch (err) {
181
- throw new errors.FailedToRetrieveGraphQLSchemaError(service.id, err.message)
183
+ throw new FailedToRetrieveGraphQLSchemaError(application.id, err.message)
182
184
  }
183
185
  },
184
186
 
185
- async getServiceMeta () {
187
+ async getApplicationMeta () {
186
188
  try {
187
- return await app.stackable.getMeta()
189
+ return await instance.capability.getMeta()
188
190
  } catch (err) {
189
- throw new errors.FailedToRetrieveMetaError(service.id, err.message)
191
+ throw new FailedToRetrieveMetaError(application.id, err.message)
190
192
  }
191
193
  },
192
194
 
193
195
  async getMetrics (format) {
194
196
  try {
195
- return await app.getMetrics({ format })
197
+ return await instance.getMetrics({ format })
196
198
  } catch (err) {
197
- throw new errors.FailedToRetrieveMetricsError(service.id, err.message)
199
+ throw new FailedToRetrieveMetricsError(application.id, err.message)
198
200
  }
199
201
  },
200
202
 
201
203
  async getHealth () {
202
204
  try {
203
- return await app.getHealth()
205
+ return await instance.getHealth()
204
206
  } catch (err) {
205
- throw new errors.FailedToRetrieveHealthError(service.id, err.message)
207
+ throw new FailedToRetrieveHealthError(application.id, err.message)
206
208
  }
207
209
  },
208
210
 
209
211
  async getCustomHealthCheck () {
210
212
  try {
211
- return await app.stackable.getCustomHealthCheck()
213
+ return await instance.capability.getCustomHealthCheck()
212
214
  } catch (err) {
213
- throw new errors.FailedToPerformCustomHealthCheckError(service.id, err.message)
215
+ throw new FailedToPerformCustomHealthCheckError(application.id, err.message)
214
216
  }
215
217
  },
216
218
 
217
219
  async getCustomReadinessCheck () {
218
220
  try {
219
- return await app.stackable.getCustomReadinessCheck()
221
+ return await instance.capability.getCustomReadinessCheck()
220
222
  } catch (err) {
221
- throw new errors.FailedToPerformCustomReadinessCheckError(service.id, err.message)
223
+ throw new FailedToPerformCustomReadinessCheckError(application.id, err.message)
222
224
  }
223
225
  },
224
226
 
@@ -232,12 +234,10 @@ function setupITC (app, service, dispatcher, sharedContext) {
232
234
  }
233
235
  })
234
236
 
235
- app.on('changed', () => {
237
+ instance.on('changed', () => {
236
238
  itc.notify('changed')
237
239
  })
238
240
 
239
241
  itc.listen()
240
242
  return itc
241
243
  }
242
-
243
- module.exports = { sendViaITC, setupITC, waitEventFromITC }
@@ -1,37 +1,33 @@
1
- 'use strict'
2
-
3
- const { EventEmitter } = require('node:events')
4
- const { hostname } = require('node:os')
5
- const { resolve } = require('node:path')
6
- const { workerData, threadId } = require('node:worker_threads')
7
- const { pathToFileURL } = require('node:url')
8
- const inspector = require('node:inspector')
9
- const diagnosticChannel = require('node:diagnostics_channel')
10
- const { ServerResponse } = require('node:http')
11
-
12
- const {
1
+ import {
2
+ buildPinoFormatters,
3
+ buildPinoTimestamp,
13
4
  disablePinoDirectWrite,
14
- executeWithTimeout,
15
5
  ensureLoggableError,
16
- getPrivateSymbol,
17
- buildPinoFormatters,
18
- buildPinoTimestamp
19
- } = require('@platformatic/foundation')
20
- const dotenv = require('dotenv')
21
- const pino = require('pino')
22
- const { fetch } = require('undici')
23
-
24
- const { PlatformaticApp } = require('./app')
25
- const { SharedContext } = require('./shared-context')
26
- const { setupITC } = require('./itc')
27
- const { setDispatcher } = require('./interceptors')
28
- const { kId, kITC, kStderrMarker } = require('./symbols')
6
+ executeWithTimeout,
7
+ getPrivateSymbol
8
+ } from '@platformatic/foundation'
9
+ import dotenv from 'dotenv'
10
+ import { subscribe } from 'node:diagnostics_channel'
11
+ import { EventEmitter } from 'node:events'
12
+ import { ServerResponse } from 'node:http'
13
+ import inspector from 'node:inspector'
14
+ import { hostname } from 'node:os'
15
+ import { resolve } from 'node:path'
16
+ import { pathToFileURL } from 'node:url'
17
+ import { threadId, workerData } from 'node:worker_threads'
18
+ import pino from 'pino'
19
+ import { fetch } from 'undici'
20
+ import { Controller } from './controller.js'
21
+ import { setDispatcher } from './interceptors.js'
22
+ import { setupITC } from './itc.js'
23
+ import { SharedContext } from './shared-context.js'
24
+ import { kId, kITC, kStderrMarker } from './symbols.js'
29
25
 
30
26
  function handleUnhandled (app, type, err) {
31
27
  const label =
32
28
  workerData.worker.count > 1
33
- ? `worker ${workerData.worker.index} of the service "${workerData.serviceConfig.id}"`
34
- : `service "${workerData.serviceConfig.id}"`
29
+ ? `worker ${workerData.worker.index} of the application "${workerData.applicationConfig.id}"`
30
+ : `application "${workerData.applicationConfig.id}"`
35
31
 
36
32
  globalThis.platformatic.logger.error({ err: ensureLoggableError(err) }, `The ${label} threw an ${type}.`)
37
33
 
@@ -72,7 +68,7 @@ function createLogger () {
72
68
 
73
69
  const pinoOptions = {
74
70
  level: 'trace',
75
- name: workerData.serviceConfig.id,
71
+ name: workerData.applicationConfig.id,
76
72
  ...workerData.config.logger
77
73
  }
78
74
 
@@ -112,16 +108,16 @@ async function main () {
112
108
 
113
109
  const config = workerData.config
114
110
 
115
- await performPreloading(config, workerData.serviceConfig)
111
+ await performPreloading(config, workerData.applicationConfig)
116
112
 
117
- const service = workerData.serviceConfig
113
+ const application = workerData.applicationConfig
118
114
 
119
- // Load env file and mixin env vars from service config
115
+ // Load env file and mixin env vars from application config
120
116
  let envfile
121
- if (service.envfile) {
122
- envfile = resolve(workerData.dirname, service.envfile)
117
+ if (application.envfile) {
118
+ envfile = resolve(workerData.dirname, application.envfile)
123
119
  } else {
124
- envfile = resolve(workerData.serviceConfig.path, '.env')
120
+ envfile = resolve(workerData.applicationConfig.path, '.env')
125
121
  }
126
122
 
127
123
  globalThis.platformatic.logger.debug({ envfile }, 'Loading envfile...')
@@ -133,17 +129,17 @@ async function main () {
133
129
  if (config.env) {
134
130
  Object.assign(process.env, config.env)
135
131
  }
136
- if (service.env) {
137
- Object.assign(process.env, service.env)
132
+ if (application.env) {
133
+ Object.assign(process.env, application.env)
138
134
  }
139
135
 
140
136
  const { threadDispatcher } = await setDispatcher(config)
141
137
 
142
- // If the service is an entrypoint and runtime server config is defined, use it.
138
+ // If the application is an entrypoint and runtime server config is defined, use it.
143
139
  let serverConfig = null
144
- if (config.server && service.entrypoint) {
140
+ if (config.server && application.entrypoint) {
145
141
  serverConfig = config.server
146
- } else if (service.useHttp) {
142
+ } else if (application.useHttp) {
147
143
  serverConfig = {
148
144
  port: 0,
149
145
  hostname: '127.0.0.1',
@@ -166,14 +162,14 @@ async function main () {
166
162
  const res = await fetch(url)
167
163
  const [{ devtoolsFrontendUrl }] = await res.json()
168
164
 
169
- console.log(`For ${service.id} debugger open the following in chrome: "${devtoolsFrontendUrl}"`)
165
+ console.log(`For ${application.id} debugger open the following in chrome: "${devtoolsFrontendUrl}"`)
170
166
  }
171
167
 
172
168
  // Create the application
173
- const app = new PlatformaticApp(
174
- service,
169
+ const app = new Controller(
170
+ application,
175
171
  workerData.worker.count > 1 ? workerData.worker.index : undefined,
176
- service.telemetry,
172
+ application.telemetry,
177
173
  config.logger,
178
174
  serverConfig,
179
175
  config.metrics,
@@ -186,9 +182,9 @@ async function main () {
186
182
 
187
183
  await app.init()
188
184
 
189
- if (service.entrypoint && config.basePath) {
190
- const meta = await app.stackable.getMeta()
191
- if (!meta.composer.wantsAbsoluteUrls) {
185
+ if (application.entrypoint && config.basePath) {
186
+ const meta = await app.capability.getMeta()
187
+ if (!meta.gateway.wantsAbsoluteUrls) {
192
188
  stripBasePath(config.basePath)
193
189
  }
194
190
  }
@@ -201,7 +197,7 @@ async function main () {
201
197
  }
202
198
 
203
199
  // Setup interaction with parent port
204
- const itc = setupITC(app, service, threadDispatcher, sharedContext)
200
+ const itc = setupITC(app, application, threadDispatcher, sharedContext)
205
201
  globalThis[kITC] = itc
206
202
 
207
203
  // Get the dependencies
@@ -212,7 +208,7 @@ async function main () {
212
208
  function stripBasePath (basePath) {
213
209
  const kBasePath = Symbol('kBasePath')
214
210
 
215
- diagnosticChannel.subscribe('http.server.request.start', ({ request, response }) => {
211
+ subscribe('http.server.request.start', ({ request, response }) => {
216
212
  if (request.url.startsWith(basePath)) {
217
213
  request.url = request.url.slice(basePath.length)
218
214
 
@@ -1,14 +1,12 @@
1
- 'use strict'
2
-
3
- const { executeWithTimeout, kTimeout } = require('@platformatic/foundation')
4
- const { ITC, generateResponse, sanitize } = require('@platformatic/itc')
5
- const errors = require('../errors')
6
- const { RoundRobinMap } = require('./round-robin-map')
7
- const { kWorkersBroadcast, kITC } = require('./symbols')
1
+ import { executeWithTimeout, kTimeout } from '@platformatic/foundation'
2
+ import { ITC, generateResponse, sanitize } from '@platformatic/itc'
3
+ import { MessagingError } from '../errors.js'
4
+ import { RoundRobinMap } from './round-robin-map.js'
5
+ import { kITC, kWorkersBroadcast } from './symbols.js'
8
6
 
9
7
  const kPendingResponses = Symbol('plt.messaging.pendingResponses')
10
8
 
11
- class MessagingITC extends ITC {
9
+ export class MessagingITC extends ITC {
12
10
  #timeout
13
11
  #listener
14
12
  #closeResolvers
@@ -26,7 +24,7 @@ class MessagingITC extends ITC {
26
24
  this.#workers = new RoundRobinMap()
27
25
  this.#sources = new Set()
28
26
 
29
- // Start listening on the BroadcastChannel for the list of services
27
+ // Start listening on the BroadcastChannel for the list of applications
30
28
  this.#broadcastChannel = new BroadcastChannel(kWorkersBroadcast)
31
29
  this.#broadcastChannel.onmessage = this.#updateWorkers.bind(this)
32
30
 
@@ -47,25 +45,25 @@ class MessagingITC extends ITC {
47
45
  }
48
46
  }
49
47
 
50
- async send (service, name, message, options) {
51
- // Get the next worker for the service
52
- const worker = this.#workers.next(service)
48
+ async send (application, name, message, options) {
49
+ // Get the next worker for the application
50
+ const worker = this.#workers.next(application)
53
51
 
54
52
  if (!worker) {
55
- throw new errors.MessagingError(service, 'No workers available')
53
+ throw new MessagingError(application, 'No workers available')
56
54
  }
57
55
 
58
56
  if (!worker.channel) {
59
57
  // Use twice the value here as a fallback measure. The target handler in the main thread is forwarding
60
58
  // the request to the worker, using executeWithTimeout with the user set timeout value.
61
59
  const channel = await executeWithTimeout(
62
- globalThis[kITC].send('getWorkerMessagingChannel', { service: worker.service, worker: worker.worker }),
60
+ globalThis[kITC].send('getWorkerMessagingChannel', { application: worker.application, worker: worker.worker }),
63
61
  this.#timeout * 2
64
62
  )
65
63
 
66
64
  /* c8 ignore next 3 - Hard to test */
67
65
  if (channel === kTimeout) {
68
- throw new errors.MessagingError(service, 'Timeout while waiting for a communication channel.')
66
+ throw new MessagingError(application, 'Timeout while waiting for a communication channel.')
69
67
  }
70
68
 
71
69
  worker.channel = channel
@@ -77,13 +75,13 @@ class MessagingITC extends ITC {
77
75
 
78
76
  const context = { ...options }
79
77
  context.channel = worker.channel
80
- context.service = worker.service
78
+ context.application = worker.application
81
79
  context.trackResponse = true
82
80
 
83
81
  const response = await executeWithTimeout(super.send(name, message, context), this.#timeout)
84
82
 
85
83
  if (response === kTimeout) {
86
- throw new errors.MessagingError(service, 'Timeout while waiting for a response.')
84
+ throw new MessagingError(application, 'Timeout while waiting for a response.')
87
85
  }
88
86
 
89
87
  return response
@@ -104,8 +102,8 @@ class MessagingITC extends ITC {
104
102
  const { channel, transferList } = context
105
103
 
106
104
  if (context.trackResponse) {
107
- const service = context.service
108
- channel[kPendingResponses].set(request.reqId, { service, request })
105
+ const application = context.application
106
+ channel[kPendingResponses].set(request.reqId, { application, request })
109
107
  }
110
108
 
111
109
  channel.postMessage(sanitize(request, transferList), { transferList })
@@ -150,18 +148,18 @@ class MessagingITC extends ITC {
150
148
  this.#workers = new RoundRobinMap()
151
149
 
152
150
  const instances = []
153
- for (const [service, workers] of event.data) {
151
+ for (const [application, workers] of event.data) {
154
152
  const count = workers.length
155
153
  const next = Math.floor(Math.random() * count)
156
154
 
157
- instances.push({ id: service, next, workers: count })
155
+ instances.push({ id: application, next, workers: count })
158
156
 
159
157
  for (let i = 0; i < count; i++) {
160
158
  const worker = workers[i]
161
159
  const channel = existingChannels.get(worker.thread)
162
160
 
163
- // Note i is not the worker index as in runtime, but the index in the list of current alive workers for the service
164
- this.#workers.set(`${service}:${i}`, { ...worker, channel })
161
+ // Note i is not the worker index as in runtime, but the index in the list of current alive workers for the application
162
+ this.#workers.set(`${application}:${i}`, { ...worker, channel })
165
163
  }
166
164
  }
167
165
 
@@ -169,11 +167,11 @@ class MessagingITC extends ITC {
169
167
  }
170
168
 
171
169
  #handlePendingResponse (channel) {
172
- for (const { service, request } of channel[kPendingResponses].values()) {
170
+ for (const { application, request } of channel[kPendingResponses].values()) {
173
171
  this._emitResponse(
174
172
  generateResponse(
175
173
  request,
176
- new errors.MessagingError(service, 'The communication channel was closed before receiving a response.'),
174
+ new MessagingError(application, 'The communication channel was closed before receiving a response.'),
177
175
  null
178
176
  )
179
177
  )
@@ -182,5 +180,3 @@ class MessagingITC extends ITC {
182
180
  channel[kPendingResponses].clear()
183
181
  }
184
182
  }
185
-
186
- module.exports = { MessagingITC }
@@ -1,9 +1,7 @@
1
- 'use strict'
2
-
3
- class RoundRobinMap extends Map {
1
+ export class RoundRobinMap extends Map {
4
2
  #instances
5
3
 
6
- constructor (iterable, instances) {
4
+ constructor (iterable, instances = {}) {
7
5
  super(iterable)
8
6
  this.#instances = instances
9
7
  }
@@ -12,29 +10,37 @@ class RoundRobinMap extends Map {
12
10
  return { ...this.#instances }
13
11
  }
14
12
 
15
- configure (services) {
13
+ configure (applications) {
16
14
  this.#instances = {}
17
15
 
18
- for (const service of services) {
19
- this.#instances[service.id] = { next: service.next ?? 0, count: service.workers }
16
+ for (const application of applications) {
17
+ this.#instances[application.id] = { next: application.next ?? 0, count: application.workers }
20
18
  }
21
19
  }
22
20
 
23
- getCount (service) {
24
- return this.#instances[service].count
21
+ getCount (application) {
22
+ if (!this.#instances[application]) {
23
+ return null
24
+ }
25
+
26
+ return this.#instances[application].count
25
27
  }
26
28
 
27
- setCount (service, count) {
28
- this.#instances[service].count = count
29
+ setCount (application, count) {
30
+ if (!this.#instances[application]) {
31
+ throw new Error(`Application ${application} is not configured.`)
32
+ }
33
+
34
+ this.#instances[application].count = count
29
35
  }
30
36
 
31
- next (service) {
32
- if (!this.#instances[service]) {
33
- return undefined
37
+ next (application) {
38
+ if (!this.#instances[application]) {
39
+ return null
34
40
  }
35
41
 
36
42
  let worker
37
- let { next, count } = this.#instances[service]
43
+ let { next, count } = this.#instances[application]
38
44
 
39
45
  // Try count times to get the next worker. This is to handle the case where a worker is being restarted.
40
46
  for (let i = 0; i < count; i++) {
@@ -43,16 +49,14 @@ class RoundRobinMap extends Map {
43
49
  next = 0
44
50
  }
45
51
 
46
- worker = this.get(`${service}:${current}`)
52
+ worker = this.get(`${application}:${current}`)
47
53
 
48
54
  if (worker) {
49
55
  break
50
56
  }
51
57
  }
52
58
 
53
- this.#instances[service].next = next
59
+ this.#instances[application].next = next
54
60
  return worker
55
61
  }
56
62
  }
57
-
58
- module.exports = { RoundRobinMap }