@nocobase/server 1.9.0-beta.1 → 1.9.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,6 +37,8 @@ import { Environment } from './environment';
37
37
  import { ServiceContainer } from './service-container';
38
38
  import { EventQueue, EventQueueOptions } from './event-queue';
39
39
  import { BackgroundJobManager, BackgroundJobManagerOptions } from './background-job-manager';
40
+ import { RedisConfig, RedisConnectionManager } from './redis-connection-manager';
41
+ import { WorkerIdAllocator } from './worker-id-allocator';
40
42
  export type PluginType = string | typeof Plugin;
41
43
  export type PluginConfiguration = PluginType | [PluginType, any];
42
44
  export interface ResourceManagerOptions {
@@ -57,8 +59,9 @@ export interface AppTelemetryOptions extends TelemetryOptions {
57
59
  enabled?: boolean;
58
60
  }
59
61
  export interface ApplicationOptions {
60
- instanceId?: string;
62
+ instanceId?: number;
61
63
  database?: IDatabaseOptions | Database;
64
+ redisConfig?: RedisConfig;
62
65
  cacheManager?: CacheManagerOptions;
63
66
  /**
64
67
  * this property is deprecated and should not be used.
@@ -131,9 +134,12 @@ export type MaintainingCommandStatus = {
131
134
  status: MaintainingStatus;
132
135
  error?: Error;
133
136
  };
137
+ interface SnowflakeIdGenerator {
138
+ generate(): number | BigInt;
139
+ }
134
140
  export declare class Application<StateT = DefaultState, ContextT = DefaultContext> extends Koa implements AsyncEmitter {
135
141
  options: ApplicationOptions;
136
- readonly instanceId: string;
142
+ private _instanceId;
137
143
  /**
138
144
  * @internal
139
145
  */
@@ -168,6 +174,9 @@ export declare class Application<StateT = DefaultState, ContextT = DefaultContex
168
174
  /**
169
175
  * @internal
170
176
  */
177
+ redisConnectionManager: RedisConnectionManager;
178
+ workerIdAllocator: WorkerIdAllocator;
179
+ snowflakeIdGenerator: SnowflakeIdGenerator;
171
180
  pubSubManager: PubSubManager;
172
181
  syncMessageManager: SyncMessageManager;
173
182
  requestLogger: Logger;
@@ -186,6 +195,7 @@ export declare class Application<StateT = DefaultState, ContextT = DefaultContex
186
195
  private static staticCommands;
187
196
  static addCommand(callback: (app: Application) => void): void;
188
197
  private _sqlLogger;
198
+ get instanceId(): number;
189
199
  get sqlLogger(): Logger;
190
200
  protected _logger: SystemLogger;
191
201
  get logger(): SystemLogger;
@@ -309,6 +319,7 @@ export declare class Application<StateT = DefaultState, ContextT = DefaultContex
309
319
  actions(handlers: any, options?: ActionsOptions): void;
310
320
  command(name: string, desc?: string, opts?: CommandOptions): AppCommand;
311
321
  findCommand(name: string): Command;
322
+ private disposeServices;
312
323
  /**
313
324
  * @internal
314
325
  */
@@ -51,12 +51,12 @@ var import_logger = require("@nocobase/logger");
51
51
  var import_telemetry = require("@nocobase/telemetry");
52
52
  var import_lock_manager = require("@nocobase/lock-manager");
53
53
  var import_utils = require("@nocobase/utils");
54
+ var import_snowflake_id = require("@nocobase/snowflake-id");
54
55
  var import_crypto = require("crypto");
55
56
  var import_glob = __toESM(require("glob"));
56
57
  var import_koa = __toESM(require("koa"));
57
58
  var import_koa_compose = __toESM(require("koa-compose"));
58
59
  var import_lodash = __toESM(require("lodash"));
59
- var import_nanoid = require("nanoid");
60
60
  var import_path = __toESM(require("path"));
61
61
  var import_semver = __toESM(require("semver"));
62
62
  var import_acl = require("./acl");
@@ -84,11 +84,13 @@ var import_environment = require("./environment");
84
84
  var import_service_container = require("./service-container");
85
85
  var import_event_queue = require("./event-queue");
86
86
  var import_background_job_manager = require("./background-job-manager");
87
+ var import_redis_connection_manager = require("./redis-connection-manager");
88
+ var import_worker_id_allocator = require("./worker-id-allocator");
89
+ var import_snowflake_id_field = require("./snowflake-id-field");
87
90
  const _Application = class _Application extends import_koa.default {
88
91
  constructor(options) {
89
92
  super();
90
93
  this.options = options;
91
- this.instanceId = options.instanceId || (0, import_nanoid.nanoid)();
92
94
  this.context.reqId = (0, import_crypto.randomUUID)();
93
95
  this.rawOptions = this.name == "main" ? import_lodash.default.cloneDeep(options) : {};
94
96
  this.init();
@@ -96,7 +98,7 @@ const _Application = class _Application extends import_koa.default {
96
98
  this._appSupervisor.addApp(this);
97
99
  }
98
100
  }
99
- instanceId;
101
+ _instanceId;
100
102
  /**
101
103
  * @internal
102
104
  */
@@ -124,6 +126,9 @@ const _Application = class _Application extends import_koa.default {
124
126
  /**
125
127
  * @internal
126
128
  */
129
+ redisConnectionManager;
130
+ workerIdAllocator;
131
+ snowflakeIdGenerator;
127
132
  pubSubManager;
128
133
  syncMessageManager;
129
134
  requestLogger;
@@ -142,6 +147,9 @@ const _Application = class _Application extends import_koa.default {
142
147
  this.staticCommands.push(callback);
143
148
  }
144
149
  _sqlLogger;
150
+ get instanceId() {
151
+ return this._instanceId;
152
+ }
145
153
  get sqlLogger() {
146
154
  return this._sqlLogger;
147
155
  }
@@ -279,6 +287,9 @@ const _Application = class _Application extends import_koa.default {
279
287
  if (!WORKER_MODE) {
280
288
  return true;
281
289
  }
290
+ if (WORKER_MODE === "-") {
291
+ return false;
292
+ }
282
293
  const topics = WORKER_MODE.trim().split(",");
283
294
  if (key) {
284
295
  if (WORKER_MODE === "*") {
@@ -389,16 +400,10 @@ const _Application = class _Application extends import_koa.default {
389
400
  findCommand(name) {
390
401
  return this.cli._findCommand(name);
391
402
  }
392
- /**
393
- * @internal
394
- */
395
- async reInit() {
396
- if (!this._loaded) {
397
- return;
403
+ async disposeServices() {
404
+ if (this.redisConnectionManager) {
405
+ await this.redisConnectionManager.close();
398
406
  }
399
- this.log.info("app reinitializing");
400
- await this.emitAsync("beforeStop");
401
- await this.emitAsync("afterStop");
402
407
  if (this.cacheManager) {
403
408
  await this.cacheManager.close();
404
409
  }
@@ -408,6 +413,19 @@ const _Application = class _Application extends import_koa.default {
408
413
  if (this.telemetry.started) {
409
414
  await this.telemetry.shutdown();
410
415
  }
416
+ await this.workerIdAllocator.release();
417
+ }
418
+ /**
419
+ * @internal
420
+ */
421
+ async reInit() {
422
+ if (!this._loaded) {
423
+ return;
424
+ }
425
+ this.log.info("app reinitializing");
426
+ await this.emitAsync("beforeStop");
427
+ await this.emitAsync("afterStop");
428
+ await this.disposeServices();
411
429
  this.closeLogger();
412
430
  const oldDb = this.db;
413
431
  this.init();
@@ -431,12 +449,7 @@ const _Application = class _Application extends import_koa.default {
431
449
  if (options == null ? void 0 : options.reload) {
432
450
  this.setMaintainingMessage("app reload");
433
451
  this.log.info(`app.reload()`, { method: "load" });
434
- if (this.cacheManager) {
435
- await this.cacheManager.close();
436
- }
437
- if (this.telemetry.started) {
438
- await this.telemetry.shutdown();
439
- }
452
+ await this.disposeServices();
440
453
  const oldDb = this.db;
441
454
  this.init();
442
455
  if (!oldDb.closed()) {
@@ -457,6 +470,15 @@ const _Application = class _Application extends import_koa.default {
457
470
  if ((options == null ? void 0 : options.hooks) !== false) {
458
471
  await this.emitAsync("beforeLoad", this, options);
459
472
  }
473
+ if (!this._instanceId) {
474
+ this._instanceId = await this.workerIdAllocator.getWorkerId();
475
+ this.log.info(`allocate worker id: ${this._instanceId}`, { method: "load" });
476
+ }
477
+ if (!this.snowflakeIdGenerator) {
478
+ this.snowflakeIdGenerator = new import_snowflake_id.Snowflake({
479
+ workerId: this._instanceId
480
+ });
481
+ }
460
482
  if (!this.telemetry.started) {
461
483
  this.telemetry.init();
462
484
  if ((_a = this.options.telemetry) == null ? void 0 : _a.enabled) {
@@ -704,12 +726,7 @@ const _Application = class _Application extends import_koa.default {
704
726
  } catch (e) {
705
727
  log.error(e.message, { method: "stop", err: e.stack });
706
728
  }
707
- if (this.cacheManager) {
708
- await this.cacheManager.close();
709
- }
710
- if (this.telemetry.started) {
711
- await this.telemetry.shutdown();
712
- }
729
+ await this.disposeServices();
713
730
  await this.emitAsync("afterStop", this, options);
714
731
  this.emit("__stopped", this, options);
715
732
  this.stopped = true;
@@ -873,6 +890,7 @@ const _Application = class _Application extends import_koa.default {
873
890
  }
874
891
  init() {
875
892
  const options = this.options;
893
+ this._instanceId = options.instanceId;
876
894
  this.initLogger(options.logger);
877
895
  this.reInitEvents();
878
896
  this.middleware = new import_utils.Toposort();
@@ -881,6 +899,11 @@ const _Application = class _Application extends import_koa.default {
881
899
  this.db.removeAllListeners();
882
900
  }
883
901
  this.createMainDataSource(options);
902
+ this.redisConnectionManager = new import_redis_connection_manager.RedisConnectionManager({
903
+ redisConfig: options.redisConfig,
904
+ logger: this._logger.child({ module: "redis-connection-manager" })
905
+ });
906
+ this.workerIdAllocator = new import_worker_id_allocator.WorkerIdAllocator();
884
907
  this._cronJobManager = new import_cron_job_manager.CronJobManager(this);
885
908
  this._env = new import_environment.Environment();
886
909
  this._cli = this.createCLI();
@@ -959,6 +982,7 @@ const _Application = class _Application extends import_koa.default {
959
982
  app: this
960
983
  });
961
984
  this.dataSourceManager.dataSources.set("main", mainDataSourceInstance);
985
+ (0, import_snowflake_id_field.setupSnowflakeIdField)(this);
962
986
  }
963
987
  createDatabase(options) {
964
988
  const logging = /* @__PURE__ */ __name((...args) => {
@@ -252,6 +252,9 @@ const _AuditManager = class _AuditManager {
252
252
  async output(ctx, reqId, metadata) {
253
253
  var _a;
254
254
  try {
255
+ if (!ctx.action) {
256
+ return;
257
+ }
255
258
  const { resourceName, actionName } = ctx.action;
256
259
  const action = this.getAction(actionName, resourceName);
257
260
  if (!action) {
@@ -7,6 +7,7 @@
7
7
  * For more information, please refer to: https://www.nocobase.com/agreement.
8
8
  */
9
9
  import Application from './application';
10
+ import { SystemLogger } from '@nocobase/logger';
10
11
  export declare const QUEUE_DEFAULT_INTERVAL = 250;
11
12
  export declare const QUEUE_DEFAULT_CONCURRENCY = 1;
12
13
  export declare const QUEUE_DEFAULT_ACK_TIMEOUT = 15000;
@@ -15,7 +16,7 @@ export type QueueCallbackOptions = {
15
16
  retried?: number;
16
17
  signal?: AbortSignal;
17
18
  };
18
- export type QueueCallback = (message: any, options: QueueCallbackOptions) => Promise<void> | void;
19
+ export type QueueCallback = (message: any, options: QueueCallbackOptions) => Promise<void>;
19
20
  export type QueueEventOptions = {
20
21
  /**
21
22
  * @experimental
@@ -53,11 +54,12 @@ export declare class MemoryEventQueueAdapter implements IEventQueueAdapter {
53
54
  content: any;
54
55
  options?: QueueMessageOptions;
55
56
  }[]>;
56
- get processing(): Promise<void[]>;
57
+ get processing(): Promise<Promise<void>[][]>;
57
58
  private get storagePath();
58
- listen: (channel: string) => Promise<void>;
59
+ listen: (channel: string) => void;
59
60
  constructor(options: {
60
61
  appName: string;
62
+ logger: SystemLogger;
61
63
  });
62
64
  isConnected(): boolean;
63
65
  setConnected(connected: boolean): void;
@@ -69,7 +71,7 @@ export declare class MemoryEventQueueAdapter implements IEventQueueAdapter {
69
71
  unsubscribe(channel: string): void;
70
72
  publish(channel: string, content: any, options?: QueueMessageOptions): void;
71
73
  consume(channel: string, once?: boolean): Promise<void>;
72
- read(channel: string): Promise<void>;
74
+ read(channel: string, n: number): Promise<void>[];
73
75
  process(channel: string, { id, message }: {
74
76
  id: any;
75
77
  message: any;
@@ -73,26 +73,35 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
73
73
  get storagePath() {
74
74
  return import_path.default.resolve(process.cwd(), "storage", "apps", this.options.appName, "event-queue.json");
75
75
  }
76
- listen = /* @__PURE__ */ __name(async (channel) => {
76
+ listen = /* @__PURE__ */ __name((channel) => {
77
77
  if (!this.connected) {
78
78
  return;
79
79
  }
80
- if (this.reading.has(channel)) {
81
- console.debug(`memory queue (${channel}) is already reading, waiting last reading to end...`);
82
- await this.reading.get(channel);
83
- }
80
+ const { logger } = this.options;
84
81
  const event = this.events.get(channel);
85
82
  if (!event) {
86
- console.warn(`memory queue (${channel}) not found, skipping...`);
83
+ logger.warn(`memory queue (${channel}) not found, skipping...`);
87
84
  return;
88
85
  }
89
86
  if (!event.idle()) {
90
- console.debug(`memory queue (${channel}) is not idle, skipping...`);
91
87
  return;
92
88
  }
93
- const reading = this.read(channel);
89
+ const reading = this.reading.get(channel) || [];
90
+ const count = (event.concurrency || QUEUE_DEFAULT_CONCURRENCY) - reading.length;
91
+ if (count <= 0) {
92
+ return;
93
+ }
94
+ logger.debug(`reading more from queue (${channel}), count: ${count}`);
95
+ this.read(channel, count).forEach((promise) => {
96
+ reading.push(promise);
97
+ promise.finally(() => {
98
+ const index = reading.indexOf(promise);
99
+ if (index > -1) {
100
+ reading.splice(index, 1);
101
+ }
102
+ });
103
+ });
94
104
  this.reading.set(channel, reading);
95
- await reading;
96
105
  }, "listen");
97
106
  isConnected() {
98
107
  return this.connected;
@@ -103,20 +112,21 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
103
112
  async loadFromStorage() {
104
113
  let queues = {};
105
114
  let exists = false;
115
+ const { logger } = this.options;
106
116
  try {
107
117
  await import_promises.default.stat(this.storagePath);
108
118
  exists = true;
109
119
  } catch (ex) {
110
- console.info(`memory queue storage file not found, skip`);
120
+ logger.info(`memory queue storage file not found, skip`);
111
121
  }
112
122
  if (exists) {
113
123
  try {
114
124
  const queueJson = await import_promises.default.readFile(this.storagePath);
115
125
  queues = JSON.parse(queueJson.toString());
116
- console.debug("memory queue loaded from storage", queues);
126
+ logger.debug("memory queue loaded from storage", queues);
117
127
  await import_promises.default.unlink(this.storagePath);
118
128
  } catch (ex) {
119
- console.error("failed to load queue from storage", ex);
129
+ logger.error("failed to load queue from storage", ex);
120
130
  }
121
131
  }
122
132
  this.queues = new Map(Object.entries(queues));
@@ -128,12 +138,13 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
128
138
  }
129
139
  return acc;
130
140
  }, {});
141
+ const { logger } = this.options;
131
142
  if (Object.keys(queues).length) {
132
143
  await import_promises.default.mkdir(import_path.default.dirname(this.storagePath), { recursive: true });
133
144
  await import_promises.default.writeFile(this.storagePath, JSON.stringify(queues));
134
- console.debug("memory queue saved to storage", queues);
145
+ logger.debug("memory queue saved to storage", queues);
135
146
  } else {
136
- console.debug("memory queue empty, no need to save to storage");
147
+ logger.debug("memory queue empty, no need to save to storage");
137
148
  }
138
149
  }
139
150
  async connect() {
@@ -149,13 +160,17 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
149
160
  });
150
161
  }
151
162
  async close() {
163
+ if (!this.connected) {
164
+ return;
165
+ }
166
+ const { logger } = this.options;
152
167
  this.connected = false;
153
168
  if (this.processing) {
154
- console.info("memory queue waiting for processing job...");
169
+ logger.info("memory queue waiting for processing job...");
155
170
  await this.processing;
156
- console.info("memory queue job cleaned");
171
+ logger.info("memory queue job cleaned");
157
172
  }
158
- console.log("memory queue gracefully shutting down...");
173
+ logger.info("memory queue gracefully shutting down...");
159
174
  await this.saveToStorage();
160
175
  }
161
176
  subscribe(channel, options) {
@@ -181,6 +196,7 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
181
196
  publish(channel, content, options = { timestamp: Date.now() }) {
182
197
  const event = this.events.get(channel);
183
198
  if (!event) {
199
+ console.debug(`memory queue (${channel}) not subscribed, skip`);
184
200
  return;
185
201
  }
186
202
  if (!this.queues.get(channel)) {
@@ -189,7 +205,8 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
189
205
  const queue = this.queues.get(channel);
190
206
  const message = { id: (0, import_crypto.randomUUID)(), content, options };
191
207
  queue.push(message);
192
- console.debug(`memory queue (${channel}) published message`, content);
208
+ const { logger } = this.options;
209
+ logger.debug(`memory queue (${channel}) published message`, content);
193
210
  setImmediate(() => {
194
211
  this.emitter.emit(channel, channel);
195
212
  });
@@ -200,7 +217,7 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
200
217
  const interval = event.interval || QUEUE_DEFAULT_INTERVAL;
201
218
  const queue = this.queues.get(channel);
202
219
  if (event.idle() && (queue == null ? void 0 : queue.length)) {
203
- await this.listen(channel);
220
+ this.listen(channel);
204
221
  }
205
222
  if (once) {
206
223
  break;
@@ -208,47 +225,43 @@ const _MemoryEventQueueAdapter = class _MemoryEventQueueAdapter {
208
225
  await (0, import_utils.sleep)(interval);
209
226
  }
210
227
  }
211
- async read(channel) {
212
- const event = this.events.get(channel);
213
- if (!event) {
214
- this.reading.delete(channel);
215
- return;
216
- }
228
+ read(channel, n) {
217
229
  const queue = this.queues.get(channel);
218
- if (queue == null ? void 0 : queue.length) {
219
- const messages = queue.slice(0, event.concurrency || QUEUE_DEFAULT_CONCURRENCY);
220
- console.debug(`memory queue (${channel}) read ${messages.length} messages`, messages);
221
- queue.splice(0, messages.length);
222
- const batch = messages.map(({ id, ...message }) => this.process(channel, { id, message }));
223
- await Promise.all(batch);
224
- }
225
- this.reading.delete(channel);
230
+ if (!(queue == null ? void 0 : queue.length)) {
231
+ return [];
232
+ }
233
+ const { logger } = this.options;
234
+ const messages = queue.slice(0, n);
235
+ logger.debug(`memory queue (${channel}) read ${messages.length} messages`, messages);
236
+ queue.splice(0, messages.length);
237
+ const batch = messages.map(({ id, ...message }) => this.process(channel, { id, message }));
238
+ return batch;
226
239
  }
227
240
  async process(channel, { id, message }) {
228
241
  const event = this.events.get(channel);
229
242
  const { content, options: { timeout = QUEUE_DEFAULT_ACK_TIMEOUT, maxRetries = 0, retried = 0 } = {} } = message;
230
- try {
231
- console.debug(`memory queue (${channel}) processing message (${id})...`, content);
232
- await event.process(content, {
233
- id,
234
- retried,
235
- signal: AbortSignal.timeout(timeout)
236
- });
237
- console.debug(`memory queue (${channel}) consumed message (${id})`);
238
- } catch (ex) {
243
+ const { logger } = this.options;
244
+ logger.debug(`memory queue (${channel}) processing message (${id})...`, content);
245
+ return (async () => event.process(content, {
246
+ id,
247
+ retried,
248
+ signal: AbortSignal.timeout(timeout)
249
+ }))().then(() => {
250
+ logger.debug(`memory queue (${channel}) consumed message (${id})`);
251
+ }).catch((ex) => {
239
252
  if (maxRetries > 0 && retried < maxRetries) {
240
253
  const currentRetry = retried + 1;
241
- console.warn(
254
+ logger.warn(
242
255
  `memory queue (${channel}) consum message (${id}) failed, retrying (${currentRetry} / ${maxRetries})...`,
243
256
  ex
244
257
  );
245
- setImmediate(() => {
258
+ setTimeout(() => {
246
259
  this.publish(channel, content, { timeout, maxRetries, retried: currentRetry, timestamp: Date.now() });
247
- });
260
+ }, 500);
248
261
  } else {
249
- console.error(ex);
262
+ logger.error(ex);
250
263
  }
251
- }
264
+ });
252
265
  }
253
266
  };
254
267
  __name(_MemoryEventQueueAdapter, "MemoryEventQueueAdapter");
@@ -257,14 +270,16 @@ const _EventQueue = class _EventQueue {
257
270
  constructor(app, options = {}) {
258
271
  this.app = app;
259
272
  this.options = options;
260
- this.setAdapter(new MemoryEventQueueAdapter({ appName: this.app.name }));
261
- app.on("afterStart", async () => {
262
- await this.connect();
263
- });
264
- app.on("beforeStop", async () => {
265
- app.logger.info("[queue] gracefully shutting down...");
266
- await this.close();
267
- });
273
+ if (app.serving()) {
274
+ this.setAdapter(new MemoryEventQueueAdapter({ appName: this.app.name, logger: this.app.logger }));
275
+ app.on("afterStart", async () => {
276
+ await this.connect();
277
+ });
278
+ app.on("beforeStop", async () => {
279
+ app.logger.info("[queue] gracefully shutting down...");
280
+ await this.close();
281
+ });
282
+ }
268
283
  }
269
284
  adapter;
270
285
  events = /* @__PURE__ */ new Map();
@@ -288,7 +303,12 @@ const _EventQueue = class _EventQueue {
288
303
  if (!this.adapter) {
289
304
  throw new Error("no adapter set, cannot connect");
290
305
  }
306
+ if (!this.app.serving()) {
307
+ this.app.logger.warn("app is not serving, will not connect to event queue");
308
+ return;
309
+ }
291
310
  await this.adapter.connect();
311
+ this.app.logger.debug(`connected to adapter, using memory? ${this.adapter instanceof MemoryEventQueueAdapter}`);
292
312
  for (const [channel, event] of this.events.entries()) {
293
313
  this.adapter.subscribe(this.getFullChannel(channel), event);
294
314
  }
@@ -329,7 +349,7 @@ const _EventQueue = class _EventQueue {
329
349
  throw new Error("event queue not connected, cannot publish");
330
350
  }
331
351
  const c = this.getFullChannel(channel);
332
- this.app.logger.debug("event queue publishing:", { channel: c, message });
352
+ this.app.logger.debug(`event queue publishing to channel(${c})`, { message });
333
353
  await this.adapter.publish(c, message, {
334
354
  timeout: QUEUE_DEFAULT_ACK_TIMEOUT,
335
355
  ...options,
@@ -47,6 +47,8 @@ export declare class Gateway extends EventEmitter {
47
47
  private port;
48
48
  private host;
49
49
  private socketPath;
50
+ private terminating;
51
+ private onTerminate;
50
52
  private constructor();
51
53
  static getInstance(options?: any): Gateway;
52
54
  static getIPCSocketClient(): Promise<false | IPCSocketClient>;
@@ -86,10 +86,37 @@ const _Gateway = class _Gateway extends import_events.EventEmitter {
86
86
  port = import_node_process.default.env.APP_PORT ? parseInt(import_node_process.default.env.APP_PORT) : null;
87
87
  host = "0.0.0.0";
88
88
  socketPath = (0, import_path.resolve)(import_node_process.default.cwd(), "storage", "gateway.sock");
89
+ terminating = false;
90
+ onTerminate = /* @__PURE__ */ __name(async (signal) => {
91
+ var _a;
92
+ if (this.terminating) {
93
+ return;
94
+ }
95
+ this.terminating = true;
96
+ const supervisor = import_app_supervisor.AppSupervisor.getInstance();
97
+ const apps = Object.values(supervisor.apps || {});
98
+ try {
99
+ for (const app of apps) {
100
+ try {
101
+ await app.destroy({ signal });
102
+ } catch (error) {
103
+ const logger = (app == null ? void 0 : app.log) ?? console;
104
+ (_a = logger.error) == null ? void 0 : _a.call(logger, error);
105
+ }
106
+ }
107
+ await supervisor.destroy();
108
+ } catch (error) {
109
+ console.error("Failed to shutdown applications gracefully", error);
110
+ } finally {
111
+ this.destroy();
112
+ }
113
+ }, "onTerminate");
89
114
  constructor() {
90
115
  super();
91
116
  this.reset();
92
117
  this.socketPath = getSocketPath();
118
+ import_node_process.default.once("SIGTERM", this.onTerminate);
119
+ import_node_process.default.once("SIGINT", this.onTerminate);
93
120
  }
94
121
  static getInstance(options = {}) {
95
122
  if (!_Gateway.instance) {
@@ -106,6 +133,8 @@ const _Gateway = class _Gateway extends import_events.EventEmitter {
106
133
  }
107
134
  }
108
135
  destroy() {
136
+ import_node_process.default.off("SIGTERM", this.onTerminate);
137
+ import_node_process.default.off("SIGINT", this.onTerminate);
109
138
  this.reset();
110
139
  _Gateway.instance = null;
111
140
  }
@@ -137,6 +166,10 @@ const _Gateway = class _Gateway extends import_events.EventEmitter {
137
166
  this.ipcSocketServer.close();
138
167
  this.ipcSocketServer = null;
139
168
  }
169
+ if (this.wsServer) {
170
+ this.wsServer.close();
171
+ this.wsServer = null;
172
+ }
140
173
  }
141
174
  addAppSelectorMiddleware(middleware, options) {
142
175
  if (this.selectorMiddlewares.nodes.some((existingFunc) => existingFunc.toString() === middleware.toString())) {
@@ -172,6 +172,20 @@ const _WSServer = class _WSServer extends import_events.default {
172
172
  message
173
173
  );
174
174
  });
175
+ app.on("ws:sendToUser", ({ userId, message }) => {
176
+ this.sendToAppUser(app.name, userId, message);
177
+ app.logger.trace(`[broadcasting message] ws:sendToUser for user ${userId}`, { message });
178
+ app.pubSubManager.publish(
179
+ "ws:sendToUser",
180
+ {
181
+ userId,
182
+ message
183
+ },
184
+ {
185
+ skipSelf: true
186
+ }
187
+ );
188
+ });
175
189
  app.on("ws:sendToClient", ({ clientId, message }) => {
176
190
  this.sendToClient(clientId, message);
177
191
  });
@@ -184,6 +198,12 @@ const _WSServer = class _WSServer extends import_events.default {
184
198
  app.on("ws:authorized", ({ clientId, userId }) => {
185
199
  this.sendToClient(clientId, { type: "authorized" });
186
200
  });
201
+ app.on("afterLoad", () => {
202
+ app.pubSubManager.subscribe("ws:sendToUser", ({ userId, message }) => {
203
+ app.logger.debug(`[receive broadcasting message] ws:sendToUser for user ${userId}`, { message });
204
+ this.sendToAppUser(app.name, userId, message);
205
+ });
206
+ });
187
207
  }
188
208
  addNewConnection(ws, request) {
189
209
  const id = (0, import_nanoid.nanoid)();
package/lib/helper.d.ts CHANGED
@@ -16,3 +16,4 @@ export declare const createAppProxy: (app: Application) => Application<import(".
16
16
  export declare const getCommandFullName: (command: Command) => string;
17
17
  export declare const tsxRerunning: () => Promise<void>;
18
18
  export declare const enablePerfHooks: (app: Application) => void;
19
+ export declare function getBodyLimit(): string;
package/lib/helper.js CHANGED
@@ -41,6 +41,7 @@ __export(helper_exports, {
41
41
  createI18n: () => createI18n,
42
42
  createResourcer: () => createResourcer,
43
43
  enablePerfHooks: () => enablePerfHooks,
44
+ getBodyLimit: () => getBodyLimit,
44
45
  getCommandFullName: () => getCommandFullName,
45
46
  registerMiddlewares: () => registerMiddlewares,
46
47
  tsxRerunning: () => tsxRerunning
@@ -99,7 +100,7 @@ function registerMiddlewares(app, options) {
99
100
  }
100
101
  );
101
102
  if (options.bodyParser !== false) {
102
- const bodyLimit = "10mb";
103
+ const bodyLimit = getBodyLimit();
103
104
  app.use(
104
105
  (0, import_koa_bodyparser.default)({
105
106
  jsonLimit: bodyLimit,
@@ -188,12 +189,17 @@ const enablePerfHooks = /* @__PURE__ */ __name((app) => {
188
189
  });
189
190
  app.acl.allow("perf", "*", "public");
190
191
  }, "enablePerfHooks");
192
+ function getBodyLimit() {
193
+ return process.env.REQUEST_BODY_LIMIT || "10mb";
194
+ }
195
+ __name(getBodyLimit, "getBodyLimit");
191
196
  // Annotate the CommonJS export names for ESM import in node:
192
197
  0 && (module.exports = {
193
198
  createAppProxy,
194
199
  createI18n,
195
200
  createResourcer,
196
201
  enablePerfHooks,
202
+ getBodyLimit,
197
203
  getCommandFullName,
198
204
  registerMiddlewares,
199
205
  tsxRerunning