bunsane 0.2.9 → 0.2.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/core/App.ts CHANGED
@@ -26,6 +26,12 @@ import studioEndpoint from "../endpoints";
26
26
  import { type Middleware, composeMiddleware } from "./Middleware";
27
27
  import { deepHealthCheck, readinessCheck } from "./health";
28
28
  import { validateEnv } from "./validateEnv";
29
+ import {
30
+ RemoteManager,
31
+ registerRemoteHandlers,
32
+ setRemoteManager,
33
+ } from "./remote";
34
+ import type { RemoteManagerConfig } from "./remote";
29
35
 
30
36
  export type CorsConfig = {
31
37
  origin?: string | string[] | ((origin: string) => boolean);
@@ -70,6 +76,8 @@ export default class App {
70
76
  private composedHandler: ((req: Request) => Promise<Response>) | null = null;
71
77
 
72
78
  private studioEnabled: boolean = false;
79
+ private remote: RemoteManager | null = null;
80
+ private remoteConfig: Partial<RemoteManagerConfig> | null = null;
73
81
  private server: ReturnType<typeof Bun.serve> | null = null;
74
82
  private isShuttingDown = false;
75
83
  private isReady = false;
@@ -254,6 +262,40 @@ export default class App {
254
262
  `Registered scheduled tasks for ${services.length} services`
255
263
  );
256
264
 
265
+ // Initialize RemoteManager (opt-in via enableRemote())
266
+ if (this.remoteConfig) {
267
+ try {
268
+ const rmConfig: RemoteManagerConfig = {
269
+ appName:
270
+ this.remoteConfig.appName ||
271
+ this.name,
272
+ ...this.remoteConfig,
273
+ };
274
+ this.remote = new RemoteManager(rmConfig);
275
+ setRemoteManager(this.remote);
276
+ await this.remote.start();
277
+
278
+ for (const service of services) {
279
+ try {
280
+ registerRemoteHandlers(service);
281
+ } catch (error) {
282
+ logger.warn(
283
+ `Failed to register remote handlers for service ${service.constructor.name}`
284
+ );
285
+ logger.warn(error);
286
+ }
287
+ }
288
+ logger.info(
289
+ `RemoteManager initialized for app "${rmConfig.appName}"`
290
+ );
291
+ } catch (error) {
292
+ logger.error(
293
+ "Failed to start RemoteManager:"
294
+ );
295
+ logger.error(error);
296
+ }
297
+ }
298
+
257
299
  // Collect REST endpoints from all services
258
300
  for (const service of services) {
259
301
  const endpoints = (service.constructor as any)
@@ -561,6 +603,31 @@ export default class App {
561
603
  ), req);
562
604
  }
563
605
 
606
+ // Remote health check
607
+ if (url.pathname === "/health/remote") {
608
+ clearTimeout(timeoutId);
609
+ if (!this.remote) {
610
+ return this.addCorsHeaders(new Response(
611
+ JSON.stringify({
612
+ healthy: false,
613
+ error: "Remote subsystem not enabled",
614
+ }),
615
+ {
616
+ status: 503,
617
+ headers: { "Content-Type": "application/json" },
618
+ }
619
+ ), req);
620
+ }
621
+ const health = await this.remote.health();
622
+ return this.addCorsHeaders(new Response(
623
+ JSON.stringify(health),
624
+ {
625
+ status: health.healthy ? 200 : 503,
626
+ headers: { "Content-Type": "application/json" },
627
+ }
628
+ ), req);
629
+ }
630
+
564
631
  // Readiness probe
565
632
  if (url.pathname === "/health/ready") {
566
633
  clearTimeout(timeoutId);
@@ -911,10 +978,27 @@ export default class App {
911
978
  this.name = name;
912
979
  }
913
980
 
981
+ public getName(): string {
982
+ return this.name;
983
+ }
984
+
914
985
  public setVersion(version: string) {
915
986
  this.version = version;
916
987
  }
917
988
 
989
+ /**
990
+ * Enable remote cross-app communication over Redis Streams.
991
+ * Must be called before `init()` (initialization happens in SYSTEM_READY).
992
+ * `appName` defaults to the app name.
993
+ */
994
+ public enableRemote(config: Partial<RemoteManagerConfig> = {}) {
995
+ this.remoteConfig = config;
996
+ }
997
+
998
+ public getRemote(): RemoteManager | null {
999
+ return this.remote;
1000
+ }
1001
+
918
1002
  public subscribeAppReady(callback: () => void) {
919
1003
  this.appReadyCallbacks.push(callback);
920
1004
  }
@@ -1023,6 +1107,7 @@ export default class App {
1023
1107
  cache: cacheStats,
1024
1108
  scheduler: SchedulerManager.getInstance().getMetrics(),
1025
1109
  preparedStatements: preparedStatementCache.getStats(),
1110
+ remote: this.remote ? this.remote.getMetrics() : null,
1026
1111
  };
1027
1112
  }
1028
1113
 
@@ -1127,6 +1212,18 @@ export default class App {
1127
1212
  logger.warn({ scope: 'app', component: 'App', msg: 'Scheduler stop error', error });
1128
1213
  }
1129
1214
 
1215
+ // Shutdown RemoteManager (after scheduler, before cache — DB still available)
1216
+ if (this.remote) {
1217
+ try {
1218
+ await this.remote.shutdown();
1219
+ setRemoteManager(null);
1220
+ this.remote = null;
1221
+ logger.info({ scope: 'app', component: 'App', msg: 'RemoteManager shutdown' });
1222
+ } catch (error) {
1223
+ logger.warn({ scope: 'app', component: 'App', msg: 'RemoteManager shutdown error', error });
1224
+ }
1225
+ }
1226
+
1130
1227
  // Shutdown cache
1131
1228
  try {
1132
1229
  const { CacheManager } = await import('./cache/CacheManager');
@@ -0,0 +1,115 @@
1
+ /**
2
+ * Remote Communication: CircuitBreaker
3
+ *
4
+ * Three-state breaker: closed -> open -> half-open -> closed.
5
+ *
6
+ * closed: pass through; increment failure count on error; trip to open at N.
7
+ * open: reject immediately (fail-fast) until reset timeout elapses.
8
+ * half: one trial call allowed; success -> closed, failure -> open again.
9
+ *
10
+ * Wraps Redis publish operations so a sustained Redis outage does not stall
11
+ * callers waiting for command timeouts on every request.
12
+ */
13
+
14
+ export type CircuitState = "closed" | "open" | "half-open";
15
+
16
+ export interface CircuitBreakerConfig {
17
+ /** Consecutive failures before opening (default 5) */
18
+ threshold?: number;
19
+ /** ms after opening before a half-open trial is allowed (default 30000) */
20
+ resetTimeoutMs?: number;
21
+ }
22
+
23
+ export class CircuitOpenError extends Error {
24
+ public readonly code = "CIRCUIT_OPEN";
25
+ constructor(message = "Circuit breaker is open") {
26
+ super(message);
27
+ this.name = "CircuitOpenError";
28
+ }
29
+ }
30
+
31
+ export class CircuitBreaker {
32
+ private state: CircuitState = "closed";
33
+ private failures = 0;
34
+ private openedAt = 0;
35
+ private threshold: number;
36
+ private resetTimeoutMs: number;
37
+
38
+ /** Hooks for metrics. */
39
+ public onTrip?: () => void;
40
+ public onReject?: () => void;
41
+
42
+ constructor(config: CircuitBreakerConfig = {}) {
43
+ this.threshold = config.threshold ?? 5;
44
+ this.resetTimeoutMs = config.resetTimeoutMs ?? 30_000;
45
+ }
46
+
47
+ getState(): CircuitState {
48
+ // Lazy transition from open -> half-open when reset window elapses.
49
+ if (
50
+ this.state === "open" &&
51
+ Date.now() - this.openedAt >= this.resetTimeoutMs
52
+ ) {
53
+ this.state = "half-open";
54
+ }
55
+ return this.state;
56
+ }
57
+
58
+ async exec<T>(fn: () => Promise<T>): Promise<T> {
59
+ const state = this.getState();
60
+ if (state === "open") {
61
+ this.onReject?.();
62
+ throw new CircuitOpenError();
63
+ }
64
+
65
+ try {
66
+ const result = await fn();
67
+ this.recordSuccess();
68
+ return result;
69
+ } catch (err) {
70
+ this.recordFailure();
71
+ throw err;
72
+ }
73
+ }
74
+
75
+ recordSuccess(): void {
76
+ const current = this.getState();
77
+ if (current === "half-open") {
78
+ this.state = "closed";
79
+ }
80
+ this.failures = 0;
81
+ }
82
+
83
+ recordFailure(): void {
84
+ // Force lazy open->half-open transition before deciding what to do.
85
+ const current = this.getState();
86
+ this.failures++;
87
+ if (current === "half-open") {
88
+ // Trial failed — back to open.
89
+ this.state = "open";
90
+ this.openedAt = Date.now();
91
+ this.onTrip?.();
92
+ return;
93
+ }
94
+ if (current === "closed" && this.failures >= this.threshold) {
95
+ this.state = "open";
96
+ this.openedAt = Date.now();
97
+ this.onTrip?.();
98
+ }
99
+ }
100
+
101
+ /** Force reset (useful for tests or manual recovery). */
102
+ reset(): void {
103
+ this.state = "closed";
104
+ this.failures = 0;
105
+ this.openedAt = 0;
106
+ }
107
+
108
+ getStats() {
109
+ return {
110
+ state: this.getState(),
111
+ failures: this.failures,
112
+ openedAt: this.openedAt,
113
+ };
114
+ }
115
+ }
@@ -0,0 +1,176 @@
1
+ /**
2
+ * Remote Communication: OutboxWorker
3
+ *
4
+ * Polls `remote_outbox` for unpublished rows, publishes each to Redis, and
5
+ * marks the row published. Uses `FOR UPDATE SKIP LOCKED` so multiple
6
+ * instances can run workers concurrently without double-publishing:
7
+ * each row is claimed by exactly one worker per batch.
8
+ *
9
+ * At-least-once semantics: if the worker crashes after XADD but before the
10
+ * UPDATE commits, the row stays pending and will be republished. Consumers
11
+ * must be idempotent — enforce this at the handler level (e.g., dedup on
12
+ * `ctx.messageId` or domain-level idempotency keys).
13
+ */
14
+
15
+ import type Redis from "ioredis";
16
+ import type { SQL } from "bun";
17
+ import { logger } from "../Logger";
18
+ import type { RemoteMetrics } from "./metrics";
19
+
20
+ const loggerInstance = logger.child({ scope: "OutboxWorker" });
21
+
22
+ export interface OutboxWorkerConfig {
23
+ sourceApp: string;
24
+ streamPrefix: string;
25
+ pollIntervalMs: number;
26
+ batchSize: number;
27
+ enableLogging: boolean;
28
+ }
29
+
30
+ interface OutboxRow {
31
+ id: string;
32
+ target: string;
33
+ event: string;
34
+ data: unknown;
35
+ created_at: Date;
36
+ }
37
+
38
+ export class OutboxWorker {
39
+ private db: SQL;
40
+ private publisher: Redis;
41
+ private config: OutboxWorkerConfig;
42
+ private running = false;
43
+ private timer: ReturnType<typeof setTimeout> | null = null;
44
+ private currentTick: Promise<void> | null = null;
45
+ private metrics?: RemoteMetrics;
46
+
47
+ constructor(
48
+ db: SQL,
49
+ publisher: Redis,
50
+ config: OutboxWorkerConfig,
51
+ metrics?: RemoteMetrics
52
+ ) {
53
+ this.db = db;
54
+ this.publisher = publisher;
55
+ this.config = config;
56
+ this.metrics = metrics;
57
+ }
58
+
59
+ async start(): Promise<void> {
60
+ if (this.running) return;
61
+ this.running = true;
62
+ this.scheduleNext(0);
63
+ loggerInstance.info(
64
+ `OutboxWorker started pollMs=${this.config.pollIntervalMs} batch=${this.config.batchSize}`
65
+ );
66
+ }
67
+
68
+ async stop(): Promise<void> {
69
+ if (!this.running) return;
70
+ this.running = false;
71
+ if (this.timer) {
72
+ clearTimeout(this.timer);
73
+ this.timer = null;
74
+ }
75
+ if (this.currentTick) {
76
+ await this.currentTick.catch(() => {});
77
+ }
78
+ loggerInstance.info("OutboxWorker stopped");
79
+ }
80
+
81
+ /**
82
+ * Force an immediate tick. Used during shutdown to flush any
83
+ * committed-but-unpublished rows before the process exits.
84
+ */
85
+ async flush(): Promise<void> {
86
+ await this.tick();
87
+ }
88
+
89
+ private scheduleNext(delayMs: number): void {
90
+ if (!this.running) return;
91
+ this.timer = setTimeout(() => {
92
+ this.currentTick = this.tick().finally(() => {
93
+ this.currentTick = null;
94
+ this.scheduleNext(this.config.pollIntervalMs);
95
+ });
96
+ }, delayMs);
97
+ }
98
+
99
+ private async tick(): Promise<void> {
100
+ if (!this.running) return;
101
+ try {
102
+ await this.processBatch();
103
+ } catch (error: any) {
104
+ loggerInstance.error(
105
+ { err: error, msg: "OutboxWorker tick error" }
106
+ );
107
+ }
108
+ }
109
+
110
+ private async processBatch(): Promise<void> {
111
+ const db = this.db as any;
112
+ await db.begin(async (trx: any) => {
113
+ const rows: OutboxRow[] = await trx`
114
+ SELECT id, target, event, data, created_at
115
+ FROM remote_outbox
116
+ WHERE published_at IS NULL
117
+ ORDER BY created_at
118
+ LIMIT ${this.config.batchSize}
119
+ FOR UPDATE SKIP LOCKED
120
+ `;
121
+
122
+ if (rows.length === 0) return;
123
+
124
+ this.metrics?.outboxClaimed(rows.length);
125
+ if (this.config.enableLogging) {
126
+ loggerInstance.debug(`Claimed ${rows.length} outbox rows`);
127
+ }
128
+
129
+ const successIds: string[] = [];
130
+
131
+ for (const row of rows) {
132
+ const stream = `${this.config.streamPrefix}${row.target}`;
133
+ const envelope = JSON.stringify({
134
+ kind: "event",
135
+ sourceApp: this.config.sourceApp,
136
+ event: row.event,
137
+ data: row.data,
138
+ emittedAt: row.created_at.getTime(),
139
+ });
140
+ try {
141
+ await this.publisher.xadd(
142
+ stream,
143
+ "*",
144
+ "data",
145
+ envelope
146
+ );
147
+ successIds.push(row.id);
148
+ } catch (err: any) {
149
+ this.metrics?.outboxPublishFailed();
150
+ loggerInstance.error(
151
+ {
152
+ err,
153
+ outboxId: row.id,
154
+ target: row.target,
155
+ event: row.event,
156
+ msg: "Outbox XADD failed — row will retry next tick",
157
+ }
158
+ );
159
+ // Leave row unpublished; SKIP LOCKED releases on tx end
160
+ // so next tick (or another instance) picks it up.
161
+ }
162
+ }
163
+
164
+ if (successIds.length > 0) {
165
+ for (const id of successIds) {
166
+ await trx`
167
+ UPDATE remote_outbox
168
+ SET published_at = NOW()
169
+ WHERE id = ${id}::uuid
170
+ `;
171
+ }
172
+ this.metrics?.outboxPublished(successIds.length);
173
+ }
174
+ });
175
+ }
176
+ }