@bitspacerlabs/rabbit-relay 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/deploy.yml +66 -0
- package/.vscode/settings.json +3 -0
- package/LICENSE +21 -0
- package/README.md +90 -0
- package/assets/logo.svg +154 -0
- package/dist/cjs/config.d.ts +5 -0
- package/dist/cjs/config.js +120 -0
- package/dist/cjs/eventFactories.d.ts +31 -0
- package/dist/cjs/eventFactories.js +54 -0
- package/dist/cjs/index.d.ts +4 -0
- package/dist/cjs/index.js +20 -0
- package/dist/cjs/pluginManager.d.ts +13 -0
- package/dist/cjs/pluginManager.js +27 -0
- package/dist/cjs/rabbitmqBroker.d.ts +62 -0
- package/dist/cjs/rabbitmqBroker.js +403 -0
- package/dist/cjs/utils/dedupe.d.ts +12 -0
- package/dist/cjs/utils/dedupe.js +56 -0
- package/dist/esm/config.d.ts +5 -0
- package/dist/esm/config.js +120 -0
- package/dist/esm/eventFactories.d.ts +31 -0
- package/dist/esm/eventFactories.js +54 -0
- package/dist/esm/index.d.ts +4 -0
- package/dist/esm/index.js +20 -0
- package/dist/esm/pluginManager.d.ts +13 -0
- package/dist/esm/pluginManager.js +27 -0
- package/dist/esm/rabbitmqBroker.d.ts +62 -0
- package/dist/esm/rabbitmqBroker.js +403 -0
- package/dist/esm/utils/dedupe.d.ts +12 -0
- package/dist/esm/utils/dedupe.js +56 -0
- package/package.json +63 -0
- package/release.sh +106 -0
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
/** Optional metadata carried alongside the event payload. */
|
|
2
|
+
export interface EventMeta {
|
|
3
|
+
corrId?: string;
|
|
4
|
+
causationId?: string;
|
|
5
|
+
/** Optional application headers. */
|
|
6
|
+
headers?: Record<string, string>;
|
|
7
|
+
expectsReply?: boolean;
|
|
8
|
+
timeoutMs?: number;
|
|
9
|
+
}
|
|
10
|
+
export interface EventEnvelope<T = unknown> {
|
|
11
|
+
id: string;
|
|
12
|
+
name: string;
|
|
13
|
+
v: string;
|
|
14
|
+
time: number;
|
|
15
|
+
data: T;
|
|
16
|
+
meta?: EventMeta;
|
|
17
|
+
}
|
|
18
|
+
export type EnvelopeFactory<T> = (data: T, meta?: EventMeta) => EventEnvelope<T>;
|
|
19
|
+
export declare function expectReply(meta?: EventMeta, timeoutMs?: number): EventMeta;
|
|
20
|
+
export declare function event(name: string, v?: string): {
|
|
21
|
+
of: <T = unknown>() => EnvelopeFactory<T>;
|
|
22
|
+
};
|
|
23
|
+
export declare function eventWithReply(name: string, v?: string): {
|
|
24
|
+
of: <T = unknown, R = unknown>() => EnvelopeFactory<T>;
|
|
25
|
+
};
|
|
26
|
+
/**
|
|
27
|
+
* Augment an events map so calling a factory publishes via broker.produce().
|
|
28
|
+
*/
|
|
29
|
+
export declare function augmentEvents<T extends object>(events: Record<string, (...args: any[]) => EventEnvelope>, broker: {
|
|
30
|
+
produce: (...evts: EventEnvelope[]) => Promise<unknown>;
|
|
31
|
+
}): T & Record<string, (...args: any[]) => Promise<unknown>>;
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.expectReply = expectReply;
|
|
4
|
+
exports.event = event;
|
|
5
|
+
exports.eventWithReply = eventWithReply;
|
|
6
|
+
exports.augmentEvents = augmentEvents;
|
|
7
|
+
const crypto_1 = require("crypto");
|
|
8
|
+
function expectReply(meta, timeoutMs) {
|
|
9
|
+
return { ...(meta !== null && meta !== void 0 ? meta : {}), expectsReply: true, ...(timeoutMs != null ? { timeoutMs } : {}) };
|
|
10
|
+
}
|
|
11
|
+
function randomId() {
|
|
12
|
+
var _a;
|
|
13
|
+
try {
|
|
14
|
+
return (_a = crypto_1.randomUUID === null || crypto_1.randomUUID === void 0 ? void 0 : (0, crypto_1.randomUUID)()) !== null && _a !== void 0 ? _a : `evt_${Date.now()}_${Math.random().toString(36).slice(2)}`;
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return `evt_${Date.now()}_${Math.random().toString(36).slice(2)}`;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
function event(name, v = "1.0.0") {
|
|
21
|
+
return {
|
|
22
|
+
of: () => (data, meta) => ({
|
|
23
|
+
id: randomId(),
|
|
24
|
+
name,
|
|
25
|
+
v,
|
|
26
|
+
time: Date.now(),
|
|
27
|
+
data,
|
|
28
|
+
meta,
|
|
29
|
+
}),
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
function eventWithReply(name, v = "1.0.0") {
|
|
33
|
+
return {
|
|
34
|
+
of: () => (data, meta) => ({
|
|
35
|
+
id: randomId(),
|
|
36
|
+
name,
|
|
37
|
+
v,
|
|
38
|
+
time: Date.now(),
|
|
39
|
+
data,
|
|
40
|
+
meta: { expectsReply: false, ...(meta !== null && meta !== void 0 ? meta : {}) },
|
|
41
|
+
}),
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Augment an events map so calling a factory publishes via broker.produce().
|
|
46
|
+
*/
|
|
47
|
+
function augmentEvents(events, broker) {
|
|
48
|
+
const augmented = { ...events, ...broker };
|
|
49
|
+
for (const key of Object.keys(events)) {
|
|
50
|
+
const factory = events[key];
|
|
51
|
+
augmented[key] = async (...args) => broker.produce(factory(...args));
|
|
52
|
+
}
|
|
53
|
+
return augmented;
|
|
54
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./rabbitmqBroker"), exports);
|
|
18
|
+
__exportStar(require("./eventFactories"), exports);
|
|
19
|
+
__exportStar(require("./pluginManager"), exports);
|
|
20
|
+
__exportStar(require("./utils/dedupe"), exports);
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { EventEnvelope } from "./eventFactories";
|
|
2
|
+
export interface Plugin {
|
|
3
|
+
beforeProduce?(event: EventEnvelope): Promise<void>;
|
|
4
|
+
afterProduce?(event: EventEnvelope, result: unknown): Promise<void>;
|
|
5
|
+
beforeProcess?(id: string | number, event: EventEnvelope): Promise<void>;
|
|
6
|
+
afterProcess?(id: string | number, event: EventEnvelope, result: unknown): Promise<void>;
|
|
7
|
+
}
|
|
8
|
+
export declare class PluginManager {
|
|
9
|
+
private plugins;
|
|
10
|
+
register(plugin: Plugin): void;
|
|
11
|
+
executeHook<K extends keyof Plugin>(hookName: K, ...args: Parameters<NonNullable<Plugin[K]>>): Promise<void>;
|
|
12
|
+
}
|
|
13
|
+
export declare const pluginManager: PluginManager;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.pluginManager = exports.PluginManager = void 0;
|
|
4
|
+
class PluginManager {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.plugins = [];
|
|
7
|
+
}
|
|
8
|
+
register(plugin) {
|
|
9
|
+
this.plugins.push(plugin);
|
|
10
|
+
}
|
|
11
|
+
async executeHook(hookName, ...args) {
|
|
12
|
+
for (const plugin of this.plugins) {
|
|
13
|
+
const fn = plugin[hookName];
|
|
14
|
+
if (typeof fn === "function") {
|
|
15
|
+
try {
|
|
16
|
+
await fn
|
|
17
|
+
.call(plugin, ...args);
|
|
18
|
+
}
|
|
19
|
+
catch (err) {
|
|
20
|
+
console.error(`Error executing hook ${String(hookName)}:`, err);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
exports.PluginManager = PluginManager;
|
|
27
|
+
exports.pluginManager = new PluginManager();
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { Options } from "amqplib";
|
|
2
|
+
import { EventEnvelope } from "./eventFactories";
|
|
3
|
+
export interface ExchangeConfig {
|
|
4
|
+
exchangeType?: "topic" | "direct" | "fanout";
|
|
5
|
+
routingKey?: string;
|
|
6
|
+
durable?: boolean;
|
|
7
|
+
publisherConfirms?: boolean;
|
|
8
|
+
queueArgs?: Options.AssertQueue["arguments"];
|
|
9
|
+
/**
|
|
10
|
+
* If true, do NOT declare the queue; only check it exists.
|
|
11
|
+
* Use this when a separate setup step has already created the queue with specific args.
|
|
12
|
+
*/
|
|
13
|
+
passiveQueue?: boolean;
|
|
14
|
+
}
|
|
15
|
+
export interface ConsumeOptions {
|
|
16
|
+
/** Max unacked messages this consumer can hold. Also default concurrency. */
|
|
17
|
+
prefetch?: number;
|
|
18
|
+
/** Parallel handler executions. Defaults to prefetch (or 1). */
|
|
19
|
+
concurrency?: number;
|
|
20
|
+
/** If true, nack+requeue on handler error; else ack even on error. (back-compat) */
|
|
21
|
+
requeueOnError?: boolean;
|
|
22
|
+
/** What to do when the handler throws. Default "ack". */
|
|
23
|
+
onError?: "ack" | "requeue" | "dead-letter";
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Generic Broker Interface:
|
|
27
|
+
* TEvents maps event name keys -> EventEnvelope types.
|
|
28
|
+
*/
|
|
29
|
+
export interface BrokerInterface<TEvents extends Record<string, EventEnvelope>> {
|
|
30
|
+
handle<K extends keyof TEvents>(eventName: K | "*", handler: (id: string | number, event: TEvents[K]) => Promise<unknown>): BrokerInterface<TEvents>;
|
|
31
|
+
consume(opts?: ConsumeOptions): Promise<{
|
|
32
|
+
stop(): Promise<void>;
|
|
33
|
+
}>;
|
|
34
|
+
produce<K extends keyof TEvents>(...events: TEvents[K][]): Promise<void | unknown>;
|
|
35
|
+
produceMany<K extends keyof TEvents>(...events: TEvents[K][]): Promise<void>;
|
|
36
|
+
with<U extends Record<string, (...args: any[]) => EventEnvelope>>(events: U): BrokerInterface<{
|
|
37
|
+
[K in keyof U]: ReturnType<U[K]>;
|
|
38
|
+
}> & {
|
|
39
|
+
[K in keyof U]: (...args: Parameters<U[K]>) => ReturnType<U[K]>;
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
export declare class RabbitMQBroker {
|
|
43
|
+
private peerName;
|
|
44
|
+
private defaultCfg;
|
|
45
|
+
/** The current live channel promise (replaced after reconnect). */
|
|
46
|
+
private channelPromise;
|
|
47
|
+
/** Reconnect state */
|
|
48
|
+
private reconnecting;
|
|
49
|
+
private backoffMs;
|
|
50
|
+
private readonly maxBackoffMs;
|
|
51
|
+
/** Callbacks to run after a successful reconnect (like re-assert topology, resume consume). */
|
|
52
|
+
private onReconnectCbs;
|
|
53
|
+
constructor(peerName: string, config?: ExchangeConfig);
|
|
54
|
+
private initChannel;
|
|
55
|
+
private scheduleReconnect;
|
|
56
|
+
private getChannel;
|
|
57
|
+
private onReconnect;
|
|
58
|
+
queue(queueName: string): {
|
|
59
|
+
exchange: <TEvents extends Record<string, EventEnvelope>>(exchangeName: string, exchangeConfig?: ExchangeConfig) => Promise<BrokerInterface<TEvents>>;
|
|
60
|
+
};
|
|
61
|
+
private exchange;
|
|
62
|
+
}
|
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.RabbitMQBroker = void 0;
|
|
4
|
+
const config_1 = require("./config");
|
|
5
|
+
const pluginManager_1 = require("./pluginManager");
|
|
6
|
+
function generateUuid() {
|
|
7
|
+
return Math.random().toString(36).slice(2) + Math.random().toString(36).slice(2);
|
|
8
|
+
}
|
|
9
|
+
class RabbitMQBroker {
|
|
10
|
+
constructor(peerName, config = {}) {
|
|
11
|
+
var _a, _b, _c, _d, _e;
|
|
12
|
+
/** Reconnect state */
|
|
13
|
+
this.reconnecting = false;
|
|
14
|
+
this.backoffMs = 500;
|
|
15
|
+
this.maxBackoffMs = 20000;
|
|
16
|
+
/** Callbacks to run after a successful reconnect (like re-assert topology, resume consume). */
|
|
17
|
+
this.onReconnectCbs = [];
|
|
18
|
+
this.peerName = peerName;
|
|
19
|
+
this.defaultCfg = {
|
|
20
|
+
exchangeType: (_a = config.exchangeType) !== null && _a !== void 0 ? _a : "topic",
|
|
21
|
+
routingKey: (_b = config.routingKey) !== null && _b !== void 0 ? _b : "#",
|
|
22
|
+
durable: (_c = config.durable) !== null && _c !== void 0 ? _c : true,
|
|
23
|
+
publisherConfirms: (_d = config.publisherConfirms) !== null && _d !== void 0 ? _d : false,
|
|
24
|
+
queueArgs: config.queueArgs,
|
|
25
|
+
passiveQueue: (_e = config.passiveQueue) !== null && _e !== void 0 ? _e : false,
|
|
26
|
+
};
|
|
27
|
+
this.initChannel();
|
|
28
|
+
}
|
|
29
|
+
async initChannel() {
|
|
30
|
+
var _a, _b, _c, _d;
|
|
31
|
+
this.channelPromise = (0, config_1.getRabbitMQChannel)();
|
|
32
|
+
const ch = await this.channelPromise;
|
|
33
|
+
this.backoffMs = 500;
|
|
34
|
+
const onClose = () => this.scheduleReconnect("channel.close");
|
|
35
|
+
const onError = () => this.scheduleReconnect("channel.error");
|
|
36
|
+
(_b = (_a = ch).on) === null || _b === void 0 ? void 0 : _b.call(_a, "close", onClose);
|
|
37
|
+
(_d = (_c = ch).on) === null || _d === void 0 ? void 0 : _d.call(_c, "error", onError);
|
|
38
|
+
}
|
|
39
|
+
async scheduleReconnect(reason) {
|
|
40
|
+
if (this.reconnecting)
|
|
41
|
+
return;
|
|
42
|
+
this.reconnecting = true;
|
|
43
|
+
// eslint-disable-next-line no-constant-condition
|
|
44
|
+
while (true) {
|
|
45
|
+
try {
|
|
46
|
+
const jitter = Math.floor(Math.random() * 250);
|
|
47
|
+
await new Promise((r) => setTimeout(r, this.backoffMs + jitter));
|
|
48
|
+
await this.initChannel();
|
|
49
|
+
const ch = await this.channelPromise;
|
|
50
|
+
this.backoffMs = 500;
|
|
51
|
+
this.reconnecting = false;
|
|
52
|
+
for (const cb of this.onReconnectCbs) {
|
|
53
|
+
try {
|
|
54
|
+
await cb(ch);
|
|
55
|
+
}
|
|
56
|
+
catch (e) {
|
|
57
|
+
console.error("[broker] onReconnect callback failed:", e);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
return;
|
|
61
|
+
}
|
|
62
|
+
catch {
|
|
63
|
+
this.backoffMs = Math.min(this.maxBackoffMs, Math.floor(this.backoffMs * 1.7 + Math.random() * 100));
|
|
64
|
+
console.error(`[broker] reconnect failed (${reason}), retrying in ~${this.backoffMs}ms`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
async getChannel() {
|
|
69
|
+
return this.channelPromise;
|
|
70
|
+
}
|
|
71
|
+
onReconnect(cb) {
|
|
72
|
+
this.onReconnectCbs.push(cb);
|
|
73
|
+
}
|
|
74
|
+
queue(queueName) {
|
|
75
|
+
return {
|
|
76
|
+
exchange: async (exchangeName, exchangeConfig = {}) => {
|
|
77
|
+
return this.exchange(exchangeName, queueName, exchangeConfig);
|
|
78
|
+
},
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
async exchange(exchangeName, queueName, exchangeConfig = {}) {
|
|
82
|
+
const assertTopology = async (channel) => {
|
|
83
|
+
var _a, _b, _c, _d, _e, _f;
|
|
84
|
+
const cfg = {
|
|
85
|
+
exchangeType: (_a = exchangeConfig.exchangeType) !== null && _a !== void 0 ? _a : this.defaultCfg.exchangeType,
|
|
86
|
+
routingKey: (_b = exchangeConfig.routingKey) !== null && _b !== void 0 ? _b : this.defaultCfg.routingKey,
|
|
87
|
+
durable: (_c = exchangeConfig.durable) !== null && _c !== void 0 ? _c : this.defaultCfg.durable,
|
|
88
|
+
publisherConfirms: (_d = exchangeConfig.publisherConfirms) !== null && _d !== void 0 ? _d : this.defaultCfg.publisherConfirms,
|
|
89
|
+
queueArgs: (_e = exchangeConfig.queueArgs) !== null && _e !== void 0 ? _e : this.defaultCfg.queueArgs,
|
|
90
|
+
passiveQueue: (_f = exchangeConfig.passiveQueue) !== null && _f !== void 0 ? _f : this.defaultCfg.passiveQueue,
|
|
91
|
+
};
|
|
92
|
+
await channel.assertExchange(exchangeName, cfg.exchangeType, { durable: cfg.durable });
|
|
93
|
+
if (cfg.passiveQueue) {
|
|
94
|
+
if (cfg.queueArgs) {
|
|
95
|
+
console.warn(`[broker] passiveQueue=true: ignoring queueArgs for '${queueName}' (not declaring).`);
|
|
96
|
+
}
|
|
97
|
+
try {
|
|
98
|
+
await channel.checkQueue(queueName);
|
|
99
|
+
}
|
|
100
|
+
catch (err) {
|
|
101
|
+
const code = err === null || err === void 0 ? void 0 : err.code;
|
|
102
|
+
if (code === 404) {
|
|
103
|
+
throw new Error(`[broker] passiveQueue check failed: queue '${queueName}' does not exist. ` +
|
|
104
|
+
`Either create it in your setup step with the desired arguments, ` +
|
|
105
|
+
`or call with passiveQueue:false and queueArgs to auto-declare.`);
|
|
106
|
+
}
|
|
107
|
+
throw err;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
else {
|
|
111
|
+
try {
|
|
112
|
+
const qOpts = {
|
|
113
|
+
durable: cfg.durable,
|
|
114
|
+
...(cfg.queueArgs ? { arguments: cfg.queueArgs } : {}),
|
|
115
|
+
};
|
|
116
|
+
await channel.assertQueue(queueName, qOpts);
|
|
117
|
+
}
|
|
118
|
+
catch (err) {
|
|
119
|
+
if ((err === null || err === void 0 ? void 0 : err.code) === 406) {
|
|
120
|
+
throw new Error(`[broker] QueueDeclare PRECONDITION_FAILED for '${queueName}'. ` +
|
|
121
|
+
`Existing queue has different arguments. ` +
|
|
122
|
+
`Fix: delete the queue or switch to { passiveQueue: true } if you're using a setup step.`);
|
|
123
|
+
}
|
|
124
|
+
throw err;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
// (Re)bind is idempotent - safe to call even if binding already exists
|
|
128
|
+
await channel.bindQueue(queueName, exchangeName, cfg.routingKey);
|
|
129
|
+
};
|
|
130
|
+
const channel = await this.getChannel();
|
|
131
|
+
await assertTopology(channel);
|
|
132
|
+
const handlers = new Map();
|
|
133
|
+
let consumerTag;
|
|
134
|
+
let isConsuming = false;
|
|
135
|
+
let consumeCh = null;
|
|
136
|
+
let prefetchCount = 1;
|
|
137
|
+
let concurrency = 1;
|
|
138
|
+
let onError = "ack";
|
|
139
|
+
this.onReconnect(async (ch) => {
|
|
140
|
+
await assertTopology(ch);
|
|
141
|
+
if (isConsuming) {
|
|
142
|
+
if (prefetchCount > 0)
|
|
143
|
+
await ch.prefetch(prefetchCount, false);
|
|
144
|
+
consumeCh = ch; // pin
|
|
145
|
+
const ok = await ch.consume(queueName, onMessage);
|
|
146
|
+
consumerTag = ok.consumerTag;
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
const handle = (eventName, handler) => {
|
|
150
|
+
handlers.set(eventName, handler);
|
|
151
|
+
return brokerInterface;
|
|
152
|
+
};
|
|
153
|
+
// Backpressure-aware publish helper
|
|
154
|
+
const waitForDrain = (ch) => new Promise((resolve) => {
|
|
155
|
+
const anyCh = ch;
|
|
156
|
+
if (typeof anyCh.once === "function")
|
|
157
|
+
anyCh.once("drain", resolve);
|
|
158
|
+
else
|
|
159
|
+
resolve(); // if not supported, resolve immediately
|
|
160
|
+
});
|
|
161
|
+
const publishWithBackpressure = async (ch, exchange, routingKey, content, options) => {
|
|
162
|
+
const ok = ch.publish(exchange, routingKey, content, options);
|
|
163
|
+
if (!ok) {
|
|
164
|
+
console.warn(`[amqp] publish backpressure: waiting for 'drain' (exchange=${exchange}, key=${routingKey}, size=${content.length})`);
|
|
165
|
+
const t0 = Date.now();
|
|
166
|
+
await waitForDrain(ch);
|
|
167
|
+
const dt = Date.now() - t0;
|
|
168
|
+
if (dt >= 1) {
|
|
169
|
+
console.warn(`[amqp] drain resolved after ${dt}ms (exchange=${exchange}, key=${routingKey})`);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
const getPubChannel = async () => {
|
|
174
|
+
var _a;
|
|
175
|
+
if ((_a = exchangeConfig.publisherConfirms) !== null && _a !== void 0 ? _a : this.defaultCfg.publisherConfirms) {
|
|
176
|
+
return (0, config_1.getRabbitMQConfirmChannel)();
|
|
177
|
+
}
|
|
178
|
+
return this.getChannel();
|
|
179
|
+
};
|
|
180
|
+
const maybeWaitForConfirms = async (ch) => {
|
|
181
|
+
const anyCh = ch;
|
|
182
|
+
if (typeof anyCh.waitForConfirms === "function") {
|
|
183
|
+
await anyCh.waitForConfirms();
|
|
184
|
+
}
|
|
185
|
+
};
|
|
186
|
+
const onMessage = async (msg) => {
|
|
187
|
+
if (!msg)
|
|
188
|
+
return;
|
|
189
|
+
const ch = consumeCh;
|
|
190
|
+
if (!ch)
|
|
191
|
+
return;
|
|
192
|
+
const id = msg.fields.deliveryTag;
|
|
193
|
+
const payload = JSON.parse(msg.content.toString());
|
|
194
|
+
const handler = handlers.get(payload.name) || handlers.get("*");
|
|
195
|
+
let result = null;
|
|
196
|
+
let errored = false;
|
|
197
|
+
try {
|
|
198
|
+
await pluginManager_1.pluginManager.executeHook("beforeProcess", id, payload);
|
|
199
|
+
if (handler) {
|
|
200
|
+
// concurrency is enforced by prefetch limiting in-flight
|
|
201
|
+
result = await handler(id, payload);
|
|
202
|
+
}
|
|
203
|
+
await pluginManager_1.pluginManager.executeHook("afterProcess", id, payload, result);
|
|
204
|
+
}
|
|
205
|
+
catch (err) {
|
|
206
|
+
errored = true;
|
|
207
|
+
console.error("Handler error:", err);
|
|
208
|
+
}
|
|
209
|
+
// RPC reply path (even if handler errored, you might still want a reply)
|
|
210
|
+
if (msg.properties.replyTo) {
|
|
211
|
+
try {
|
|
212
|
+
await publishWithBackpressure(ch, "", msg.properties.replyTo, Buffer.from(JSON.stringify({ reply: errored ? null : result })), { correlationId: msg.properties.correlationId });
|
|
213
|
+
}
|
|
214
|
+
catch (e) {
|
|
215
|
+
console.error("Reply publish failed:", e);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
// Ack/Nack decision
|
|
219
|
+
try {
|
|
220
|
+
if (errored) {
|
|
221
|
+
// derive behavior from onError (Backward compatibility: requeueOnError -> "requeue" handled in consume())
|
|
222
|
+
if (onError === "requeue") {
|
|
223
|
+
ch.nack(msg, false, true); // requeue back to SAME queue
|
|
224
|
+
}
|
|
225
|
+
else if (onError === "dead-letter") {
|
|
226
|
+
ch.nack(msg, false, false); // route to DLX (if queue is DLX-configured)
|
|
227
|
+
}
|
|
228
|
+
else {
|
|
229
|
+
ch.ack(msg); // swallow the error
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
else {
|
|
233
|
+
ch.ack(msg);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
catch (e) {
|
|
237
|
+
console.error("Ack/Nack failed:", e);
|
|
238
|
+
}
|
|
239
|
+
};
|
|
240
|
+
const consume = async (opts) => {
|
|
241
|
+
var _a, _b, _c, _d;
|
|
242
|
+
prefetchCount = (_b = (_a = opts === null || opts === void 0 ? void 0 : opts.prefetch) !== null && _a !== void 0 ? _a : opts === null || opts === void 0 ? void 0 : opts.concurrency) !== null && _b !== void 0 ? _b : 1;
|
|
243
|
+
concurrency = (_c = opts === null || opts === void 0 ? void 0 : opts.concurrency) !== null && _c !== void 0 ? _c : prefetchCount;
|
|
244
|
+
// Back-compat: if requeueOnError is set and onError not explicitly provided, use "requeue"
|
|
245
|
+
onError = (_d = opts === null || opts === void 0 ? void 0 : opts.onError) !== null && _d !== void 0 ? _d : ((opts === null || opts === void 0 ? void 0 : opts.requeueOnError) ? "requeue" : "ack");
|
|
246
|
+
const ch = await this.getChannel();
|
|
247
|
+
consumeCh = ch;
|
|
248
|
+
if (prefetchCount > 0)
|
|
249
|
+
await ch.prefetch(prefetchCount, false);
|
|
250
|
+
const ok = await ch.consume(queueName, onMessage);
|
|
251
|
+
consumerTag = ok.consumerTag;
|
|
252
|
+
isConsuming = true;
|
|
253
|
+
return {
|
|
254
|
+
stop: async () => {
|
|
255
|
+
isConsuming = false;
|
|
256
|
+
try {
|
|
257
|
+
const c = consumeCh;
|
|
258
|
+
if (consumerTag && c)
|
|
259
|
+
await c.cancel(consumerTag);
|
|
260
|
+
}
|
|
261
|
+
catch {
|
|
262
|
+
// channel may be closed; ignore
|
|
263
|
+
}
|
|
264
|
+
},
|
|
265
|
+
};
|
|
266
|
+
};
|
|
267
|
+
const safePublish = async (publish) => {
|
|
268
|
+
try {
|
|
269
|
+
const ch = await getPubChannel();
|
|
270
|
+
await publish(ch);
|
|
271
|
+
await maybeWaitForConfirms(ch);
|
|
272
|
+
}
|
|
273
|
+
catch {
|
|
274
|
+
// Broker is likely reconnecting. Briefly wait, then retry once.
|
|
275
|
+
const delay = Math.min(this.backoffMs * 2, 2000);
|
|
276
|
+
await new Promise(r => setTimeout(r, delay));
|
|
277
|
+
// try once more after reconnect
|
|
278
|
+
const ch2 = await getPubChannel();
|
|
279
|
+
await publish(ch2);
|
|
280
|
+
await maybeWaitForConfirms(ch2);
|
|
281
|
+
}
|
|
282
|
+
};
|
|
283
|
+
const produceMany = async (...events) => {
|
|
284
|
+
for (const evt of events) {
|
|
285
|
+
await pluginManager_1.pluginManager.executeHook("beforeProduce", evt);
|
|
286
|
+
await safePublish((ch) => {
|
|
287
|
+
var _a, _b, _c;
|
|
288
|
+
const e = evt;
|
|
289
|
+
const props = {
|
|
290
|
+
messageId: e.id, // idempotency key
|
|
291
|
+
type: e.name, // event name
|
|
292
|
+
timestamp: Math.floor(((_a = e.time) !== null && _a !== void 0 ? _a : Date.now()) / 1000),
|
|
293
|
+
correlationId: (_b = e.meta) === null || _b === void 0 ? void 0 : _b.corrId,
|
|
294
|
+
headers: (_c = e.meta) === null || _c === void 0 ? void 0 : _c.headers,
|
|
295
|
+
};
|
|
296
|
+
return publishWithBackpressure(ch, exchangeName, e.name, Buffer.from(JSON.stringify(e)), props);
|
|
297
|
+
});
|
|
298
|
+
await pluginManager_1.pluginManager.executeHook("afterProduce", evt, null);
|
|
299
|
+
}
|
|
300
|
+
};
|
|
301
|
+
const produce = async (...events) => {
|
|
302
|
+
var _a, _b, _c, _d, _e;
|
|
303
|
+
// Back-compat: upgrade legacy `wait` (if present) to meta fields
|
|
304
|
+
if (events.length === 1 && ((_a = events[0]) === null || _a === void 0 ? void 0 : _a.wait)) {
|
|
305
|
+
const first = events[0];
|
|
306
|
+
const w = first.wait;
|
|
307
|
+
first.meta = first.meta || {};
|
|
308
|
+
if (first.meta.expectsReply !== true)
|
|
309
|
+
first.meta.expectsReply = true;
|
|
310
|
+
if ((w === null || w === void 0 ? void 0 : w.timeout) != null && first.meta.timeoutMs == null)
|
|
311
|
+
first.meta.timeoutMs = w.timeout;
|
|
312
|
+
if (w === null || w === void 0 ? void 0 : w.source) {
|
|
313
|
+
first.meta.headers = { ...(first.meta.headers || {}), source: w.source };
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
// RPC request path
|
|
317
|
+
if (events.length === 1 && ((_c = (_b = events[0]) === null || _b === void 0 ? void 0 : _b.meta) === null || _c === void 0 ? void 0 : _c.expectsReply) === true) {
|
|
318
|
+
const evt = events[0];
|
|
319
|
+
const correlationId = generateUuid();
|
|
320
|
+
const rpcCh = await this.getChannel(); // pin for reply consumer/ack
|
|
321
|
+
const temp = await rpcCh.assertQueue("", { exclusive: true, autoDelete: true });
|
|
322
|
+
await pluginManager_1.pluginManager.executeHook("beforeProduce", evt);
|
|
323
|
+
await safePublish(async () => {
|
|
324
|
+
var _a, _b;
|
|
325
|
+
// use (confirm) pub channel for the request publish
|
|
326
|
+
const pubCh = await getPubChannel();
|
|
327
|
+
const props = {
|
|
328
|
+
messageId: evt.id,
|
|
329
|
+
type: evt.name,
|
|
330
|
+
timestamp: Math.floor(((_a = evt.time) !== null && _a !== void 0 ? _a : Date.now()) / 1000),
|
|
331
|
+
correlationId,
|
|
332
|
+
headers: (_b = evt.meta) === null || _b === void 0 ? void 0 : _b.headers,
|
|
333
|
+
replyTo: temp.queue,
|
|
334
|
+
};
|
|
335
|
+
await publishWithBackpressure(pubCh, exchangeName, evt.name, Buffer.from(JSON.stringify(evt)), props);
|
|
336
|
+
});
|
|
337
|
+
const timeoutMs = (_e = (_d = evt.meta) === null || _d === void 0 ? void 0 : _d.timeoutMs) !== null && _e !== void 0 ? _e : 5000;
|
|
338
|
+
return await new Promise((resolve, reject) => {
|
|
339
|
+
let ctag;
|
|
340
|
+
const timer = setTimeout(async () => {
|
|
341
|
+
try {
|
|
342
|
+
if (ctag)
|
|
343
|
+
await rpcCh.cancel(ctag);
|
|
344
|
+
}
|
|
345
|
+
catch { }
|
|
346
|
+
try {
|
|
347
|
+
await rpcCh.deleteQueue(temp.queue);
|
|
348
|
+
}
|
|
349
|
+
catch { }
|
|
350
|
+
reject(new Error("Timeout waiting for reply"));
|
|
351
|
+
}, timeoutMs);
|
|
352
|
+
rpcCh
|
|
353
|
+
.consume(temp.queue, (msg) => {
|
|
354
|
+
if (!msg)
|
|
355
|
+
return;
|
|
356
|
+
if (msg.properties.correlationId !== correlationId)
|
|
357
|
+
return;
|
|
358
|
+
clearTimeout(timer);
|
|
359
|
+
try {
|
|
360
|
+
const reply = JSON.parse(msg.content.toString()).reply;
|
|
361
|
+
pluginManager_1.pluginManager.executeHook("afterProduce", evt, reply);
|
|
362
|
+
resolve(reply);
|
|
363
|
+
}
|
|
364
|
+
finally {
|
|
365
|
+
Promise.resolve()
|
|
366
|
+
.then(async () => {
|
|
367
|
+
try {
|
|
368
|
+
if (ctag)
|
|
369
|
+
await rpcCh.cancel(ctag);
|
|
370
|
+
}
|
|
371
|
+
catch { }
|
|
372
|
+
try {
|
|
373
|
+
await rpcCh.deleteQueue(temp.queue);
|
|
374
|
+
}
|
|
375
|
+
catch { }
|
|
376
|
+
})
|
|
377
|
+
.catch(() => undefined);
|
|
378
|
+
}
|
|
379
|
+
}, { noAck: true })
|
|
380
|
+
.then((ok) => { ctag = ok.consumerTag; })
|
|
381
|
+
.catch((err) => {
|
|
382
|
+
clearTimeout(timer);
|
|
383
|
+
reject(err);
|
|
384
|
+
});
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
return produceMany(...events);
|
|
388
|
+
};
|
|
389
|
+
const brokerInterface = {
|
|
390
|
+
handle,
|
|
391
|
+
consume,
|
|
392
|
+
produce,
|
|
393
|
+
produceMany,
|
|
394
|
+
with: (events) => {
|
|
395
|
+
const { augmentEvents } = require("./eventFactories");
|
|
396
|
+
const augmented = augmentEvents(events, brokerInterface);
|
|
397
|
+
return augmented;
|
|
398
|
+
},
|
|
399
|
+
};
|
|
400
|
+
return brokerInterface;
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
exports.RabbitMQBroker = RabbitMQBroker;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export type KeyOf = (e: any) => string | undefined;
|
|
2
|
+
export interface DedupeOpts {
|
|
3
|
+
ttlMs?: number;
|
|
4
|
+
maxKeys?: number;
|
|
5
|
+
keyOf?: KeyOf;
|
|
6
|
+
}
|
|
7
|
+
export interface Dedupe {
|
|
8
|
+
seen(id: string): boolean;
|
|
9
|
+
checkAndRemember(e: any): boolean;
|
|
10
|
+
size(): number;
|
|
11
|
+
}
|
|
12
|
+
export declare function makeMemoryDedupe(opts?: DedupeOpts): Dedupe;
|