@onlineapps/conn-infra-mq 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +223 -0
- package/package.json +96 -0
- package/src/BaseClient.js +219 -0
- package/src/ConnectorMQClient.js +446 -0
- package/src/config/configSchema.js +70 -0
- package/src/config/defaultConfig.js +48 -0
- package/src/index.js +65 -0
- package/src/layers/ForkJoinHandler.js +312 -0
- package/src/layers/QueueManager.js +263 -0
- package/src/layers/RPCHandler.js +324 -0
- package/src/layers/RetryHandler.js +370 -0
- package/src/layers/WorkflowRouter.js +136 -0
- package/src/transports/rabbitmqClient.js +216 -0
- package/src/transports/transportFactory.js +33 -0
- package/src/utils/errorHandler.js +120 -0
- package/src/utils/logger.js +38 -0
- package/src/utils/serializer.js +44 -0
|
@@ -0,0 +1,324 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* RPCHandler - Manages RPC (Remote Procedure Call) patterns
|
|
5
|
+
* Handles request-response communication with correlation IDs and timeouts
|
|
6
|
+
*/
|
|
7
|
+
class RPCHandler {
|
|
8
|
+
constructor(mqClient, queueManager, config = {}) {
|
|
9
|
+
this.client = mqClient;
|
|
10
|
+
this.queueManager = queueManager;
|
|
11
|
+
this.config = {
|
|
12
|
+
defaultTimeout: config.defaultTimeout || 5000,
|
|
13
|
+
replyQueuePrefix: config.replyQueuePrefix || 'rpc.reply',
|
|
14
|
+
...config
|
|
15
|
+
};
|
|
16
|
+
this.pendingRequests = new Map();
|
|
17
|
+
this.replyQueue = null;
|
|
18
|
+
this.replyConsumer = null;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Initialize RPC handler (create reply queue)
|
|
23
|
+
*/
|
|
24
|
+
async initialize() {
|
|
25
|
+
if (this.replyQueue) {
|
|
26
|
+
return this.replyQueue;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Create exclusive reply queue
|
|
30
|
+
this.replyQueue = await this.queueManager.createTemporaryQueue(
|
|
31
|
+
this.config.replyQueuePrefix,
|
|
32
|
+
{
|
|
33
|
+
exclusive: true,
|
|
34
|
+
autoDelete: true,
|
|
35
|
+
expires: 3600000 // 1 hour
|
|
36
|
+
}
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
// Start consuming replies
|
|
40
|
+
this.replyConsumer = await this.client.consume(
|
|
41
|
+
(message, rawMsg) => this.handleReply(message, rawMsg),
|
|
42
|
+
{
|
|
43
|
+
queue: this.replyQueue,
|
|
44
|
+
noAck: true
|
|
45
|
+
}
|
|
46
|
+
);
|
|
47
|
+
|
|
48
|
+
return this.replyQueue;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Send RPC request and wait for response
|
|
53
|
+
* @param {string} targetQueue - Target service queue
|
|
54
|
+
* @param {Object} request - Request message
|
|
55
|
+
* @param {Object} options - RPC options
|
|
56
|
+
*/
|
|
57
|
+
async call(targetQueue, request, options = {}) {
|
|
58
|
+
// Ensure reply queue exists
|
|
59
|
+
await this.initialize();
|
|
60
|
+
|
|
61
|
+
const correlationId = this.generateCorrelationId();
|
|
62
|
+
const timeout = options.timeout || this.config.defaultTimeout;
|
|
63
|
+
|
|
64
|
+
// Create promise for response
|
|
65
|
+
const responsePromise = new Promise((resolve, reject) => {
|
|
66
|
+
// Setup timeout
|
|
67
|
+
const timeoutHandle = setTimeout(() => {
|
|
68
|
+
this.pendingRequests.delete(correlationId);
|
|
69
|
+
reject(new Error(`RPC request timeout after ${timeout}ms`));
|
|
70
|
+
}, timeout);
|
|
71
|
+
|
|
72
|
+
// Store pending request
|
|
73
|
+
this.pendingRequests.set(correlationId, {
|
|
74
|
+
resolve,
|
|
75
|
+
reject,
|
|
76
|
+
timeoutHandle,
|
|
77
|
+
startTime: Date.now(),
|
|
78
|
+
targetQueue
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
// Send request with correlation ID and reply queue
|
|
83
|
+
await this.client.publish(request, {
|
|
84
|
+
queue: targetQueue,
|
|
85
|
+
correlationId,
|
|
86
|
+
replyTo: this.replyQueue,
|
|
87
|
+
expiration: timeout.toString(),
|
|
88
|
+
...options
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
return responsePromise;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Setup service as RPC server
|
|
96
|
+
* @param {string} queue - Queue to listen on
|
|
97
|
+
* @param {Function} handler - Request handler function
|
|
98
|
+
* @param {Object} options - Server options
|
|
99
|
+
*/
|
|
100
|
+
async serve(queue, handler, options = {}) {
|
|
101
|
+
return this.client.consume(async (request, rawMsg) => {
|
|
102
|
+
const { correlationId, replyTo } = rawMsg.properties;
|
|
103
|
+
|
|
104
|
+
if (!replyTo) {
|
|
105
|
+
// Not an RPC request, handle normally
|
|
106
|
+
if (options.allowNonRPC) {
|
|
107
|
+
await handler(request, rawMsg);
|
|
108
|
+
}
|
|
109
|
+
await this.client.ack(rawMsg);
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
try {
|
|
114
|
+
// Process request
|
|
115
|
+
const response = await handler(request, rawMsg);
|
|
116
|
+
|
|
117
|
+
// Send response
|
|
118
|
+
if (replyTo && correlationId) {
|
|
119
|
+
await this.client.publish(response, {
|
|
120
|
+
queue: replyTo,
|
|
121
|
+
correlationId
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
await this.client.ack(rawMsg);
|
|
126
|
+
} catch (error) {
|
|
127
|
+
// Send error response
|
|
128
|
+
if (replyTo && correlationId) {
|
|
129
|
+
await this.client.publish(
|
|
130
|
+
{
|
|
131
|
+
error: {
|
|
132
|
+
message: error.message,
|
|
133
|
+
code: error.code || 'RPC_ERROR',
|
|
134
|
+
timestamp: Date.now()
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
{
|
|
138
|
+
queue: replyTo,
|
|
139
|
+
correlationId
|
|
140
|
+
}
|
|
141
|
+
);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// Reject or acknowledge based on configuration
|
|
145
|
+
if (options.rejectOnError) {
|
|
146
|
+
await this.client.nack(rawMsg, false);
|
|
147
|
+
} else {
|
|
148
|
+
await this.client.ack(rawMsg);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}, {
|
|
152
|
+
queue,
|
|
153
|
+
...options
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Handle RPC reply
|
|
159
|
+
* @private
|
|
160
|
+
*/
|
|
161
|
+
handleReply(message, rawMsg) {
|
|
162
|
+
const { correlationId } = rawMsg.properties;
|
|
163
|
+
|
|
164
|
+
if (!correlationId || !this.pendingRequests.has(correlationId)) {
|
|
165
|
+
// Unknown or expired request
|
|
166
|
+
return;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
const pending = this.pendingRequests.get(correlationId);
|
|
170
|
+
this.pendingRequests.delete(correlationId);
|
|
171
|
+
|
|
172
|
+
// Clear timeout
|
|
173
|
+
clearTimeout(pending.timeoutHandle);
|
|
174
|
+
|
|
175
|
+
// Calculate round-trip time
|
|
176
|
+
const rtt = Date.now() - pending.startTime;
|
|
177
|
+
|
|
178
|
+
// Check for error response
|
|
179
|
+
if (message && message.error) {
|
|
180
|
+
const error = new Error(message.error.message || 'RPC error');
|
|
181
|
+
error.code = message.error.code;
|
|
182
|
+
error.rtt = rtt;
|
|
183
|
+
pending.reject(error);
|
|
184
|
+
} else {
|
|
185
|
+
// Add metadata to response
|
|
186
|
+
if (typeof message === 'object' && message !== null) {
|
|
187
|
+
message._rpcMetadata = {
|
|
188
|
+
rtt,
|
|
189
|
+
correlationId,
|
|
190
|
+
targetQueue: pending.targetQueue
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
pending.resolve(message);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Call multiple RPC requests in parallel
|
|
199
|
+
* @param {Array} requests - Array of { queue, request, options } objects
|
|
200
|
+
*/
|
|
201
|
+
async callMany(requests) {
|
|
202
|
+
const promises = requests.map(req =>
|
|
203
|
+
this.call(req.queue, req.request, req.options)
|
|
204
|
+
.then(response => ({ success: true, response, queue: req.queue }))
|
|
205
|
+
.catch(error => ({ success: false, error, queue: req.queue }))
|
|
206
|
+
);
|
|
207
|
+
|
|
208
|
+
return Promise.all(promises);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
/**
|
|
212
|
+
* Call with retry logic
|
|
213
|
+
* @param {string} targetQueue - Target service queue
|
|
214
|
+
* @param {Object} request - Request message
|
|
215
|
+
* @param {Object} options - RPC options with retry configuration
|
|
216
|
+
*/
|
|
217
|
+
async callWithRetry(targetQueue, request, options = {}) {
|
|
218
|
+
const maxRetries = options.maxRetries || 3;
|
|
219
|
+
const retryDelay = options.retryDelay || 1000;
|
|
220
|
+
let lastError;
|
|
221
|
+
|
|
222
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
223
|
+
try {
|
|
224
|
+
return await this.call(targetQueue, request, options);
|
|
225
|
+
} catch (error) {
|
|
226
|
+
lastError = error;
|
|
227
|
+
|
|
228
|
+
if (attempt < maxRetries) {
|
|
229
|
+
// Wait before retry with exponential backoff
|
|
230
|
+
const delay = retryDelay * Math.pow(2, attempt);
|
|
231
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
throw lastError;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
/**
|
|
240
|
+
* Broadcast request to multiple queues and collect responses
|
|
241
|
+
* @param {Array} queues - Target queues
|
|
242
|
+
* @param {Object} request - Request message
|
|
243
|
+
* @param {Object} options - Broadcast options
|
|
244
|
+
*/
|
|
245
|
+
async broadcast(queues, request, options = {}) {
|
|
246
|
+
const timeout = options.timeout || this.config.defaultTimeout;
|
|
247
|
+
const waitForAll = options.waitForAll !== false;
|
|
248
|
+
|
|
249
|
+
const results = [];
|
|
250
|
+
const promises = [];
|
|
251
|
+
|
|
252
|
+
for (const queue of queues) {
|
|
253
|
+
const promise = this.call(queue, request, { timeout })
|
|
254
|
+
.then(response => {
|
|
255
|
+
results.push({ queue, response, success: true });
|
|
256
|
+
return { queue, response, success: true };
|
|
257
|
+
})
|
|
258
|
+
.catch(error => {
|
|
259
|
+
const result = { queue, error: error.message, success: false };
|
|
260
|
+
if (!waitForAll) {
|
|
261
|
+
results.push(result);
|
|
262
|
+
}
|
|
263
|
+
return result;
|
|
264
|
+
});
|
|
265
|
+
|
|
266
|
+
promises.push(promise);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
if (waitForAll) {
|
|
270
|
+
const allResults = await Promise.all(promises);
|
|
271
|
+
return allResults;
|
|
272
|
+
} else {
|
|
273
|
+
// Race for first successful response
|
|
274
|
+
await Promise.race(promises.filter(p => p.then(r => r.success)));
|
|
275
|
+
return results;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* Generate unique correlation ID
|
|
281
|
+
* @private
|
|
282
|
+
*/
|
|
283
|
+
generateCorrelationId() {
|
|
284
|
+
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
/**
|
|
288
|
+
* Get pending requests count
|
|
289
|
+
*/
|
|
290
|
+
getPendingCount() {
|
|
291
|
+
return this.pendingRequests.size;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
/**
|
|
295
|
+
* Clear all pending requests
|
|
296
|
+
*/
|
|
297
|
+
clearPending() {
|
|
298
|
+
for (const pending of this.pendingRequests.values()) {
|
|
299
|
+
clearTimeout(pending.timeoutHandle);
|
|
300
|
+
pending.reject(new Error('RPC handler shutdown'));
|
|
301
|
+
}
|
|
302
|
+
this.pendingRequests.clear();
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
/**
|
|
306
|
+
* Cleanup RPC handler
|
|
307
|
+
*/
|
|
308
|
+
async cleanup() {
|
|
309
|
+
this.clearPending();
|
|
310
|
+
|
|
311
|
+
if (this.replyQueue) {
|
|
312
|
+
try {
|
|
313
|
+
await this.queueManager.deleteQueue(this.replyQueue);
|
|
314
|
+
} catch (error) {
|
|
315
|
+
// Queue might already be deleted
|
|
316
|
+
}
|
|
317
|
+
this.replyQueue = null;
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
this.replyConsumer = null;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
module.exports = RPCHandler;
|
|
@@ -0,0 +1,370 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* RetryHandler - Manages message retry logic and dead letter queue handling
|
|
5
|
+
* Implements exponential backoff, max retries, and error recovery patterns
|
|
6
|
+
*/
|
|
7
|
+
class RetryHandler {
|
|
8
|
+
constructor(mqClient, config = {}) {
|
|
9
|
+
this.client = mqClient;
|
|
10
|
+
this.config = {
|
|
11
|
+
maxRetries: config.maxRetries || 3,
|
|
12
|
+
initialDelay: config.initialDelay || 1000,
|
|
13
|
+
maxDelay: config.maxDelay || 30000,
|
|
14
|
+
backoffMultiplier: config.backoffMultiplier || 2,
|
|
15
|
+
dlqSuffix: config.dlqSuffix || '.dlq',
|
|
16
|
+
...config
|
|
17
|
+
};
|
|
18
|
+
this.retryStats = {
|
|
19
|
+
totalRetries: 0,
|
|
20
|
+
successfulRetries: 0,
|
|
21
|
+
failedRetries: 0,
|
|
22
|
+
dlqMessages: 0
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Process message with retry logic
|
|
28
|
+
* @param {Function} handler - Message processing function
|
|
29
|
+
* @param {Object} message - Message to process
|
|
30
|
+
* @param {Object} rawMsg - Raw message from AMQP
|
|
31
|
+
* @param {Object} options - Retry options
|
|
32
|
+
*/
|
|
33
|
+
async processWithRetry(handler, message, rawMsg, options = {}) {
|
|
34
|
+
const maxRetries = options.maxRetries || this.config.maxRetries;
|
|
35
|
+
const retryCount = this.getRetryCount(message, rawMsg);
|
|
36
|
+
|
|
37
|
+
try {
|
|
38
|
+
// Attempt to process message
|
|
39
|
+
await handler(message, rawMsg);
|
|
40
|
+
|
|
41
|
+
// Success - acknowledge
|
|
42
|
+
await this.client.ack(rawMsg);
|
|
43
|
+
|
|
44
|
+
// Update stats if this was a retry
|
|
45
|
+
if (retryCount > 0) {
|
|
46
|
+
this.retryStats.successfulRetries++;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return { success: true, retryCount };
|
|
50
|
+
} catch (error) {
|
|
51
|
+
// Processing failed
|
|
52
|
+
return this.handleFailure(error, message, rawMsg, retryCount, maxRetries, options);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Handle processing failure
|
|
58
|
+
* @private
|
|
59
|
+
*/
|
|
60
|
+
async handleFailure(error, message, rawMsg, retryCount, maxRetries, options = {}) {
|
|
61
|
+
const queueName = rawMsg.fields.routingKey || rawMsg.fields.queue || options.queue;
|
|
62
|
+
|
|
63
|
+
// Add error context to message
|
|
64
|
+
const enrichedMessage = {
|
|
65
|
+
...message,
|
|
66
|
+
_error: {
|
|
67
|
+
message: error.message,
|
|
68
|
+
code: error.code,
|
|
69
|
+
stack: error.stack,
|
|
70
|
+
timestamp: Date.now(),
|
|
71
|
+
queue: queueName
|
|
72
|
+
},
|
|
73
|
+
_retryCount: retryCount + 1
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
if (retryCount < maxRetries) {
|
|
77
|
+
// Retry the message
|
|
78
|
+
await this.retryMessage(enrichedMessage, queueName, retryCount + 1, options);
|
|
79
|
+
|
|
80
|
+
// Acknowledge original to remove from queue
|
|
81
|
+
await this.client.ack(rawMsg);
|
|
82
|
+
|
|
83
|
+
this.retryStats.totalRetries++;
|
|
84
|
+
|
|
85
|
+
return { success: false, retried: true, retryCount: retryCount + 1 };
|
|
86
|
+
} else {
|
|
87
|
+
// Max retries exceeded, send to DLQ
|
|
88
|
+
await this.sendToDLQ(enrichedMessage, queueName, error, options);
|
|
89
|
+
|
|
90
|
+
// Acknowledge to remove from main queue
|
|
91
|
+
await this.client.ack(rawMsg);
|
|
92
|
+
|
|
93
|
+
this.retryStats.failedRetries++;
|
|
94
|
+
this.retryStats.dlqMessages++;
|
|
95
|
+
|
|
96
|
+
return { success: false, retried: false, dlq: true, retryCount };
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Retry a message with delay
|
|
102
|
+
* @param {Object} message - Message to retry
|
|
103
|
+
* @param {string} queue - Original queue name
|
|
104
|
+
* @param {number} retryCount - Current retry attempt
|
|
105
|
+
* @param {Object} options - Retry options
|
|
106
|
+
*/
|
|
107
|
+
async retryMessage(message, queue, retryCount, options = {}) {
|
|
108
|
+
const delay = this.calculateDelay(retryCount);
|
|
109
|
+
|
|
110
|
+
// Add retry metadata
|
|
111
|
+
const retryMessage = {
|
|
112
|
+
...message,
|
|
113
|
+
_retry: {
|
|
114
|
+
count: retryCount,
|
|
115
|
+
delay,
|
|
116
|
+
timestamp: Date.now(),
|
|
117
|
+
originalQueue: queue
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
// Use delayed message plugin if available, otherwise setTimeout
|
|
122
|
+
if (options.useDelayedPlugin) {
|
|
123
|
+
// RabbitMQ delayed message plugin
|
|
124
|
+
await this.client.publish(retryMessage, {
|
|
125
|
+
queue,
|
|
126
|
+
headers: {
|
|
127
|
+
'x-delay': delay,
|
|
128
|
+
'x-retry-count': retryCount
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
} else {
|
|
132
|
+
// Manual delay
|
|
133
|
+
setTimeout(async () => {
|
|
134
|
+
try {
|
|
135
|
+
await this.client.publish(retryMessage, {
|
|
136
|
+
queue,
|
|
137
|
+
headers: {
|
|
138
|
+
'x-retry-count': retryCount,
|
|
139
|
+
'x-retry-timestamp': Date.now()
|
|
140
|
+
}
|
|
141
|
+
});
|
|
142
|
+
} catch (error) {
|
|
143
|
+
console.error(`Failed to retry message: ${error.message}`);
|
|
144
|
+
}
|
|
145
|
+
}, delay);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Send message to Dead Letter Queue
|
|
151
|
+
* @param {Object} message - Failed message
|
|
152
|
+
* @param {string} originalQueue - Original queue name
|
|
153
|
+
* @param {Error} error - The error that caused the failure
|
|
154
|
+
* @param {Object} options - DLQ options
|
|
155
|
+
*/
|
|
156
|
+
async sendToDLQ(message, originalQueue, error, options = {}) {
|
|
157
|
+
const dlqName = options.dlqName || `${originalQueue}${this.config.dlqSuffix}`;
|
|
158
|
+
|
|
159
|
+
const dlqMessage = {
|
|
160
|
+
...message,
|
|
161
|
+
_dlq: {
|
|
162
|
+
originalQueue,
|
|
163
|
+
sentAt: Date.now(),
|
|
164
|
+
reason: error.message,
|
|
165
|
+
errorCode: error.code,
|
|
166
|
+
maxRetriesReached: true,
|
|
167
|
+
retryCount: message._retryCount || 0
|
|
168
|
+
}
|
|
169
|
+
};
|
|
170
|
+
|
|
171
|
+
await this.client.publish(dlqMessage, {
|
|
172
|
+
queue: dlqName,
|
|
173
|
+
persistent: true // DLQ messages should be persistent
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Process messages from DLQ (for manual intervention or reprocessing)
|
|
179
|
+
* @param {string} dlqName - Dead Letter Queue name
|
|
180
|
+
* @param {Function} handler - Handler for DLQ messages
|
|
181
|
+
* @param {Object} options - Processing options
|
|
182
|
+
*/
|
|
183
|
+
async processDLQ(dlqName, handler, options = {}) {
|
|
184
|
+
return this.client.consume(async (message, rawMsg) => {
|
|
185
|
+
try {
|
|
186
|
+
const action = await handler(message, rawMsg);
|
|
187
|
+
|
|
188
|
+
switch (action) {
|
|
189
|
+
case 'retry':
|
|
190
|
+
// Requeue to original queue
|
|
191
|
+
const originalQueue = message._dlq?.originalQueue || options.defaultQueue;
|
|
192
|
+
if (originalQueue) {
|
|
193
|
+
// Reset retry count
|
|
194
|
+
const retriedMessage = { ...message };
|
|
195
|
+
delete retriedMessage._dlq;
|
|
196
|
+
delete retriedMessage._retryCount;
|
|
197
|
+
delete retriedMessage._error;
|
|
198
|
+
|
|
199
|
+
await this.client.publish(retriedMessage, { queue: originalQueue });
|
|
200
|
+
await this.client.ack(rawMsg);
|
|
201
|
+
}
|
|
202
|
+
break;
|
|
203
|
+
|
|
204
|
+
case 'discard':
|
|
205
|
+
// Remove from DLQ
|
|
206
|
+
await this.client.ack(rawMsg);
|
|
207
|
+
break;
|
|
208
|
+
|
|
209
|
+
case 'keep':
|
|
210
|
+
default:
|
|
211
|
+
// Leave in DLQ (nack without requeue)
|
|
212
|
+
await this.client.nack(rawMsg, false);
|
|
213
|
+
break;
|
|
214
|
+
}
|
|
215
|
+
} catch (error) {
|
|
216
|
+
console.error(`DLQ processing error: ${error.message}`);
|
|
217
|
+
await this.client.nack(rawMsg, false);
|
|
218
|
+
}
|
|
219
|
+
}, {
|
|
220
|
+
queue: dlqName,
|
|
221
|
+
prefetch: 1, // Process DLQ messages one at a time
|
|
222
|
+
...options
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
/**
|
|
227
|
+
* Calculate retry delay with exponential backoff
|
|
228
|
+
* @private
|
|
229
|
+
*/
|
|
230
|
+
calculateDelay(retryCount) {
|
|
231
|
+
const delay = Math.min(
|
|
232
|
+
this.config.initialDelay * Math.pow(this.config.backoffMultiplier, retryCount - 1),
|
|
233
|
+
this.config.maxDelay
|
|
234
|
+
);
|
|
235
|
+
|
|
236
|
+
// Add jitter to prevent thundering herd
|
|
237
|
+
const jitter = Math.random() * 0.1 * delay;
|
|
238
|
+
|
|
239
|
+
return Math.floor(delay + jitter);
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Get retry count from message or headers
|
|
244
|
+
* @private
|
|
245
|
+
*/
|
|
246
|
+
getRetryCount(message, rawMsg) {
|
|
247
|
+
// Check message property
|
|
248
|
+
if (message._retryCount !== undefined) {
|
|
249
|
+
return message._retryCount;
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Check headers
|
|
253
|
+
if (rawMsg.properties?.headers?.['x-retry-count'] !== undefined) {
|
|
254
|
+
return parseInt(rawMsg.properties.headers['x-retry-count'], 10);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// Check death count (RabbitMQ dead letter)
|
|
258
|
+
if (rawMsg.properties?.headers?.['x-death']) {
|
|
259
|
+
const deaths = rawMsg.properties.headers['x-death'];
|
|
260
|
+
if (Array.isArray(deaths) && deaths.length > 0) {
|
|
261
|
+
return deaths[0].count || 0;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
return 0;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
/**
|
|
269
|
+
* Create a retry-enabled consumer
|
|
270
|
+
* @param {string} queue - Queue to consume from
|
|
271
|
+
* @param {Function} handler - Message handler
|
|
272
|
+
* @param {Object} options - Consumer options
|
|
273
|
+
*/
|
|
274
|
+
async consumeWithRetry(queue, handler, options = {}) {
|
|
275
|
+
return this.client.consume(async (message, rawMsg) => {
|
|
276
|
+
await this.processWithRetry(handler, message, rawMsg, {
|
|
277
|
+
...options,
|
|
278
|
+
queue
|
|
279
|
+
});
|
|
280
|
+
}, {
|
|
281
|
+
queue,
|
|
282
|
+
...options
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
/**
|
|
287
|
+
* Get retry statistics
|
|
288
|
+
*/
|
|
289
|
+
getStats() {
|
|
290
|
+
return {
|
|
291
|
+
...this.retryStats,
|
|
292
|
+
successRate: this.retryStats.totalRetries > 0
|
|
293
|
+
? (this.retryStats.successfulRetries / this.retryStats.totalRetries * 100).toFixed(2) + '%'
|
|
294
|
+
: 'N/A'
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
/**
|
|
299
|
+
* Reset retry statistics
|
|
300
|
+
*/
|
|
301
|
+
resetStats() {
|
|
302
|
+
this.retryStats = {
|
|
303
|
+
totalRetries: 0,
|
|
304
|
+
successfulRetries: 0,
|
|
305
|
+
failedRetries: 0,
|
|
306
|
+
dlqMessages: 0
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
/**
|
|
311
|
+
* Bulk retry messages from DLQ
|
|
312
|
+
* @param {string} dlqName - DLQ name
|
|
313
|
+
* @param {string} targetQueue - Target queue for retry
|
|
314
|
+
* @param {number} limit - Maximum messages to retry
|
|
315
|
+
*/
|
|
316
|
+
async bulkRetryFromDLQ(dlqName, targetQueue, limit = 100) {
|
|
317
|
+
let count = 0;
|
|
318
|
+
const results = {
|
|
319
|
+
processed: 0,
|
|
320
|
+
retried: 0,
|
|
321
|
+
failed: 0
|
|
322
|
+
};
|
|
323
|
+
|
|
324
|
+
await this.client.consume(async (message, rawMsg) => {
|
|
325
|
+
if (count >= limit) {
|
|
326
|
+
await this.client.nack(rawMsg, true); // Requeue
|
|
327
|
+
return;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
count++;
|
|
331
|
+
results.processed++;
|
|
332
|
+
|
|
333
|
+
try {
|
|
334
|
+
// Clean up message
|
|
335
|
+
const cleanMessage = { ...message };
|
|
336
|
+
delete cleanMessage._dlq;
|
|
337
|
+
delete cleanMessage._error;
|
|
338
|
+
delete cleanMessage._retryCount;
|
|
339
|
+
|
|
340
|
+
// Republish to target queue
|
|
341
|
+
await this.client.publish(cleanMessage, {
|
|
342
|
+
queue: targetQueue,
|
|
343
|
+
headers: {
|
|
344
|
+
'x-dlq-retry': true,
|
|
345
|
+
'x-retry-timestamp': Date.now()
|
|
346
|
+
}
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
await this.client.ack(rawMsg);
|
|
350
|
+
results.retried++;
|
|
351
|
+
} catch (error) {
|
|
352
|
+
console.error(`Failed to retry DLQ message: ${error.message}`);
|
|
353
|
+
await this.client.nack(rawMsg, false);
|
|
354
|
+
results.failed++;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
// Stop consuming if limit reached
|
|
358
|
+
if (count >= limit) {
|
|
359
|
+
// TODO: Cancel consumer
|
|
360
|
+
}
|
|
361
|
+
}, {
|
|
362
|
+
queue: dlqName,
|
|
363
|
+
prefetch: 10
|
|
364
|
+
});
|
|
365
|
+
|
|
366
|
+
return results;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
module.exports = RetryHandler;
|