@drarzter/kafka-client 0.2.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -12
- package/dist/chunk-UDOHIMAZ.mjs +545 -0
- package/dist/chunk-UDOHIMAZ.mjs.map +1 -0
- package/dist/core.d.mts +338 -0
- package/dist/core.d.ts +338 -0
- package/dist/core.js +561 -0
- package/dist/core.js.map +1 -0
- package/dist/core.mjs +15 -0
- package/dist/core.mjs.map +1 -0
- package/dist/index.d.mts +3 -302
- package/dist/index.d.ts +3 -302
- package/dist/index.js +292 -331
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +27 -591
- package/dist/index.mjs.map +1 -1
- package/package.json +12 -1
package/dist/index.mjs
CHANGED
|
@@ -1,582 +1,26 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
};
|
|
11
|
-
var __decorateParam = (index, decorator) => (target, key) => decorator(target, key, index);
|
|
1
|
+
import {
|
|
2
|
+
KafkaClient,
|
|
3
|
+
KafkaProcessingError,
|
|
4
|
+
KafkaRetryExhaustedError,
|
|
5
|
+
KafkaValidationError,
|
|
6
|
+
__decorateClass,
|
|
7
|
+
__decorateParam,
|
|
8
|
+
topic
|
|
9
|
+
} from "./chunk-UDOHIMAZ.mjs";
|
|
12
10
|
|
|
13
|
-
// src/
|
|
14
|
-
import { Module } from "@nestjs/common";
|
|
11
|
+
// src/nest/kafka.module.ts
|
|
12
|
+
import { Module, Logger as Logger2 } from "@nestjs/common";
|
|
15
13
|
import { DiscoveryModule } from "@nestjs/core";
|
|
16
14
|
|
|
17
|
-
// src/
|
|
18
|
-
import { Kafka, Partitioners } from "kafkajs";
|
|
19
|
-
import { Logger } from "@nestjs/common";
|
|
20
|
-
|
|
21
|
-
// src/client/errors.ts
|
|
22
|
-
var KafkaProcessingError = class extends Error {
|
|
23
|
-
constructor(message, topic2, originalMessage, options) {
|
|
24
|
-
super(message, options);
|
|
25
|
-
this.topic = topic2;
|
|
26
|
-
this.originalMessage = originalMessage;
|
|
27
|
-
this.name = "KafkaProcessingError";
|
|
28
|
-
if (options?.cause) this.cause = options.cause;
|
|
29
|
-
}
|
|
30
|
-
};
|
|
31
|
-
var KafkaValidationError = class extends Error {
|
|
32
|
-
constructor(topic2, originalMessage, options) {
|
|
33
|
-
super(`Schema validation failed for topic "${topic2}"`, options);
|
|
34
|
-
this.topic = topic2;
|
|
35
|
-
this.originalMessage = originalMessage;
|
|
36
|
-
this.name = "KafkaValidationError";
|
|
37
|
-
if (options?.cause) this.cause = options.cause;
|
|
38
|
-
}
|
|
39
|
-
};
|
|
40
|
-
var KafkaRetryExhaustedError = class extends KafkaProcessingError {
|
|
41
|
-
constructor(topic2, originalMessage, attempts, options) {
|
|
42
|
-
super(
|
|
43
|
-
`Message processing failed after ${attempts} attempts on topic "${topic2}"`,
|
|
44
|
-
topic2,
|
|
45
|
-
originalMessage,
|
|
46
|
-
options
|
|
47
|
-
);
|
|
48
|
-
this.attempts = attempts;
|
|
49
|
-
this.name = "KafkaRetryExhaustedError";
|
|
50
|
-
}
|
|
51
|
-
};
|
|
52
|
-
|
|
53
|
-
// src/client/kafka.client.ts
|
|
54
|
-
var KafkaClient = class {
|
|
55
|
-
kafka;
|
|
56
|
-
producer;
|
|
57
|
-
consumers = /* @__PURE__ */ new Map();
|
|
58
|
-
admin;
|
|
59
|
-
logger;
|
|
60
|
-
autoCreateTopicsEnabled;
|
|
61
|
-
strictSchemasEnabled;
|
|
62
|
-
ensuredTopics = /* @__PURE__ */ new Set();
|
|
63
|
-
defaultGroupId;
|
|
64
|
-
schemaRegistry = /* @__PURE__ */ new Map();
|
|
65
|
-
runningConsumers = /* @__PURE__ */ new Map();
|
|
66
|
-
isAdminConnected = false;
|
|
67
|
-
clientId;
|
|
68
|
-
constructor(clientId, groupId, brokers, options) {
|
|
69
|
-
this.clientId = clientId;
|
|
70
|
-
this.defaultGroupId = groupId;
|
|
71
|
-
this.logger = new Logger(`KafkaClient:${clientId}`);
|
|
72
|
-
this.autoCreateTopicsEnabled = options?.autoCreateTopics ?? false;
|
|
73
|
-
this.strictSchemasEnabled = options?.strictSchemas ?? true;
|
|
74
|
-
this.kafka = new Kafka({
|
|
75
|
-
clientId: this.clientId,
|
|
76
|
-
brokers
|
|
77
|
-
});
|
|
78
|
-
this.producer = this.kafka.producer({
|
|
79
|
-
createPartitioner: Partitioners.DefaultPartitioner,
|
|
80
|
-
idempotent: true,
|
|
81
|
-
transactionalId: `${clientId}-tx`,
|
|
82
|
-
maxInFlightRequests: 1
|
|
83
|
-
});
|
|
84
|
-
this.admin = this.kafka.admin();
|
|
85
|
-
}
|
|
86
|
-
getOrCreateConsumer(groupId) {
|
|
87
|
-
const gid = groupId || this.defaultGroupId;
|
|
88
|
-
if (!this.consumers.has(gid)) {
|
|
89
|
-
this.consumers.set(gid, this.kafka.consumer({ groupId: gid }));
|
|
90
|
-
}
|
|
91
|
-
return this.consumers.get(gid);
|
|
92
|
-
}
|
|
93
|
-
resolveTopicName(topicOrDescriptor) {
|
|
94
|
-
if (typeof topicOrDescriptor === "string") return topicOrDescriptor;
|
|
95
|
-
if (topicOrDescriptor && typeof topicOrDescriptor === "object" && "__topic" in topicOrDescriptor) {
|
|
96
|
-
return topicOrDescriptor.__topic;
|
|
97
|
-
}
|
|
98
|
-
return String(topicOrDescriptor);
|
|
99
|
-
}
|
|
100
|
-
async ensureTopic(topic2) {
|
|
101
|
-
if (!this.autoCreateTopicsEnabled || this.ensuredTopics.has(topic2)) return;
|
|
102
|
-
if (!this.isAdminConnected) {
|
|
103
|
-
await this.admin.connect();
|
|
104
|
-
this.isAdminConnected = true;
|
|
105
|
-
}
|
|
106
|
-
await this.admin.createTopics({
|
|
107
|
-
topics: [{ topic: topic2, numPartitions: 1 }]
|
|
108
|
-
});
|
|
109
|
-
this.ensuredTopics.add(topic2);
|
|
110
|
-
}
|
|
111
|
-
validateMessage(topicOrDesc, message) {
|
|
112
|
-
if (topicOrDesc?.__schema) {
|
|
113
|
-
const topic2 = this.resolveTopicName(topicOrDesc);
|
|
114
|
-
this.schemaRegistry.set(topic2, topicOrDesc.__schema);
|
|
115
|
-
return topicOrDesc.__schema.parse(message);
|
|
116
|
-
}
|
|
117
|
-
if (this.strictSchemasEnabled && typeof topicOrDesc === "string") {
|
|
118
|
-
const schema = this.schemaRegistry.get(topicOrDesc);
|
|
119
|
-
if (schema) return schema.parse(message);
|
|
120
|
-
}
|
|
121
|
-
return message;
|
|
122
|
-
}
|
|
123
|
-
async sendMessage(topicOrDesc, message, options = {}) {
|
|
124
|
-
const validated = this.validateMessage(topicOrDesc, message);
|
|
125
|
-
const topic2 = this.resolveTopicName(topicOrDesc);
|
|
126
|
-
await this.ensureTopic(topic2);
|
|
127
|
-
await this.producer.send({
|
|
128
|
-
topic: topic2,
|
|
129
|
-
messages: [
|
|
130
|
-
{
|
|
131
|
-
value: JSON.stringify(validated),
|
|
132
|
-
key: options.key ?? null,
|
|
133
|
-
headers: options.headers
|
|
134
|
-
}
|
|
135
|
-
],
|
|
136
|
-
acks: -1
|
|
137
|
-
});
|
|
138
|
-
}
|
|
139
|
-
async sendBatch(topicOrDesc, messages) {
|
|
140
|
-
const topic2 = this.resolveTopicName(topicOrDesc);
|
|
141
|
-
await this.ensureTopic(topic2);
|
|
142
|
-
await this.producer.send({
|
|
143
|
-
topic: topic2,
|
|
144
|
-
messages: messages.map((m) => ({
|
|
145
|
-
value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
|
|
146
|
-
key: m.key ?? null,
|
|
147
|
-
headers: m.headers
|
|
148
|
-
})),
|
|
149
|
-
acks: -1
|
|
150
|
-
});
|
|
151
|
-
}
|
|
152
|
-
/** Execute multiple sends atomically. Commits on success, aborts on error. */
|
|
153
|
-
async transaction(fn) {
|
|
154
|
-
const tx = await this.producer.transaction();
|
|
155
|
-
try {
|
|
156
|
-
const ctx = {
|
|
157
|
-
send: async (topicOrDesc, message, options = {}) => {
|
|
158
|
-
const validated = this.validateMessage(topicOrDesc, message);
|
|
159
|
-
const topic2 = this.resolveTopicName(topicOrDesc);
|
|
160
|
-
await this.ensureTopic(topic2);
|
|
161
|
-
await tx.send({
|
|
162
|
-
topic: topic2,
|
|
163
|
-
messages: [
|
|
164
|
-
{
|
|
165
|
-
value: JSON.stringify(validated),
|
|
166
|
-
key: options.key ?? null,
|
|
167
|
-
headers: options.headers
|
|
168
|
-
}
|
|
169
|
-
],
|
|
170
|
-
acks: -1
|
|
171
|
-
});
|
|
172
|
-
},
|
|
173
|
-
sendBatch: async (topicOrDesc, messages) => {
|
|
174
|
-
const topic2 = this.resolveTopicName(topicOrDesc);
|
|
175
|
-
await this.ensureTopic(topic2);
|
|
176
|
-
await tx.send({
|
|
177
|
-
topic: topic2,
|
|
178
|
-
messages: messages.map((m) => ({
|
|
179
|
-
value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
|
|
180
|
-
key: m.key ?? null,
|
|
181
|
-
headers: m.headers
|
|
182
|
-
})),
|
|
183
|
-
acks: -1
|
|
184
|
-
});
|
|
185
|
-
}
|
|
186
|
-
};
|
|
187
|
-
await fn(ctx);
|
|
188
|
-
await tx.commit();
|
|
189
|
-
} catch (error) {
|
|
190
|
-
await tx.abort();
|
|
191
|
-
throw error;
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
/** Connect the idempotent producer. Called automatically by `KafkaModule.register()`. */
|
|
195
|
-
async connectProducer() {
|
|
196
|
-
await this.producer.connect();
|
|
197
|
-
this.logger.log("Producer connected");
|
|
198
|
-
}
|
|
199
|
-
async disconnectProducer() {
|
|
200
|
-
await this.producer.disconnect();
|
|
201
|
-
this.logger.log("Producer disconnected");
|
|
202
|
-
}
|
|
203
|
-
async startConsumer(topics, handleMessage, options = {}) {
|
|
204
|
-
const {
|
|
205
|
-
groupId: optGroupId,
|
|
206
|
-
fromBeginning = false,
|
|
207
|
-
autoCommit = true,
|
|
208
|
-
retry,
|
|
209
|
-
dlq = false,
|
|
210
|
-
interceptors = [],
|
|
211
|
-
schemas: optionSchemas
|
|
212
|
-
} = options;
|
|
213
|
-
const gid = optGroupId || this.defaultGroupId;
|
|
214
|
-
const existingMode = this.runningConsumers.get(gid);
|
|
215
|
-
if (existingMode === "eachBatch") {
|
|
216
|
-
throw new Error(
|
|
217
|
-
`Cannot use eachMessage on consumer group "${gid}" \u2014 it is already running with eachBatch. Use a different groupId for this consumer.`
|
|
218
|
-
);
|
|
219
|
-
}
|
|
220
|
-
const consumer = this.getOrCreateConsumer(optGroupId);
|
|
221
|
-
const schemaMap = this.buildSchemaMap(topics, optionSchemas);
|
|
222
|
-
const topicNames = topics.map(
|
|
223
|
-
(t) => this.resolveTopicName(t)
|
|
224
|
-
);
|
|
225
|
-
await consumer.connect();
|
|
226
|
-
await this.subscribeWithRetry(consumer, topicNames, fromBeginning, options.subscribeRetry);
|
|
227
|
-
this.logger.log(`Consumer subscribed to topics: ${topicNames.join(", ")}`);
|
|
228
|
-
await consumer.run({
|
|
229
|
-
autoCommit,
|
|
230
|
-
eachMessage: async ({ topic: topic2, message }) => {
|
|
231
|
-
if (!message.value) {
|
|
232
|
-
this.logger.warn(`Received empty message from topic ${topic2}`);
|
|
233
|
-
return;
|
|
234
|
-
}
|
|
235
|
-
const raw = message.value.toString();
|
|
236
|
-
let parsedMessage;
|
|
237
|
-
try {
|
|
238
|
-
parsedMessage = JSON.parse(raw);
|
|
239
|
-
} catch (error) {
|
|
240
|
-
this.logger.error(
|
|
241
|
-
`Failed to parse message from topic ${topic2}:`,
|
|
242
|
-
error instanceof Error ? error.stack : String(error)
|
|
243
|
-
);
|
|
244
|
-
return;
|
|
245
|
-
}
|
|
246
|
-
const schema = schemaMap.get(topic2);
|
|
247
|
-
if (schema) {
|
|
248
|
-
try {
|
|
249
|
-
parsedMessage = schema.parse(parsedMessage);
|
|
250
|
-
} catch (error) {
|
|
251
|
-
const err = error instanceof Error ? error : new Error(String(error));
|
|
252
|
-
const validationError = new KafkaValidationError(
|
|
253
|
-
topic2,
|
|
254
|
-
parsedMessage,
|
|
255
|
-
{ cause: err }
|
|
256
|
-
);
|
|
257
|
-
this.logger.error(
|
|
258
|
-
`Schema validation failed for topic ${topic2}:`,
|
|
259
|
-
err.message
|
|
260
|
-
);
|
|
261
|
-
if (dlq) await this.sendToDlq(topic2, raw);
|
|
262
|
-
for (const interceptor of interceptors) {
|
|
263
|
-
await interceptor.onError?.(
|
|
264
|
-
parsedMessage,
|
|
265
|
-
topic2,
|
|
266
|
-
validationError
|
|
267
|
-
);
|
|
268
|
-
}
|
|
269
|
-
return;
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
await this.processMessage(parsedMessage, raw, topic2, handleMessage, {
|
|
273
|
-
retry,
|
|
274
|
-
dlq,
|
|
275
|
-
interceptors
|
|
276
|
-
});
|
|
277
|
-
}
|
|
278
|
-
});
|
|
279
|
-
this.runningConsumers.set(gid, "eachMessage");
|
|
280
|
-
}
|
|
281
|
-
async startBatchConsumer(topics, handleBatch, options = {}) {
|
|
282
|
-
const {
|
|
283
|
-
groupId: optGroupId,
|
|
284
|
-
fromBeginning = false,
|
|
285
|
-
autoCommit = true,
|
|
286
|
-
retry,
|
|
287
|
-
dlq = false,
|
|
288
|
-
interceptors = [],
|
|
289
|
-
schemas: optionSchemas
|
|
290
|
-
} = options;
|
|
291
|
-
const gid = optGroupId || this.defaultGroupId;
|
|
292
|
-
const existingMode = this.runningConsumers.get(gid);
|
|
293
|
-
if (existingMode === "eachMessage") {
|
|
294
|
-
throw new Error(
|
|
295
|
-
`Cannot use eachBatch on consumer group "${gid}" \u2014 it is already running with eachMessage. Use a different groupId for this consumer.`
|
|
296
|
-
);
|
|
297
|
-
}
|
|
298
|
-
const consumer = this.getOrCreateConsumer(optGroupId);
|
|
299
|
-
const schemaMap = this.buildSchemaMap(topics, optionSchemas);
|
|
300
|
-
const topicNames = topics.map(
|
|
301
|
-
(t) => this.resolveTopicName(t)
|
|
302
|
-
);
|
|
303
|
-
await consumer.connect();
|
|
304
|
-
await this.subscribeWithRetry(consumer, topicNames, fromBeginning, options.subscribeRetry);
|
|
305
|
-
this.logger.log(
|
|
306
|
-
`Batch consumer subscribed to topics: ${topicNames.join(", ")}`
|
|
307
|
-
);
|
|
308
|
-
await consumer.run({
|
|
309
|
-
autoCommit,
|
|
310
|
-
eachBatch: async ({
|
|
311
|
-
batch,
|
|
312
|
-
heartbeat,
|
|
313
|
-
resolveOffset,
|
|
314
|
-
commitOffsetsIfNecessary
|
|
315
|
-
}) => {
|
|
316
|
-
const validMessages = [];
|
|
317
|
-
for (const message of batch.messages) {
|
|
318
|
-
if (!message.value) {
|
|
319
|
-
this.logger.warn(
|
|
320
|
-
`Received empty message from topic ${batch.topic}`
|
|
321
|
-
);
|
|
322
|
-
continue;
|
|
323
|
-
}
|
|
324
|
-
const raw = message.value.toString();
|
|
325
|
-
let parsedMessage;
|
|
326
|
-
try {
|
|
327
|
-
parsedMessage = JSON.parse(raw);
|
|
328
|
-
} catch (error) {
|
|
329
|
-
this.logger.error(
|
|
330
|
-
`Failed to parse message from topic ${batch.topic}:`,
|
|
331
|
-
error instanceof Error ? error.stack : String(error)
|
|
332
|
-
);
|
|
333
|
-
continue;
|
|
334
|
-
}
|
|
335
|
-
const schema = schemaMap.get(batch.topic);
|
|
336
|
-
if (schema) {
|
|
337
|
-
try {
|
|
338
|
-
parsedMessage = schema.parse(parsedMessage);
|
|
339
|
-
} catch (error) {
|
|
340
|
-
const err = error instanceof Error ? error : new Error(String(error));
|
|
341
|
-
const validationError = new KafkaValidationError(
|
|
342
|
-
batch.topic,
|
|
343
|
-
parsedMessage,
|
|
344
|
-
{ cause: err }
|
|
345
|
-
);
|
|
346
|
-
this.logger.error(
|
|
347
|
-
`Schema validation failed for topic ${batch.topic}:`,
|
|
348
|
-
err.message
|
|
349
|
-
);
|
|
350
|
-
if (dlq) await this.sendToDlq(batch.topic, raw);
|
|
351
|
-
for (const interceptor of interceptors) {
|
|
352
|
-
await interceptor.onError?.(
|
|
353
|
-
parsedMessage,
|
|
354
|
-
batch.topic,
|
|
355
|
-
validationError
|
|
356
|
-
);
|
|
357
|
-
}
|
|
358
|
-
continue;
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
validMessages.push(parsedMessage);
|
|
362
|
-
}
|
|
363
|
-
if (validMessages.length === 0) return;
|
|
364
|
-
const meta = {
|
|
365
|
-
partition: batch.partition,
|
|
366
|
-
highWatermark: batch.highWatermark,
|
|
367
|
-
heartbeat,
|
|
368
|
-
resolveOffset,
|
|
369
|
-
commitOffsetsIfNecessary
|
|
370
|
-
};
|
|
371
|
-
const maxAttempts = retry ? retry.maxRetries + 1 : 1;
|
|
372
|
-
const backoffMs = retry?.backoffMs ?? 1e3;
|
|
373
|
-
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
374
|
-
try {
|
|
375
|
-
for (const interceptor of interceptors) {
|
|
376
|
-
for (const msg of validMessages) {
|
|
377
|
-
await interceptor.before?.(msg, batch.topic);
|
|
378
|
-
}
|
|
379
|
-
}
|
|
380
|
-
await handleBatch(validMessages, batch.topic, meta);
|
|
381
|
-
for (const interceptor of interceptors) {
|
|
382
|
-
for (const msg of validMessages) {
|
|
383
|
-
await interceptor.after?.(msg, batch.topic);
|
|
384
|
-
}
|
|
385
|
-
}
|
|
386
|
-
return;
|
|
387
|
-
} catch (error) {
|
|
388
|
-
const err = error instanceof Error ? error : new Error(String(error));
|
|
389
|
-
const isLastAttempt = attempt === maxAttempts;
|
|
390
|
-
if (isLastAttempt && maxAttempts > 1) {
|
|
391
|
-
const exhaustedError = new KafkaRetryExhaustedError(
|
|
392
|
-
batch.topic,
|
|
393
|
-
validMessages,
|
|
394
|
-
maxAttempts,
|
|
395
|
-
{ cause: err }
|
|
396
|
-
);
|
|
397
|
-
for (const interceptor of interceptors) {
|
|
398
|
-
await interceptor.onError?.(
|
|
399
|
-
validMessages,
|
|
400
|
-
batch.topic,
|
|
401
|
-
exhaustedError
|
|
402
|
-
);
|
|
403
|
-
}
|
|
404
|
-
} else {
|
|
405
|
-
for (const interceptor of interceptors) {
|
|
406
|
-
await interceptor.onError?.(
|
|
407
|
-
validMessages,
|
|
408
|
-
batch.topic,
|
|
409
|
-
err
|
|
410
|
-
);
|
|
411
|
-
}
|
|
412
|
-
}
|
|
413
|
-
this.logger.error(
|
|
414
|
-
`Error processing batch from topic ${batch.topic} (attempt ${attempt}/${maxAttempts}):`,
|
|
415
|
-
err.stack
|
|
416
|
-
);
|
|
417
|
-
if (isLastAttempt) {
|
|
418
|
-
if (dlq) {
|
|
419
|
-
for (const msg of batch.messages) {
|
|
420
|
-
if (msg.value) {
|
|
421
|
-
await this.sendToDlq(batch.topic, msg.value.toString());
|
|
422
|
-
}
|
|
423
|
-
}
|
|
424
|
-
}
|
|
425
|
-
} else {
|
|
426
|
-
await this.sleep(backoffMs * attempt);
|
|
427
|
-
}
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
}
|
|
431
|
-
});
|
|
432
|
-
this.runningConsumers.set(gid, "eachBatch");
|
|
433
|
-
}
|
|
434
|
-
async stopConsumer() {
|
|
435
|
-
const tasks = [];
|
|
436
|
-
for (const consumer of this.consumers.values()) {
|
|
437
|
-
tasks.push(consumer.disconnect());
|
|
438
|
-
}
|
|
439
|
-
await Promise.allSettled(tasks);
|
|
440
|
-
this.consumers.clear();
|
|
441
|
-
this.runningConsumers.clear();
|
|
442
|
-
this.logger.log("All consumers disconnected");
|
|
443
|
-
}
|
|
444
|
-
/** Check broker connectivity and return available topics. */
|
|
445
|
-
async checkStatus() {
|
|
446
|
-
if (!this.isAdminConnected) {
|
|
447
|
-
await this.admin.connect();
|
|
448
|
-
this.isAdminConnected = true;
|
|
449
|
-
}
|
|
450
|
-
const topics = await this.admin.listTopics();
|
|
451
|
-
return { topics };
|
|
452
|
-
}
|
|
453
|
-
getClientId() {
|
|
454
|
-
return this.clientId;
|
|
455
|
-
}
|
|
456
|
-
/** Gracefully disconnect producer, all consumers, and admin. */
|
|
457
|
-
async disconnect() {
|
|
458
|
-
const tasks = [this.producer.disconnect()];
|
|
459
|
-
for (const consumer of this.consumers.values()) {
|
|
460
|
-
tasks.push(consumer.disconnect());
|
|
461
|
-
}
|
|
462
|
-
if (this.isAdminConnected) {
|
|
463
|
-
tasks.push(this.admin.disconnect());
|
|
464
|
-
this.isAdminConnected = false;
|
|
465
|
-
}
|
|
466
|
-
await Promise.allSettled(tasks);
|
|
467
|
-
this.consumers.clear();
|
|
468
|
-
this.runningConsumers.clear();
|
|
469
|
-
this.logger.log("All connections closed");
|
|
470
|
-
}
|
|
471
|
-
// --- Private helpers ---
|
|
472
|
-
buildSchemaMap(topics, optionSchemas) {
|
|
473
|
-
const schemaMap = /* @__PURE__ */ new Map();
|
|
474
|
-
for (const t of topics) {
|
|
475
|
-
if (t?.__schema) {
|
|
476
|
-
const name = this.resolveTopicName(t);
|
|
477
|
-
schemaMap.set(name, t.__schema);
|
|
478
|
-
this.schemaRegistry.set(name, t.__schema);
|
|
479
|
-
}
|
|
480
|
-
}
|
|
481
|
-
if (optionSchemas) {
|
|
482
|
-
for (const [k, v] of optionSchemas) {
|
|
483
|
-
schemaMap.set(k, v);
|
|
484
|
-
this.schemaRegistry.set(k, v);
|
|
485
|
-
}
|
|
486
|
-
}
|
|
487
|
-
return schemaMap;
|
|
488
|
-
}
|
|
489
|
-
async processMessage(parsedMessage, raw, topic2, handleMessage, opts) {
|
|
490
|
-
const { retry, dlq = false, interceptors = [] } = opts;
|
|
491
|
-
const maxAttempts = retry ? retry.maxRetries + 1 : 1;
|
|
492
|
-
const backoffMs = retry?.backoffMs ?? 1e3;
|
|
493
|
-
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
494
|
-
try {
|
|
495
|
-
for (const interceptor of interceptors) {
|
|
496
|
-
await interceptor.before?.(parsedMessage, topic2);
|
|
497
|
-
}
|
|
498
|
-
await handleMessage(parsedMessage, topic2);
|
|
499
|
-
for (const interceptor of interceptors) {
|
|
500
|
-
await interceptor.after?.(parsedMessage, topic2);
|
|
501
|
-
}
|
|
502
|
-
return;
|
|
503
|
-
} catch (error) {
|
|
504
|
-
const err = error instanceof Error ? error : new Error(String(error));
|
|
505
|
-
const isLastAttempt = attempt === maxAttempts;
|
|
506
|
-
if (isLastAttempt && maxAttempts > 1) {
|
|
507
|
-
const exhaustedError = new KafkaRetryExhaustedError(
|
|
508
|
-
topic2,
|
|
509
|
-
parsedMessage,
|
|
510
|
-
maxAttempts,
|
|
511
|
-
{ cause: err }
|
|
512
|
-
);
|
|
513
|
-
for (const interceptor of interceptors) {
|
|
514
|
-
await interceptor.onError?.(parsedMessage, topic2, exhaustedError);
|
|
515
|
-
}
|
|
516
|
-
} else {
|
|
517
|
-
for (const interceptor of interceptors) {
|
|
518
|
-
await interceptor.onError?.(parsedMessage, topic2, err);
|
|
519
|
-
}
|
|
520
|
-
}
|
|
521
|
-
this.logger.error(
|
|
522
|
-
`Error processing message from topic ${topic2} (attempt ${attempt}/${maxAttempts}):`,
|
|
523
|
-
err.stack
|
|
524
|
-
);
|
|
525
|
-
if (isLastAttempt) {
|
|
526
|
-
if (dlq) await this.sendToDlq(topic2, raw);
|
|
527
|
-
} else {
|
|
528
|
-
await this.sleep(backoffMs * attempt);
|
|
529
|
-
}
|
|
530
|
-
}
|
|
531
|
-
}
|
|
532
|
-
}
|
|
533
|
-
async sendToDlq(topic2, rawMessage) {
|
|
534
|
-
const dlqTopic = `${topic2}.dlq`;
|
|
535
|
-
try {
|
|
536
|
-
await this.producer.send({
|
|
537
|
-
topic: dlqTopic,
|
|
538
|
-
messages: [{ value: rawMessage }],
|
|
539
|
-
acks: -1
|
|
540
|
-
});
|
|
541
|
-
this.logger.warn(`Message sent to DLQ: ${dlqTopic}`);
|
|
542
|
-
} catch (error) {
|
|
543
|
-
this.logger.error(
|
|
544
|
-
`Failed to send message to DLQ ${dlqTopic}:`,
|
|
545
|
-
error instanceof Error ? error.stack : String(error)
|
|
546
|
-
);
|
|
547
|
-
}
|
|
548
|
-
}
|
|
549
|
-
async subscribeWithRetry(consumer, topics, fromBeginning, retryOpts) {
|
|
550
|
-
const maxAttempts = retryOpts?.retries ?? 5;
|
|
551
|
-
const backoffMs = retryOpts?.backoffMs ?? 5e3;
|
|
552
|
-
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
553
|
-
try {
|
|
554
|
-
await consumer.subscribe({ topics, fromBeginning });
|
|
555
|
-
return;
|
|
556
|
-
} catch (error) {
|
|
557
|
-
if (attempt === maxAttempts) throw error;
|
|
558
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
559
|
-
this.logger.warn(
|
|
560
|
-
`Failed to subscribe to [${topics.join(", ")}] (attempt ${attempt}/${maxAttempts}): ${msg}. Retrying in ${backoffMs}ms...`
|
|
561
|
-
);
|
|
562
|
-
await this.sleep(backoffMs);
|
|
563
|
-
}
|
|
564
|
-
}
|
|
565
|
-
}
|
|
566
|
-
sleep(ms) {
|
|
567
|
-
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
568
|
-
}
|
|
569
|
-
};
|
|
570
|
-
|
|
571
|
-
// src/module/kafka.constants.ts
|
|
15
|
+
// src/nest/kafka.constants.ts
|
|
572
16
|
var KAFKA_CLIENT = "KAFKA_CLIENT";
|
|
573
17
|
var getKafkaClientToken = (name) => name ? `KAFKA_CLIENT_${name}` : KAFKA_CLIENT;
|
|
574
18
|
|
|
575
|
-
// src/
|
|
576
|
-
import { Inject as Inject2, Injectable, Logger
|
|
19
|
+
// src/nest/kafka.explorer.ts
|
|
20
|
+
import { Inject as Inject2, Injectable, Logger } from "@nestjs/common";
|
|
577
21
|
import { DiscoveryService, ModuleRef } from "@nestjs/core";
|
|
578
22
|
|
|
579
|
-
// src/
|
|
23
|
+
// src/nest/kafka.decorator.ts
|
|
580
24
|
import { Inject } from "@nestjs/common";
|
|
581
25
|
var KAFKA_SUBSCRIBER_METADATA = "KAFKA_SUBSCRIBER_METADATA";
|
|
582
26
|
var InjectKafkaClient = (name) => Inject(getKafkaClientToken(name));
|
|
@@ -610,13 +54,13 @@ var SubscribeTo = (topics, options) => {
|
|
|
610
54
|
};
|
|
611
55
|
};
|
|
612
56
|
|
|
613
|
-
// src/
|
|
57
|
+
// src/nest/kafka.explorer.ts
|
|
614
58
|
var KafkaExplorer = class {
|
|
615
59
|
constructor(discoveryService, moduleRef) {
|
|
616
60
|
this.discoveryService = discoveryService;
|
|
617
61
|
this.moduleRef = moduleRef;
|
|
618
62
|
}
|
|
619
|
-
logger = new
|
|
63
|
+
logger = new Logger(KafkaExplorer.name);
|
|
620
64
|
async onModuleInit() {
|
|
621
65
|
const providers = this.discoveryService.getProviders();
|
|
622
66
|
for (const wrapper of providers) {
|
|
@@ -673,7 +117,7 @@ KafkaExplorer = __decorateClass([
|
|
|
673
117
|
__decorateParam(1, Inject2(ModuleRef))
|
|
674
118
|
], KafkaExplorer);
|
|
675
119
|
|
|
676
|
-
// src/
|
|
120
|
+
// src/nest/kafka.module.ts
|
|
677
121
|
var KafkaModule = class {
|
|
678
122
|
/** Register a Kafka client with static options. */
|
|
679
123
|
static register(options) {
|
|
@@ -685,7 +129,10 @@ var KafkaModule = class {
|
|
|
685
129
|
options.clientId,
|
|
686
130
|
options.groupId,
|
|
687
131
|
options.brokers,
|
|
688
|
-
{
|
|
132
|
+
{
|
|
133
|
+
autoCreateTopics: options.autoCreateTopics,
|
|
134
|
+
logger: new Logger2(`KafkaClient:${options.clientId}`)
|
|
135
|
+
}
|
|
689
136
|
);
|
|
690
137
|
await client.connectProducer();
|
|
691
138
|
return client;
|
|
@@ -717,7 +164,10 @@ var KafkaModule = class {
|
|
|
717
164
|
options.clientId,
|
|
718
165
|
options.groupId,
|
|
719
166
|
options.brokers,
|
|
720
|
-
{
|
|
167
|
+
{
|
|
168
|
+
autoCreateTopics: options.autoCreateTopics,
|
|
169
|
+
logger: new Logger2(`KafkaClient:${options.clientId}`)
|
|
170
|
+
}
|
|
721
171
|
);
|
|
722
172
|
await client.connectProducer();
|
|
723
173
|
return client;
|
|
@@ -744,21 +194,7 @@ KafkaModule = __decorateClass([
|
|
|
744
194
|
Module({})
|
|
745
195
|
], KafkaModule);
|
|
746
196
|
|
|
747
|
-
// src/
|
|
748
|
-
function topic(name) {
|
|
749
|
-
const fn = () => ({
|
|
750
|
-
__topic: name,
|
|
751
|
-
__type: void 0
|
|
752
|
-
});
|
|
753
|
-
fn.schema = (schema) => ({
|
|
754
|
-
__topic: name,
|
|
755
|
-
__type: void 0,
|
|
756
|
-
__schema: schema
|
|
757
|
-
});
|
|
758
|
-
return fn;
|
|
759
|
-
}
|
|
760
|
-
|
|
761
|
-
// src/health/kafka.health.ts
|
|
197
|
+
// src/nest/kafka.health.ts
|
|
762
198
|
import { Injectable as Injectable2 } from "@nestjs/common";
|
|
763
199
|
var KafkaHealthIndicator = class {
|
|
764
200
|
async check(client) {
|