@drarzter/kafka-client 0.1.8 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +193 -7
- package/dist/index.d.mts +75 -6
- package/dist/index.d.ts +75 -6
- package/dist/index.js +282 -28
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +281 -28
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -25,6 +25,16 @@ var KafkaProcessingError = class extends Error {
|
|
|
25
25
|
this.topic = topic2;
|
|
26
26
|
this.originalMessage = originalMessage;
|
|
27
27
|
this.name = "KafkaProcessingError";
|
|
28
|
+
if (options?.cause) this.cause = options.cause;
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
var KafkaValidationError = class extends Error {
|
|
32
|
+
constructor(topic2, originalMessage, options) {
|
|
33
|
+
super(`Schema validation failed for topic "${topic2}"`, options);
|
|
34
|
+
this.topic = topic2;
|
|
35
|
+
this.originalMessage = originalMessage;
|
|
36
|
+
this.name = "KafkaValidationError";
|
|
37
|
+
if (options?.cause) this.cause = options.cause;
|
|
28
38
|
}
|
|
29
39
|
};
|
|
30
40
|
var KafkaRetryExhaustedError = class extends KafkaProcessingError {
|
|
@@ -44,15 +54,17 @@ var KafkaRetryExhaustedError = class extends KafkaProcessingError {
|
|
|
44
54
|
var KafkaClient = class {
|
|
45
55
|
kafka;
|
|
46
56
|
producer;
|
|
47
|
-
|
|
57
|
+
consumers = /* @__PURE__ */ new Map();
|
|
48
58
|
admin;
|
|
49
59
|
logger;
|
|
50
60
|
autoCreateTopicsEnabled;
|
|
51
61
|
ensuredTopics = /* @__PURE__ */ new Set();
|
|
62
|
+
defaultGroupId;
|
|
52
63
|
isAdminConnected = false;
|
|
53
64
|
clientId;
|
|
54
65
|
constructor(clientId, groupId, brokers, options) {
|
|
55
66
|
this.clientId = clientId;
|
|
67
|
+
this.defaultGroupId = groupId;
|
|
56
68
|
this.logger = new Logger(`KafkaClient:${clientId}`);
|
|
57
69
|
this.autoCreateTopicsEnabled = options?.autoCreateTopics ?? false;
|
|
58
70
|
this.kafka = new Kafka({
|
|
@@ -65,9 +77,15 @@ var KafkaClient = class {
|
|
|
65
77
|
transactionalId: `${clientId}-tx`,
|
|
66
78
|
maxInFlightRequests: 1
|
|
67
79
|
});
|
|
68
|
-
this.consumer = this.kafka.consumer({ groupId });
|
|
69
80
|
this.admin = this.kafka.admin();
|
|
70
81
|
}
|
|
82
|
+
getOrCreateConsumer(groupId) {
|
|
83
|
+
const gid = groupId || this.defaultGroupId;
|
|
84
|
+
if (!this.consumers.has(gid)) {
|
|
85
|
+
this.consumers.set(gid, this.kafka.consumer({ groupId: gid }));
|
|
86
|
+
}
|
|
87
|
+
return this.consumers.get(gid);
|
|
88
|
+
}
|
|
71
89
|
resolveTopicName(topicOrDescriptor) {
|
|
72
90
|
if (typeof topicOrDescriptor === "string") return topicOrDescriptor;
|
|
73
91
|
if (topicOrDescriptor && typeof topicOrDescriptor === "object" && "__topic" in topicOrDescriptor) {
|
|
@@ -86,14 +104,21 @@ var KafkaClient = class {
|
|
|
86
104
|
});
|
|
87
105
|
this.ensuredTopics.add(topic2);
|
|
88
106
|
}
|
|
107
|
+
validateMessage(topicOrDesc, message) {
|
|
108
|
+
if (topicOrDesc?.__schema) {
|
|
109
|
+
return topicOrDesc.__schema.parse(message);
|
|
110
|
+
}
|
|
111
|
+
return message;
|
|
112
|
+
}
|
|
89
113
|
async sendMessage(topicOrDesc, message, options = {}) {
|
|
114
|
+
const validated = this.validateMessage(topicOrDesc, message);
|
|
90
115
|
const topic2 = this.resolveTopicName(topicOrDesc);
|
|
91
116
|
await this.ensureTopic(topic2);
|
|
92
117
|
await this.producer.send({
|
|
93
118
|
topic: topic2,
|
|
94
119
|
messages: [
|
|
95
120
|
{
|
|
96
|
-
value: JSON.stringify(
|
|
121
|
+
value: JSON.stringify(validated),
|
|
97
122
|
key: options.key ?? null,
|
|
98
123
|
headers: options.headers
|
|
99
124
|
}
|
|
@@ -107,7 +132,7 @@ var KafkaClient = class {
|
|
|
107
132
|
await this.producer.send({
|
|
108
133
|
topic: topic2,
|
|
109
134
|
messages: messages.map((m) => ({
|
|
110
|
-
value: JSON.stringify(m.value),
|
|
135
|
+
value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
|
|
111
136
|
key: m.key ?? null,
|
|
112
137
|
headers: m.headers
|
|
113
138
|
})),
|
|
@@ -120,13 +145,14 @@ var KafkaClient = class {
|
|
|
120
145
|
try {
|
|
121
146
|
const ctx = {
|
|
122
147
|
send: async (topicOrDesc, message, options = {}) => {
|
|
148
|
+
const validated = this.validateMessage(topicOrDesc, message);
|
|
123
149
|
const topic2 = this.resolveTopicName(topicOrDesc);
|
|
124
150
|
await this.ensureTopic(topic2);
|
|
125
151
|
await tx.send({
|
|
126
152
|
topic: topic2,
|
|
127
153
|
messages: [
|
|
128
154
|
{
|
|
129
|
-
value: JSON.stringify(
|
|
155
|
+
value: JSON.stringify(validated),
|
|
130
156
|
key: options.key ?? null,
|
|
131
157
|
headers: options.headers
|
|
132
158
|
}
|
|
@@ -140,7 +166,7 @@ var KafkaClient = class {
|
|
|
140
166
|
await tx.send({
|
|
141
167
|
topic: topic2,
|
|
142
168
|
messages: messages.map((m) => ({
|
|
143
|
-
value: JSON.stringify(m.value),
|
|
169
|
+
value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
|
|
144
170
|
key: m.key ?? null,
|
|
145
171
|
headers: m.headers
|
|
146
172
|
})),
|
|
@@ -164,25 +190,28 @@ var KafkaClient = class {
|
|
|
164
190
|
await this.producer.disconnect();
|
|
165
191
|
this.logger.log("Producer disconnected");
|
|
166
192
|
}
|
|
167
|
-
/** Subscribe to topics and start consuming messages with the given handler. */
|
|
168
193
|
async startConsumer(topics, handleMessage, options = {}) {
|
|
169
194
|
const {
|
|
195
|
+
groupId: optGroupId,
|
|
170
196
|
fromBeginning = false,
|
|
171
197
|
autoCommit = true,
|
|
172
198
|
retry,
|
|
173
199
|
dlq = false,
|
|
174
|
-
interceptors = []
|
|
200
|
+
interceptors = [],
|
|
201
|
+
schemas: optionSchemas
|
|
175
202
|
} = options;
|
|
203
|
+
const consumer = this.getOrCreateConsumer(optGroupId);
|
|
204
|
+
const schemaMap = this.buildSchemaMap(topics, optionSchemas);
|
|
176
205
|
const topicNames = topics.map(
|
|
177
206
|
(t) => this.resolveTopicName(t)
|
|
178
207
|
);
|
|
179
|
-
await
|
|
208
|
+
await consumer.connect();
|
|
180
209
|
for (const t of topicNames) {
|
|
181
210
|
await this.ensureTopic(t);
|
|
182
211
|
}
|
|
183
|
-
await
|
|
212
|
+
await consumer.subscribe({ topics: topicNames, fromBeginning });
|
|
184
213
|
this.logger.log(`Consumer subscribed to topics: ${topicNames.join(", ")}`);
|
|
185
|
-
await
|
|
214
|
+
await consumer.run({
|
|
186
215
|
autoCommit,
|
|
187
216
|
eachMessage: async ({ topic: topic2, message }) => {
|
|
188
217
|
if (!message.value) {
|
|
@@ -200,6 +229,32 @@ var KafkaClient = class {
|
|
|
200
229
|
);
|
|
201
230
|
return;
|
|
202
231
|
}
|
|
232
|
+
const schema = schemaMap.get(topic2);
|
|
233
|
+
if (schema) {
|
|
234
|
+
try {
|
|
235
|
+
parsedMessage = schema.parse(parsedMessage);
|
|
236
|
+
} catch (error) {
|
|
237
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
238
|
+
const validationError = new KafkaValidationError(
|
|
239
|
+
topic2,
|
|
240
|
+
parsedMessage,
|
|
241
|
+
{ cause: err }
|
|
242
|
+
);
|
|
243
|
+
this.logger.error(
|
|
244
|
+
`Schema validation failed for topic ${topic2}:`,
|
|
245
|
+
err.message
|
|
246
|
+
);
|
|
247
|
+
if (dlq) await this.sendToDlq(topic2, raw);
|
|
248
|
+
for (const interceptor of interceptors) {
|
|
249
|
+
await interceptor.onError?.(
|
|
250
|
+
parsedMessage,
|
|
251
|
+
topic2,
|
|
252
|
+
validationError
|
|
253
|
+
);
|
|
254
|
+
}
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
203
258
|
await this.processMessage(parsedMessage, raw, topic2, handleMessage, {
|
|
204
259
|
retry,
|
|
205
260
|
dlq,
|
|
@@ -208,9 +263,162 @@ var KafkaClient = class {
|
|
|
208
263
|
}
|
|
209
264
|
});
|
|
210
265
|
}
|
|
266
|
+
async startBatchConsumer(topics, handleBatch, options = {}) {
|
|
267
|
+
const {
|
|
268
|
+
groupId: optGroupId,
|
|
269
|
+
fromBeginning = false,
|
|
270
|
+
autoCommit = true,
|
|
271
|
+
retry,
|
|
272
|
+
dlq = false,
|
|
273
|
+
interceptors = [],
|
|
274
|
+
schemas: optionSchemas
|
|
275
|
+
} = options;
|
|
276
|
+
const consumer = this.getOrCreateConsumer(optGroupId);
|
|
277
|
+
const schemaMap = this.buildSchemaMap(topics, optionSchemas);
|
|
278
|
+
const topicNames = topics.map(
|
|
279
|
+
(t) => this.resolveTopicName(t)
|
|
280
|
+
);
|
|
281
|
+
await consumer.connect();
|
|
282
|
+
for (const t of topicNames) {
|
|
283
|
+
await this.ensureTopic(t);
|
|
284
|
+
}
|
|
285
|
+
await consumer.subscribe({ topics: topicNames, fromBeginning });
|
|
286
|
+
this.logger.log(
|
|
287
|
+
`Batch consumer subscribed to topics: ${topicNames.join(", ")}`
|
|
288
|
+
);
|
|
289
|
+
await consumer.run({
|
|
290
|
+
autoCommit,
|
|
291
|
+
eachBatch: async ({
|
|
292
|
+
batch,
|
|
293
|
+
heartbeat,
|
|
294
|
+
resolveOffset,
|
|
295
|
+
commitOffsetsIfNecessary
|
|
296
|
+
}) => {
|
|
297
|
+
const validMessages = [];
|
|
298
|
+
for (const message of batch.messages) {
|
|
299
|
+
if (!message.value) {
|
|
300
|
+
this.logger.warn(
|
|
301
|
+
`Received empty message from topic ${batch.topic}`
|
|
302
|
+
);
|
|
303
|
+
continue;
|
|
304
|
+
}
|
|
305
|
+
const raw = message.value.toString();
|
|
306
|
+
let parsedMessage;
|
|
307
|
+
try {
|
|
308
|
+
parsedMessage = JSON.parse(raw);
|
|
309
|
+
} catch (error) {
|
|
310
|
+
this.logger.error(
|
|
311
|
+
`Failed to parse message from topic ${batch.topic}:`,
|
|
312
|
+
error instanceof Error ? error.stack : String(error)
|
|
313
|
+
);
|
|
314
|
+
continue;
|
|
315
|
+
}
|
|
316
|
+
const schema = schemaMap.get(batch.topic);
|
|
317
|
+
if (schema) {
|
|
318
|
+
try {
|
|
319
|
+
parsedMessage = schema.parse(parsedMessage);
|
|
320
|
+
} catch (error) {
|
|
321
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
322
|
+
const validationError = new KafkaValidationError(
|
|
323
|
+
batch.topic,
|
|
324
|
+
parsedMessage,
|
|
325
|
+
{ cause: err }
|
|
326
|
+
);
|
|
327
|
+
this.logger.error(
|
|
328
|
+
`Schema validation failed for topic ${batch.topic}:`,
|
|
329
|
+
err.message
|
|
330
|
+
);
|
|
331
|
+
if (dlq) await this.sendToDlq(batch.topic, raw);
|
|
332
|
+
for (const interceptor of interceptors) {
|
|
333
|
+
await interceptor.onError?.(
|
|
334
|
+
parsedMessage,
|
|
335
|
+
batch.topic,
|
|
336
|
+
validationError
|
|
337
|
+
);
|
|
338
|
+
}
|
|
339
|
+
continue;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
validMessages.push(parsedMessage);
|
|
343
|
+
}
|
|
344
|
+
if (validMessages.length === 0) return;
|
|
345
|
+
const meta = {
|
|
346
|
+
partition: batch.partition,
|
|
347
|
+
highWatermark: batch.highWatermark,
|
|
348
|
+
heartbeat,
|
|
349
|
+
resolveOffset,
|
|
350
|
+
commitOffsetsIfNecessary
|
|
351
|
+
};
|
|
352
|
+
const maxAttempts = retry ? retry.maxRetries + 1 : 1;
|
|
353
|
+
const backoffMs = retry?.backoffMs ?? 1e3;
|
|
354
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
355
|
+
try {
|
|
356
|
+
for (const interceptor of interceptors) {
|
|
357
|
+
for (const msg of validMessages) {
|
|
358
|
+
await interceptor.before?.(msg, batch.topic);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
await handleBatch(validMessages, batch.topic, meta);
|
|
362
|
+
for (const interceptor of interceptors) {
|
|
363
|
+
for (const msg of validMessages) {
|
|
364
|
+
await interceptor.after?.(msg, batch.topic);
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
return;
|
|
368
|
+
} catch (error) {
|
|
369
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
370
|
+
const isLastAttempt = attempt === maxAttempts;
|
|
371
|
+
if (isLastAttempt && maxAttempts > 1) {
|
|
372
|
+
const exhaustedError = new KafkaRetryExhaustedError(
|
|
373
|
+
batch.topic,
|
|
374
|
+
validMessages,
|
|
375
|
+
maxAttempts,
|
|
376
|
+
{ cause: err }
|
|
377
|
+
);
|
|
378
|
+
for (const interceptor of interceptors) {
|
|
379
|
+
await interceptor.onError?.(
|
|
380
|
+
validMessages,
|
|
381
|
+
batch.topic,
|
|
382
|
+
exhaustedError
|
|
383
|
+
);
|
|
384
|
+
}
|
|
385
|
+
} else {
|
|
386
|
+
for (const interceptor of interceptors) {
|
|
387
|
+
await interceptor.onError?.(
|
|
388
|
+
validMessages,
|
|
389
|
+
batch.topic,
|
|
390
|
+
err
|
|
391
|
+
);
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
this.logger.error(
|
|
395
|
+
`Error processing batch from topic ${batch.topic} (attempt ${attempt}/${maxAttempts}):`,
|
|
396
|
+
err.stack
|
|
397
|
+
);
|
|
398
|
+
if (isLastAttempt) {
|
|
399
|
+
if (dlq) {
|
|
400
|
+
for (const msg of batch.messages) {
|
|
401
|
+
if (msg.value) {
|
|
402
|
+
await this.sendToDlq(batch.topic, msg.value.toString());
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
} else {
|
|
407
|
+
await this.sleep(backoffMs * attempt);
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
});
|
|
413
|
+
}
|
|
211
414
|
async stopConsumer() {
|
|
212
|
-
|
|
213
|
-
this.
|
|
415
|
+
const tasks = [];
|
|
416
|
+
for (const consumer of this.consumers.values()) {
|
|
417
|
+
tasks.push(consumer.disconnect());
|
|
418
|
+
}
|
|
419
|
+
await Promise.allSettled(tasks);
|
|
420
|
+
this.consumers.clear();
|
|
421
|
+
this.logger.log("All consumers disconnected");
|
|
214
422
|
}
|
|
215
423
|
/** Check broker connectivity and return available topics. */
|
|
216
424
|
async checkStatus() {
|
|
@@ -224,17 +432,35 @@ var KafkaClient = class {
|
|
|
224
432
|
getClientId() {
|
|
225
433
|
return this.clientId;
|
|
226
434
|
}
|
|
227
|
-
/** Gracefully disconnect producer,
|
|
435
|
+
/** Gracefully disconnect producer, all consumers, and admin. */
|
|
228
436
|
async disconnect() {
|
|
229
|
-
const tasks = [this.producer.disconnect()
|
|
437
|
+
const tasks = [this.producer.disconnect()];
|
|
438
|
+
for (const consumer of this.consumers.values()) {
|
|
439
|
+
tasks.push(consumer.disconnect());
|
|
440
|
+
}
|
|
230
441
|
if (this.isAdminConnected) {
|
|
231
442
|
tasks.push(this.admin.disconnect());
|
|
232
443
|
this.isAdminConnected = false;
|
|
233
444
|
}
|
|
234
445
|
await Promise.allSettled(tasks);
|
|
446
|
+
this.consumers.clear();
|
|
235
447
|
this.logger.log("All connections closed");
|
|
236
448
|
}
|
|
237
449
|
// --- Private helpers ---
|
|
450
|
+
buildSchemaMap(topics, optionSchemas) {
|
|
451
|
+
const schemaMap = /* @__PURE__ */ new Map();
|
|
452
|
+
for (const t of topics) {
|
|
453
|
+
if (t?.__schema) {
|
|
454
|
+
schemaMap.set(this.resolveTopicName(t), t.__schema);
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
if (optionSchemas) {
|
|
458
|
+
for (const [k, v] of optionSchemas) {
|
|
459
|
+
schemaMap.set(k, v);
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
return schemaMap;
|
|
463
|
+
}
|
|
238
464
|
async processMessage(parsedMessage, raw, topic2, handleMessage, opts) {
|
|
239
465
|
const { retry, dlq = false, interceptors = [] } = opts;
|
|
240
466
|
const maxAttempts = retry ? retry.maxRetries + 1 : 1;
|
|
@@ -314,10 +540,14 @@ var KAFKA_SUBSCRIBER_METADATA = "KAFKA_SUBSCRIBER_METADATA";
|
|
|
314
540
|
var InjectKafkaClient = (name) => Inject(getKafkaClientToken(name));
|
|
315
541
|
var SubscribeTo = (topics, options) => {
|
|
316
542
|
const arr = Array.isArray(topics) ? topics : [topics];
|
|
317
|
-
const topicsArray = arr.map(
|
|
318
|
-
|
|
319
|
-
)
|
|
320
|
-
|
|
543
|
+
const topicsArray = arr.map((t) => typeof t === "string" ? t : t.__topic);
|
|
544
|
+
const schemas = /* @__PURE__ */ new Map();
|
|
545
|
+
for (const t of arr) {
|
|
546
|
+
if (typeof t !== "string" && t.__schema) {
|
|
547
|
+
schemas.set(t.__topic, t.__schema);
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
const { clientName, batch, ...consumerOptions } = options || {};
|
|
321
551
|
return (target, propertyKey, _descriptor) => {
|
|
322
552
|
const existing = Reflect.getMetadata(KAFKA_SUBSCRIBER_METADATA, target.constructor) || [];
|
|
323
553
|
Reflect.defineMetadata(
|
|
@@ -326,8 +556,10 @@ var SubscribeTo = (topics, options) => {
|
|
|
326
556
|
...existing,
|
|
327
557
|
{
|
|
328
558
|
topics: topicsArray,
|
|
559
|
+
schemas: schemas.size > 0 ? schemas : void 0,
|
|
329
560
|
options: Object.keys(consumerOptions).length ? consumerOptions : void 0,
|
|
330
561
|
clientName,
|
|
562
|
+
batch,
|
|
331
563
|
methodName: propertyKey
|
|
332
564
|
}
|
|
333
565
|
],
|
|
@@ -365,15 +597,29 @@ var KafkaExplorer = class {
|
|
|
365
597
|
continue;
|
|
366
598
|
}
|
|
367
599
|
const handler = instance[entry.methodName].bind(instance);
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
600
|
+
const consumerOptions = { ...entry.options };
|
|
601
|
+
if (entry.schemas) {
|
|
602
|
+
consumerOptions.schemas = entry.schemas;
|
|
603
|
+
}
|
|
604
|
+
if (entry.batch) {
|
|
605
|
+
await client.startBatchConsumer(
|
|
606
|
+
entry.topics,
|
|
607
|
+
async (messages, topic2, meta) => {
|
|
608
|
+
await handler(messages, topic2, meta);
|
|
609
|
+
},
|
|
610
|
+
consumerOptions
|
|
611
|
+
);
|
|
612
|
+
} else {
|
|
613
|
+
await client.startConsumer(
|
|
614
|
+
entry.topics,
|
|
615
|
+
async (message, topic2) => {
|
|
616
|
+
await handler(message, topic2);
|
|
617
|
+
},
|
|
618
|
+
consumerOptions
|
|
619
|
+
);
|
|
620
|
+
}
|
|
375
621
|
this.logger.log(
|
|
376
|
-
`Registered @SubscribeTo(${entry.topics.join(", ")}) on ${instance.constructor.name}.${String(entry.methodName)}`
|
|
622
|
+
`Registered @SubscribeTo(${entry.topics.join(", ")})${entry.batch ? " [batch]" : ""} on ${instance.constructor.name}.${String(entry.methodName)}`
|
|
377
623
|
);
|
|
378
624
|
}
|
|
379
625
|
}
|
|
@@ -458,10 +704,16 @@ KafkaModule = __decorateClass([
|
|
|
458
704
|
|
|
459
705
|
// src/client/topic.ts
|
|
460
706
|
function topic(name) {
|
|
461
|
-
|
|
707
|
+
const fn = () => ({
|
|
462
708
|
__topic: name,
|
|
463
709
|
__type: void 0
|
|
464
710
|
});
|
|
711
|
+
fn.schema = (schema) => ({
|
|
712
|
+
__topic: name,
|
|
713
|
+
__type: void 0,
|
|
714
|
+
__schema: schema
|
|
715
|
+
});
|
|
716
|
+
return fn;
|
|
465
717
|
}
|
|
466
718
|
|
|
467
719
|
// src/health/kafka.health.ts
|
|
@@ -497,6 +749,7 @@ export {
|
|
|
497
749
|
KafkaModule,
|
|
498
750
|
KafkaProcessingError,
|
|
499
751
|
KafkaRetryExhaustedError,
|
|
752
|
+
KafkaValidationError,
|
|
500
753
|
SubscribeTo,
|
|
501
754
|
getKafkaClientToken,
|
|
502
755
|
topic
|