@drarzter/kafka-client 0.3.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -29,6 +29,11 @@ var __decorateParam = (index, decorator) => (target, key) => decorator(target, k
29
29
  // src/index.ts
30
30
  var index_exports = {};
31
31
  __export(index_exports, {
32
+ HEADER_CORRELATION_ID: () => HEADER_CORRELATION_ID,
33
+ HEADER_EVENT_ID: () => HEADER_EVENT_ID,
34
+ HEADER_SCHEMA_VERSION: () => HEADER_SCHEMA_VERSION,
35
+ HEADER_TIMESTAMP: () => HEADER_TIMESTAMP,
36
+ HEADER_TRACEPARENT: () => HEADER_TRACEPARENT,
32
37
  InjectKafkaClient: () => InjectKafkaClient,
33
38
  KAFKA_CLIENT: () => KAFKA_CLIENT,
34
39
  KAFKA_SUBSCRIBER_METADATA: () => KAFKA_SUBSCRIBER_METADATA,
@@ -40,13 +45,78 @@ __export(index_exports, {
40
45
  KafkaRetryExhaustedError: () => KafkaRetryExhaustedError,
41
46
  KafkaValidationError: () => KafkaValidationError,
42
47
  SubscribeTo: () => SubscribeTo,
48
+ buildEnvelopeHeaders: () => buildEnvelopeHeaders,
49
+ decodeHeaders: () => decodeHeaders,
50
+ extractEnvelope: () => extractEnvelope,
51
+ getEnvelopeContext: () => getEnvelopeContext,
43
52
  getKafkaClientToken: () => getKafkaClientToken,
53
+ runWithEnvelopeContext: () => runWithEnvelopeContext,
44
54
  topic: () => topic
45
55
  });
46
56
  module.exports = __toCommonJS(index_exports);
47
57
 
48
58
  // src/client/kafka.client.ts
49
- var import_kafkajs = require("kafkajs");
59
+ var import_kafka_javascript = require("@confluentinc/kafka-javascript");
60
+
61
+ // src/client/envelope.ts
62
+ var import_node_async_hooks = require("async_hooks");
63
+ var import_node_crypto = require("crypto");
64
+ var HEADER_EVENT_ID = "x-event-id";
65
+ var HEADER_CORRELATION_ID = "x-correlation-id";
66
+ var HEADER_TIMESTAMP = "x-timestamp";
67
+ var HEADER_SCHEMA_VERSION = "x-schema-version";
68
+ var HEADER_TRACEPARENT = "traceparent";
69
+ var envelopeStorage = new import_node_async_hooks.AsyncLocalStorage();
70
+ function getEnvelopeContext() {
71
+ return envelopeStorage.getStore();
72
+ }
73
+ function runWithEnvelopeContext(ctx, fn) {
74
+ return envelopeStorage.run(ctx, fn);
75
+ }
76
+ function buildEnvelopeHeaders(options = {}) {
77
+ const ctx = getEnvelopeContext();
78
+ const correlationId = options.correlationId ?? ctx?.correlationId ?? (0, import_node_crypto.randomUUID)();
79
+ const eventId = options.eventId ?? (0, import_node_crypto.randomUUID)();
80
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString();
81
+ const schemaVersion = String(options.schemaVersion ?? 1);
82
+ const envelope = {
83
+ [HEADER_EVENT_ID]: eventId,
84
+ [HEADER_CORRELATION_ID]: correlationId,
85
+ [HEADER_TIMESTAMP]: timestamp,
86
+ [HEADER_SCHEMA_VERSION]: schemaVersion
87
+ };
88
+ if (ctx?.traceparent) {
89
+ envelope[HEADER_TRACEPARENT] = ctx.traceparent;
90
+ }
91
+ return { ...envelope, ...options.headers };
92
+ }
93
+ function decodeHeaders(raw) {
94
+ if (!raw) return {};
95
+ const result = {};
96
+ for (const [key, value] of Object.entries(raw)) {
97
+ if (value === void 0) continue;
98
+ if (Array.isArray(value)) {
99
+ result[key] = value.map((v) => Buffer.isBuffer(v) ? v.toString() : v).join(",");
100
+ } else {
101
+ result[key] = Buffer.isBuffer(value) ? value.toString() : value;
102
+ }
103
+ }
104
+ return result;
105
+ }
106
+ function extractEnvelope(payload, headers, topic2, partition, offset) {
107
+ return {
108
+ payload,
109
+ topic: topic2,
110
+ partition,
111
+ offset,
112
+ eventId: headers[HEADER_EVENT_ID] ?? (0, import_node_crypto.randomUUID)(),
113
+ correlationId: headers[HEADER_CORRELATION_ID] ?? (0, import_node_crypto.randomUUID)(),
114
+ timestamp: headers[HEADER_TIMESTAMP] ?? (/* @__PURE__ */ new Date()).toISOString(),
115
+ schemaVersion: Number(headers[HEADER_SCHEMA_VERSION] ?? 1),
116
+ traceparent: headers[HEADER_TRACEPARENT],
117
+ headers
118
+ };
119
+ }
50
120
 
51
121
  // src/client/errors.ts
52
122
  var KafkaProcessingError = class extends Error {
@@ -80,14 +150,159 @@ var KafkaRetryExhaustedError = class extends KafkaProcessingError {
80
150
  }
81
151
  };
82
152
 
83
- // src/client/kafka.client.ts
84
- var ACKS_ALL = -1;
153
+ // src/client/consumer-pipeline.ts
85
154
  function toError(error) {
86
155
  return error instanceof Error ? error : new Error(String(error));
87
156
  }
157
+ function sleep(ms) {
158
+ return new Promise((resolve) => setTimeout(resolve, ms));
159
+ }
160
+ function parseJsonMessage(raw, topic2, logger) {
161
+ try {
162
+ return JSON.parse(raw);
163
+ } catch (error) {
164
+ logger.error(
165
+ `Failed to parse message from topic ${topic2}:`,
166
+ toError(error).stack
167
+ );
168
+ return null;
169
+ }
170
+ }
171
+ async function validateWithSchema(message, raw, topic2, schemaMap, interceptors, dlq, deps) {
172
+ const schema = schemaMap.get(topic2);
173
+ if (!schema) return message;
174
+ try {
175
+ return schema.parse(message);
176
+ } catch (error) {
177
+ const err = toError(error);
178
+ const validationError = new KafkaValidationError(topic2, message, {
179
+ cause: err
180
+ });
181
+ deps.logger.error(
182
+ `Schema validation failed for topic ${topic2}:`,
183
+ err.message
184
+ );
185
+ if (dlq) await sendToDlq(topic2, raw, deps);
186
+ const errorEnvelope = extractEnvelope(message, {}, topic2, -1, "");
187
+ for (const interceptor of interceptors) {
188
+ await interceptor.onError?.(errorEnvelope, validationError);
189
+ }
190
+ return null;
191
+ }
192
+ }
193
+ async function sendToDlq(topic2, rawMessage, deps) {
194
+ const dlqTopic = `${topic2}.dlq`;
195
+ try {
196
+ await deps.producer.send({
197
+ topic: dlqTopic,
198
+ messages: [{ value: rawMessage }]
199
+ });
200
+ deps.logger.warn(`Message sent to DLQ: ${dlqTopic}`);
201
+ } catch (error) {
202
+ deps.logger.error(
203
+ `Failed to send message to DLQ ${dlqTopic}:`,
204
+ toError(error).stack
205
+ );
206
+ }
207
+ }
208
+ async function executeWithRetry(fn, ctx, deps) {
209
+ const { envelope, rawMessages, interceptors, dlq, retry, isBatch } = ctx;
210
+ const maxAttempts = retry ? retry.maxRetries + 1 : 1;
211
+ const backoffMs = retry?.backoffMs ?? 1e3;
212
+ const envelopes = Array.isArray(envelope) ? envelope : [envelope];
213
+ const topic2 = envelopes[0]?.topic ?? "unknown";
214
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
215
+ const cleanups = [];
216
+ try {
217
+ for (const env of envelopes) {
218
+ for (const inst of deps.instrumentation) {
219
+ const cleanup = inst.beforeConsume?.(env);
220
+ if (typeof cleanup === "function") cleanups.push(cleanup);
221
+ }
222
+ }
223
+ for (const env of envelopes) {
224
+ for (const interceptor of interceptors) {
225
+ await interceptor.before?.(env);
226
+ }
227
+ }
228
+ await fn();
229
+ for (const env of envelopes) {
230
+ for (const interceptor of interceptors) {
231
+ await interceptor.after?.(env);
232
+ }
233
+ }
234
+ for (const cleanup of cleanups) cleanup();
235
+ return;
236
+ } catch (error) {
237
+ const err = toError(error);
238
+ const isLastAttempt = attempt === maxAttempts;
239
+ for (const env of envelopes) {
240
+ for (const inst of deps.instrumentation) {
241
+ inst.onConsumeError?.(env, err);
242
+ }
243
+ }
244
+ for (const cleanup of cleanups) cleanup();
245
+ if (isLastAttempt && maxAttempts > 1) {
246
+ const exhaustedError = new KafkaRetryExhaustedError(
247
+ topic2,
248
+ envelopes.map((e) => e.payload),
249
+ maxAttempts,
250
+ { cause: err }
251
+ );
252
+ for (const env of envelopes) {
253
+ for (const interceptor of interceptors) {
254
+ await interceptor.onError?.(env, exhaustedError);
255
+ }
256
+ }
257
+ } else {
258
+ for (const env of envelopes) {
259
+ for (const interceptor of interceptors) {
260
+ await interceptor.onError?.(env, err);
261
+ }
262
+ }
263
+ }
264
+ deps.logger.error(
265
+ `Error processing ${isBatch ? "batch" : "message"} from topic ${topic2} (attempt ${attempt}/${maxAttempts}):`,
266
+ err.stack
267
+ );
268
+ if (isLastAttempt) {
269
+ if (dlq) {
270
+ for (const raw of rawMessages) {
271
+ await sendToDlq(topic2, raw, deps);
272
+ }
273
+ }
274
+ } else {
275
+ await sleep(backoffMs * attempt);
276
+ }
277
+ }
278
+ }
279
+ }
280
+
281
+ // src/client/subscribe-retry.ts
282
+ async function subscribeWithRetry(consumer, topics, logger, retryOpts) {
283
+ const maxAttempts = retryOpts?.retries ?? 5;
284
+ const backoffMs = retryOpts?.backoffMs ?? 5e3;
285
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
286
+ try {
287
+ await consumer.subscribe({ topics });
288
+ return;
289
+ } catch (error) {
290
+ if (attempt === maxAttempts) throw error;
291
+ const msg = toError(error).message;
292
+ logger.warn(
293
+ `Failed to subscribe to [${topics.join(", ")}] (attempt ${attempt}/${maxAttempts}): ${msg}. Retrying in ${backoffMs}ms...`
294
+ );
295
+ await sleep(backoffMs);
296
+ }
297
+ }
298
+ }
299
+
300
+ // src/client/kafka.client.ts
301
+ var { Kafka: KafkaClass, logLevel: KafkaLogLevel } = import_kafka_javascript.KafkaJS;
88
302
  var KafkaClient = class {
89
303
  kafka;
90
304
  producer;
305
+ txProducer;
91
306
  consumers = /* @__PURE__ */ new Map();
92
307
  admin;
93
308
  logger;
@@ -98,6 +313,7 @@ var KafkaClient = class {
98
313
  defaultGroupId;
99
314
  schemaRegistry = /* @__PURE__ */ new Map();
100
315
  runningConsumers = /* @__PURE__ */ new Map();
316
+ instrumentation;
101
317
  isAdminConnected = false;
102
318
  clientId;
103
319
  constructor(clientId, groupId, brokers, options) {
@@ -111,52 +327,72 @@ var KafkaClient = class {
111
327
  this.autoCreateTopicsEnabled = options?.autoCreateTopics ?? false;
112
328
  this.strictSchemasEnabled = options?.strictSchemas ?? true;
113
329
  this.numPartitions = options?.numPartitions ?? 1;
114
- this.kafka = new import_kafkajs.Kafka({
115
- clientId: this.clientId,
116
- brokers,
117
- logLevel: import_kafkajs.logLevel.WARN,
118
- logCreator: () => ({ level, log }) => {
119
- const msg = `[kafkajs] ${log.message}`;
120
- if (level === import_kafkajs.logLevel.ERROR) {
121
- const text = log.message ?? "";
122
- const isRetriable = text.includes("TOPIC_ALREADY_EXISTS") || text.includes("GROUP_COORDINATOR_NOT_AVAILABLE") || text.includes("NOT_COORDINATOR") || text.includes("Response GroupCoordinator") || text.includes("Response CreateTopics");
123
- if (isRetriable) this.logger.warn(msg);
124
- else this.logger.error(msg);
125
- } else if (level === import_kafkajs.logLevel.WARN) {
126
- this.logger.warn(msg);
127
- } else {
128
- this.logger.log(msg);
129
- }
330
+ this.instrumentation = options?.instrumentation ?? [];
331
+ this.kafka = new KafkaClass({
332
+ kafkaJS: {
333
+ clientId: this.clientId,
334
+ brokers,
335
+ logLevel: KafkaLogLevel.ERROR
130
336
  }
131
337
  });
132
338
  this.producer = this.kafka.producer({
133
- createPartitioner: import_kafkajs.Partitioners.DefaultPartitioner,
134
- idempotent: true,
135
- transactionalId: `${clientId}-tx`,
136
- maxInFlightRequests: 1
339
+ kafkaJS: {
340
+ acks: -1
341
+ }
137
342
  });
138
343
  this.admin = this.kafka.admin();
139
344
  }
140
345
  async sendMessage(topicOrDesc, message, options = {}) {
141
346
  const payload = this.buildSendPayload(topicOrDesc, [
142
- { value: message, key: options.key, headers: options.headers }
347
+ {
348
+ value: message,
349
+ key: options.key,
350
+ headers: options.headers,
351
+ correlationId: options.correlationId,
352
+ schemaVersion: options.schemaVersion,
353
+ eventId: options.eventId
354
+ }
143
355
  ]);
144
356
  await this.ensureTopic(payload.topic);
145
357
  await this.producer.send(payload);
358
+ for (const inst of this.instrumentation) {
359
+ inst.afterSend?.(payload.topic);
360
+ }
146
361
  }
147
362
  async sendBatch(topicOrDesc, messages) {
148
363
  const payload = this.buildSendPayload(topicOrDesc, messages);
149
364
  await this.ensureTopic(payload.topic);
150
365
  await this.producer.send(payload);
366
+ for (const inst of this.instrumentation) {
367
+ inst.afterSend?.(payload.topic);
368
+ }
151
369
  }
152
370
  /** Execute multiple sends atomically. Commits on success, aborts on error. */
153
371
  async transaction(fn) {
154
- const tx = await this.producer.transaction();
372
+ if (!this.txProducer) {
373
+ this.txProducer = this.kafka.producer({
374
+ kafkaJS: {
375
+ acks: -1,
376
+ idempotent: true,
377
+ transactionalId: `${this.clientId}-tx`,
378
+ maxInFlightRequests: 1
379
+ }
380
+ });
381
+ await this.txProducer.connect();
382
+ }
383
+ const tx = await this.txProducer.transaction();
155
384
  try {
156
385
  const ctx = {
157
386
  send: async (topicOrDesc, message, options = {}) => {
158
387
  const payload = this.buildSendPayload(topicOrDesc, [
159
- { value: message, key: options.key, headers: options.headers }
388
+ {
389
+ value: message,
390
+ key: options.key,
391
+ headers: options.headers,
392
+ correlationId: options.correlationId,
393
+ schemaVersion: options.schemaVersion,
394
+ eventId: options.eventId
395
+ }
160
396
  ]);
161
397
  await this.ensureTopic(payload.topic);
162
398
  await tx.send(payload);
@@ -193,28 +429,41 @@ var KafkaClient = class {
193
429
  }
194
430
  async startConsumer(topics, handleMessage, options = {}) {
195
431
  const { consumer, schemaMap, gid, dlq, interceptors, retry } = await this.setupConsumer(topics, "eachMessage", options);
432
+ const deps = { logger: this.logger, producer: this.producer, instrumentation: this.instrumentation };
196
433
  await consumer.run({
197
- autoCommit: options.autoCommit ?? true,
198
- eachMessage: async ({ topic: topic2, message }) => {
434
+ eachMessage: async ({ topic: topic2, partition, message }) => {
199
435
  if (!message.value) {
200
436
  this.logger.warn(`Received empty message from topic ${topic2}`);
201
437
  return;
202
438
  }
203
439
  const raw = message.value.toString();
204
- const parsed = this.parseJsonMessage(raw, topic2);
440
+ const parsed = parseJsonMessage(raw, topic2, this.logger);
205
441
  if (parsed === null) return;
206
- const validated = await this.validateWithSchema(
442
+ const validated = await validateWithSchema(
207
443
  parsed,
208
444
  raw,
209
445
  topic2,
210
446
  schemaMap,
211
447
  interceptors,
212
- dlq
448
+ dlq,
449
+ deps
213
450
  );
214
451
  if (validated === null) return;
215
- await this.executeWithRetry(
216
- () => handleMessage(validated, topic2),
217
- { topic: topic2, messages: validated, rawMessages: [raw], interceptors, dlq, retry }
452
+ const headers = decodeHeaders(message.headers);
453
+ const envelope = extractEnvelope(
454
+ validated,
455
+ headers,
456
+ topic2,
457
+ partition,
458
+ message.offset
459
+ );
460
+ await executeWithRetry(
461
+ () => runWithEnvelopeContext(
462
+ { correlationId: envelope.correlationId, traceparent: envelope.traceparent },
463
+ () => handleMessage(envelope)
464
+ ),
465
+ { envelope, rawMessages: [raw], interceptors, dlq, retry },
466
+ deps
218
467
  );
219
468
  }
220
469
  });
@@ -222,15 +471,15 @@ var KafkaClient = class {
222
471
  }
223
472
  async startBatchConsumer(topics, handleBatch, options = {}) {
224
473
  const { consumer, schemaMap, gid, dlq, interceptors, retry } = await this.setupConsumer(topics, "eachBatch", options);
474
+ const deps = { logger: this.logger, producer: this.producer, instrumentation: this.instrumentation };
225
475
  await consumer.run({
226
- autoCommit: options.autoCommit ?? true,
227
476
  eachBatch: async ({
228
477
  batch,
229
478
  heartbeat,
230
479
  resolveOffset,
231
480
  commitOffsetsIfNecessary
232
481
  }) => {
233
- const validMessages = [];
482
+ const envelopes = [];
234
483
  const rawMessages = [];
235
484
  for (const message of batch.messages) {
236
485
  if (!message.value) {
@@ -240,21 +489,25 @@ var KafkaClient = class {
240
489
  continue;
241
490
  }
242
491
  const raw = message.value.toString();
243
- const parsed = this.parseJsonMessage(raw, batch.topic);
492
+ const parsed = parseJsonMessage(raw, batch.topic, this.logger);
244
493
  if (parsed === null) continue;
245
- const validated = await this.validateWithSchema(
494
+ const validated = await validateWithSchema(
246
495
  parsed,
247
496
  raw,
248
497
  batch.topic,
249
498
  schemaMap,
250
499
  interceptors,
251
- dlq
500
+ dlq,
501
+ deps
252
502
  );
253
503
  if (validated === null) continue;
254
- validMessages.push(validated);
504
+ const headers = decodeHeaders(message.headers);
505
+ envelopes.push(
506
+ extractEnvelope(validated, headers, batch.topic, batch.partition, message.offset)
507
+ );
255
508
  rawMessages.push(raw);
256
509
  }
257
- if (validMessages.length === 0) return;
510
+ if (envelopes.length === 0) return;
258
511
  const meta = {
259
512
  partition: batch.partition,
260
513
  highWatermark: batch.highWatermark,
@@ -262,17 +515,17 @@ var KafkaClient = class {
262
515
  resolveOffset,
263
516
  commitOffsetsIfNecessary
264
517
  };
265
- await this.executeWithRetry(
266
- () => handleBatch(validMessages, batch.topic, meta),
518
+ await executeWithRetry(
519
+ () => handleBatch(envelopes, meta),
267
520
  {
268
- topic: batch.topic,
269
- messages: validMessages,
521
+ envelope: envelopes,
270
522
  rawMessages: batch.messages.filter((m) => m.value).map((m) => m.value.toString()),
271
523
  interceptors,
272
524
  dlq,
273
525
  retry,
274
526
  isBatch: true
275
- }
527
+ },
528
+ deps
276
529
  );
277
530
  }
278
531
  });
@@ -304,6 +557,10 @@ var KafkaClient = class {
304
557
  /** Gracefully disconnect producer, all consumers, and admin. */
305
558
  async disconnect() {
306
559
  const tasks = [this.producer.disconnect()];
560
+ if (this.txProducer) {
561
+ tasks.push(this.txProducer.disconnect());
562
+ this.txProducer = void 0;
563
+ }
307
564
  for (const consumer of this.consumers.values()) {
308
565
  tasks.push(consumer.disconnect());
309
566
  }
@@ -317,12 +574,16 @@ var KafkaClient = class {
317
574
  this.logger.log("All connections closed");
318
575
  }
319
576
  // ── Private helpers ──────────────────────────────────────────────
320
- getOrCreateConsumer(groupId) {
321
- const gid = groupId || this.defaultGroupId;
322
- if (!this.consumers.has(gid)) {
323
- this.consumers.set(gid, this.kafka.consumer({ groupId: gid }));
577
+ getOrCreateConsumer(groupId, fromBeginning, autoCommit) {
578
+ if (!this.consumers.has(groupId)) {
579
+ this.consumers.set(
580
+ groupId,
581
+ this.kafka.consumer({
582
+ kafkaJS: { groupId, fromBeginning, autoCommit }
583
+ })
584
+ );
324
585
  }
325
- return this.consumers.get(gid);
586
+ return this.consumers.get(groupId);
326
587
  }
327
588
  resolveTopicName(topicOrDescriptor) {
328
589
  if (typeof topicOrDescriptor === "string") return topicOrDescriptor;
@@ -362,19 +623,30 @@ var KafkaClient = class {
362
623
  }
363
624
  /**
364
625
  * Build a kafkajs-ready send payload.
365
- * Handles: topic resolution, schema registration, validation, JSON serialization.
626
+ * Handles: topic resolution, schema registration, validation, JSON serialization,
627
+ * envelope header generation, and instrumentation hooks.
366
628
  */
367
629
  buildSendPayload(topicOrDesc, messages) {
368
630
  this.registerSchema(topicOrDesc);
369
631
  const topic2 = this.resolveTopicName(topicOrDesc);
370
632
  return {
371
633
  topic: topic2,
372
- messages: messages.map((m) => ({
373
- value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
374
- key: m.key ?? null,
375
- headers: m.headers
376
- })),
377
- acks: ACKS_ALL
634
+ messages: messages.map((m) => {
635
+ const envelopeHeaders = buildEnvelopeHeaders({
636
+ correlationId: m.correlationId,
637
+ schemaVersion: m.schemaVersion,
638
+ eventId: m.eventId,
639
+ headers: m.headers
640
+ });
641
+ for (const inst of this.instrumentation) {
642
+ inst.beforeSend?.(topic2, envelopeHeaders);
643
+ }
644
+ return {
645
+ value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
646
+ key: m.key ?? null,
647
+ headers: envelopeHeaders
648
+ };
649
+ })
378
650
  };
379
651
  }
380
652
  /** Shared consumer setup: groupId check, schema map, connect, subscribe. */
@@ -395,13 +667,21 @@ var KafkaClient = class {
395
667
  `Cannot use ${mode} on consumer group "${gid}" \u2014 it is already running with ${oppositeMode}. Use a different groupId for this consumer.`
396
668
  );
397
669
  }
398
- const consumer = this.getOrCreateConsumer(optGroupId);
670
+ const consumer = this.getOrCreateConsumer(gid, fromBeginning, options.autoCommit ?? true);
399
671
  const schemaMap = this.buildSchemaMap(topics, optionSchemas);
400
672
  const topicNames = topics.map(
401
673
  (t) => this.resolveTopicName(t)
402
674
  );
675
+ for (const t of topicNames) {
676
+ await this.ensureTopic(t);
677
+ }
678
+ if (dlq) {
679
+ for (const t of topicNames) {
680
+ await this.ensureTopic(`${t}.dlq`);
681
+ }
682
+ }
403
683
  await consumer.connect();
404
- await this.subscribeWithRetry(consumer, topicNames, fromBeginning, options.subscribeRetry);
684
+ await subscribeWithRetry(consumer, topicNames, this.logger, options.subscribeRetry);
405
685
  this.logger.log(
406
686
  `${mode === "eachBatch" ? "Batch consumer" : "Consumer"} subscribed to topics: ${topicNames.join(", ")}`
407
687
  );
@@ -424,148 +704,6 @@ var KafkaClient = class {
424
704
  }
425
705
  return schemaMap;
426
706
  }
427
- /** Parse raw message as JSON. Returns null on failure (logs error). */
428
- parseJsonMessage(raw, topic2) {
429
- try {
430
- return JSON.parse(raw);
431
- } catch (error) {
432
- this.logger.error(
433
- `Failed to parse message from topic ${topic2}:`,
434
- toError(error).stack
435
- );
436
- return null;
437
- }
438
- }
439
- /**
440
- * Validate a parsed message against the schema map.
441
- * On failure: logs error, sends to DLQ if enabled, calls interceptor.onError.
442
- * Returns validated message or null.
443
- */
444
- async validateWithSchema(message, raw, topic2, schemaMap, interceptors, dlq) {
445
- const schema = schemaMap.get(topic2);
446
- if (!schema) return message;
447
- try {
448
- return schema.parse(message);
449
- } catch (error) {
450
- const err = toError(error);
451
- const validationError = new KafkaValidationError(topic2, message, {
452
- cause: err
453
- });
454
- this.logger.error(
455
- `Schema validation failed for topic ${topic2}:`,
456
- err.message
457
- );
458
- if (dlq) await this.sendToDlq(topic2, raw);
459
- for (const interceptor of interceptors) {
460
- await interceptor.onError?.(message, topic2, validationError);
461
- }
462
- return null;
463
- }
464
- }
465
- /**
466
- * Execute a handler with retry, interceptors, and DLQ support.
467
- * Used by both single-message and batch consumers.
468
- */
469
- async executeWithRetry(fn, ctx) {
470
- const { topic: topic2, messages, rawMessages, interceptors, dlq, retry, isBatch } = ctx;
471
- const maxAttempts = retry ? retry.maxRetries + 1 : 1;
472
- const backoffMs = retry?.backoffMs ?? 1e3;
473
- for (let attempt = 1; attempt <= maxAttempts; attempt++) {
474
- try {
475
- if (isBatch) {
476
- for (const interceptor of interceptors) {
477
- for (const msg of messages) {
478
- await interceptor.before?.(msg, topic2);
479
- }
480
- }
481
- } else {
482
- for (const interceptor of interceptors) {
483
- await interceptor.before?.(messages, topic2);
484
- }
485
- }
486
- await fn();
487
- if (isBatch) {
488
- for (const interceptor of interceptors) {
489
- for (const msg of messages) {
490
- await interceptor.after?.(msg, topic2);
491
- }
492
- }
493
- } else {
494
- for (const interceptor of interceptors) {
495
- await interceptor.after?.(messages, topic2);
496
- }
497
- }
498
- return;
499
- } catch (error) {
500
- const err = toError(error);
501
- const isLastAttempt = attempt === maxAttempts;
502
- if (isLastAttempt && maxAttempts > 1) {
503
- const exhaustedError = new KafkaRetryExhaustedError(
504
- topic2,
505
- messages,
506
- maxAttempts,
507
- { cause: err }
508
- );
509
- for (const interceptor of interceptors) {
510
- await interceptor.onError?.(messages, topic2, exhaustedError);
511
- }
512
- } else {
513
- for (const interceptor of interceptors) {
514
- await interceptor.onError?.(messages, topic2, err);
515
- }
516
- }
517
- this.logger.error(
518
- `Error processing ${isBatch ? "batch" : "message"} from topic ${topic2} (attempt ${attempt}/${maxAttempts}):`,
519
- err.stack
520
- );
521
- if (isLastAttempt) {
522
- if (dlq) {
523
- for (const raw of rawMessages) {
524
- await this.sendToDlq(topic2, raw);
525
- }
526
- }
527
- } else {
528
- await this.sleep(backoffMs * attempt);
529
- }
530
- }
531
- }
532
- }
533
- async sendToDlq(topic2, rawMessage) {
534
- const dlqTopic = `${topic2}.dlq`;
535
- try {
536
- await this.producer.send({
537
- topic: dlqTopic,
538
- messages: [{ value: rawMessage }],
539
- acks: ACKS_ALL
540
- });
541
- this.logger.warn(`Message sent to DLQ: ${dlqTopic}`);
542
- } catch (error) {
543
- this.logger.error(
544
- `Failed to send message to DLQ ${dlqTopic}:`,
545
- toError(error).stack
546
- );
547
- }
548
- }
549
- async subscribeWithRetry(consumer, topics, fromBeginning, retryOpts) {
550
- const maxAttempts = retryOpts?.retries ?? 5;
551
- const backoffMs = retryOpts?.backoffMs ?? 5e3;
552
- for (let attempt = 1; attempt <= maxAttempts; attempt++) {
553
- try {
554
- await consumer.subscribe({ topics, fromBeginning });
555
- return;
556
- } catch (error) {
557
- if (attempt === maxAttempts) throw error;
558
- const msg = toError(error).message;
559
- this.logger.warn(
560
- `Failed to subscribe to [${topics.join(", ")}] (attempt ${attempt}/${maxAttempts}): ${msg}. Retrying in ${backoffMs}ms...`
561
- );
562
- await this.sleep(backoffMs);
563
- }
564
- }
565
- }
566
- sleep(ms) {
567
- return new Promise((resolve) => setTimeout(resolve, ms));
568
- }
569
707
  };
570
708
 
571
709
  // src/client/topic.ts
@@ -664,16 +802,16 @@ var KafkaExplorer = class {
664
802
  if (entry.batch) {
665
803
  await client.startBatchConsumer(
666
804
  entry.topics,
667
- async (messages, topic2, meta) => {
668
- await handler(messages, topic2, meta);
805
+ async (envelopes, meta) => {
806
+ await handler(envelopes, meta);
669
807
  },
670
808
  consumerOptions
671
809
  );
672
810
  } else {
673
811
  await client.startConsumer(
674
812
  entry.topics,
675
- async (message, topic2) => {
676
- await handler(message, topic2);
813
+ async (envelope) => {
814
+ await handler(envelope);
677
815
  },
678
816
  consumerOptions
679
817
  );
@@ -705,6 +843,9 @@ var KafkaModule = class {
705
843
  options.brokers,
706
844
  {
707
845
  autoCreateTopics: options.autoCreateTopics,
846
+ strictSchemas: options.strictSchemas,
847
+ numPartitions: options.numPartitions,
848
+ instrumentation: options.instrumentation,
708
849
  logger: new import_common3.Logger(`KafkaClient:${options.clientId}`)
709
850
  }
710
851
  );
@@ -740,6 +881,9 @@ var KafkaModule = class {
740
881
  options.brokers,
741
882
  {
742
883
  autoCreateTopics: options.autoCreateTopics,
884
+ strictSchemas: options.strictSchemas,
885
+ numPartitions: options.numPartitions,
886
+ instrumentation: options.instrumentation,
743
887
  logger: new import_common3.Logger(`KafkaClient:${options.clientId}`)
744
888
  }
745
889
  );
@@ -793,6 +937,11 @@ KafkaHealthIndicator = __decorateClass([
793
937
  ], KafkaHealthIndicator);
794
938
  // Annotate the CommonJS export names for ESM import in node:
795
939
  0 && (module.exports = {
940
+ HEADER_CORRELATION_ID,
941
+ HEADER_EVENT_ID,
942
+ HEADER_SCHEMA_VERSION,
943
+ HEADER_TIMESTAMP,
944
+ HEADER_TRACEPARENT,
796
945
  InjectKafkaClient,
797
946
  KAFKA_CLIENT,
798
947
  KAFKA_SUBSCRIBER_METADATA,
@@ -804,7 +953,12 @@ KafkaHealthIndicator = __decorateClass([
804
953
  KafkaRetryExhaustedError,
805
954
  KafkaValidationError,
806
955
  SubscribeTo,
956
+ buildEnvelopeHeaders,
957
+ decodeHeaders,
958
+ extractEnvelope,
959
+ getEnvelopeContext,
807
960
  getKafkaClientToken,
961
+ runWithEnvelopeContext,
808
962
  topic
809
963
  });
810
964
  //# sourceMappingURL=index.js.map