@drarzter/kafka-client 0.3.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,65 @@
1
1
  // src/client/kafka.client.ts
2
- import { Kafka, Partitioners, logLevel as KafkaLogLevel } from "kafkajs";
2
+ import { KafkaJS } from "@confluentinc/kafka-javascript";
3
+
4
+ // src/client/envelope.ts
5
+ import { AsyncLocalStorage } from "async_hooks";
6
+ import { randomUUID } from "crypto";
7
+ var HEADER_EVENT_ID = "x-event-id";
8
+ var HEADER_CORRELATION_ID = "x-correlation-id";
9
+ var HEADER_TIMESTAMP = "x-timestamp";
10
+ var HEADER_SCHEMA_VERSION = "x-schema-version";
11
+ var HEADER_TRACEPARENT = "traceparent";
12
+ var envelopeStorage = new AsyncLocalStorage();
13
+ function getEnvelopeContext() {
14
+ return envelopeStorage.getStore();
15
+ }
16
+ function runWithEnvelopeContext(ctx, fn) {
17
+ return envelopeStorage.run(ctx, fn);
18
+ }
19
+ function buildEnvelopeHeaders(options = {}) {
20
+ const ctx = getEnvelopeContext();
21
+ const correlationId = options.correlationId ?? ctx?.correlationId ?? randomUUID();
22
+ const eventId = options.eventId ?? randomUUID();
23
+ const timestamp = (/* @__PURE__ */ new Date()).toISOString();
24
+ const schemaVersion = String(options.schemaVersion ?? 1);
25
+ const envelope = {
26
+ [HEADER_EVENT_ID]: eventId,
27
+ [HEADER_CORRELATION_ID]: correlationId,
28
+ [HEADER_TIMESTAMP]: timestamp,
29
+ [HEADER_SCHEMA_VERSION]: schemaVersion
30
+ };
31
+ if (ctx?.traceparent) {
32
+ envelope[HEADER_TRACEPARENT] = ctx.traceparent;
33
+ }
34
+ return { ...envelope, ...options.headers };
35
+ }
36
+ function decodeHeaders(raw) {
37
+ if (!raw) return {};
38
+ const result = {};
39
+ for (const [key, value] of Object.entries(raw)) {
40
+ if (value === void 0) continue;
41
+ if (Array.isArray(value)) {
42
+ result[key] = value.map((v) => Buffer.isBuffer(v) ? v.toString() : v).join(",");
43
+ } else {
44
+ result[key] = Buffer.isBuffer(value) ? value.toString() : value;
45
+ }
46
+ }
47
+ return result;
48
+ }
49
+ function extractEnvelope(payload, headers, topic2, partition, offset) {
50
+ return {
51
+ payload,
52
+ topic: topic2,
53
+ partition,
54
+ offset,
55
+ eventId: headers[HEADER_EVENT_ID] ?? randomUUID(),
56
+ correlationId: headers[HEADER_CORRELATION_ID] ?? randomUUID(),
57
+ timestamp: headers[HEADER_TIMESTAMP] ?? (/* @__PURE__ */ new Date()).toISOString(),
58
+ schemaVersion: Number(headers[HEADER_SCHEMA_VERSION] ?? 1),
59
+ traceparent: headers[HEADER_TRACEPARENT],
60
+ headers
61
+ };
62
+ }
3
63
 
4
64
  // src/client/errors.ts
5
65
  var KafkaProcessingError = class extends Error {
@@ -33,14 +93,159 @@ var KafkaRetryExhaustedError = class extends KafkaProcessingError {
33
93
  }
34
94
  };
35
95
 
36
- // src/client/kafka.client.ts
37
- var ACKS_ALL = -1;
96
+ // src/client/consumer-pipeline.ts
38
97
  function toError(error) {
39
98
  return error instanceof Error ? error : new Error(String(error));
40
99
  }
100
+ function sleep(ms) {
101
+ return new Promise((resolve) => setTimeout(resolve, ms));
102
+ }
103
+ function parseJsonMessage(raw, topic2, logger) {
104
+ try {
105
+ return JSON.parse(raw);
106
+ } catch (error) {
107
+ logger.error(
108
+ `Failed to parse message from topic ${topic2}:`,
109
+ toError(error).stack
110
+ );
111
+ return null;
112
+ }
113
+ }
114
+ async function validateWithSchema(message, raw, topic2, schemaMap, interceptors, dlq, deps) {
115
+ const schema = schemaMap.get(topic2);
116
+ if (!schema) return message;
117
+ try {
118
+ return schema.parse(message);
119
+ } catch (error) {
120
+ const err = toError(error);
121
+ const validationError = new KafkaValidationError(topic2, message, {
122
+ cause: err
123
+ });
124
+ deps.logger.error(
125
+ `Schema validation failed for topic ${topic2}:`,
126
+ err.message
127
+ );
128
+ if (dlq) await sendToDlq(topic2, raw, deps);
129
+ const errorEnvelope = extractEnvelope(message, {}, topic2, -1, "");
130
+ for (const interceptor of interceptors) {
131
+ await interceptor.onError?.(errorEnvelope, validationError);
132
+ }
133
+ return null;
134
+ }
135
+ }
136
+ async function sendToDlq(topic2, rawMessage, deps) {
137
+ const dlqTopic = `${topic2}.dlq`;
138
+ try {
139
+ await deps.producer.send({
140
+ topic: dlqTopic,
141
+ messages: [{ value: rawMessage }]
142
+ });
143
+ deps.logger.warn(`Message sent to DLQ: ${dlqTopic}`);
144
+ } catch (error) {
145
+ deps.logger.error(
146
+ `Failed to send message to DLQ ${dlqTopic}:`,
147
+ toError(error).stack
148
+ );
149
+ }
150
+ }
151
+ async function executeWithRetry(fn, ctx, deps) {
152
+ const { envelope, rawMessages, interceptors, dlq, retry, isBatch } = ctx;
153
+ const maxAttempts = retry ? retry.maxRetries + 1 : 1;
154
+ const backoffMs = retry?.backoffMs ?? 1e3;
155
+ const envelopes = Array.isArray(envelope) ? envelope : [envelope];
156
+ const topic2 = envelopes[0]?.topic ?? "unknown";
157
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
158
+ const cleanups = [];
159
+ try {
160
+ for (const env of envelopes) {
161
+ for (const inst of deps.instrumentation) {
162
+ const cleanup = inst.beforeConsume?.(env);
163
+ if (typeof cleanup === "function") cleanups.push(cleanup);
164
+ }
165
+ }
166
+ for (const env of envelopes) {
167
+ for (const interceptor of interceptors) {
168
+ await interceptor.before?.(env);
169
+ }
170
+ }
171
+ await fn();
172
+ for (const env of envelopes) {
173
+ for (const interceptor of interceptors) {
174
+ await interceptor.after?.(env);
175
+ }
176
+ }
177
+ for (const cleanup of cleanups) cleanup();
178
+ return;
179
+ } catch (error) {
180
+ const err = toError(error);
181
+ const isLastAttempt = attempt === maxAttempts;
182
+ for (const env of envelopes) {
183
+ for (const inst of deps.instrumentation) {
184
+ inst.onConsumeError?.(env, err);
185
+ }
186
+ }
187
+ for (const cleanup of cleanups) cleanup();
188
+ if (isLastAttempt && maxAttempts > 1) {
189
+ const exhaustedError = new KafkaRetryExhaustedError(
190
+ topic2,
191
+ envelopes.map((e) => e.payload),
192
+ maxAttempts,
193
+ { cause: err }
194
+ );
195
+ for (const env of envelopes) {
196
+ for (const interceptor of interceptors) {
197
+ await interceptor.onError?.(env, exhaustedError);
198
+ }
199
+ }
200
+ } else {
201
+ for (const env of envelopes) {
202
+ for (const interceptor of interceptors) {
203
+ await interceptor.onError?.(env, err);
204
+ }
205
+ }
206
+ }
207
+ deps.logger.error(
208
+ `Error processing ${isBatch ? "batch" : "message"} from topic ${topic2} (attempt ${attempt}/${maxAttempts}):`,
209
+ err.stack
210
+ );
211
+ if (isLastAttempt) {
212
+ if (dlq) {
213
+ for (const raw of rawMessages) {
214
+ await sendToDlq(topic2, raw, deps);
215
+ }
216
+ }
217
+ } else {
218
+ await sleep(backoffMs * attempt);
219
+ }
220
+ }
221
+ }
222
+ }
223
+
224
+ // src/client/subscribe-retry.ts
225
+ async function subscribeWithRetry(consumer, topics, logger, retryOpts) {
226
+ const maxAttempts = retryOpts?.retries ?? 5;
227
+ const backoffMs = retryOpts?.backoffMs ?? 5e3;
228
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
229
+ try {
230
+ await consumer.subscribe({ topics });
231
+ return;
232
+ } catch (error) {
233
+ if (attempt === maxAttempts) throw error;
234
+ const msg = toError(error).message;
235
+ logger.warn(
236
+ `Failed to subscribe to [${topics.join(", ")}] (attempt ${attempt}/${maxAttempts}): ${msg}. Retrying in ${backoffMs}ms...`
237
+ );
238
+ await sleep(backoffMs);
239
+ }
240
+ }
241
+ }
242
+
243
+ // src/client/kafka.client.ts
244
+ var { Kafka: KafkaClass, logLevel: KafkaLogLevel } = KafkaJS;
41
245
  var KafkaClient = class {
42
246
  kafka;
43
247
  producer;
248
+ txProducer;
44
249
  consumers = /* @__PURE__ */ new Map();
45
250
  admin;
46
251
  logger;
@@ -51,6 +256,7 @@ var KafkaClient = class {
51
256
  defaultGroupId;
52
257
  schemaRegistry = /* @__PURE__ */ new Map();
53
258
  runningConsumers = /* @__PURE__ */ new Map();
259
+ instrumentation;
54
260
  isAdminConnected = false;
55
261
  clientId;
56
262
  constructor(clientId, groupId, brokers, options) {
@@ -64,52 +270,72 @@ var KafkaClient = class {
64
270
  this.autoCreateTopicsEnabled = options?.autoCreateTopics ?? false;
65
271
  this.strictSchemasEnabled = options?.strictSchemas ?? true;
66
272
  this.numPartitions = options?.numPartitions ?? 1;
67
- this.kafka = new Kafka({
68
- clientId: this.clientId,
69
- brokers,
70
- logLevel: KafkaLogLevel.WARN,
71
- logCreator: () => ({ level, log }) => {
72
- const msg = `[kafkajs] ${log.message}`;
73
- if (level === KafkaLogLevel.ERROR) {
74
- const text = log.message ?? "";
75
- const isRetriable = text.includes("TOPIC_ALREADY_EXISTS") || text.includes("GROUP_COORDINATOR_NOT_AVAILABLE") || text.includes("NOT_COORDINATOR") || text.includes("Response GroupCoordinator") || text.includes("Response CreateTopics");
76
- if (isRetriable) this.logger.warn(msg);
77
- else this.logger.error(msg);
78
- } else if (level === KafkaLogLevel.WARN) {
79
- this.logger.warn(msg);
80
- } else {
81
- this.logger.log(msg);
82
- }
273
+ this.instrumentation = options?.instrumentation ?? [];
274
+ this.kafka = new KafkaClass({
275
+ kafkaJS: {
276
+ clientId: this.clientId,
277
+ brokers,
278
+ logLevel: KafkaLogLevel.ERROR
83
279
  }
84
280
  });
85
281
  this.producer = this.kafka.producer({
86
- createPartitioner: Partitioners.DefaultPartitioner,
87
- idempotent: true,
88
- transactionalId: `${clientId}-tx`,
89
- maxInFlightRequests: 1
282
+ kafkaJS: {
283
+ acks: -1
284
+ }
90
285
  });
91
286
  this.admin = this.kafka.admin();
92
287
  }
93
288
  async sendMessage(topicOrDesc, message, options = {}) {
94
289
  const payload = this.buildSendPayload(topicOrDesc, [
95
- { value: message, key: options.key, headers: options.headers }
290
+ {
291
+ value: message,
292
+ key: options.key,
293
+ headers: options.headers,
294
+ correlationId: options.correlationId,
295
+ schemaVersion: options.schemaVersion,
296
+ eventId: options.eventId
297
+ }
96
298
  ]);
97
299
  await this.ensureTopic(payload.topic);
98
300
  await this.producer.send(payload);
301
+ for (const inst of this.instrumentation) {
302
+ inst.afterSend?.(payload.topic);
303
+ }
99
304
  }
100
305
  async sendBatch(topicOrDesc, messages) {
101
306
  const payload = this.buildSendPayload(topicOrDesc, messages);
102
307
  await this.ensureTopic(payload.topic);
103
308
  await this.producer.send(payload);
309
+ for (const inst of this.instrumentation) {
310
+ inst.afterSend?.(payload.topic);
311
+ }
104
312
  }
105
313
  /** Execute multiple sends atomically. Commits on success, aborts on error. */
106
314
  async transaction(fn) {
107
- const tx = await this.producer.transaction();
315
+ if (!this.txProducer) {
316
+ this.txProducer = this.kafka.producer({
317
+ kafkaJS: {
318
+ acks: -1,
319
+ idempotent: true,
320
+ transactionalId: `${this.clientId}-tx`,
321
+ maxInFlightRequests: 1
322
+ }
323
+ });
324
+ await this.txProducer.connect();
325
+ }
326
+ const tx = await this.txProducer.transaction();
108
327
  try {
109
328
  const ctx = {
110
329
  send: async (topicOrDesc, message, options = {}) => {
111
330
  const payload = this.buildSendPayload(topicOrDesc, [
112
- { value: message, key: options.key, headers: options.headers }
331
+ {
332
+ value: message,
333
+ key: options.key,
334
+ headers: options.headers,
335
+ correlationId: options.correlationId,
336
+ schemaVersion: options.schemaVersion,
337
+ eventId: options.eventId
338
+ }
113
339
  ]);
114
340
  await this.ensureTopic(payload.topic);
115
341
  await tx.send(payload);
@@ -146,28 +372,41 @@ var KafkaClient = class {
146
372
  }
147
373
  async startConsumer(topics, handleMessage, options = {}) {
148
374
  const { consumer, schemaMap, gid, dlq, interceptors, retry } = await this.setupConsumer(topics, "eachMessage", options);
375
+ const deps = { logger: this.logger, producer: this.producer, instrumentation: this.instrumentation };
149
376
  await consumer.run({
150
- autoCommit: options.autoCommit ?? true,
151
- eachMessage: async ({ topic: topic2, message }) => {
377
+ eachMessage: async ({ topic: topic2, partition, message }) => {
152
378
  if (!message.value) {
153
379
  this.logger.warn(`Received empty message from topic ${topic2}`);
154
380
  return;
155
381
  }
156
382
  const raw = message.value.toString();
157
- const parsed = this.parseJsonMessage(raw, topic2);
383
+ const parsed = parseJsonMessage(raw, topic2, this.logger);
158
384
  if (parsed === null) return;
159
- const validated = await this.validateWithSchema(
385
+ const validated = await validateWithSchema(
160
386
  parsed,
161
387
  raw,
162
388
  topic2,
163
389
  schemaMap,
164
390
  interceptors,
165
- dlq
391
+ dlq,
392
+ deps
166
393
  );
167
394
  if (validated === null) return;
168
- await this.executeWithRetry(
169
- () => handleMessage(validated, topic2),
170
- { topic: topic2, messages: validated, rawMessages: [raw], interceptors, dlq, retry }
395
+ const headers = decodeHeaders(message.headers);
396
+ const envelope = extractEnvelope(
397
+ validated,
398
+ headers,
399
+ topic2,
400
+ partition,
401
+ message.offset
402
+ );
403
+ await executeWithRetry(
404
+ () => runWithEnvelopeContext(
405
+ { correlationId: envelope.correlationId, traceparent: envelope.traceparent },
406
+ () => handleMessage(envelope)
407
+ ),
408
+ { envelope, rawMessages: [raw], interceptors, dlq, retry },
409
+ deps
171
410
  );
172
411
  }
173
412
  });
@@ -175,15 +414,15 @@ var KafkaClient = class {
175
414
  }
176
415
  async startBatchConsumer(topics, handleBatch, options = {}) {
177
416
  const { consumer, schemaMap, gid, dlq, interceptors, retry } = await this.setupConsumer(topics, "eachBatch", options);
417
+ const deps = { logger: this.logger, producer: this.producer, instrumentation: this.instrumentation };
178
418
  await consumer.run({
179
- autoCommit: options.autoCommit ?? true,
180
419
  eachBatch: async ({
181
420
  batch,
182
421
  heartbeat,
183
422
  resolveOffset,
184
423
  commitOffsetsIfNecessary
185
424
  }) => {
186
- const validMessages = [];
425
+ const envelopes = [];
187
426
  const rawMessages = [];
188
427
  for (const message of batch.messages) {
189
428
  if (!message.value) {
@@ -193,21 +432,25 @@ var KafkaClient = class {
193
432
  continue;
194
433
  }
195
434
  const raw = message.value.toString();
196
- const parsed = this.parseJsonMessage(raw, batch.topic);
435
+ const parsed = parseJsonMessage(raw, batch.topic, this.logger);
197
436
  if (parsed === null) continue;
198
- const validated = await this.validateWithSchema(
437
+ const validated = await validateWithSchema(
199
438
  parsed,
200
439
  raw,
201
440
  batch.topic,
202
441
  schemaMap,
203
442
  interceptors,
204
- dlq
443
+ dlq,
444
+ deps
205
445
  );
206
446
  if (validated === null) continue;
207
- validMessages.push(validated);
447
+ const headers = decodeHeaders(message.headers);
448
+ envelopes.push(
449
+ extractEnvelope(validated, headers, batch.topic, batch.partition, message.offset)
450
+ );
208
451
  rawMessages.push(raw);
209
452
  }
210
- if (validMessages.length === 0) return;
453
+ if (envelopes.length === 0) return;
211
454
  const meta = {
212
455
  partition: batch.partition,
213
456
  highWatermark: batch.highWatermark,
@@ -215,17 +458,17 @@ var KafkaClient = class {
215
458
  resolveOffset,
216
459
  commitOffsetsIfNecessary
217
460
  };
218
- await this.executeWithRetry(
219
- () => handleBatch(validMessages, batch.topic, meta),
461
+ await executeWithRetry(
462
+ () => handleBatch(envelopes, meta),
220
463
  {
221
- topic: batch.topic,
222
- messages: validMessages,
464
+ envelope: envelopes,
223
465
  rawMessages: batch.messages.filter((m) => m.value).map((m) => m.value.toString()),
224
466
  interceptors,
225
467
  dlq,
226
468
  retry,
227
469
  isBatch: true
228
- }
470
+ },
471
+ deps
229
472
  );
230
473
  }
231
474
  });
@@ -257,6 +500,10 @@ var KafkaClient = class {
257
500
  /** Gracefully disconnect producer, all consumers, and admin. */
258
501
  async disconnect() {
259
502
  const tasks = [this.producer.disconnect()];
503
+ if (this.txProducer) {
504
+ tasks.push(this.txProducer.disconnect());
505
+ this.txProducer = void 0;
506
+ }
260
507
  for (const consumer of this.consumers.values()) {
261
508
  tasks.push(consumer.disconnect());
262
509
  }
@@ -270,12 +517,16 @@ var KafkaClient = class {
270
517
  this.logger.log("All connections closed");
271
518
  }
272
519
  // ── Private helpers ──────────────────────────────────────────────
273
- getOrCreateConsumer(groupId) {
274
- const gid = groupId || this.defaultGroupId;
275
- if (!this.consumers.has(gid)) {
276
- this.consumers.set(gid, this.kafka.consumer({ groupId: gid }));
520
+ getOrCreateConsumer(groupId, fromBeginning, autoCommit) {
521
+ if (!this.consumers.has(groupId)) {
522
+ this.consumers.set(
523
+ groupId,
524
+ this.kafka.consumer({
525
+ kafkaJS: { groupId, fromBeginning, autoCommit }
526
+ })
527
+ );
277
528
  }
278
- return this.consumers.get(gid);
529
+ return this.consumers.get(groupId);
279
530
  }
280
531
  resolveTopicName(topicOrDescriptor) {
281
532
  if (typeof topicOrDescriptor === "string") return topicOrDescriptor;
@@ -315,19 +566,30 @@ var KafkaClient = class {
315
566
  }
316
567
  /**
317
568
  * Build a kafkajs-ready send payload.
318
- * Handles: topic resolution, schema registration, validation, JSON serialization.
569
+ * Handles: topic resolution, schema registration, validation, JSON serialization,
570
+ * envelope header generation, and instrumentation hooks.
319
571
  */
320
572
  buildSendPayload(topicOrDesc, messages) {
321
573
  this.registerSchema(topicOrDesc);
322
574
  const topic2 = this.resolveTopicName(topicOrDesc);
323
575
  return {
324
576
  topic: topic2,
325
- messages: messages.map((m) => ({
326
- value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
327
- key: m.key ?? null,
328
- headers: m.headers
329
- })),
330
- acks: ACKS_ALL
577
+ messages: messages.map((m) => {
578
+ const envelopeHeaders = buildEnvelopeHeaders({
579
+ correlationId: m.correlationId,
580
+ schemaVersion: m.schemaVersion,
581
+ eventId: m.eventId,
582
+ headers: m.headers
583
+ });
584
+ for (const inst of this.instrumentation) {
585
+ inst.beforeSend?.(topic2, envelopeHeaders);
586
+ }
587
+ return {
588
+ value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),
589
+ key: m.key ?? null,
590
+ headers: envelopeHeaders
591
+ };
592
+ })
331
593
  };
332
594
  }
333
595
  /** Shared consumer setup: groupId check, schema map, connect, subscribe. */
@@ -348,13 +610,21 @@ var KafkaClient = class {
348
610
  `Cannot use ${mode} on consumer group "${gid}" \u2014 it is already running with ${oppositeMode}. Use a different groupId for this consumer.`
349
611
  );
350
612
  }
351
- const consumer = this.getOrCreateConsumer(optGroupId);
613
+ const consumer = this.getOrCreateConsumer(gid, fromBeginning, options.autoCommit ?? true);
352
614
  const schemaMap = this.buildSchemaMap(topics, optionSchemas);
353
615
  const topicNames = topics.map(
354
616
  (t) => this.resolveTopicName(t)
355
617
  );
618
+ for (const t of topicNames) {
619
+ await this.ensureTopic(t);
620
+ }
621
+ if (dlq) {
622
+ for (const t of topicNames) {
623
+ await this.ensureTopic(`${t}.dlq`);
624
+ }
625
+ }
356
626
  await consumer.connect();
357
- await this.subscribeWithRetry(consumer, topicNames, fromBeginning, options.subscribeRetry);
627
+ await subscribeWithRetry(consumer, topicNames, this.logger, options.subscribeRetry);
358
628
  this.logger.log(
359
629
  `${mode === "eachBatch" ? "Batch consumer" : "Consumer"} subscribed to topics: ${topicNames.join(", ")}`
360
630
  );
@@ -377,148 +647,6 @@ var KafkaClient = class {
377
647
  }
378
648
  return schemaMap;
379
649
  }
380
- /** Parse raw message as JSON. Returns null on failure (logs error). */
381
- parseJsonMessage(raw, topic2) {
382
- try {
383
- return JSON.parse(raw);
384
- } catch (error) {
385
- this.logger.error(
386
- `Failed to parse message from topic ${topic2}:`,
387
- toError(error).stack
388
- );
389
- return null;
390
- }
391
- }
392
- /**
393
- * Validate a parsed message against the schema map.
394
- * On failure: logs error, sends to DLQ if enabled, calls interceptor.onError.
395
- * Returns validated message or null.
396
- */
397
- async validateWithSchema(message, raw, topic2, schemaMap, interceptors, dlq) {
398
- const schema = schemaMap.get(topic2);
399
- if (!schema) return message;
400
- try {
401
- return schema.parse(message);
402
- } catch (error) {
403
- const err = toError(error);
404
- const validationError = new KafkaValidationError(topic2, message, {
405
- cause: err
406
- });
407
- this.logger.error(
408
- `Schema validation failed for topic ${topic2}:`,
409
- err.message
410
- );
411
- if (dlq) await this.sendToDlq(topic2, raw);
412
- for (const interceptor of interceptors) {
413
- await interceptor.onError?.(message, topic2, validationError);
414
- }
415
- return null;
416
- }
417
- }
418
- /**
419
- * Execute a handler with retry, interceptors, and DLQ support.
420
- * Used by both single-message and batch consumers.
421
- */
422
- async executeWithRetry(fn, ctx) {
423
- const { topic: topic2, messages, rawMessages, interceptors, dlq, retry, isBatch } = ctx;
424
- const maxAttempts = retry ? retry.maxRetries + 1 : 1;
425
- const backoffMs = retry?.backoffMs ?? 1e3;
426
- for (let attempt = 1; attempt <= maxAttempts; attempt++) {
427
- try {
428
- if (isBatch) {
429
- for (const interceptor of interceptors) {
430
- for (const msg of messages) {
431
- await interceptor.before?.(msg, topic2);
432
- }
433
- }
434
- } else {
435
- for (const interceptor of interceptors) {
436
- await interceptor.before?.(messages, topic2);
437
- }
438
- }
439
- await fn();
440
- if (isBatch) {
441
- for (const interceptor of interceptors) {
442
- for (const msg of messages) {
443
- await interceptor.after?.(msg, topic2);
444
- }
445
- }
446
- } else {
447
- for (const interceptor of interceptors) {
448
- await interceptor.after?.(messages, topic2);
449
- }
450
- }
451
- return;
452
- } catch (error) {
453
- const err = toError(error);
454
- const isLastAttempt = attempt === maxAttempts;
455
- if (isLastAttempt && maxAttempts > 1) {
456
- const exhaustedError = new KafkaRetryExhaustedError(
457
- topic2,
458
- messages,
459
- maxAttempts,
460
- { cause: err }
461
- );
462
- for (const interceptor of interceptors) {
463
- await interceptor.onError?.(messages, topic2, exhaustedError);
464
- }
465
- } else {
466
- for (const interceptor of interceptors) {
467
- await interceptor.onError?.(messages, topic2, err);
468
- }
469
- }
470
- this.logger.error(
471
- `Error processing ${isBatch ? "batch" : "message"} from topic ${topic2} (attempt ${attempt}/${maxAttempts}):`,
472
- err.stack
473
- );
474
- if (isLastAttempt) {
475
- if (dlq) {
476
- for (const raw of rawMessages) {
477
- await this.sendToDlq(topic2, raw);
478
- }
479
- }
480
- } else {
481
- await this.sleep(backoffMs * attempt);
482
- }
483
- }
484
- }
485
- }
486
- async sendToDlq(topic2, rawMessage) {
487
- const dlqTopic = `${topic2}.dlq`;
488
- try {
489
- await this.producer.send({
490
- topic: dlqTopic,
491
- messages: [{ value: rawMessage }],
492
- acks: ACKS_ALL
493
- });
494
- this.logger.warn(`Message sent to DLQ: ${dlqTopic}`);
495
- } catch (error) {
496
- this.logger.error(
497
- `Failed to send message to DLQ ${dlqTopic}:`,
498
- toError(error).stack
499
- );
500
- }
501
- }
502
- async subscribeWithRetry(consumer, topics, fromBeginning, retryOpts) {
503
- const maxAttempts = retryOpts?.retries ?? 5;
504
- const backoffMs = retryOpts?.backoffMs ?? 5e3;
505
- for (let attempt = 1; attempt <= maxAttempts; attempt++) {
506
- try {
507
- await consumer.subscribe({ topics, fromBeginning });
508
- return;
509
- } catch (error) {
510
- if (attempt === maxAttempts) throw error;
511
- const msg = toError(error).message;
512
- this.logger.warn(
513
- `Failed to subscribe to [${topics.join(", ")}] (attempt ${attempt}/${maxAttempts}): ${msg}. Retrying in ${backoffMs}ms...`
514
- );
515
- await this.sleep(backoffMs);
516
- }
517
- }
518
- }
519
- sleep(ms) {
520
- return new Promise((resolve) => setTimeout(resolve, ms));
521
- }
522
650
  };
523
651
 
524
652
  // src/client/topic.ts
@@ -536,10 +664,20 @@ function topic(name) {
536
664
  }
537
665
 
538
666
  export {
667
+ HEADER_EVENT_ID,
668
+ HEADER_CORRELATION_ID,
669
+ HEADER_TIMESTAMP,
670
+ HEADER_SCHEMA_VERSION,
671
+ HEADER_TRACEPARENT,
672
+ getEnvelopeContext,
673
+ runWithEnvelopeContext,
674
+ buildEnvelopeHeaders,
675
+ decodeHeaders,
676
+ extractEnvelope,
539
677
  KafkaProcessingError,
540
678
  KafkaValidationError,
541
679
  KafkaRetryExhaustedError,
542
680
  KafkaClient,
543
681
  topic
544
682
  };
545
- //# sourceMappingURL=chunk-A56D7HXR.mjs.map
683
+ //# sourceMappingURL=chunk-YCKN2YEC.mjs.map