@drarzter/kafka-client 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -13,6 +13,7 @@ An opinionated wrapper around kafkajs that integrates with NestJS as a DynamicMo
13
13
  ## Why?
14
14
 
15
15
  - **Typed topics** — you define a map of topic -> message shape, and the compiler won't let you send wrong data to wrong topic
16
+ - **Topic descriptors** — `topic()` DX sugar lets you define topics as standalone typed objects instead of string keys
16
17
  - **NestJS-native** — `register()` / `registerAsync()`, DI injection, lifecycle hooks out of the box
17
18
  - **Idempotent producer** — `acks: -1`, `idempotent: true` by default
18
19
  - **Retry + DLQ** — configurable retries with backoff, dead letter queue for failed messages
@@ -21,6 +22,8 @@ An opinionated wrapper around kafkajs that integrates with NestJS as a DynamicMo
21
22
  - **Custom headers** — attach metadata headers to messages
22
23
  - **Transactions** — exactly-once semantics with `producer.transaction()`
23
24
  - **Consumer interceptors** — before/after/onError hooks for message processing
25
+ - **Auto-create topics** — `autoCreateTopics: true` for dev mode — no need to pre-create topics
26
+ - **Error classes** — `KafkaProcessingError` and `KafkaRetryExhaustedError` with topic, message, and attempt metadata
24
27
  - **Health check** — built-in health indicator for monitoring
25
28
  - **Multiple consumer groups** — named clients for different bounded contexts
26
29
  - **Declarative & imperative** — use `@SubscribeTo()` decorator or `startConsumer()` directly
@@ -126,6 +129,48 @@ export type OrdersTopicMap = {
126
129
  };
127
130
  ```
128
131
 
132
+ #### Alternative: `topic()` descriptors
133
+
134
+ Instead of a centralized topic map, define each topic as a standalone typed object:
135
+
136
+ ```typescript
137
+ import { topic, TopicsFrom } from '@drarzter/kafka-client';
138
+
139
+ export const OrderCreated = topic('order.created')<{
140
+ orderId: string;
141
+ userId: string;
142
+ amount: number;
143
+ }>();
144
+
145
+ export const OrderCompleted = topic('order.completed')<{
146
+ orderId: string;
147
+ completedAt: string;
148
+ }>();
149
+
150
+ // Combine into a topic map for KafkaModule generics
151
+ export type OrdersTopicMap = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
152
+ ```
153
+
154
+ Topic descriptors work everywhere strings work — `sendMessage`, `sendBatch`, `transaction`, `startConsumer`, and `@SubscribeTo()`:
155
+
156
+ ```typescript
157
+ // Sending
158
+ await kafka.sendMessage(OrderCreated, { orderId: '123', userId: '456', amount: 100 });
159
+ await kafka.sendBatch(OrderCreated, [{ value: { orderId: '1', userId: '10', amount: 50 } }]);
160
+
161
+ // Transactions
162
+ await kafka.transaction(async (tx) => {
163
+ await tx.send(OrderCreated, { orderId: '123', userId: '456', amount: 100 });
164
+ });
165
+
166
+ // Consuming (decorator)
167
+ @SubscribeTo(OrderCreated)
168
+ async handleOrder(message: OrdersTopicMap['order.created']) { ... }
169
+
170
+ // Consuming (imperative)
171
+ await kafka.startConsumer([OrderCreated], handler);
172
+ ```
173
+
129
174
  ### 2. Register the module
130
175
 
131
176
  ```typescript
@@ -138,12 +183,15 @@ import { OrdersTopicMap } from './orders.types';
138
183
  clientId: 'my-service',
139
184
  groupId: 'my-consumer-group',
140
185
  brokers: ['localhost:9092'],
186
+ autoCreateTopics: true, // auto-create topics on first use (dev mode)
141
187
  }),
142
188
  ],
143
189
  })
144
190
  export class OrdersModule {}
145
191
  ```
146
192
 
193
+ `autoCreateTopics` calls `admin.createTopics()` (idempotent — no-op if topic already exists) before the first send/consume for each topic. Useful in development, not recommended for production.
194
+
147
195
  Or with `ConfigService`:
148
196
 
149
197
  ```typescript
@@ -416,6 +464,50 @@ Multiple interceptors run in order. All hooks are optional.
416
464
  | `dlq` | `false` | Send to `{topic}.dlq` after all retries exhausted |
417
465
  | `interceptors` | `[]` | Array of before/after/onError hooks |
418
466
 
467
+ ### Module options
468
+
469
+ | Option | Default | Description |
470
+ |--------|---------|-------------|
471
+ | `clientId` | — | Kafka client identifier |
472
+ | `groupId` | — | Consumer group ID |
473
+ | `brokers` | — | Array of broker addresses |
474
+ | `name` | — | Named client for multi-group setups |
475
+ | `isGlobal` | `false` | Make the client available in all modules |
476
+ | `autoCreateTopics` | `false` | Auto-create topics on first send/consume |
477
+
478
+ ## Error classes
479
+
480
+ When a consumer message handler fails after all retries, the library throws typed error objects:
481
+
482
+ ```typescript
483
+ import { KafkaProcessingError, KafkaRetryExhaustedError } from '@drarzter/kafka-client';
484
+ ```
485
+
486
+ **`KafkaProcessingError`** — base class for processing failures. Has `topic`, `originalMessage`, and supports `cause`:
487
+
488
+ ```typescript
489
+ const err = new KafkaProcessingError('handler failed', 'order.created', rawMessage, { cause: originalError });
490
+ err.topic; // 'order.created'
491
+ err.originalMessage; // the parsed message object
492
+ err.cause; // the original error
493
+ ```
494
+
495
+ **`KafkaRetryExhaustedError`** — thrown after all retries are exhausted. Extends `KafkaProcessingError` and adds `attempts`:
496
+
497
+ ```typescript
498
+ // In an onError interceptor:
499
+ const interceptor: ConsumerInterceptor<MyTopics> = {
500
+ onError: (message, topic, error) => {
501
+ if (error instanceof KafkaRetryExhaustedError) {
502
+ console.log(`Failed after ${error.attempts} attempts on ${error.topic}`);
503
+ console.log('Last error:', error.cause);
504
+ }
505
+ },
506
+ };
507
+ ```
508
+
509
+ When `retry.maxRetries` is set and all attempts fail, `KafkaRetryExhaustedError` is passed to `onError` interceptors automatically.
510
+
419
511
  ## Health check
420
512
 
421
513
  Monitor Kafka connectivity with the built-in health indicator:
@@ -442,11 +534,29 @@ export class HealthService {
442
534
  }
443
535
  ```
444
536
 
537
+ ## Testing
538
+
539
+ Unit tests (mocked kafkajs):
540
+
541
+ ```bash
542
+ npm test
543
+ ```
544
+
545
+ Integration tests with a real Kafka broker via [testcontainers](https://node.testcontainers.org/) (requires Docker):
546
+
547
+ ```bash
548
+ npm run test:integration
549
+ ```
550
+
551
+ The integration suite spins up a single-node KRaft Kafka container and tests sending, consuming, batching, transactions, retry + DLQ, interceptors, health checks, and `fromBeginning` — no mocks.
552
+
553
+ Both suites run in CI on every push to `main`.
554
+
445
555
  ## Project structure
446
556
 
447
557
  ```
448
558
  src/
449
- ├── client/ # KafkaClient, types, interfaces
559
+ ├── client/ # KafkaClient, types, topic(), error classes
450
560
  ├── module/ # KafkaModule, KafkaExplorer, DI constants
451
561
  ├── decorators/ # @InjectKafkaClient(), @SubscribeTo()
452
562
  ├── health/ # KafkaHealthIndicator
package/dist/index.d.mts CHANGED
@@ -1,6 +1,50 @@
1
1
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
2
2
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
3
3
 
4
+ /**
5
+ * A typed topic descriptor that pairs a topic name with its message type.
6
+ * Created via the `topic()` factory function.
7
+ *
8
+ * @typeParam N - The literal topic name string.
9
+ * @typeParam M - The message payload type for this topic.
10
+ */
11
+ interface TopicDescriptor<N extends string = string, M extends Record<string, any> = Record<string, any>> {
12
+ readonly __topic: N;
13
+ /** @internal Phantom type — never has a real value at runtime. */
14
+ readonly __type: M;
15
+ }
16
+ /**
17
+ * Define a typed topic descriptor.
18
+ *
19
+ * @example
20
+ * ```ts
21
+ * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
22
+ *
23
+ * // Use with KafkaClient:
24
+ * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
25
+ *
26
+ * // Use with @SubscribeTo:
27
+ * @SubscribeTo(OrderCreated)
28
+ * async handleOrder(msg) { ... }
29
+ * ```
30
+ */
31
+ declare function topic<N extends string>(name: N): <M extends Record<string, any>>() => TopicDescriptor<N, M>;
32
+ /**
33
+ * Build a topic-message map type from a union of TopicDescriptors.
34
+ *
35
+ * @example
36
+ * ```ts
37
+ * const OrderCreated = topic('order.created')<{ orderId: string }>();
38
+ * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();
39
+ *
40
+ * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
41
+ * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }
42
+ * ```
43
+ */
44
+ type TopicsFrom<D extends TopicDescriptor<any, any>> = {
45
+ [K in D as K["__topic"]]: K["__type"];
46
+ };
47
+
4
48
  /**
5
49
  * Mapping of topic names to their message types.
6
50
  * Define this interface to get type-safe publish/subscribe across your app.
@@ -74,11 +118,17 @@ interface ConsumerInterceptor<T extends TopicMapConstraint<T> = TTopicMessageMap
74
118
  /** Context passed to the `transaction()` callback with type-safe send methods. */
75
119
  interface TransactionContext<T extends TopicMapConstraint<T>> {
76
120
  send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
121
+ send<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
77
122
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
78
123
  value: T[K];
79
124
  key?: string;
80
125
  headers?: MessageHeaders;
81
126
  }>): Promise<void>;
127
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
128
+ value: D["__type"];
129
+ key?: string;
130
+ headers?: MessageHeaders;
131
+ }>): Promise<void>;
82
132
  }
83
133
  /** Interface describing all public methods of the Kafka client. */
84
134
  interface IKafkaClient<T extends TopicMapConstraint<T>> {
@@ -97,6 +147,12 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
97
147
  getClientId: () => ClientId;
98
148
  disconnect(): Promise<void>;
99
149
  }
150
+ /** Options for `KafkaClient` constructor. */
151
+ interface KafkaClientOptions {
152
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
153
+ autoCreateTopics?: boolean;
154
+ }
155
+
100
156
  /**
101
157
  * Type-safe Kafka client for NestJS.
102
158
  * Wraps kafkajs with JSON serialization, retries, DLQ, transactions, and interceptors.
@@ -109,13 +165,22 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
109
165
  private readonly consumer;
110
166
  private readonly admin;
111
167
  private readonly logger;
112
- private isConsumerRunning;
168
+ private readonly autoCreateTopicsEnabled;
169
+ private readonly ensuredTopics;
113
170
  private isAdminConnected;
114
171
  readonly clientId: ClientId;
115
- constructor(clientId: ClientId, groupId: GroupId, brokers: string[]);
116
- /** Send a single typed message to a topic. */
172
+ constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
173
+ private resolveTopicName;
174
+ private ensureTopic;
175
+ /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
176
+ sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
117
177
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
118
- /** Send multiple typed messages to a topic in one call. */
178
+ /** Send multiple typed messages in one call. Accepts a topic key or a TopicDescriptor. */
179
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
180
+ value: D["__type"];
181
+ key?: string;
182
+ headers?: MessageHeaders;
183
+ }>): Promise<void>;
119
184
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
120
185
  value: T[K];
121
186
  key?: string;
@@ -127,7 +192,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
127
192
  connectProducer(): Promise<void>;
128
193
  disconnectProducer(): Promise<void>;
129
194
  /** Subscribe to topics and start consuming messages with the given handler. */
130
- startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
195
+ startConsumer<K extends Array<keyof T>>(topics: K | TopicDescriptor[], handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
131
196
  stopConsumer(): Promise<void>;
132
197
  /** Check broker connectivity and return available topics. */
133
198
  checkStatus(): Promise<{
@@ -136,6 +201,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
136
201
  getClientId(): ClientId;
137
202
  /** Gracefully disconnect producer, consumer, and admin. */
138
203
  disconnect(): Promise<void>;
204
+ private processMessage;
139
205
  private sendToDlq;
140
206
  private sleep;
141
207
  }
@@ -152,12 +218,16 @@ interface KafkaModuleOptions {
152
218
  brokers: string[];
153
219
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
154
220
  isGlobal?: boolean;
221
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
222
+ autoCreateTopics?: boolean;
155
223
  }
156
224
  /** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */
157
225
  interface KafkaModuleAsyncOptions {
158
226
  name?: string;
159
227
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
160
228
  isGlobal?: boolean;
229
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
230
+ autoCreateTopics?: boolean;
161
231
  imports?: any[];
162
232
  useFactory: (...args: any[]) => KafkaModuleOptions | Promise<KafkaModuleOptions>;
163
233
  inject?: any[];
@@ -173,6 +243,22 @@ declare class KafkaModule {
173
243
  static registerAsync<T extends TopicMapConstraint<T>>(asyncOptions: KafkaModuleAsyncOptions): DynamicModule;
174
244
  }
175
245
 
246
+ /** Error thrown when a consumer message handler fails. */
247
+ declare class KafkaProcessingError extends Error {
248
+ readonly topic: string;
249
+ readonly originalMessage: unknown;
250
+ constructor(message: string, topic: string, originalMessage: unknown, options?: {
251
+ cause?: Error;
252
+ });
253
+ }
254
+ /** Error thrown when all retry attempts are exhausted for a message. */
255
+ declare class KafkaRetryExhaustedError extends KafkaProcessingError {
256
+ readonly attempts: number;
257
+ constructor(topic: string, originalMessage: unknown, attempts: number, options?: {
258
+ cause?: Error;
259
+ });
260
+ }
261
+
176
262
  /** Default DI token for the Kafka client. */
177
263
  declare const KAFKA_CLIENT = "KAFKA_CLIENT";
178
264
  /** Returns the DI token for a named (or default) Kafka client instance. */
@@ -190,7 +276,7 @@ declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
190
276
  * Decorator that auto-subscribes a method to Kafka topics on module init.
191
277
  * The decorated method receives `(message, topic)` for each consumed message.
192
278
  */
193
- declare const SubscribeTo: (topics: string | string[], options?: ConsumerOptions & {
279
+ declare const SubscribeTo: (topics: string | string[] | TopicDescriptor | TopicDescriptor[] | (string | TopicDescriptor)[], options?: ConsumerOptions & {
194
280
  clientName?: string;
195
281
  }) => MethodDecorator;
196
282
 
@@ -215,4 +301,4 @@ declare class KafkaHealthIndicator {
215
301
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
216
302
  }
217
303
 
218
- export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicMapConstraint, type TransactionContext, getKafkaClientToken };
304
+ export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
package/dist/index.d.ts CHANGED
@@ -1,6 +1,50 @@
1
1
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
2
2
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
3
3
 
4
+ /**
5
+ * A typed topic descriptor that pairs a topic name with its message type.
6
+ * Created via the `topic()` factory function.
7
+ *
8
+ * @typeParam N - The literal topic name string.
9
+ * @typeParam M - The message payload type for this topic.
10
+ */
11
+ interface TopicDescriptor<N extends string = string, M extends Record<string, any> = Record<string, any>> {
12
+ readonly __topic: N;
13
+ /** @internal Phantom type — never has a real value at runtime. */
14
+ readonly __type: M;
15
+ }
16
+ /**
17
+ * Define a typed topic descriptor.
18
+ *
19
+ * @example
20
+ * ```ts
21
+ * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
22
+ *
23
+ * // Use with KafkaClient:
24
+ * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
25
+ *
26
+ * // Use with @SubscribeTo:
27
+ * @SubscribeTo(OrderCreated)
28
+ * async handleOrder(msg) { ... }
29
+ * ```
30
+ */
31
+ declare function topic<N extends string>(name: N): <M extends Record<string, any>>() => TopicDescriptor<N, M>;
32
+ /**
33
+ * Build a topic-message map type from a union of TopicDescriptors.
34
+ *
35
+ * @example
36
+ * ```ts
37
+ * const OrderCreated = topic('order.created')<{ orderId: string }>();
38
+ * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();
39
+ *
40
+ * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
41
+ * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }
42
+ * ```
43
+ */
44
+ type TopicsFrom<D extends TopicDescriptor<any, any>> = {
45
+ [K in D as K["__topic"]]: K["__type"];
46
+ };
47
+
4
48
  /**
5
49
  * Mapping of topic names to their message types.
6
50
  * Define this interface to get type-safe publish/subscribe across your app.
@@ -74,11 +118,17 @@ interface ConsumerInterceptor<T extends TopicMapConstraint<T> = TTopicMessageMap
74
118
  /** Context passed to the `transaction()` callback with type-safe send methods. */
75
119
  interface TransactionContext<T extends TopicMapConstraint<T>> {
76
120
  send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
121
+ send<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
77
122
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
78
123
  value: T[K];
79
124
  key?: string;
80
125
  headers?: MessageHeaders;
81
126
  }>): Promise<void>;
127
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
128
+ value: D["__type"];
129
+ key?: string;
130
+ headers?: MessageHeaders;
131
+ }>): Promise<void>;
82
132
  }
83
133
  /** Interface describing all public methods of the Kafka client. */
84
134
  interface IKafkaClient<T extends TopicMapConstraint<T>> {
@@ -97,6 +147,12 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
97
147
  getClientId: () => ClientId;
98
148
  disconnect(): Promise<void>;
99
149
  }
150
+ /** Options for `KafkaClient` constructor. */
151
+ interface KafkaClientOptions {
152
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
153
+ autoCreateTopics?: boolean;
154
+ }
155
+
100
156
  /**
101
157
  * Type-safe Kafka client for NestJS.
102
158
  * Wraps kafkajs with JSON serialization, retries, DLQ, transactions, and interceptors.
@@ -109,13 +165,22 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
109
165
  private readonly consumer;
110
166
  private readonly admin;
111
167
  private readonly logger;
112
- private isConsumerRunning;
168
+ private readonly autoCreateTopicsEnabled;
169
+ private readonly ensuredTopics;
113
170
  private isAdminConnected;
114
171
  readonly clientId: ClientId;
115
- constructor(clientId: ClientId, groupId: GroupId, brokers: string[]);
116
- /** Send a single typed message to a topic. */
172
+ constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
173
+ private resolveTopicName;
174
+ private ensureTopic;
175
+ /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
176
+ sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
117
177
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
118
- /** Send multiple typed messages to a topic in one call. */
178
+ /** Send multiple typed messages in one call. Accepts a topic key or a TopicDescriptor. */
179
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
180
+ value: D["__type"];
181
+ key?: string;
182
+ headers?: MessageHeaders;
183
+ }>): Promise<void>;
119
184
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
120
185
  value: T[K];
121
186
  key?: string;
@@ -127,7 +192,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
127
192
  connectProducer(): Promise<void>;
128
193
  disconnectProducer(): Promise<void>;
129
194
  /** Subscribe to topics and start consuming messages with the given handler. */
130
- startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
195
+ startConsumer<K extends Array<keyof T>>(topics: K | TopicDescriptor[], handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
131
196
  stopConsumer(): Promise<void>;
132
197
  /** Check broker connectivity and return available topics. */
133
198
  checkStatus(): Promise<{
@@ -136,6 +201,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
136
201
  getClientId(): ClientId;
137
202
  /** Gracefully disconnect producer, consumer, and admin. */
138
203
  disconnect(): Promise<void>;
204
+ private processMessage;
139
205
  private sendToDlq;
140
206
  private sleep;
141
207
  }
@@ -152,12 +218,16 @@ interface KafkaModuleOptions {
152
218
  brokers: string[];
153
219
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
154
220
  isGlobal?: boolean;
221
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
222
+ autoCreateTopics?: boolean;
155
223
  }
156
224
  /** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */
157
225
  interface KafkaModuleAsyncOptions {
158
226
  name?: string;
159
227
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
160
228
  isGlobal?: boolean;
229
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
230
+ autoCreateTopics?: boolean;
161
231
  imports?: any[];
162
232
  useFactory: (...args: any[]) => KafkaModuleOptions | Promise<KafkaModuleOptions>;
163
233
  inject?: any[];
@@ -173,6 +243,22 @@ declare class KafkaModule {
173
243
  static registerAsync<T extends TopicMapConstraint<T>>(asyncOptions: KafkaModuleAsyncOptions): DynamicModule;
174
244
  }
175
245
 
246
+ /** Error thrown when a consumer message handler fails. */
247
+ declare class KafkaProcessingError extends Error {
248
+ readonly topic: string;
249
+ readonly originalMessage: unknown;
250
+ constructor(message: string, topic: string, originalMessage: unknown, options?: {
251
+ cause?: Error;
252
+ });
253
+ }
254
+ /** Error thrown when all retry attempts are exhausted for a message. */
255
+ declare class KafkaRetryExhaustedError extends KafkaProcessingError {
256
+ readonly attempts: number;
257
+ constructor(topic: string, originalMessage: unknown, attempts: number, options?: {
258
+ cause?: Error;
259
+ });
260
+ }
261
+
176
262
  /** Default DI token for the Kafka client. */
177
263
  declare const KAFKA_CLIENT = "KAFKA_CLIENT";
178
264
  /** Returns the DI token for a named (or default) Kafka client instance. */
@@ -190,7 +276,7 @@ declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
190
276
  * Decorator that auto-subscribes a method to Kafka topics on module init.
191
277
  * The decorated method receives `(message, topic)` for each consumed message.
192
278
  */
193
- declare const SubscribeTo: (topics: string | string[], options?: ConsumerOptions & {
279
+ declare const SubscribeTo: (topics: string | string[] | TopicDescriptor | TopicDescriptor[] | (string | TopicDescriptor)[], options?: ConsumerOptions & {
194
280
  clientName?: string;
195
281
  }) => MethodDecorator;
196
282
 
@@ -215,4 +301,4 @@ declare class KafkaHealthIndicator {
215
301
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
216
302
  }
217
303
 
218
- export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicMapConstraint, type TransactionContext, getKafkaClientToken };
304
+ export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };