@drarzter/kafka-client 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -13,6 +13,7 @@ An opinionated wrapper around kafkajs that integrates with NestJS as a DynamicMo
13
13
  ## Why?
14
14
 
15
15
  - **Typed topics** — you define a map of topic -> message shape, and the compiler won't let you send wrong data to wrong topic
16
+ - **Topic descriptors** — `topic()` DX sugar lets you define topics as standalone typed objects instead of string keys
16
17
  - **NestJS-native** — `register()` / `registerAsync()`, DI injection, lifecycle hooks out of the box
17
18
  - **Idempotent producer** — `acks: -1`, `idempotent: true` by default
18
19
  - **Retry + DLQ** — configurable retries with backoff, dead letter queue for failed messages
@@ -21,6 +22,8 @@ An opinionated wrapper around kafkajs that integrates with NestJS as a DynamicMo
21
22
  - **Custom headers** — attach metadata headers to messages
22
23
  - **Transactions** — exactly-once semantics with `producer.transaction()`
23
24
  - **Consumer interceptors** — before/after/onError hooks for message processing
25
+ - **Auto-create topics** — `autoCreateTopics: true` for dev mode — no need to pre-create topics
26
+ - **Error classes** — `KafkaProcessingError` and `KafkaRetryExhaustedError` with topic, message, and attempt metadata
24
27
  - **Health check** — built-in health indicator for monitoring
25
28
  - **Multiple consumer groups** — named clients for different bounded contexts
26
29
  - **Declarative & imperative** — use `@SubscribeTo()` decorator or `startConsumer()` directly
@@ -126,6 +129,48 @@ export type OrdersTopicMap = {
126
129
  };
127
130
  ```
128
131
 
132
+ #### Alternative: `topic()` descriptors
133
+
134
+ Instead of a centralized topic map, define each topic as a standalone typed object:
135
+
136
+ ```typescript
137
+ import { topic, TopicsFrom } from '@drarzter/kafka-client';
138
+
139
+ export const OrderCreated = topic('order.created')<{
140
+ orderId: string;
141
+ userId: string;
142
+ amount: number;
143
+ }>();
144
+
145
+ export const OrderCompleted = topic('order.completed')<{
146
+ orderId: string;
147
+ completedAt: string;
148
+ }>();
149
+
150
+ // Combine into a topic map for KafkaModule generics
151
+ export type OrdersTopicMap = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
152
+ ```
153
+
154
+ Topic descriptors work everywhere strings work — `sendMessage`, `sendBatch`, `transaction`, `startConsumer`, and `@SubscribeTo()`:
155
+
156
+ ```typescript
157
+ // Sending
158
+ await kafka.sendMessage(OrderCreated, { orderId: '123', userId: '456', amount: 100 });
159
+ await kafka.sendBatch(OrderCreated, [{ value: { orderId: '1', userId: '10', amount: 50 } }]);
160
+
161
+ // Transactions
162
+ await kafka.transaction(async (tx) => {
163
+ await tx.send(OrderCreated, { orderId: '123', userId: '456', amount: 100 });
164
+ });
165
+
166
+ // Consuming (decorator)
167
+ @SubscribeTo(OrderCreated)
168
+ async handleOrder(message: OrdersTopicMap['order.created']) { ... }
169
+
170
+ // Consuming (imperative)
171
+ await kafka.startConsumer([OrderCreated], handler);
172
+ ```
173
+
129
174
  ### 2. Register the module
130
175
 
131
176
  ```typescript
@@ -138,12 +183,15 @@ import { OrdersTopicMap } from './orders.types';
138
183
  clientId: 'my-service',
139
184
  groupId: 'my-consumer-group',
140
185
  brokers: ['localhost:9092'],
186
+ autoCreateTopics: true, // auto-create topics on first use (dev mode)
141
187
  }),
142
188
  ],
143
189
  })
144
190
  export class OrdersModule {}
145
191
  ```
146
192
 
193
+ `autoCreateTopics` calls `admin.createTopics()` (idempotent — no-op if topic already exists) before the first send/consume for each topic. Useful in development, not recommended for production.
194
+
147
195
  Or with `ConfigService`:
148
196
 
149
197
  ```typescript
@@ -416,6 +464,50 @@ Multiple interceptors run in order. All hooks are optional.
416
464
  | `dlq` | `false` | Send to `{topic}.dlq` after all retries exhausted |
417
465
  | `interceptors` | `[]` | Array of before/after/onError hooks |
418
466
 
467
+ ### Module options
468
+
469
+ | Option | Default | Description |
470
+ |--------|---------|-------------|
471
+ | `clientId` | — | Kafka client identifier |
472
+ | `groupId` | — | Consumer group ID |
473
+ | `brokers` | — | Array of broker addresses |
474
+ | `name` | — | Named client for multi-group setups |
475
+ | `isGlobal` | `false` | Make the client available in all modules |
476
+ | `autoCreateTopics` | `false` | Auto-create topics on first send/consume |
477
+
478
+ ## Error classes
479
+
480
+ When a consumer message handler fails after all retries, the library throws typed error objects:
481
+
482
+ ```typescript
483
+ import { KafkaProcessingError, KafkaRetryExhaustedError } from '@drarzter/kafka-client';
484
+ ```
485
+
486
+ **`KafkaProcessingError`** — base class for processing failures. Has `topic`, `originalMessage`, and supports `cause`:
487
+
488
+ ```typescript
489
+ const err = new KafkaProcessingError('handler failed', 'order.created', rawMessage, { cause: originalError });
490
+ err.topic; // 'order.created'
491
+ err.originalMessage; // the parsed message object
492
+ err.cause; // the original error
493
+ ```
494
+
495
+ **`KafkaRetryExhaustedError`** — thrown after all retries are exhausted. Extends `KafkaProcessingError` and adds `attempts`:
496
+
497
+ ```typescript
498
+ // In an onError interceptor:
499
+ const interceptor: ConsumerInterceptor<MyTopics> = {
500
+ onError: (message, topic, error) => {
501
+ if (error instanceof KafkaRetryExhaustedError) {
502
+ console.log(`Failed after ${error.attempts} attempts on ${error.topic}`);
503
+ console.log('Last error:', error.cause);
504
+ }
505
+ },
506
+ };
507
+ ```
508
+
509
+ When `retry.maxRetries` is set and all attempts fail, `KafkaRetryExhaustedError` is passed to `onError` interceptors automatically.
510
+
419
511
  ## Health check
420
512
 
421
513
  Monitor Kafka connectivity with the built-in health indicator:
@@ -464,7 +556,7 @@ Both suites run in CI on every push to `main`.
464
556
 
465
557
  ```
466
558
  src/
467
- ├── client/ # KafkaClient, types, interfaces
559
+ ├── client/ # KafkaClient, types, topic(), error classes
468
560
  ├── module/ # KafkaModule, KafkaExplorer, DI constants
469
561
  ├── decorators/ # @InjectKafkaClient(), @SubscribeTo()
470
562
  ├── health/ # KafkaHealthIndicator
package/dist/index.d.mts CHANGED
@@ -1,6 +1,50 @@
1
1
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
2
2
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
3
3
 
4
+ /**
5
+ * A typed topic descriptor that pairs a topic name with its message type.
6
+ * Created via the `topic()` factory function.
7
+ *
8
+ * @typeParam N - The literal topic name string.
9
+ * @typeParam M - The message payload type for this topic.
10
+ */
11
+ interface TopicDescriptor<N extends string = string, M extends Record<string, any> = Record<string, any>> {
12
+ readonly __topic: N;
13
+ /** @internal Phantom type — never has a real value at runtime. */
14
+ readonly __type: M;
15
+ }
16
+ /**
17
+ * Define a typed topic descriptor.
18
+ *
19
+ * @example
20
+ * ```ts
21
+ * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
22
+ *
23
+ * // Use with KafkaClient:
24
+ * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
25
+ *
26
+ * // Use with @SubscribeTo:
27
+ * @SubscribeTo(OrderCreated)
28
+ * async handleOrder(msg) { ... }
29
+ * ```
30
+ */
31
+ declare function topic<N extends string>(name: N): <M extends Record<string, any>>() => TopicDescriptor<N, M>;
32
+ /**
33
+ * Build a topic-message map type from a union of TopicDescriptors.
34
+ *
35
+ * @example
36
+ * ```ts
37
+ * const OrderCreated = topic('order.created')<{ orderId: string }>();
38
+ * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();
39
+ *
40
+ * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
41
+ * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }
42
+ * ```
43
+ */
44
+ type TopicsFrom<D extends TopicDescriptor<any, any>> = {
45
+ [K in D as K["__topic"]]: K["__type"];
46
+ };
47
+
4
48
  /**
5
49
  * Mapping of topic names to their message types.
6
50
  * Define this interface to get type-safe publish/subscribe across your app.
@@ -74,11 +118,17 @@ interface ConsumerInterceptor<T extends TopicMapConstraint<T> = TTopicMessageMap
74
118
  /** Context passed to the `transaction()` callback with type-safe send methods. */
75
119
  interface TransactionContext<T extends TopicMapConstraint<T>> {
76
120
  send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
121
+ send<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
77
122
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
78
123
  value: T[K];
79
124
  key?: string;
80
125
  headers?: MessageHeaders;
81
126
  }>): Promise<void>;
127
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
128
+ value: D["__type"];
129
+ key?: string;
130
+ headers?: MessageHeaders;
131
+ }>): Promise<void>;
82
132
  }
83
133
  /** Interface describing all public methods of the Kafka client. */
84
134
  interface IKafkaClient<T extends TopicMapConstraint<T>> {
@@ -86,6 +136,7 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
86
136
  topics: string[];
87
137
  }>;
88
138
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
139
+ startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
89
140
  stopConsumer(): Promise<void>;
90
141
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
91
142
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
@@ -97,6 +148,12 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
97
148
  getClientId: () => ClientId;
98
149
  disconnect(): Promise<void>;
99
150
  }
151
+ /** Options for `KafkaClient` constructor. */
152
+ interface KafkaClientOptions {
153
+ /** Auto-create topics via admin before the first `sendMessage`, `sendBatch`, `transaction`, or `startConsumer` for each topic. Useful for development — not recommended in production. */
154
+ autoCreateTopics?: boolean;
155
+ }
156
+
100
157
  /**
101
158
  * Type-safe Kafka client for NestJS.
102
159
  * Wraps kafkajs with JSON serialization, retries, DLQ, transactions, and interceptors.
@@ -109,13 +166,22 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
109
166
  private readonly consumer;
110
167
  private readonly admin;
111
168
  private readonly logger;
112
- private isConsumerRunning;
169
+ private readonly autoCreateTopicsEnabled;
170
+ private readonly ensuredTopics;
113
171
  private isAdminConnected;
114
172
  readonly clientId: ClientId;
115
- constructor(clientId: ClientId, groupId: GroupId, brokers: string[]);
116
- /** Send a single typed message to a topic. */
173
+ constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
174
+ private resolveTopicName;
175
+ private ensureTopic;
176
+ /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
177
+ sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
117
178
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
118
- /** Send multiple typed messages to a topic in one call. */
179
+ /** Send multiple typed messages in one call. Accepts a topic key or a TopicDescriptor. */
180
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
181
+ value: D["__type"];
182
+ key?: string;
183
+ headers?: MessageHeaders;
184
+ }>): Promise<void>;
119
185
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
120
186
  value: T[K];
121
187
  key?: string;
@@ -128,6 +194,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
128
194
  disconnectProducer(): Promise<void>;
129
195
  /** Subscribe to topics and start consuming messages with the given handler. */
130
196
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
197
+ startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
131
198
  stopConsumer(): Promise<void>;
132
199
  /** Check broker connectivity and return available topics. */
133
200
  checkStatus(): Promise<{
@@ -136,6 +203,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
136
203
  getClientId(): ClientId;
137
204
  /** Gracefully disconnect producer, consumer, and admin. */
138
205
  disconnect(): Promise<void>;
206
+ private processMessage;
139
207
  private sendToDlq;
140
208
  private sleep;
141
209
  }
@@ -152,12 +220,16 @@ interface KafkaModuleOptions {
152
220
  brokers: string[];
153
221
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
154
222
  isGlobal?: boolean;
223
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
224
+ autoCreateTopics?: boolean;
155
225
  }
156
226
  /** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */
157
227
  interface KafkaModuleAsyncOptions {
158
228
  name?: string;
159
229
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
160
230
  isGlobal?: boolean;
231
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
232
+ autoCreateTopics?: boolean;
161
233
  imports?: any[];
162
234
  useFactory: (...args: any[]) => KafkaModuleOptions | Promise<KafkaModuleOptions>;
163
235
  inject?: any[];
@@ -173,6 +245,23 @@ declare class KafkaModule {
173
245
  static registerAsync<T extends TopicMapConstraint<T>>(asyncOptions: KafkaModuleAsyncOptions): DynamicModule;
174
246
  }
175
247
 
248
+ /** Error thrown when a consumer message handler fails. */
249
+ declare class KafkaProcessingError extends Error {
250
+ readonly topic: string;
251
+ readonly originalMessage: unknown;
252
+ readonly cause?: Error;
253
+ constructor(message: string, topic: string, originalMessage: unknown, options?: {
254
+ cause?: Error;
255
+ });
256
+ }
257
+ /** Error thrown when all retry attempts are exhausted for a message. */
258
+ declare class KafkaRetryExhaustedError extends KafkaProcessingError {
259
+ readonly attempts: number;
260
+ constructor(topic: string, originalMessage: unknown, attempts: number, options?: {
261
+ cause?: Error;
262
+ });
263
+ }
264
+
176
265
  /** Default DI token for the Kafka client. */
177
266
  declare const KAFKA_CLIENT = "KAFKA_CLIENT";
178
267
  /** Returns the DI token for a named (or default) Kafka client instance. */
@@ -190,7 +279,7 @@ declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
190
279
  * Decorator that auto-subscribes a method to Kafka topics on module init.
191
280
  * The decorated method receives `(message, topic)` for each consumed message.
192
281
  */
193
- declare const SubscribeTo: (topics: string | string[], options?: ConsumerOptions & {
282
+ declare const SubscribeTo: (topics: string | string[] | TopicDescriptor | TopicDescriptor[] | (string | TopicDescriptor)[], options?: ConsumerOptions & {
194
283
  clientName?: string;
195
284
  }) => MethodDecorator;
196
285
 
@@ -215,4 +304,4 @@ declare class KafkaHealthIndicator {
215
304
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
216
305
  }
217
306
 
218
- export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicMapConstraint, type TransactionContext, getKafkaClientToken };
307
+ export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
package/dist/index.d.ts CHANGED
@@ -1,6 +1,50 @@
1
1
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
2
2
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
3
3
 
4
+ /**
5
+ * A typed topic descriptor that pairs a topic name with its message type.
6
+ * Created via the `topic()` factory function.
7
+ *
8
+ * @typeParam N - The literal topic name string.
9
+ * @typeParam M - The message payload type for this topic.
10
+ */
11
+ interface TopicDescriptor<N extends string = string, M extends Record<string, any> = Record<string, any>> {
12
+ readonly __topic: N;
13
+ /** @internal Phantom type — never has a real value at runtime. */
14
+ readonly __type: M;
15
+ }
16
+ /**
17
+ * Define a typed topic descriptor.
18
+ *
19
+ * @example
20
+ * ```ts
21
+ * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
22
+ *
23
+ * // Use with KafkaClient:
24
+ * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
25
+ *
26
+ * // Use with @SubscribeTo:
27
+ * @SubscribeTo(OrderCreated)
28
+ * async handleOrder(msg) { ... }
29
+ * ```
30
+ */
31
+ declare function topic<N extends string>(name: N): <M extends Record<string, any>>() => TopicDescriptor<N, M>;
32
+ /**
33
+ * Build a topic-message map type from a union of TopicDescriptors.
34
+ *
35
+ * @example
36
+ * ```ts
37
+ * const OrderCreated = topic('order.created')<{ orderId: string }>();
38
+ * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();
39
+ *
40
+ * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
41
+ * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }
42
+ * ```
43
+ */
44
+ type TopicsFrom<D extends TopicDescriptor<any, any>> = {
45
+ [K in D as K["__topic"]]: K["__type"];
46
+ };
47
+
4
48
  /**
5
49
  * Mapping of topic names to their message types.
6
50
  * Define this interface to get type-safe publish/subscribe across your app.
@@ -74,11 +118,17 @@ interface ConsumerInterceptor<T extends TopicMapConstraint<T> = TTopicMessageMap
74
118
  /** Context passed to the `transaction()` callback with type-safe send methods. */
75
119
  interface TransactionContext<T extends TopicMapConstraint<T>> {
76
120
  send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
121
+ send<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
77
122
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
78
123
  value: T[K];
79
124
  key?: string;
80
125
  headers?: MessageHeaders;
81
126
  }>): Promise<void>;
127
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
128
+ value: D["__type"];
129
+ key?: string;
130
+ headers?: MessageHeaders;
131
+ }>): Promise<void>;
82
132
  }
83
133
  /** Interface describing all public methods of the Kafka client. */
84
134
  interface IKafkaClient<T extends TopicMapConstraint<T>> {
@@ -86,6 +136,7 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
86
136
  topics: string[];
87
137
  }>;
88
138
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
139
+ startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
89
140
  stopConsumer(): Promise<void>;
90
141
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
91
142
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
@@ -97,6 +148,12 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
97
148
  getClientId: () => ClientId;
98
149
  disconnect(): Promise<void>;
99
150
  }
151
+ /** Options for `KafkaClient` constructor. */
152
+ interface KafkaClientOptions {
153
+ /** Auto-create topics via admin before the first `sendMessage`, `sendBatch`, `transaction`, or `startConsumer` for each topic. Useful for development — not recommended in production. */
154
+ autoCreateTopics?: boolean;
155
+ }
156
+
100
157
  /**
101
158
  * Type-safe Kafka client for NestJS.
102
159
  * Wraps kafkajs with JSON serialization, retries, DLQ, transactions, and interceptors.
@@ -109,13 +166,22 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
109
166
  private readonly consumer;
110
167
  private readonly admin;
111
168
  private readonly logger;
112
- private isConsumerRunning;
169
+ private readonly autoCreateTopicsEnabled;
170
+ private readonly ensuredTopics;
113
171
  private isAdminConnected;
114
172
  readonly clientId: ClientId;
115
- constructor(clientId: ClientId, groupId: GroupId, brokers: string[]);
116
- /** Send a single typed message to a topic. */
173
+ constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
174
+ private resolveTopicName;
175
+ private ensureTopic;
176
+ /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
177
+ sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
117
178
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
118
- /** Send multiple typed messages to a topic in one call. */
179
+ /** Send multiple typed messages in one call. Accepts a topic key or a TopicDescriptor. */
180
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
181
+ value: D["__type"];
182
+ key?: string;
183
+ headers?: MessageHeaders;
184
+ }>): Promise<void>;
119
185
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
120
186
  value: T[K];
121
187
  key?: string;
@@ -128,6 +194,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
128
194
  disconnectProducer(): Promise<void>;
129
195
  /** Subscribe to topics and start consuming messages with the given handler. */
130
196
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
197
+ startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
131
198
  stopConsumer(): Promise<void>;
132
199
  /** Check broker connectivity and return available topics. */
133
200
  checkStatus(): Promise<{
@@ -136,6 +203,7 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
136
203
  getClientId(): ClientId;
137
204
  /** Gracefully disconnect producer, consumer, and admin. */
138
205
  disconnect(): Promise<void>;
206
+ private processMessage;
139
207
  private sendToDlq;
140
208
  private sleep;
141
209
  }
@@ -152,12 +220,16 @@ interface KafkaModuleOptions {
152
220
  brokers: string[];
153
221
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
154
222
  isGlobal?: boolean;
223
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
224
+ autoCreateTopics?: boolean;
155
225
  }
156
226
  /** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */
157
227
  interface KafkaModuleAsyncOptions {
158
228
  name?: string;
159
229
  /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */
160
230
  isGlobal?: boolean;
231
+ /** Auto-create topics via admin on first use (send/consume). Useful for development. */
232
+ autoCreateTopics?: boolean;
161
233
  imports?: any[];
162
234
  useFactory: (...args: any[]) => KafkaModuleOptions | Promise<KafkaModuleOptions>;
163
235
  inject?: any[];
@@ -173,6 +245,23 @@ declare class KafkaModule {
173
245
  static registerAsync<T extends TopicMapConstraint<T>>(asyncOptions: KafkaModuleAsyncOptions): DynamicModule;
174
246
  }
175
247
 
248
+ /** Error thrown when a consumer message handler fails. */
249
+ declare class KafkaProcessingError extends Error {
250
+ readonly topic: string;
251
+ readonly originalMessage: unknown;
252
+ readonly cause?: Error;
253
+ constructor(message: string, topic: string, originalMessage: unknown, options?: {
254
+ cause?: Error;
255
+ });
256
+ }
257
+ /** Error thrown when all retry attempts are exhausted for a message. */
258
+ declare class KafkaRetryExhaustedError extends KafkaProcessingError {
259
+ readonly attempts: number;
260
+ constructor(topic: string, originalMessage: unknown, attempts: number, options?: {
261
+ cause?: Error;
262
+ });
263
+ }
264
+
176
265
  /** Default DI token for the Kafka client. */
177
266
  declare const KAFKA_CLIENT = "KAFKA_CLIENT";
178
267
  /** Returns the DI token for a named (or default) Kafka client instance. */
@@ -190,7 +279,7 @@ declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
190
279
  * Decorator that auto-subscribes a method to Kafka topics on module init.
191
280
  * The decorated method receives `(message, topic)` for each consumed message.
192
281
  */
193
- declare const SubscribeTo: (topics: string | string[], options?: ConsumerOptions & {
282
+ declare const SubscribeTo: (topics: string | string[] | TopicDescriptor | TopicDescriptor[] | (string | TopicDescriptor)[], options?: ConsumerOptions & {
194
283
  clientName?: string;
195
284
  }) => MethodDecorator;
196
285
 
@@ -215,4 +304,4 @@ declare class KafkaHealthIndicator {
215
304
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
216
305
  }
217
306
 
218
- export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicMapConstraint, type TransactionContext, getKafkaClientToken };
307
+ export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };