@drarzter/kafka-client 0.1.1 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Rostislav Chapega
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md CHANGED
@@ -1,5 +1,9 @@
1
1
  # @drarzter/kafka-client
2
2
 
3
+ [![npm version](https://img.shields.io/npm/v/@drarzter/kafka-client)](https://www.npmjs.com/package/@drarzter/kafka-client)
4
+ [![CI](https://github.com/drarzter/kafka-client/actions/workflows/publish.yml/badge.svg)](https://github.com/drarzter/kafka-client/actions/workflows/publish.yml)
5
+ [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
6
+
3
7
  Type-safe Kafka client wrapper for NestJS. Built on top of [kafkajs](https://kafka.js.org/).
4
8
 
5
9
  ## What is this?
@@ -8,10 +12,18 @@ An opinionated wrapper around kafkajs that integrates with NestJS as a DynamicMo
8
12
 
9
13
  ## Why?
10
14
 
11
- - **Typed topics** — you define a map of topic message shape, and the compiler won't let you send wrong data to wrong topic
15
+ - **Typed topics** — you define a map of topic -> message shape, and the compiler won't let you send wrong data to wrong topic
12
16
  - **NestJS-native** — `register()` / `registerAsync()`, DI injection, lifecycle hooks out of the box
13
17
  - **Idempotent producer** — `acks: -1`, `idempotent: true` by default
14
- - **Minimal** — ~150 lines of code, no magic
18
+ - **Retry + DLQ** — configurable retries with backoff, dead letter queue for failed messages
19
+ - **Batch sending** — send multiple messages in a single request
20
+ - **Partition key support** — route related messages to the same partition
21
+ - **Custom headers** — attach metadata headers to messages
22
+ - **Transactions** — exactly-once semantics with `producer.transaction()`
23
+ - **Consumer interceptors** — before/after/onError hooks for message processing
24
+ - **Health check** — built-in health indicator for monitoring
25
+ - **Multiple consumer groups** — named clients for different bounded contexts
26
+ - **Declarative & imperative** — use `@SubscribeTo()` decorator or `startConsumer()` directly
15
27
 
16
28
  ## Installation
17
29
 
@@ -21,7 +33,63 @@ npm install @drarzter/kafka-client
21
33
  pnpm add @drarzter/kafka-client
22
34
  ```
23
35
 
24
- Peer dependencies: `@nestjs/common`, `reflect-metadata`, `rxjs`
36
+ Peer dependencies: `@nestjs/common`, `@nestjs/core`, `reflect-metadata`, `rxjs`
37
+
38
+ ## Quick start
39
+
40
+ Send and receive a message in 3 files:
41
+
42
+ ```typescript
43
+ // types.ts
44
+ import { TTopicMessageMap } from '@drarzter/kafka-client';
45
+
46
+ export interface MyTopics extends TTopicMessageMap {
47
+ 'hello': { text: string };
48
+ }
49
+ ```
50
+
51
+ ```typescript
52
+ // app.module.ts
53
+ import { Module } from '@nestjs/common';
54
+ import { KafkaModule } from '@drarzter/kafka-client';
55
+ import { MyTopics } from './types';
56
+ import { AppService } from './app.service';
57
+
58
+ @Module({
59
+ imports: [
60
+ KafkaModule.register<MyTopics>({
61
+ clientId: 'my-app',
62
+ groupId: 'my-group',
63
+ brokers: ['localhost:9092'],
64
+ }),
65
+ ],
66
+ providers: [AppService],
67
+ })
68
+ export class AppModule {}
69
+ ```
70
+
71
+ ```typescript
72
+ // app.service.ts
73
+ import { Injectable } from '@nestjs/common';
74
+ import { InjectKafkaClient, KafkaClient, SubscribeTo } from '@drarzter/kafka-client';
75
+ import { MyTopics } from './types';
76
+
77
+ @Injectable()
78
+ export class AppService {
79
+ constructor(
80
+ @InjectKafkaClient() private readonly kafka: KafkaClient<MyTopics>,
81
+ ) {}
82
+
83
+ async send() {
84
+ await this.kafka.sendMessage('hello', { text: 'Hello, Kafka!' });
85
+ }
86
+
87
+ @SubscribeTo('hello')
88
+ async onHello(message: MyTopics['hello']) {
89
+ console.log('Received:', message.text);
90
+ }
91
+ }
92
+ ```
25
93
 
26
94
  ## Usage
27
95
 
@@ -78,53 +146,269 @@ KafkaModule.registerAsync<OrdersTopicMap>({
78
146
  ### 3. Inject and use
79
147
 
80
148
  ```typescript
81
- import { Injectable, Inject } from '@nestjs/common';
82
- import { KafkaClient, KAFKA_CLIENT } from '@drarzter/kafka-client';
149
+ import { Injectable } from '@nestjs/common';
150
+ import { InjectKafkaClient, KafkaClient } from '@drarzter/kafka-client';
83
151
  import { OrdersTopicMap } from './orders.types';
84
152
 
85
153
  @Injectable()
86
154
  export class OrdersService {
87
155
  constructor(
88
- @Inject(KAFKA_CLIENT)
156
+ @InjectKafkaClient()
89
157
  private readonly kafka: KafkaClient<OrdersTopicMap>,
90
158
  ) {}
91
159
 
92
160
  async createOrder() {
93
- // ✅ type-safe — compiler knows the shape
94
161
  await this.kafka.sendMessage('order.created', {
95
162
  orderId: '123',
96
163
  userId: '456',
97
164
  amount: 100,
98
165
  });
166
+ }
167
+ }
168
+ ```
99
169
 
100
- // won't compile — wrong payload for this topic
101
- // await this.kafka.sendMessage('order.created', { wrong: 'data' });
170
+ ## Consuming messages
171
+
172
+ Two ways — choose what fits your style.
173
+
174
+ ### Declarative: @SubscribeTo()
175
+
176
+ ```typescript
177
+ import { Injectable } from '@nestjs/common';
178
+ import { SubscribeTo } from '@drarzter/kafka-client';
179
+
180
+ @Injectable()
181
+ export class OrdersHandler {
182
+ @SubscribeTo('order.created')
183
+ async handleOrderCreated(message: OrdersTopicMap['order.created'], topic: string) {
184
+ console.log('New order:', message.orderId);
185
+ }
186
+
187
+ @SubscribeTo('order.completed', { retry: { maxRetries: 3 }, dlq: true })
188
+ async handleOrderCompleted(message: OrdersTopicMap['order.completed'], topic: string) {
189
+ console.log('Order completed:', message.orderId);
102
190
  }
191
+ }
192
+ ```
193
+
194
+ The module auto-discovers `@SubscribeTo()` methods on startup and subscribes them.
195
+
196
+ ### Imperative: startConsumer()
197
+
198
+ ```typescript
199
+ @Injectable()
200
+ export class OrdersService implements OnModuleInit {
201
+ constructor(
202
+ @InjectKafkaClient()
203
+ private readonly kafka: KafkaClient<OrdersTopicMap>,
204
+ ) {}
103
205
 
104
- async listen() {
206
+ async onModuleInit() {
105
207
  await this.kafka.startConsumer(
106
- ['order.created'],
208
+ ['order.created', 'order.completed'],
107
209
  async (message, topic) => {
108
- console.log(message.orderId); // typed
210
+ console.log(`${topic}:`, message);
211
+ },
212
+ {
213
+ retry: { maxRetries: 3, backoffMs: 1000 },
214
+ dlq: true,
109
215
  },
110
216
  );
111
217
  }
112
218
  }
113
219
  ```
114
220
 
115
- ### Consumer options
221
+ ## Multiple consumer groups
222
+
223
+ Register multiple named clients for different bounded contexts:
116
224
 
117
225
  ```typescript
118
- await this.kafka.startConsumer(
119
- ['order.created'],
120
- handler,
226
+ @Module({
227
+ imports: [
228
+ KafkaModule.register<OrdersTopicMap>({
229
+ name: 'orders',
230
+ clientId: 'orders-service',
231
+ groupId: 'orders-consumer',
232
+ brokers: ['localhost:9092'],
233
+ }),
234
+ KafkaModule.register<PaymentsTopicMap>({
235
+ name: 'payments',
236
+ clientId: 'payments-service',
237
+ groupId: 'payments-consumer',
238
+ brokers: ['localhost:9092'],
239
+ }),
240
+ ],
241
+ })
242
+ export class AppModule {}
243
+ ```
244
+
245
+ Inject by name — the string in `@InjectKafkaClient()` must match the `name` from `register()`:
246
+
247
+ ```typescript
248
+ @Injectable()
249
+ export class OrdersService {
250
+ constructor(
251
+ @InjectKafkaClient('orders') // ← matches name: 'orders' above
252
+ private readonly kafka: KafkaClient<OrdersTopicMap>,
253
+ ) {}
254
+ }
255
+ ```
256
+
257
+ Same with `@SubscribeTo()` — use `clientName` to target a specific named client:
258
+
259
+ ```typescript
260
+ @SubscribeTo('payment.received', { clientName: 'payments' }) // ← matches name: 'payments'
261
+ async handlePayment(message: PaymentsTopicMap['payment.received']) {
262
+ // ...
263
+ }
264
+ ```
265
+
266
+ ## Partition key
267
+
268
+ Route all events for the same order to the same partition:
269
+
270
+ ```typescript
271
+ await this.kafka.sendMessage(
272
+ 'order.created',
273
+ { orderId: '123', userId: '456', amount: 100 },
274
+ { key: '123' },
275
+ );
276
+ ```
277
+
278
+ ## Message headers
279
+
280
+ Attach metadata to messages:
281
+
282
+ ```typescript
283
+ await this.kafka.sendMessage(
284
+ 'order.created',
285
+ { orderId: '123', userId: '456', amount: 100 },
121
286
  {
122
- fromBeginning: false, // default: false
123
- autoCommit: true, // default: true
287
+ key: '123',
288
+ headers: { 'x-correlation-id': 'abc-def', 'x-source': 'api-gateway' },
124
289
  },
125
290
  );
126
291
  ```
127
292
 
293
+ Headers work with batch sending too:
294
+
295
+ ```typescript
296
+ await this.kafka.sendBatch('order.created', [
297
+ {
298
+ value: { orderId: '1', userId: '10', amount: 50 },
299
+ key: '1',
300
+ headers: { 'x-correlation-id': 'req-1' },
301
+ },
302
+ ]);
303
+ ```
304
+
305
+ ## Batch sending
306
+
307
+ ```typescript
308
+ await this.kafka.sendBatch('order.created', [
309
+ { value: { orderId: '1', userId: '10', amount: 50 }, key: '1' },
310
+ { value: { orderId: '2', userId: '20', amount: 75 }, key: '2' },
311
+ { value: { orderId: '3', userId: '30', amount: 100 }, key: '3' },
312
+ ]);
313
+ ```
314
+
315
+ ## Transactions
316
+
317
+ Send multiple messages atomically with exactly-once semantics:
318
+
319
+ ```typescript
320
+ await this.kafka.transaction(async (tx) => {
321
+ await tx.send('order.created', {
322
+ orderId: '123',
323
+ userId: '456',
324
+ amount: 100,
325
+ });
326
+ await tx.send('order.completed', {
327
+ orderId: '123',
328
+ completedAt: new Date().toISOString(),
329
+ });
330
+ // if anything throws, all messages are rolled back
331
+ });
332
+ ```
333
+
334
+ `tx.sendBatch()` is also available inside transactions.
335
+
336
+ ## Consumer interceptors
337
+
338
+ Add before/after/onError hooks to message processing:
339
+
340
+ ```typescript
341
+ import { ConsumerInterceptor } from '@drarzter/kafka-client';
342
+
343
+ const loggingInterceptor: ConsumerInterceptor<OrdersTopicMap> = {
344
+ before: (message, topic) => {
345
+ console.log(`Processing ${topic}`, message);
346
+ },
347
+ after: (message, topic) => {
348
+ console.log(`Done ${topic}`);
349
+ },
350
+ onError: (message, topic, error) => {
351
+ console.error(`Failed ${topic}:`, error.message);
352
+ },
353
+ };
354
+
355
+ await this.kafka.startConsumer(['order.created'], handler, {
356
+ interceptors: [loggingInterceptor],
357
+ });
358
+ ```
359
+
360
+ Multiple interceptors run in order. All hooks are optional.
361
+
362
+ ## Consumer options
363
+
364
+ | Option | Default | Description |
365
+ |--------|---------|-------------|
366
+ | `fromBeginning` | `false` | Read from the beginning of the topic |
367
+ | `autoCommit` | `true` | Auto-commit offsets |
368
+ | `retry.maxRetries` | — | Number of retry attempts |
369
+ | `retry.backoffMs` | `1000` | Base delay between retries (multiplied by attempt number) |
370
+ | `dlq` | `false` | Send to `{topic}.dlq` after all retries exhausted |
371
+ | `interceptors` | `[]` | Array of before/after/onError hooks |
372
+
373
+ ## Health check
374
+
375
+ Monitor Kafka connectivity with the built-in health indicator:
376
+
377
+ ```typescript
378
+ import { Injectable } from '@nestjs/common';
379
+ import { InjectKafkaClient, KafkaClient, KafkaHealthIndicator } from '@drarzter/kafka-client';
380
+ import { OrdersTopicMap } from './orders.types';
381
+
382
+ @Injectable()
383
+ export class HealthService {
384
+ private readonly health = new KafkaHealthIndicator();
385
+
386
+ constructor(
387
+ @InjectKafkaClient()
388
+ private readonly kafka: KafkaClient<OrdersTopicMap>,
389
+ ) {}
390
+
391
+ async checkKafka() {
392
+ return this.health.check(this.kafka);
393
+ // { status: 'up', clientId: 'my-service', topics: ['order.created', ...] }
394
+ // or { status: 'down', clientId: 'my-service', error: 'Connection refused' }
395
+ }
396
+ }
397
+ ```
398
+
399
+ ## Project structure
400
+
401
+ ```
402
+ src/
403
+ ├── client/ # KafkaClient, types, interfaces
404
+ ├── module/ # KafkaModule, KafkaExplorer, DI constants
405
+ ├── decorators/ # @InjectKafkaClient(), @SubscribeTo()
406
+ ├── health/ # KafkaHealthIndicator
407
+ └── index.ts # Public API re-exports
408
+ ```
409
+
410
+ All exported types and methods have JSDoc comments — your IDE will show inline docs and autocomplete.
411
+
128
412
  ## License
129
413
 
130
- MIT
414
+ [MIT](LICENSE)
package/dist/index.d.mts CHANGED
@@ -1,61 +1,200 @@
1
- import { OnModuleDestroy, DynamicModule } from '@nestjs/common';
1
+ import { DynamicModule, OnModuleInit } from '@nestjs/common';
2
+ import { DiscoveryService, ModuleRef } from '@nestjs/core';
2
3
 
4
+ /**
5
+ * Mapping of topic names to their message types.
6
+ * Define this interface to get type-safe publish/subscribe across your app.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * interface MyTopics extends TTopicMessageMap {
11
+ * "orders.created": { orderId: string; amount: number };
12
+ * "users.updated": { userId: string; name: string };
13
+ * }
14
+ * ```
15
+ */
3
16
  type TTopicMessageMap = {
4
17
  [topic: string]: Record<string, any>;
5
18
  };
6
19
  type ClientId = string;
7
20
  type GroupId = string;
8
- interface ConsumerOptions {
21
+ type MessageHeaders = Record<string, string>;
22
+ /** Options for sending a single message. */
23
+ interface SendOptions {
24
+ /** Partition key for message routing. */
25
+ key?: string;
26
+ /** Custom headers attached to the message. */
27
+ headers?: MessageHeaders;
28
+ }
29
+ /** Options for configuring a Kafka consumer. */
30
+ interface ConsumerOptions<T extends TTopicMessageMap = TTopicMessageMap> {
31
+ /** Start reading from earliest offset. Default: `false`. */
9
32
  fromBeginning?: boolean;
33
+ /** Automatically commit offsets. Default: `true`. */
10
34
  autoCommit?: boolean;
35
+ /** Retry policy for failed message processing. */
36
+ retry?: RetryOptions;
37
+ /** Send failed messages to a Dead Letter Queue (`<topic>.dlq`). */
38
+ dlq?: boolean;
39
+ /** Interceptors called before/after each message. */
40
+ interceptors?: ConsumerInterceptor<T>[];
41
+ }
42
+ /** Configuration for consumer retry behavior. */
43
+ interface RetryOptions {
44
+ /** Maximum number of retry attempts before giving up. */
45
+ maxRetries: number;
46
+ /** Base delay between retries in ms (multiplied by attempt number). Default: `1000`. */
47
+ backoffMs?: number;
48
+ }
49
+ /**
50
+ * Interceptor hooks for consumer message processing.
51
+ * All methods are optional — implement only what you need.
52
+ */
53
+ interface ConsumerInterceptor<T extends TTopicMessageMap = TTopicMessageMap> {
54
+ /** Called before the message handler. */
55
+ before?(message: T[keyof T], topic: string): Promise<void> | void;
56
+ /** Called after the message handler succeeds. */
57
+ after?(message: T[keyof T], topic: string): Promise<void> | void;
58
+ /** Called when the message handler throws. */
59
+ onError?(message: T[keyof T], topic: string, error: Error): Promise<void> | void;
60
+ }
61
+ /** Context passed to the `transaction()` callback with type-safe send methods. */
62
+ interface TransactionContext<T extends TTopicMessageMap> {
63
+ send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
64
+ sendBatch<K extends keyof T>(topic: K, messages: Array<{
65
+ value: T[K];
66
+ key?: string;
67
+ headers?: MessageHeaders;
68
+ }>): Promise<void>;
11
69
  }
70
+ /** Interface describing all public methods of the Kafka client. */
12
71
  interface IKafkaClient<T extends TTopicMessageMap> {
13
72
  checkStatus(): Promise<{
14
73
  topics: string[];
15
74
  }>;
16
- startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions): Promise<void>;
75
+ startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
17
76
  stopConsumer(): Promise<void>;
18
- sendMessage<K extends keyof T>(topic: K, message: T[K]): Promise<void>;
77
+ sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
78
+ sendBatch<K extends keyof T>(topic: K, messages: Array<{
79
+ value: T[K];
80
+ key?: string;
81
+ headers?: MessageHeaders;
82
+ }>): Promise<void>;
83
+ transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
19
84
  getClientId: () => ClientId;
85
+ disconnect(): Promise<void>;
20
86
  }
87
+ /**
88
+ * Type-safe Kafka client for NestJS.
89
+ * Wraps kafkajs with JSON serialization, retries, DLQ, transactions, and interceptors.
90
+ *
91
+ * @typeParam T - Topic-to-message type mapping for compile-time safety.
92
+ */
21
93
  declare class KafkaClient<T extends TTopicMessageMap> implements IKafkaClient<T> {
22
94
  private readonly kafka;
23
95
  private readonly producer;
24
96
  private readonly consumer;
25
97
  private readonly admin;
26
98
  private readonly logger;
99
+ private isConsumerRunning;
27
100
  readonly clientId: ClientId;
28
101
  constructor(clientId: ClientId, groupId: GroupId, brokers: string[]);
29
- sendMessage<K extends keyof T>(topic: K, message: T[K]): Promise<void>;
102
+ /** Send a single typed message to a topic. */
103
+ sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
104
+ /** Send multiple typed messages to a topic in one call. */
105
+ sendBatch<K extends keyof T>(topic: K, messages: Array<{
106
+ value: T[K];
107
+ key?: string;
108
+ headers?: MessageHeaders;
109
+ }>): Promise<void>;
110
+ /** Execute multiple sends atomically. Commits on success, aborts on error. */
111
+ transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
112
+ /** Connect the idempotent producer. Called automatically by `KafkaModule.register()`. */
30
113
  connectProducer(): Promise<void>;
31
114
  disconnectProducer(): Promise<void>;
32
- startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions): Promise<void>;
115
+ /** Subscribe to topics and start consuming messages with the given handler. */
116
+ startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
33
117
  stopConsumer(): Promise<void>;
118
+ /** Check broker connectivity and return available topics. */
34
119
  checkStatus(): Promise<{
35
120
  topics: string[];
36
121
  }>;
37
122
  getClientId(): ClientId;
123
+ /** Gracefully disconnect producer and consumer. */
38
124
  disconnect(): Promise<void>;
125
+ private sendToDlq;
126
+ private sleep;
39
127
  }
40
128
 
129
+ /** Synchronous configuration for `KafkaModule.register()`. */
41
130
  interface KafkaModuleOptions {
131
+ /** Optional name for multi-client setups. Must match `@InjectKafkaClient(name)`. */
132
+ name?: string;
133
+ /** Unique Kafka client identifier. */
42
134
  clientId: ClientId;
135
+ /** Consumer group identifier. */
43
136
  groupId: GroupId;
137
+ /** List of Kafka broker addresses. */
44
138
  brokers: string[];
45
139
  }
140
+ /** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */
46
141
  interface KafkaModuleAsyncOptions {
142
+ name?: string;
47
143
  imports?: any[];
48
144
  useFactory: (...args: any[]) => KafkaModuleOptions | Promise<KafkaModuleOptions>;
49
145
  inject?: any[];
50
146
  }
51
- declare class KafkaModule implements OnModuleDestroy {
52
- private readonly client?;
53
- constructor(client?: KafkaClient<any> | undefined);
54
- onModuleDestroy(): Promise<void>;
147
+ /**
148
+ * NestJS dynamic module for registering type-safe Kafka clients.
149
+ * Use `register()` for static config or `registerAsync()` for DI-based config.
150
+ */
151
+ declare class KafkaModule {
152
+ /** Register a Kafka client with static options. */
55
153
  static register<T extends TTopicMessageMap>(options: KafkaModuleOptions): DynamicModule;
154
+ /** Register a Kafka client with async/factory-based options. */
56
155
  static registerAsync<T extends TTopicMessageMap>(asyncOptions: KafkaModuleAsyncOptions): DynamicModule;
57
156
  }
58
157
 
158
+ /** Default DI token for the Kafka client. */
59
159
  declare const KAFKA_CLIENT = "KAFKA_CLIENT";
160
+ /** Returns the DI token for a named (or default) Kafka client instance. */
161
+ declare const getKafkaClientToken: (name?: string) => string;
162
+
163
+ declare const KAFKA_SUBSCRIBER_METADATA = "KAFKA_SUBSCRIBER_METADATA";
164
+ interface KafkaSubscriberMetadata {
165
+ topics: string[];
166
+ options?: ConsumerOptions;
167
+ clientName?: string;
168
+ }
169
+ /** Inject a `KafkaClient` instance. Pass a name to target a specific named client. */
170
+ declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
171
+ /**
172
+ * Decorator that auto-subscribes a method to Kafka topics on module init.
173
+ * The decorated method receives `(message, topic)` for each consumed message.
174
+ */
175
+ declare const SubscribeTo: (topics: string | string[], options?: ConsumerOptions & {
176
+ clientName?: string;
177
+ }) => MethodDecorator;
178
+
179
+ /** Discovers `@SubscribeTo()` decorators and wires them to their Kafka clients on startup. */
180
+ declare class KafkaExplorer implements OnModuleInit {
181
+ private readonly discoveryService;
182
+ private readonly moduleRef;
183
+ private readonly logger;
184
+ constructor(discoveryService: DiscoveryService, moduleRef: ModuleRef);
185
+ onModuleInit(): Promise<void>;
186
+ }
187
+
188
+ /** Result returned by `KafkaHealthIndicator.check()`. */
189
+ interface KafkaHealthResult {
190
+ status: "up" | "down";
191
+ clientId: string;
192
+ topics?: string[];
193
+ error?: string;
194
+ }
195
+ /** Health check service. Call `check(client)` to verify broker connectivity. */
196
+ declare class KafkaHealthIndicator {
197
+ check<T extends TTopicMessageMap>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
198
+ }
60
199
 
61
- export { type ClientId, type ConsumerOptions, type GroupId, type IKafkaClient, KAFKA_CLIENT, KafkaClient, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type TTopicMessageMap };
200
+ export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TransactionContext, getKafkaClientToken };