@drarzter/kafka-client 0.1.9 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -18,6 +18,7 @@ An opinionated wrapper around kafkajs that integrates with NestJS as a DynamicMo
18
18
  - **Idempotent producer** — `acks: -1`, `idempotent: true` by default
19
19
  - **Retry + DLQ** — configurable retries with backoff, dead letter queue for failed messages
20
20
  - **Batch sending** — send multiple messages in a single request
21
+ - **Batch consuming** — `startBatchConsumer()` for high-throughput `eachBatch` processing
21
22
  - **Partition key support** — route related messages to the same partition
22
23
  - **Custom headers** — attach metadata headers to messages
23
24
  - **Transactions** — exactly-once semantics with `producer.transaction()`
@@ -314,6 +315,24 @@ export class OrdersService implements OnModuleInit {
314
315
 
315
316
  ## Multiple consumer groups
316
317
 
318
+ ### Per-consumer groupId
319
+
320
+ Override the default consumer group for specific consumers. Each unique `groupId` creates a separate kafkajs Consumer internally:
321
+
322
+ ```typescript
323
+ // Default group from constructor
324
+ await kafka.startConsumer(['orders'], handler);
325
+
326
+ // Custom group — receives its own copy of messages
327
+ await kafka.startConsumer(['orders'], auditHandler, { groupId: 'orders-audit' });
328
+
329
+ // Works with @SubscribeTo too
330
+ @SubscribeTo('orders', { groupId: 'orders-audit' })
331
+ async auditOrders(message) { ... }
332
+ ```
333
+
334
+ ### Named clients
335
+
317
336
  Register multiple named clients for different bounded contexts:
318
337
 
319
338
  ```typescript
@@ -406,6 +425,38 @@ await this.kafka.sendBatch('order.created', [
406
425
  ]);
407
426
  ```
408
427
 
428
+ ## Batch consuming
429
+
430
+ Process messages in batches for higher throughput. The handler receives an array of parsed messages and a `BatchMeta` object with offset management controls:
431
+
432
+ ```typescript
433
+ await this.kafka.startBatchConsumer(
434
+ ['order.created'],
435
+ async (messages, topic, meta) => {
436
+ // messages: OrdersTopicMap['order.created'][]
437
+ for (const msg of messages) {
438
+ await processOrder(msg);
439
+ meta.resolveOffset(/* ... */);
440
+ }
441
+ await meta.commitOffsetsIfNecessary();
442
+ },
443
+ { retry: { maxRetries: 3 }, dlq: true },
444
+ );
445
+ ```
446
+
447
+ With `@SubscribeTo()`:
448
+
449
+ ```typescript
450
+ @SubscribeTo('order.created', { batch: true })
451
+ async handleOrders(messages: OrdersTopicMap['order.created'][], topic: string) {
452
+ // messages is an array
453
+ }
454
+ ```
455
+
456
+ Schema validation runs per-message — invalid messages are skipped (DLQ'd if enabled), valid ones are passed to the handler. Retry applies to the whole batch.
457
+
458
+ `BatchMeta` exposes: `partition`, `highWatermark`, `heartbeat()`, `resolveOffset(offset)`, `commitOffsetsIfNecessary()`.
459
+
409
460
  ## Transactions
410
461
 
411
462
  Send multiple messages atomically with exactly-once semantics:
@@ -453,27 +504,83 @@ await this.kafka.startConsumer(['order.created'], handler, {
453
504
 
454
505
  Multiple interceptors run in order. All hooks are optional.
455
506
 
456
- ## Consumer options
507
+ ## Options reference
508
+
509
+ ### Send options
510
+
511
+ Options for `sendMessage()` — the third argument:
512
+
513
+ | Option | Default | Description |
514
+ |-----------|---------|--------------------------------------------------|
515
+ | `key` | — | Partition key for message routing |
516
+ | `headers` | — | Custom metadata headers (`Record<string, string>`) |
517
+
518
+ `sendBatch()` accepts `key` and `headers` per message inside the array items.
519
+
520
+ ### Consumer options
457
521
 
458
522
  | Option | Default | Description |
459
523
  |--------|---------|-------------|
524
+ | `groupId` | constructor value | Override consumer group for this subscription |
460
525
  | `fromBeginning` | `false` | Read from the beginning of the topic |
461
526
  | `autoCommit` | `true` | Auto-commit offsets |
462
527
  | `retry.maxRetries` | — | Number of retry attempts |
463
528
  | `retry.backoffMs` | `1000` | Base delay between retries (multiplied by attempt number) |
464
529
  | `dlq` | `false` | Send to `{topic}.dlq` after all retries exhausted |
465
530
  | `interceptors` | `[]` | Array of before/after/onError hooks |
531
+ | `batch` | `false` | (decorator only) Use `startBatchConsumer` instead of `startConsumer` |
466
532
 
467
533
  ### Module options
468
534
 
535
+ Passed to `KafkaModule.register()` or returned from `registerAsync()` factory:
536
+
469
537
  | Option | Default | Description |
470
538
  |--------|---------|-------------|
471
- | `clientId` | — | Kafka client identifier |
472
- | `groupId` | — | Consumer group ID |
473
- | `brokers` | — | Array of broker addresses |
474
- | `name` | — | Named client for multi-group setups |
475
- | `isGlobal` | `false` | Make the client available in all modules |
476
- | `autoCreateTopics` | `false` | Auto-create topics on first send/consume |
539
+ | `clientId` | — | Kafka client identifier (required) |
540
+ | `groupId` | — | Default consumer group ID (required) |
541
+ | `brokers` | — | Array of broker addresses (required) |
542
+ | `name` | — | Named client identifier for multi-client setups |
543
+ | `isGlobal` | `false` | Make the client available in all modules without re-importing |
544
+ | `autoCreateTopics` | `false` | Auto-create topics on first send/consume (dev only) |
545
+
546
+ **Module-scoped** (default) — import `KafkaModule` in each module that needs it:
547
+
548
+ ```typescript
549
+ // orders.module.ts
550
+ @Module({
551
+ imports: [
552
+ KafkaModule.register<OrdersTopicMap>({
553
+ clientId: 'orders',
554
+ groupId: 'orders-group',
555
+ brokers: ['localhost:9092'],
556
+ }),
557
+ ],
558
+ })
559
+ export class OrdersModule {}
560
+ ```
561
+
562
+ **App-wide** — register once in `AppModule` with `isGlobal: true`, inject anywhere:
563
+
564
+ ```typescript
565
+ // app.module.ts
566
+ @Module({
567
+ imports: [
568
+ KafkaModule.register<MyTopics>({
569
+ clientId: 'my-app',
570
+ groupId: 'my-group',
571
+ brokers: ['localhost:9092'],
572
+ isGlobal: true,
573
+ }),
574
+ ],
575
+ })
576
+ export class AppModule {}
577
+
578
+ // any module — no KafkaModule import needed
579
+ @Injectable()
580
+ export class PaymentService {
581
+ constructor(@InjectKafkaClient() private readonly kafka: KafkaClient<MyTopics>) {}
582
+ }
583
+ ```
477
584
 
478
585
  ## Error classes
479
586
 
@@ -508,6 +615,85 @@ const interceptor: ConsumerInterceptor<MyTopics> = {
508
615
 
509
616
  When `retry.maxRetries` is set and all attempts fail, `KafkaRetryExhaustedError` is passed to `onError` interceptors automatically.
510
617
 
618
+ **`KafkaValidationError`** — thrown when schema validation fails on the consumer side. Has `topic`, `originalMessage`, and `cause`:
619
+
620
+ ```typescript
621
+ import { KafkaValidationError } from '@drarzter/kafka-client';
622
+
623
+ const interceptor: ConsumerInterceptor<MyTopics> = {
624
+ onError: (message, topic, error) => {
625
+ if (error instanceof KafkaValidationError) {
626
+ console.log(`Bad message on ${error.topic}:`, error.cause?.message);
627
+ }
628
+ },
629
+ };
630
+ ```
631
+
632
+ ## Schema validation
633
+
634
+ Add runtime message validation using any library with a `.parse()` method — Zod, Valibot, ArkType, or a custom validator. No extra dependency required.
635
+
636
+ ### Defining topics with schemas
637
+
638
+ ```typescript
639
+ import { topic, TopicsFrom } from '@drarzter/kafka-client';
640
+ import { z } from 'zod'; // or valibot, arktype, etc.
641
+
642
+ // Schema-validated — type inferred from schema, no generic needed
643
+ export const OrderCreated = topic('order.created').schema(z.object({
644
+ orderId: z.string(),
645
+ userId: z.string(),
646
+ amount: z.number().positive(),
647
+ }));
648
+
649
+ // Without schema — explicit generic (still works)
650
+ export const OrderAudit = topic('order.audit')<{ orderId: string; action: string }>();
651
+
652
+ export type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderAudit>;
653
+ ```
654
+
655
+ ### How it works
656
+
657
+ **On send** — `sendMessage`, `sendBatch`, and `transaction` call `schema.parse(message)` before serializing. Invalid messages throw immediately (the schema library's error, e.g. `ZodError`):
658
+
659
+ ```typescript
660
+ // This throws ZodError — amount must be positive
661
+ await kafka.sendMessage(OrderCreated, { orderId: '1', userId: '2', amount: -5 });
662
+ ```
663
+
664
+ **On consume** — after `JSON.parse`, the consumer validates each message against the schema. Invalid messages are:
665
+
666
+ 1. Logged as errors
667
+ 2. Sent to DLQ if `dlq: true`
668
+ 3. Passed to `onError` interceptors as `KafkaValidationError`
669
+ 4. Skipped (handler is NOT called)
670
+
671
+ ```typescript
672
+ @SubscribeTo(OrderCreated, { dlq: true })
673
+ async handleOrder(message) {
674
+ // `message` is guaranteed to match the schema
675
+ console.log(message.orderId); // string — validated at runtime
676
+ }
677
+ ```
678
+
679
+ ### Bring your own validator
680
+
681
+ Any object with `parse(data: unknown): T` works:
682
+
683
+ ```typescript
684
+ import { SchemaLike } from '@drarzter/kafka-client';
685
+
686
+ const customValidator: SchemaLike<{ id: string }> = {
687
+ parse(data: unknown) {
688
+ const d = data as any;
689
+ if (typeof d?.id !== 'string') throw new Error('id must be a string');
690
+ return { id: d.id };
691
+ },
692
+ };
693
+
694
+ const MyTopic = topic('my.topic').schema(customValidator);
695
+ ```
696
+
511
697
  ## Health check
512
698
 
513
699
  Monitor Kafka connectivity with the built-in health indicator:
package/dist/index.d.mts CHANGED
@@ -1,6 +1,21 @@
1
1
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
2
2
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
3
3
 
4
+ /**
5
+ * Any validation library with a `.parse()` method.
6
+ * Works with Zod, Valibot, ArkType, or any custom validator.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * import { z } from 'zod';
11
+ * const schema: SchemaLike<{ id: string }> = z.object({ id: z.string() });
12
+ * ```
13
+ */
14
+ interface SchemaLike<T = any> {
15
+ parse(data: unknown): T;
16
+ }
17
+ /** Infer the output type from a SchemaLike. */
18
+ type InferSchema<S extends SchemaLike> = S extends SchemaLike<infer T> ? T : never;
4
19
  /**
5
20
  * A typed topic descriptor that pairs a topic name with its message type.
6
21
  * Created via the `topic()` factory function.
@@ -12,14 +27,23 @@ interface TopicDescriptor<N extends string = string, M extends Record<string, an
12
27
  readonly __topic: N;
13
28
  /** @internal Phantom type — never has a real value at runtime. */
14
29
  readonly __type: M;
30
+ /** Runtime schema validator. Present only when created via `topic().schema()`. */
31
+ readonly __schema?: SchemaLike<M>;
15
32
  }
16
33
  /**
17
34
  * Define a typed topic descriptor.
18
35
  *
19
36
  * @example
20
37
  * ```ts
38
+ * // Without schema — type provided explicitly:
21
39
  * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
22
40
  *
41
+ * // With schema — type inferred from schema:
42
+ * const OrderCreated = topic('order.created').schema(z.object({
43
+ * orderId: z.string(),
44
+ * amount: z.number(),
45
+ * }));
46
+ *
23
47
  * // Use with KafkaClient:
24
48
  * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
25
49
  *
@@ -28,7 +52,10 @@ interface TopicDescriptor<N extends string = string, M extends Record<string, an
28
52
  * async handleOrder(msg) { ... }
29
53
  * ```
30
54
  */
31
- declare function topic<N extends string>(name: N): <M extends Record<string, any>>() => TopicDescriptor<N, M>;
55
+ declare function topic<N extends string>(name: N): {
56
+ <M extends Record<string, any>>(): TopicDescriptor<N, M>;
57
+ schema<S extends SchemaLike<Record<string, any>>>(schema: S): TopicDescriptor<N, InferSchema<S>>;
58
+ };
32
59
  /**
33
60
  * Build a topic-message map type from a union of TopicDescriptors.
34
61
  *
@@ -83,8 +110,23 @@ interface SendOptions {
83
110
  /** Custom headers attached to the message. */
84
111
  headers?: MessageHeaders;
85
112
  }
113
+ /** Metadata exposed to batch consumer handlers. */
114
+ interface BatchMeta {
115
+ /** Partition number for this batch. */
116
+ partition: number;
117
+ /** Highest offset available on the broker for this partition. */
118
+ highWatermark: string;
119
+ /** Send a heartbeat to the broker to prevent session timeout. */
120
+ heartbeat(): Promise<void>;
121
+ /** Mark an offset as processed (for manual offset management). */
122
+ resolveOffset(offset: string): void;
123
+ /** Commit offsets if the auto-commit threshold has been reached. */
124
+ commitOffsetsIfNecessary(): Promise<void>;
125
+ }
86
126
  /** Options for configuring a Kafka consumer. */
87
127
  interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
128
+ /** Override the default consumer group ID from the constructor. */
129
+ groupId?: string;
88
130
  /** Start reading from earliest offset. Default: `false`. */
89
131
  fromBeginning?: boolean;
90
132
  /** Automatically commit offsets. Default: `true`. */
@@ -95,6 +137,8 @@ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
95
137
  dlq?: boolean;
96
138
  /** Interceptors called before/after each message. */
97
139
  interceptors?: ConsumerInterceptor<T>[];
140
+ /** @internal Schema map populated by @SubscribeTo when descriptors have schemas. */
141
+ schemas?: Map<string, SchemaLike>;
98
142
  }
99
143
  /** Configuration for consumer retry behavior. */
100
144
  interface RetryOptions {
@@ -137,6 +181,8 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
137
181
  }>;
138
182
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
139
183
  startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
184
+ startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (messages: T[K[number]][], topic: K[number], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
185
+ startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (messages: D["__type"][], topic: D["__topic"], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
140
186
  stopConsumer(): Promise<void>;
141
187
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
142
188
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
@@ -163,16 +209,19 @@ interface KafkaClientOptions {
163
209
  declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClient<T> {
164
210
  private readonly kafka;
165
211
  private readonly producer;
166
- private readonly consumer;
212
+ private readonly consumers;
167
213
  private readonly admin;
168
214
  private readonly logger;
169
215
  private readonly autoCreateTopicsEnabled;
170
216
  private readonly ensuredTopics;
217
+ private readonly defaultGroupId;
171
218
  private isAdminConnected;
172
219
  readonly clientId: ClientId;
173
220
  constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
221
+ private getOrCreateConsumer;
174
222
  private resolveTopicName;
175
223
  private ensureTopic;
224
+ private validateMessage;
176
225
  /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
177
226
  sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
178
227
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
@@ -195,14 +244,18 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
195
244
  /** Subscribe to topics and start consuming messages with the given handler. */
196
245
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
197
246
  startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
247
+ /** Subscribe to topics and consume messages in batches. */
248
+ startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (messages: T[K[number]][], topic: K[number], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
249
+ startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (messages: D["__type"][], topic: D["__topic"], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
198
250
  stopConsumer(): Promise<void>;
199
251
  /** Check broker connectivity and return available topics. */
200
252
  checkStatus(): Promise<{
201
253
  topics: string[];
202
254
  }>;
203
255
  getClientId(): ClientId;
204
- /** Gracefully disconnect producer, consumer, and admin. */
256
+ /** Gracefully disconnect producer, all consumers, and admin. */
205
257
  disconnect(): Promise<void>;
258
+ private buildSchemaMap;
206
259
  private processMessage;
207
260
  private sendToDlq;
208
261
  private sleep;
@@ -254,6 +307,15 @@ declare class KafkaProcessingError extends Error {
254
307
  cause?: Error;
255
308
  });
256
309
  }
310
+ /** Error thrown when schema validation fails on send or consume. */
311
+ declare class KafkaValidationError extends Error {
312
+ readonly topic: string;
313
+ readonly originalMessage: unknown;
314
+ readonly cause?: Error;
315
+ constructor(topic: string, originalMessage: unknown, options?: {
316
+ cause?: Error;
317
+ });
318
+ }
257
319
  /** Error thrown when all retry attempts are exhausted for a message. */
258
320
  declare class KafkaRetryExhaustedError extends KafkaProcessingError {
259
321
  readonly attempts: number;
@@ -270,8 +332,11 @@ declare const getKafkaClientToken: (name?: string) => string;
270
332
  declare const KAFKA_SUBSCRIBER_METADATA = "KAFKA_SUBSCRIBER_METADATA";
271
333
  interface KafkaSubscriberMetadata {
272
334
  topics: string[];
335
+ schemas?: Map<string, SchemaLike>;
273
336
  options?: ConsumerOptions;
274
337
  clientName?: string;
338
+ batch?: boolean;
339
+ methodName?: string | symbol;
275
340
  }
276
341
  /** Inject a `KafkaClient` instance. Pass a name to target a specific named client. */
277
342
  declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
@@ -281,6 +346,7 @@ declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
281
346
  */
282
347
  declare const SubscribeTo: (topics: string | string[] | TopicDescriptor | TopicDescriptor[] | (string | TopicDescriptor)[], options?: ConsumerOptions & {
283
348
  clientName?: string;
349
+ batch?: boolean;
284
350
  }) => MethodDecorator;
285
351
 
286
352
  /** Discovers `@SubscribeTo()` decorators and wires them to their Kafka clients on startup. */
@@ -304,4 +370,4 @@ declare class KafkaHealthIndicator {
304
370
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
305
371
  }
306
372
 
307
- export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
373
+ export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
package/dist/index.d.ts CHANGED
@@ -1,6 +1,21 @@
1
1
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
2
2
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
3
3
 
4
+ /**
5
+ * Any validation library with a `.parse()` method.
6
+ * Works with Zod, Valibot, ArkType, or any custom validator.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * import { z } from 'zod';
11
+ * const schema: SchemaLike<{ id: string }> = z.object({ id: z.string() });
12
+ * ```
13
+ */
14
+ interface SchemaLike<T = any> {
15
+ parse(data: unknown): T;
16
+ }
17
+ /** Infer the output type from a SchemaLike. */
18
+ type InferSchema<S extends SchemaLike> = S extends SchemaLike<infer T> ? T : never;
4
19
  /**
5
20
  * A typed topic descriptor that pairs a topic name with its message type.
6
21
  * Created via the `topic()` factory function.
@@ -12,14 +27,23 @@ interface TopicDescriptor<N extends string = string, M extends Record<string, an
12
27
  readonly __topic: N;
13
28
  /** @internal Phantom type — never has a real value at runtime. */
14
29
  readonly __type: M;
30
+ /** Runtime schema validator. Present only when created via `topic().schema()`. */
31
+ readonly __schema?: SchemaLike<M>;
15
32
  }
16
33
  /**
17
34
  * Define a typed topic descriptor.
18
35
  *
19
36
  * @example
20
37
  * ```ts
38
+ * // Without schema — type provided explicitly:
21
39
  * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
22
40
  *
41
+ * // With schema — type inferred from schema:
42
+ * const OrderCreated = topic('order.created').schema(z.object({
43
+ * orderId: z.string(),
44
+ * amount: z.number(),
45
+ * }));
46
+ *
23
47
  * // Use with KafkaClient:
24
48
  * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
25
49
  *
@@ -28,7 +52,10 @@ interface TopicDescriptor<N extends string = string, M extends Record<string, an
28
52
  * async handleOrder(msg) { ... }
29
53
  * ```
30
54
  */
31
- declare function topic<N extends string>(name: N): <M extends Record<string, any>>() => TopicDescriptor<N, M>;
55
+ declare function topic<N extends string>(name: N): {
56
+ <M extends Record<string, any>>(): TopicDescriptor<N, M>;
57
+ schema<S extends SchemaLike<Record<string, any>>>(schema: S): TopicDescriptor<N, InferSchema<S>>;
58
+ };
32
59
  /**
33
60
  * Build a topic-message map type from a union of TopicDescriptors.
34
61
  *
@@ -83,8 +110,23 @@ interface SendOptions {
83
110
  /** Custom headers attached to the message. */
84
111
  headers?: MessageHeaders;
85
112
  }
113
+ /** Metadata exposed to batch consumer handlers. */
114
+ interface BatchMeta {
115
+ /** Partition number for this batch. */
116
+ partition: number;
117
+ /** Highest offset available on the broker for this partition. */
118
+ highWatermark: string;
119
+ /** Send a heartbeat to the broker to prevent session timeout. */
120
+ heartbeat(): Promise<void>;
121
+ /** Mark an offset as processed (for manual offset management). */
122
+ resolveOffset(offset: string): void;
123
+ /** Commit offsets if the auto-commit threshold has been reached. */
124
+ commitOffsetsIfNecessary(): Promise<void>;
125
+ }
86
126
  /** Options for configuring a Kafka consumer. */
87
127
  interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
128
+ /** Override the default consumer group ID from the constructor. */
129
+ groupId?: string;
88
130
  /** Start reading from earliest offset. Default: `false`. */
89
131
  fromBeginning?: boolean;
90
132
  /** Automatically commit offsets. Default: `true`. */
@@ -95,6 +137,8 @@ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
95
137
  dlq?: boolean;
96
138
  /** Interceptors called before/after each message. */
97
139
  interceptors?: ConsumerInterceptor<T>[];
140
+ /** @internal Schema map populated by @SubscribeTo when descriptors have schemas. */
141
+ schemas?: Map<string, SchemaLike>;
98
142
  }
99
143
  /** Configuration for consumer retry behavior. */
100
144
  interface RetryOptions {
@@ -137,6 +181,8 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
137
181
  }>;
138
182
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
139
183
  startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
184
+ startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (messages: T[K[number]][], topic: K[number], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
185
+ startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (messages: D["__type"][], topic: D["__topic"], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
140
186
  stopConsumer(): Promise<void>;
141
187
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
142
188
  sendBatch<K extends keyof T>(topic: K, messages: Array<{
@@ -163,16 +209,19 @@ interface KafkaClientOptions {
163
209
  declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClient<T> {
164
210
  private readonly kafka;
165
211
  private readonly producer;
166
- private readonly consumer;
212
+ private readonly consumers;
167
213
  private readonly admin;
168
214
  private readonly logger;
169
215
  private readonly autoCreateTopicsEnabled;
170
216
  private readonly ensuredTopics;
217
+ private readonly defaultGroupId;
171
218
  private isAdminConnected;
172
219
  readonly clientId: ClientId;
173
220
  constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
221
+ private getOrCreateConsumer;
174
222
  private resolveTopicName;
175
223
  private ensureTopic;
224
+ private validateMessage;
176
225
  /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
177
226
  sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
178
227
  sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
@@ -195,14 +244,18 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
195
244
  /** Subscribe to topics and start consuming messages with the given handler. */
196
245
  startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
197
246
  startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
247
+ /** Subscribe to topics and consume messages in batches. */
248
+ startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (messages: T[K[number]][], topic: K[number], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
249
+ startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (messages: D["__type"][], topic: D["__topic"], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
198
250
  stopConsumer(): Promise<void>;
199
251
  /** Check broker connectivity and return available topics. */
200
252
  checkStatus(): Promise<{
201
253
  topics: string[];
202
254
  }>;
203
255
  getClientId(): ClientId;
204
- /** Gracefully disconnect producer, consumer, and admin. */
256
+ /** Gracefully disconnect producer, all consumers, and admin. */
205
257
  disconnect(): Promise<void>;
258
+ private buildSchemaMap;
206
259
  private processMessage;
207
260
  private sendToDlq;
208
261
  private sleep;
@@ -254,6 +307,15 @@ declare class KafkaProcessingError extends Error {
254
307
  cause?: Error;
255
308
  });
256
309
  }
310
+ /** Error thrown when schema validation fails on send or consume. */
311
+ declare class KafkaValidationError extends Error {
312
+ readonly topic: string;
313
+ readonly originalMessage: unknown;
314
+ readonly cause?: Error;
315
+ constructor(topic: string, originalMessage: unknown, options?: {
316
+ cause?: Error;
317
+ });
318
+ }
257
319
  /** Error thrown when all retry attempts are exhausted for a message. */
258
320
  declare class KafkaRetryExhaustedError extends KafkaProcessingError {
259
321
  readonly attempts: number;
@@ -270,8 +332,11 @@ declare const getKafkaClientToken: (name?: string) => string;
270
332
  declare const KAFKA_SUBSCRIBER_METADATA = "KAFKA_SUBSCRIBER_METADATA";
271
333
  interface KafkaSubscriberMetadata {
272
334
  topics: string[];
335
+ schemas?: Map<string, SchemaLike>;
273
336
  options?: ConsumerOptions;
274
337
  clientName?: string;
338
+ batch?: boolean;
339
+ methodName?: string | symbol;
275
340
  }
276
341
  /** Inject a `KafkaClient` instance. Pass a name to target a specific named client. */
277
342
  declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
@@ -281,6 +346,7 @@ declare const InjectKafkaClient: (name?: string) => ParameterDecorator;
281
346
  */
282
347
  declare const SubscribeTo: (topics: string | string[] | TopicDescriptor | TopicDescriptor[] | (string | TopicDescriptor)[], options?: ConsumerOptions & {
283
348
  clientName?: string;
349
+ batch?: boolean;
284
350
  }) => MethodDecorator;
285
351
 
286
352
  /** Discovers `@SubscribeTo()` decorators and wires them to their Kafka clients on startup. */
@@ -304,4 +370,4 @@ declare class KafkaHealthIndicator {
304
370
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
305
371
  }
306
372
 
307
- export { type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, type MessageHeaders, type RetryOptions, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
373
+ export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };