@drarzter/kafka-client 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -773,6 +773,70 @@ export class HealthService {
773
773
 
774
774
  ## Testing
775
775
 
776
+ ### Testing utilities
777
+
778
+ Import from `@drarzter/kafka-client/testing` — zero runtime deps, only `jest` and `@testcontainers/kafka` as peer dependencies.
779
+
780
+ #### `createMockKafkaClient<T>()`
781
+
782
+ Fully typed mock with `jest.fn()` on every `IKafkaClient` method. All methods resolve to sensible defaults:
783
+
784
+ ```typescript
785
+ import { createMockKafkaClient } from '@drarzter/kafka-client/testing';
786
+
787
+ const kafka = createMockKafkaClient<MyTopics>();
788
+
789
+ const service = new OrdersService(kafka);
790
+ await service.createOrder();
791
+
792
+ expect(kafka.sendMessage).toHaveBeenCalledWith(
793
+ 'order.created',
794
+ expect.objectContaining({ orderId: '123' }),
795
+ );
796
+
797
+ // Override return values
798
+ kafka.checkStatus.mockResolvedValueOnce({ topics: ['order.created'] });
799
+
800
+ // Mock rejections
801
+ kafka.sendMessage.mockRejectedValueOnce(new Error('broker down'));
802
+ ```
803
+
804
+ #### `KafkaTestContainer`
805
+
806
+ Thin wrapper around `@testcontainers/kafka` that handles common setup pain points — transaction coordinator warmup, topic pre-creation:
807
+
808
+ ```typescript
809
+ import { KafkaTestContainer } from '@drarzter/kafka-client/testing';
810
+ import { KafkaClient } from '@drarzter/kafka-client/core';
811
+
812
+ let container: KafkaTestContainer;
813
+ let brokers: string[];
814
+
815
+ beforeAll(async () => {
816
+ container = new KafkaTestContainer({
817
+ topics: ['orders', { topic: 'payments', numPartitions: 3 }],
818
+ });
819
+ brokers = await container.start();
820
+ }, 120_000);
821
+
822
+ afterAll(() => container.stop());
823
+
824
+ it('sends and receives', async () => {
825
+ const kafka = new KafkaClient('test', 'test-group', brokers);
826
+ // ...
827
+ });
828
+ ```
829
+
830
+ Options:
831
+
832
+ | Option | Default | Description |
833
+ |--------|---------|-------------|
834
+ | `image` | `"confluentinc/cp-kafka:7.7.0"` | Docker image |
835
+ | `transactionWarmup` | `true` | Warm up transaction coordinator on start |
836
+ | `topics` | `[]` | Topics to pre-create (string or `{ topic, numPartitions }`) |
837
+
838
+ ### Running tests
839
+
776
840
  Unit tests (mocked kafkajs):
777
841
 
778
842
  ```bash
@@ -795,7 +859,9 @@ Both suites run in CI on every push to `main`.
795
859
  src/
796
860
  ├── client/ # Core — KafkaClient, types, topic(), error classes (0 framework deps)
797
861
  ├── nest/ # NestJS adapter — Module, Explorer, decorators, health
862
+ ├── testing/ # Testing utilities — mock client, testcontainer wrapper
798
863
  ├── core.ts # Standalone entrypoint (@drarzter/kafka-client/core)
864
+ ├── testing.ts # Testing entrypoint (@drarzter/kafka-client/testing)
799
865
  └── index.ts # Full entrypoint — core + NestJS adapter
800
866
  ```
801
867
 
@@ -1,17 +1,5 @@
1
- var __defProp = Object.defineProperty;
2
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
- var __decorateClass = (decorators, target, key, kind) => {
4
- var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
5
- for (var i = decorators.length - 1, decorator; i >= 0; i--)
6
- if (decorator = decorators[i])
7
- result = (kind ? decorator(target, key, result) : decorator(result)) || result;
8
- if (kind && result) __defProp(target, key, result);
9
- return result;
10
- };
11
- var __decorateParam = (index, decorator) => (target, key) => decorator(target, key, index);
12
-
13
1
  // src/client/kafka.client.ts
14
- import { Kafka, Partitioners } from "kafkajs";
2
+ import { Kafka, Partitioners, logLevel as KafkaLogLevel } from "kafkajs";
15
3
 
16
4
  // src/client/errors.ts
17
5
  var KafkaProcessingError = class extends Error {
@@ -78,7 +66,21 @@ var KafkaClient = class {
78
66
  this.numPartitions = options?.numPartitions ?? 1;
79
67
  this.kafka = new Kafka({
80
68
  clientId: this.clientId,
81
- brokers
69
+ brokers,
70
+ logLevel: KafkaLogLevel.WARN,
71
+ logCreator: () => ({ level, log }) => {
72
+ const msg = `[kafkajs] ${log.message}`;
73
+ if (level === KafkaLogLevel.ERROR) {
74
+ const text = log.message ?? "";
75
+ const isRetriable = text.includes("TOPIC_ALREADY_EXISTS") || text.includes("GROUP_COORDINATOR_NOT_AVAILABLE") || text.includes("NOT_COORDINATOR") || text.includes("Response GroupCoordinator") || text.includes("Response CreateTopics");
76
+ if (isRetriable) this.logger.warn(msg);
77
+ else this.logger.error(msg);
78
+ } else if (level === KafkaLogLevel.WARN) {
79
+ this.logger.warn(msg);
80
+ } else {
81
+ this.logger.log(msg);
82
+ }
83
+ }
82
84
  });
83
85
  this.producer = this.kafka.producer({
84
86
  createPartitioner: Partitioners.DefaultPartitioner,
@@ -534,12 +536,10 @@ function topic(name) {
534
536
  }
535
537
 
536
538
  export {
537
- __decorateClass,
538
- __decorateParam,
539
539
  KafkaProcessingError,
540
540
  KafkaValidationError,
541
541
  KafkaRetryExhaustedError,
542
542
  KafkaClient,
543
543
  topic
544
544
  };
545
- //# sourceMappingURL=chunk-UDOHIMAZ.mjs.map
545
+ //# sourceMappingURL=chunk-A56D7HXR.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/client/kafka.client.ts","../src/client/errors.ts","../src/client/topic.ts"],"sourcesContent":["import { Consumer, Kafka, Partitioners, Producer, Admin, logLevel as KafkaLogLevel } from \"kafkajs\";\nimport { TopicDescriptor, SchemaLike } from \"./topic\";\nimport { KafkaRetryExhaustedError, KafkaValidationError } from \"./errors\";\nimport type {\n ClientId,\n GroupId,\n SendOptions,\n MessageHeaders,\n ConsumerOptions,\n ConsumerInterceptor,\n RetryOptions,\n TransactionContext,\n TopicMapConstraint,\n IKafkaClient,\n KafkaClientOptions,\n KafkaLogger,\n BatchMeta,\n SubscribeRetryOptions,\n} from \"./types\";\n\n// Re-export all types so existing `import { ... } from './kafka.client'` keeps working\nexport * from \"./types\";\n\n// ── Helpers ──────────────────────────────────────────────────────────\n\nconst ACKS_ALL = -1 as const;\n\nfunction toError(error: unknown): Error {\n return error instanceof Error ? error : new Error(String(error));\n}\n\n// ─────────────────────────────────────────────────────────────────────\n\n/**\n * Type-safe Kafka client.\n * Wraps kafkajs with JSON serialization, retries, DLQ, transactions, and interceptors.\n *\n * @typeParam T - Topic-to-message type mapping for compile-time safety.\n */\nexport class KafkaClient<\n T extends TopicMapConstraint<T>,\n> implements IKafkaClient<T> {\n private readonly kafka: Kafka;\n private readonly producer: Producer;\n private readonly consumers = new Map<string, Consumer>();\n private readonly admin: Admin;\n private readonly logger: KafkaLogger;\n private readonly autoCreateTopicsEnabled: boolean;\n private readonly strictSchemasEnabled: boolean;\n private readonly numPartitions: number;\n private readonly ensuredTopics = new Set<string>();\n private readonly defaultGroupId: string;\n private readonly schemaRegistry = new Map<string, SchemaLike>();\n private readonly runningConsumers = new Map<string, \"eachMessage\" | \"eachBatch\">();\n\n private isAdminConnected = false;\n public readonly clientId: ClientId;\n\n constructor(\n clientId: ClientId,\n groupId: GroupId,\n brokers: string[],\n options?: KafkaClientOptions,\n ) {\n this.clientId = clientId;\n this.defaultGroupId = groupId;\n this.logger = options?.logger ?? {\n log: (msg) => console.log(`[KafkaClient:${clientId}] ${msg}`),\n warn: (msg, ...args) => console.warn(`[KafkaClient:${clientId}] ${msg}`, ...args),\n error: (msg, ...args) => console.error(`[KafkaClient:${clientId}] ${msg}`, ...args),\n };\n this.autoCreateTopicsEnabled = options?.autoCreateTopics ?? false;\n this.strictSchemasEnabled = options?.strictSchemas ?? true;\n this.numPartitions = options?.numPartitions ?? 1;\n\n this.kafka = new Kafka({\n clientId: this.clientId,\n brokers,\n logLevel: KafkaLogLevel.WARN,\n logCreator: () => ({ level, log }) => {\n const msg = `[kafkajs] ${log.message}`;\n if (level === KafkaLogLevel.ERROR) {\n // kafkajs logs retriable broker errors at ERROR level even though\n // it retries them internally. Downgrade known-harmless ones to warn.\n const text = log.message ?? \"\";\n const isRetriable =\n text.includes(\"TOPIC_ALREADY_EXISTS\") ||\n text.includes(\"GROUP_COORDINATOR_NOT_AVAILABLE\") ||\n text.includes(\"NOT_COORDINATOR\") ||\n text.includes(\"Response GroupCoordinator\") ||\n text.includes(\"Response CreateTopics\");\n if (isRetriable) this.logger.warn(msg);\n else this.logger.error(msg);\n } else if (level === KafkaLogLevel.WARN) {\n this.logger.warn(msg);\n } else {\n this.logger.log(msg);\n }\n },\n });\n this.producer = this.kafka.producer({\n createPartitioner: Partitioners.DefaultPartitioner,\n idempotent: true,\n transactionalId: `${clientId}-tx`,\n maxInFlightRequests: 1,\n });\n this.admin = this.kafka.admin();\n }\n\n // ── Send ─────────────────────────────────────────────────────────\n\n /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */\n public async sendMessage<\n D extends TopicDescriptor<string & keyof T, T[string & keyof T]>,\n >(descriptor: D, message: D[\"__type\"], options?: SendOptions): Promise<void>;\n public async sendMessage<K extends keyof T>(\n topic: K,\n message: T[K],\n options?: SendOptions,\n ): Promise<void>;\n public async sendMessage(\n topicOrDesc: any,\n message: any,\n options: SendOptions = {},\n ): Promise<void> {\n const payload = this.buildSendPayload(topicOrDesc, [\n { value: message, key: options.key, headers: options.headers },\n ]);\n await this.ensureTopic(payload.topic);\n await this.producer.send(payload);\n }\n\n /** Send multiple typed messages in one call. Accepts a topic key or a TopicDescriptor. */\n public async sendBatch<\n D extends TopicDescriptor<string & keyof T, T[string & keyof T]>,\n >(\n descriptor: D,\n messages: Array<{\n value: D[\"__type\"];\n key?: string;\n headers?: MessageHeaders;\n }>,\n ): Promise<void>;\n public async sendBatch<K extends keyof T>(\n topic: K,\n messages: Array<{ value: T[K]; key?: string; headers?: MessageHeaders }>,\n ): Promise<void>;\n public async sendBatch(\n topicOrDesc: any,\n messages: Array<{ value: any; key?: string; headers?: MessageHeaders }>,\n ): Promise<void> {\n const payload = this.buildSendPayload(topicOrDesc, messages);\n await this.ensureTopic(payload.topic);\n await this.producer.send(payload);\n }\n\n /** Execute multiple sends atomically. Commits on success, aborts on error. */\n public async transaction(\n fn: (ctx: TransactionContext<T>) => Promise<void>,\n ): Promise<void> {\n const tx = await this.producer.transaction();\n try {\n const ctx: TransactionContext<T> = {\n send: async (\n topicOrDesc: any,\n message: any,\n options: SendOptions = {},\n ) => {\n const payload = this.buildSendPayload(topicOrDesc, [\n { value: message, key: options.key, headers: options.headers },\n ]);\n await this.ensureTopic(payload.topic);\n await tx.send(payload);\n },\n sendBatch: async (topicOrDesc: any, messages: any[]) => {\n const payload = this.buildSendPayload(topicOrDesc, messages);\n await this.ensureTopic(payload.topic);\n await tx.send(payload);\n },\n };\n await fn(ctx);\n await tx.commit();\n } catch (error) {\n try {\n await tx.abort();\n } catch (abortError) {\n this.logger.error(\n \"Failed to abort transaction:\",\n toError(abortError).message,\n );\n }\n throw error;\n }\n }\n\n // ── Producer lifecycle ───────────────────────────────────────────\n\n /** Connect the idempotent producer. Called automatically by `KafkaModule.register()`. */\n public async connectProducer(): Promise<void> {\n await this.producer.connect();\n this.logger.log(\"Producer connected\");\n }\n\n public async disconnectProducer(): Promise<void> {\n await this.producer.disconnect();\n this.logger.log(\"Producer disconnected\");\n }\n\n // ── Consumer: eachMessage ────────────────────────────────────────\n\n /** Subscribe to topics and start consuming messages with the given handler. */\n public async startConsumer<K extends Array<keyof T>>(\n topics: K,\n handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>,\n options?: ConsumerOptions<T>,\n ): Promise<void>;\n public async startConsumer<\n D extends TopicDescriptor<string & keyof T, T[string & keyof T]>,\n >(\n topics: D[],\n handleMessage: (message: D[\"__type\"], topic: D[\"__topic\"]) => Promise<void>,\n options?: ConsumerOptions<T>,\n ): Promise<void>;\n public async startConsumer(\n topics: any[],\n handleMessage: (message: any, topic: any) => Promise<void>,\n options: ConsumerOptions<T> = {},\n ): Promise<void> {\n const { consumer, schemaMap, gid, dlq, interceptors, retry } =\n await this.setupConsumer(topics, \"eachMessage\", options);\n\n await consumer.run({\n autoCommit: options.autoCommit ?? true,\n eachMessage: async ({ topic, message }) => {\n if (!message.value) {\n this.logger.warn(`Received empty message from topic ${topic}`);\n return;\n }\n\n const raw = message.value.toString();\n const parsed = this.parseJsonMessage(raw, topic);\n if (parsed === null) return;\n\n const validated = await this.validateWithSchema(\n parsed, raw, topic, schemaMap, interceptors, dlq,\n );\n if (validated === null) return;\n\n await this.executeWithRetry(\n () => handleMessage(validated, topic as any),\n { topic, messages: validated, rawMessages: [raw], interceptors, dlq, retry },\n );\n },\n });\n\n this.runningConsumers.set(gid, \"eachMessage\");\n }\n\n // ── Consumer: eachBatch ──────────────────────────────────────────\n\n /** Subscribe to topics and consume messages in batches. */\n public async startBatchConsumer<K extends Array<keyof T>>(\n topics: K,\n handleBatch: (\n messages: T[K[number]][],\n topic: K[number],\n meta: BatchMeta,\n ) => Promise<void>,\n options?: ConsumerOptions<T>,\n ): Promise<void>;\n public async startBatchConsumer<\n D extends TopicDescriptor<string & keyof T, T[string & keyof T]>,\n >(\n topics: D[],\n handleBatch: (\n messages: D[\"__type\"][],\n topic: D[\"__topic\"],\n meta: BatchMeta,\n ) => Promise<void>,\n options?: ConsumerOptions<T>,\n ): Promise<void>;\n public async startBatchConsumer(\n topics: any[],\n handleBatch: (\n messages: any[],\n topic: any,\n meta: BatchMeta,\n ) => Promise<void>,\n options: ConsumerOptions<T> = {},\n ): Promise<void> {\n const { consumer, schemaMap, gid, dlq, interceptors, retry } =\n await this.setupConsumer(topics, \"eachBatch\", options);\n\n await consumer.run({\n autoCommit: options.autoCommit ?? true,\n eachBatch: async ({\n batch,\n heartbeat,\n resolveOffset,\n commitOffsetsIfNecessary,\n }) => {\n const validMessages: any[] = [];\n const rawMessages: string[] = [];\n\n for (const message of batch.messages) {\n if (!message.value) {\n this.logger.warn(\n `Received empty message from topic ${batch.topic}`,\n );\n continue;\n }\n\n const raw = message.value.toString();\n const parsed = this.parseJsonMessage(raw, batch.topic);\n if (parsed === null) continue;\n\n const validated = await this.validateWithSchema(\n parsed, raw, batch.topic, schemaMap, interceptors, dlq,\n );\n if (validated === null) continue;\n\n validMessages.push(validated);\n rawMessages.push(raw);\n }\n\n if (validMessages.length === 0) return;\n\n const meta: BatchMeta = {\n partition: batch.partition,\n highWatermark: batch.highWatermark,\n heartbeat,\n resolveOffset,\n commitOffsetsIfNecessary,\n };\n\n await this.executeWithRetry(\n () => handleBatch(validMessages, batch.topic as any, meta),\n {\n topic: batch.topic,\n messages: validMessages,\n rawMessages: batch.messages\n .filter((m) => m.value)\n .map((m) => m.value!.toString()),\n interceptors,\n dlq,\n retry,\n isBatch: true,\n },\n );\n },\n });\n\n this.runningConsumers.set(gid, \"eachBatch\");\n }\n\n // ── Consumer lifecycle ───────────────────────────────────────────\n\n public async stopConsumer(): Promise<void> {\n const tasks = [];\n for (const consumer of this.consumers.values()) {\n tasks.push(consumer.disconnect());\n }\n await Promise.allSettled(tasks);\n this.consumers.clear();\n this.runningConsumers.clear();\n this.logger.log(\"All consumers disconnected\");\n }\n\n /** Check broker connectivity and return available topics. */\n public async checkStatus(): Promise<{ topics: string[] }> {\n if (!this.isAdminConnected) {\n await this.admin.connect();\n this.isAdminConnected = true;\n }\n const topics = await this.admin.listTopics();\n return { topics };\n }\n\n public getClientId(): ClientId {\n return this.clientId;\n }\n\n /** Gracefully disconnect producer, all consumers, and admin. */\n public async disconnect(): Promise<void> {\n const tasks: Promise<void>[] = [this.producer.disconnect()];\n for (const consumer of this.consumers.values()) {\n tasks.push(consumer.disconnect());\n }\n if (this.isAdminConnected) {\n tasks.push(this.admin.disconnect());\n this.isAdminConnected = false;\n }\n await Promise.allSettled(tasks);\n this.consumers.clear();\n this.runningConsumers.clear();\n this.logger.log(\"All connections closed\");\n }\n\n // ── Private helpers ──────────────────────────────────────────────\n\n private getOrCreateConsumer(groupId?: string): Consumer {\n const gid = groupId || this.defaultGroupId;\n if (!this.consumers.has(gid)) {\n this.consumers.set(gid, this.kafka.consumer({ groupId: gid }));\n }\n return this.consumers.get(gid)!;\n }\n\n private resolveTopicName(topicOrDescriptor: unknown): string {\n if (typeof topicOrDescriptor === \"string\") return topicOrDescriptor;\n if (\n topicOrDescriptor &&\n typeof topicOrDescriptor === \"object\" &&\n \"__topic\" in topicOrDescriptor\n ) {\n return (topicOrDescriptor as TopicDescriptor).__topic;\n }\n return String(topicOrDescriptor);\n }\n\n private async ensureTopic(topic: string): Promise<void> {\n if (!this.autoCreateTopicsEnabled || this.ensuredTopics.has(topic)) return;\n if (!this.isAdminConnected) {\n await this.admin.connect();\n this.isAdminConnected = true;\n }\n await this.admin.createTopics({\n topics: [{ topic, numPartitions: this.numPartitions }],\n });\n this.ensuredTopics.add(topic);\n }\n\n /** Register schema from descriptor into global registry (side-effect). */\n private registerSchema(topicOrDesc: any): void {\n if (topicOrDesc?.__schema) {\n const topic = this.resolveTopicName(topicOrDesc);\n this.schemaRegistry.set(topic, topicOrDesc.__schema);\n }\n }\n\n /** Validate message against schema. Pure — no side-effects on registry. */\n private validateMessage(topicOrDesc: any, message: any): any {\n if (topicOrDesc?.__schema) {\n return topicOrDesc.__schema.parse(message);\n }\n if (this.strictSchemasEnabled && typeof topicOrDesc === \"string\") {\n const schema = this.schemaRegistry.get(topicOrDesc);\n if (schema) return schema.parse(message);\n }\n return message;\n }\n\n /**\n * Build a kafkajs-ready send payload.\n * Handles: topic resolution, schema registration, validation, JSON serialization.\n */\n private buildSendPayload(\n topicOrDesc: any,\n messages: Array<{ value: any; key?: string; headers?: MessageHeaders }>,\n ): { topic: string; messages: Array<{ value: string; key: string | null; headers?: MessageHeaders }>; acks: -1 } {\n this.registerSchema(topicOrDesc);\n const topic = this.resolveTopicName(topicOrDesc);\n return {\n topic,\n messages: messages.map((m) => ({\n value: JSON.stringify(this.validateMessage(topicOrDesc, m.value)),\n key: m.key ?? null,\n headers: m.headers,\n })),\n acks: ACKS_ALL,\n };\n }\n\n /** Shared consumer setup: groupId check, schema map, connect, subscribe. */\n private async setupConsumer(\n topics: any[],\n mode: \"eachMessage\" | \"eachBatch\",\n options: ConsumerOptions<T>,\n ) {\n const {\n groupId: optGroupId,\n fromBeginning = false,\n retry,\n dlq = false,\n interceptors = [],\n schemas: optionSchemas,\n } = options;\n\n const gid = optGroupId || this.defaultGroupId;\n const existingMode = this.runningConsumers.get(gid);\n const oppositeMode = mode === \"eachMessage\" ? \"eachBatch\" : \"eachMessage\";\n if (existingMode === oppositeMode) {\n throw new Error(\n `Cannot use ${mode} on consumer group \"${gid}\" — it is already running with ${oppositeMode}. ` +\n `Use a different groupId for this consumer.`,\n );\n }\n\n const consumer = this.getOrCreateConsumer(optGroupId);\n const schemaMap = this.buildSchemaMap(topics, optionSchemas);\n\n const topicNames = (topics as any[]).map((t: any) =>\n this.resolveTopicName(t),\n );\n\n await consumer.connect();\n await this.subscribeWithRetry(consumer, topicNames, fromBeginning, options.subscribeRetry);\n\n this.logger.log(\n `${mode === \"eachBatch\" ? \"Batch consumer\" : \"Consumer\"} subscribed to topics: ${topicNames.join(\", \")}`,\n );\n\n return { consumer, schemaMap, topicNames, gid, dlq, interceptors, retry };\n }\n\n private buildSchemaMap(\n topics: any[],\n optionSchemas?: Map<string, SchemaLike>,\n ): Map<string, SchemaLike> {\n const schemaMap = new Map<string, SchemaLike>();\n for (const t of topics) {\n if (t?.__schema) {\n const name = this.resolveTopicName(t);\n schemaMap.set(name, t.__schema);\n this.schemaRegistry.set(name, t.__schema);\n }\n }\n if (optionSchemas) {\n for (const [k, v] of optionSchemas) {\n schemaMap.set(k, v);\n this.schemaRegistry.set(k, v);\n }\n }\n return schemaMap;\n }\n\n /** Parse raw message as JSON. Returns null on failure (logs error). */\n private parseJsonMessage(raw: string, topic: string): any | null {\n try {\n return JSON.parse(raw);\n } catch (error) {\n this.logger.error(\n `Failed to parse message from topic ${topic}:`,\n toError(error).stack,\n );\n return null;\n }\n }\n\n /**\n * Validate a parsed message against the schema map.\n * On failure: logs error, sends to DLQ if enabled, calls interceptor.onError.\n * Returns validated message or null.\n */\n private async validateWithSchema(\n message: any,\n raw: string,\n topic: string,\n schemaMap: Map<string, SchemaLike>,\n interceptors: ConsumerInterceptor<T>[],\n dlq: boolean,\n ): Promise<any | null> {\n const schema = schemaMap.get(topic);\n if (!schema) return message;\n\n try {\n return schema.parse(message);\n } catch (error) {\n const err = toError(error);\n const validationError = new KafkaValidationError(topic, message, {\n cause: err,\n });\n this.logger.error(\n `Schema validation failed for topic ${topic}:`,\n err.message,\n );\n if (dlq) await this.sendToDlq(topic, raw);\n for (const interceptor of interceptors) {\n await interceptor.onError?.(message, topic, validationError);\n }\n return null;\n }\n }\n\n /**\n * Execute a handler with retry, interceptors, and DLQ support.\n * Used by both single-message and batch consumers.\n */\n private async executeWithRetry(\n fn: () => Promise<void>,\n ctx: {\n topic: string;\n messages: any;\n rawMessages: string[];\n interceptors: ConsumerInterceptor<T>[];\n dlq: boolean;\n retry?: RetryOptions;\n isBatch?: boolean;\n },\n ): Promise<void> {\n const { topic, messages, rawMessages, interceptors, dlq, retry, isBatch } = ctx;\n const maxAttempts = retry ? retry.maxRetries + 1 : 1;\n const backoffMs = retry?.backoffMs ?? 1000;\n\n for (let attempt = 1; attempt <= maxAttempts; attempt++) {\n try {\n if (isBatch) {\n for (const interceptor of interceptors) {\n for (const msg of messages) {\n await interceptor.before?.(msg, topic);\n }\n }\n } else {\n for (const interceptor of interceptors) {\n await interceptor.before?.(messages, topic);\n }\n }\n\n await fn();\n\n if (isBatch) {\n for (const interceptor of interceptors) {\n for (const msg of messages) {\n await interceptor.after?.(msg, topic);\n }\n }\n } else {\n for (const interceptor of interceptors) {\n await interceptor.after?.(messages, topic);\n }\n }\n return;\n } catch (error) {\n const err = toError(error);\n const isLastAttempt = attempt === maxAttempts;\n\n if (isLastAttempt && maxAttempts > 1) {\n const exhaustedError = new KafkaRetryExhaustedError(\n topic,\n messages,\n maxAttempts,\n { cause: err },\n );\n for (const interceptor of interceptors) {\n await interceptor.onError?.(messages, topic, exhaustedError);\n }\n } else {\n for (const interceptor of interceptors) {\n await interceptor.onError?.(messages, topic, err);\n }\n }\n\n this.logger.error(\n `Error processing ${isBatch ? \"batch\" : \"message\"} from topic ${topic} (attempt ${attempt}/${maxAttempts}):`,\n err.stack,\n );\n\n if (isLastAttempt) {\n if (dlq) {\n for (const raw of rawMessages) {\n await this.sendToDlq(topic, raw);\n }\n }\n } else {\n await this.sleep(backoffMs * attempt);\n }\n }\n }\n }\n\n private async sendToDlq(topic: string, rawMessage: string): Promise<void> {\n const dlqTopic = `${topic}.dlq`;\n try {\n await this.producer.send({\n topic: dlqTopic,\n messages: [{ value: rawMessage }],\n acks: ACKS_ALL,\n });\n this.logger.warn(`Message sent to DLQ: ${dlqTopic}`);\n } catch (error) {\n this.logger.error(\n `Failed to send message to DLQ ${dlqTopic}:`,\n toError(error).stack,\n );\n }\n }\n\n private async subscribeWithRetry(\n consumer: Consumer,\n topics: string[],\n fromBeginning: boolean,\n retryOpts?: SubscribeRetryOptions,\n ): Promise<void> {\n const maxAttempts = retryOpts?.retries ?? 5;\n const backoffMs = retryOpts?.backoffMs ?? 5000;\n\n for (let attempt = 1; attempt <= maxAttempts; attempt++) {\n try {\n await consumer.subscribe({ topics, fromBeginning });\n return;\n } catch (error) {\n if (attempt === maxAttempts) throw error;\n const msg = toError(error).message;\n this.logger.warn(\n `Failed to subscribe to [${topics.join(\", \")}] (attempt ${attempt}/${maxAttempts}): ${msg}. Retrying in ${backoffMs}ms...`,\n );\n await this.sleep(backoffMs);\n }\n }\n }\n\n private sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n}\n","/** Error thrown when a consumer message handler fails. */\nexport class KafkaProcessingError extends Error {\n declare readonly cause?: Error;\n\n constructor(\n message: string,\n public readonly topic: string,\n public readonly originalMessage: unknown,\n options?: { cause?: Error },\n ) {\n super(message, options);\n this.name = \"KafkaProcessingError\";\n if (options?.cause) this.cause = options.cause;\n }\n}\n\n/** Error thrown when schema validation fails on send or consume. */\nexport class KafkaValidationError extends Error {\n declare readonly cause?: Error;\n\n constructor(\n public readonly topic: string,\n public readonly originalMessage: unknown,\n options?: { cause?: Error },\n ) {\n super(`Schema validation failed for topic \"${topic}\"`, options);\n this.name = \"KafkaValidationError\";\n if (options?.cause) this.cause = options.cause;\n }\n}\n\n/** Error thrown when all retry attempts are exhausted for a message. */\nexport class KafkaRetryExhaustedError extends KafkaProcessingError {\n constructor(\n topic: string,\n originalMessage: unknown,\n public readonly attempts: number,\n options?: { cause?: Error },\n ) {\n super(\n `Message processing failed after ${attempts} attempts on topic \"${topic}\"`,\n topic,\n originalMessage,\n options,\n );\n this.name = \"KafkaRetryExhaustedError\";\n }\n}\n","/**\n * Any validation library with a `.parse()` method.\n * Works with Zod, Valibot, ArkType, or any custom validator.\n *\n * @example\n * ```ts\n * import { z } from 'zod';\n * const schema: SchemaLike<{ id: string }> = z.object({ id: z.string() });\n * ```\n */\nexport interface SchemaLike<T = any> {\n parse(data: unknown): T;\n}\n\n/** Infer the output type from a SchemaLike. */\nexport type InferSchema<S extends SchemaLike> =\n S extends SchemaLike<infer T> ? T : never;\n\n/**\n * A typed topic descriptor that pairs a topic name with its message type.\n * Created via the `topic()` factory function.\n *\n * @typeParam N - The literal topic name string.\n * @typeParam M - The message payload type for this topic.\n */\nexport interface TopicDescriptor<\n N extends string = string,\n M extends Record<string, any> = Record<string, any>,\n> {\n readonly __topic: N;\n /** @internal Phantom type — never has a real value at runtime. */\n readonly __type: M;\n /** Runtime schema validator. Present only when created via `topic().schema()`. */\n readonly __schema?: SchemaLike<M>;\n}\n\n/**\n * Define a typed topic descriptor.\n *\n * @example\n * ```ts\n * // Without schema — type provided explicitly:\n * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();\n *\n * // With schema — type inferred from schema:\n * const OrderCreated = topic('order.created').schema(z.object({\n * orderId: z.string(),\n * amount: z.number(),\n * }));\n *\n * // Use with KafkaClient:\n * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });\n *\n * // Use with @SubscribeTo:\n * @SubscribeTo(OrderCreated)\n * async handleOrder(msg) { ... }\n * ```\n */\nexport function topic<N extends string>(name: N) {\n const fn = <M extends Record<string, any>>(): TopicDescriptor<N, M> => ({\n __topic: name,\n __type: undefined as unknown as M,\n });\n\n fn.schema = <S extends SchemaLike<Record<string, any>>>(\n schema: S,\n ): TopicDescriptor<N, InferSchema<S>> => ({\n __topic: name,\n __type: undefined as unknown as InferSchema<S>,\n __schema: schema as unknown as SchemaLike<InferSchema<S>>,\n });\n\n return fn;\n}\n\n/**\n * Build a topic-message map type from a union of TopicDescriptors.\n *\n * @example\n * ```ts\n * const OrderCreated = topic('order.created')<{ orderId: string }>();\n * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();\n *\n * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;\n * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }\n * ```\n */\nexport type TopicsFrom<D extends TopicDescriptor<any, any>> = {\n [K in D as K[\"__topic\"]]: K[\"__type\"];\n};\n"],"mappings":";AAAA,SAAmB,OAAO,cAA+B,YAAY,qBAAqB;;;ACCnF,IAAM,uBAAN,cAAmC,MAAM;AAAA,EAG9C,YACE,SACgBA,QACA,iBAChB,SACA;AACA,UAAM,SAAS,OAAO;AAJN,iBAAAA;AACA;AAIhB,SAAK,OAAO;AACZ,QAAI,SAAS,MAAO,MAAK,QAAQ,QAAQ;AAAA,EAC3C;AACF;AAGO,IAAM,uBAAN,cAAmC,MAAM;AAAA,EAG9C,YACkBA,QACA,iBAChB,SACA;AACA,UAAM,uCAAuCA,MAAK,KAAK,OAAO;AAJ9C,iBAAAA;AACA;AAIhB,SAAK,OAAO;AACZ,QAAI,SAAS,MAAO,MAAK,QAAQ,QAAQ;AAAA,EAC3C;AACF;AAGO,IAAM,2BAAN,cAAuC,qBAAqB;AAAA,EACjE,YACEA,QACA,iBACgB,UAChB,SACA;AACA;AAAA,MACE,mCAAmC,QAAQ,uBAAuBA,MAAK;AAAA,MACvEA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AARgB;AAShB,SAAK,OAAO;AAAA,EACd;AACF;;;ADtBA,IAAM,WAAW;AAEjB,SAAS,QAAQ,OAAuB;AACtC,SAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACjE;AAUO,IAAM,cAAN,MAEsB;AAAA,EACV;AAAA,EACA;AAAA,EACA,YAAY,oBAAI,IAAsB;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,gBAAgB,oBAAI,IAAY;AAAA,EAChC;AAAA,EACA,iBAAiB,oBAAI,IAAwB;AAAA,EAC7C,mBAAmB,oBAAI,IAAyC;AAAA,EAEzE,mBAAmB;AAAA,EACX;AAAA,EAEhB,YACE,UACA,SACA,SACA,SACA;AACA,SAAK,WAAW;AAChB,SAAK,iBAAiB;AACtB,SAAK,SAAS,SAAS,UAAU;AAAA,MAC/B,KAAK,CAAC,QAAQ,QAAQ,IAAI,gBAAgB,QAAQ,KAAK,GAAG,EAAE;AAAA,MAC5D,MAAM,CAAC,QAAQ,SAAS,QAAQ,KAAK,gBAAgB,QAAQ,KAAK,GAAG,IAAI,GAAG,IAAI;AAAA,MAChF,OAAO,CAAC,QAAQ,SAAS,QAAQ,MAAM,gBAAgB,QAAQ,KAAK,GAAG,IAAI,GAAG,IAAI;AAAA,IACpF;AACA,SAAK,0BAA0B,SAAS,oBAAoB;AAC5D,SAAK,uBAAuB,SAAS,iBAAiB;AACtD,SAAK,gBAAgB,SAAS,iBAAiB;AAE/C,SAAK,QAAQ,IAAI,MAAM;AAAA,MACrB,UAAU,KAAK;AAAA,MACf;AAAA,MACA,UAAU,cAAc;AAAA,MACxB,YAAY,MAAM,CAAC,EAAE,OAAO,IAAI,MAAM;AACpC,cAAM,MAAM,aAAa,IAAI,OAAO;AACpC,YAAI,UAAU,cAAc,OAAO;AAGjC,gBAAM,OAAO,IAAI,WAAW;AAC5B,gBAAM,cACJ,KAAK,SAAS,sBAAsB,KACpC,KAAK,SAAS,iCAAiC,KAC/C,KAAK,SAAS,iBAAiB,KAC/B,KAAK,SAAS,2BAA2B,KACzC,KAAK,SAAS,uBAAuB;AACvC,cAAI,YAAa,MAAK,OAAO,KAAK,GAAG;AAAA,cAChC,MAAK,OAAO,MAAM,GAAG;AAAA,QAC5B,WAAW,UAAU,cAAc,MAAM;AACvC,eAAK,OAAO,KAAK,GAAG;AAAA,QACtB,OAAO;AACL,eAAK,OAAO,IAAI,GAAG;AAAA,QACrB;AAAA,MACF;AAAA,IACF,CAAC;AACD,SAAK,WAAW,KAAK,MAAM,SAAS;AAAA,MAClC,mBAAmB,aAAa;AAAA,MAChC,YAAY;AAAA,MACZ,iBAAiB,GAAG,QAAQ;AAAA,MAC5B,qBAAqB;AAAA,IACvB,CAAC;AACD,SAAK,QAAQ,KAAK,MAAM,MAAM;AAAA,EAChC;AAAA,EAaA,MAAa,YACX,aACA,SACA,UAAuB,CAAC,GACT;AACf,UAAM,UAAU,KAAK,iBAAiB,aAAa;AAAA,MACjD,EAAE,OAAO,SAAS,KAAK,QAAQ,KAAK,SAAS,QAAQ,QAAQ;AAAA,IAC/D,CAAC;AACD,UAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,UAAM,KAAK,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA,EAiBA,MAAa,UACX,aACA,UACe;AACf,UAAM,UAAU,KAAK,iBAAiB,aAAa,QAAQ;AAC3D,UAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,UAAM,KAAK,SAAS,KAAK,OAAO;AAAA,EAClC;AAAA;AAAA,EAGA,MAAa,YACX,IACe;AACf,UAAM,KAAK,MAAM,KAAK,SAAS,YAAY;AAC3C,QAAI;AACF,YAAM,MAA6B;AAAA,QACjC,MAAM,OACJ,aACA,SACA,UAAuB,CAAC,MACrB;AACH,gBAAM,UAAU,KAAK,iBAAiB,aAAa;AAAA,YACjD,EAAE,OAAO,SAAS,KAAK,QAAQ,KAAK,SAAS,QAAQ,QAAQ;AAAA,UAC/D,CAAC;AACD,gBAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,gBAAM,GAAG,KAAK,OAAO;AAAA,QACvB;AAAA,QACA,WAAW,OAAO,aAAkB,aAAoB;AACtD,gBAAM,UAAU,KAAK,iBAAiB,aAAa,QAAQ;AAC3D,gBAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,gBAAM,GAAG,KAAK,OAAO;AAAA,QACvB;AAAA,MACF;AACA,YAAM,GAAG,GAAG;AACZ,YAAM,GAAG,OAAO;AAAA,IAClB,SAAS,OAAO;AACd,UAAI;AACF,cAAM,GAAG,MAAM;AAAA,MACjB,SAAS,YAAY;AACnB,aAAK,OAAO;AAAA,UACV;AAAA,UACA,QAAQ,UAAU,EAAE;AAAA,QACtB;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA,EAKA,MAAa,kBAAiC;AAC5C,UAAM,KAAK,SAAS,QAAQ;AAC5B,SAAK,OAAO,IAAI,oBAAoB;AAAA,EACtC;AAAA,EAEA,MAAa,qBAAoC;AAC/C,UAAM,KAAK,SAAS,WAAW;AAC/B,SAAK,OAAO,IAAI,uBAAuB;AAAA,EACzC;AAAA,EAiBA,MAAa,cACX,QACA,eACA,UAA8B,CAAC,GAChB;AACf,UAAM,EAAE,UAAU,WAAW,KAAK,KAAK,cAAc,MAAM,IACzD,MAAM,KAAK,cAAc,QAAQ,eAAe,OAAO;AAEzD,UAAM,SAAS,IAAI;AAAA,MACjB,YAAY,QAAQ,cAAc;AAAA,MAClC,aAAa,OAAO,EAAE,OAAAC,QAAO,QAAQ,MAAM;AACzC,YAAI,CAAC,QAAQ,OAAO;AAClB,eAAK,OAAO,KAAK,qCAAqCA,MAAK,EAAE;AAC7D;AAAA,QACF;AAEA,cAAM,MAAM,QAAQ,MAAM,SAAS;AACnC,cAAM,SAAS,KAAK,iBAAiB,KAAKA,MAAK;AAC/C,YAAI,WAAW,KAAM;AAErB,cAAM,YAAY,MAAM,KAAK;AAAA,UAC3B;AAAA,UAAQ;AAAA,UAAKA;AAAA,UAAO;AAAA,UAAW;AAAA,UAAc;AAAA,QAC/C;AACA,YAAI,cAAc,KAAM;AAExB,cAAM,KAAK;AAAA,UACT,MAAM,cAAc,WAAWA,MAAY;AAAA,UAC3C,EAAE,OAAAA,QAAO,UAAU,WAAW,aAAa,CAAC,GAAG,GAAG,cAAc,KAAK,MAAM;AAAA,QAC7E;AAAA,MACF;AAAA,IACF,CAAC;AAED,SAAK,iBAAiB,IAAI,KAAK,aAAa;AAAA,EAC9C;AAAA,EAyBA,MAAa,mBACX,QACA,aAKA,UAA8B,CAAC,GAChB;AACf,UAAM,EAAE,UAAU,WAAW,KAAK,KAAK,cAAc,MAAM,IACzD,MAAM,KAAK,cAAc,QAAQ,aAAa,OAAO;AAEvD,UAAM,SAAS,IAAI;AAAA,MACjB,YAAY,QAAQ,cAAc;AAAA,MAClC,WAAW,OAAO;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,MAAM;AACJ,cAAM,gBAAuB,CAAC;AAC9B,cAAM,cAAwB,CAAC;AAE/B,mBAAW,WAAW,MAAM,UAAU;AACpC,cAAI,CAAC,QAAQ,OAAO;AAClB,iBAAK,OAAO;AAAA,cACV,qCAAqC,MAAM,KAAK;AAAA,YAClD;AACA;AAAA,UACF;AAEA,gBAAM,MAAM,QAAQ,MAAM,SAAS;AACnC,gBAAM,SAAS,KAAK,iBAAiB,KAAK,MAAM,KAAK;AACrD,cAAI,WAAW,KAAM;AAErB,gBAAM,YAAY,MAAM,KAAK;AAAA,YAC3B;AAAA,YAAQ;AAAA,YAAK,MAAM;AAAA,YAAO;AAAA,YAAW;AAAA,YAAc;AAAA,UACrD;AACA,cAAI,cAAc,KAAM;AAExB,wBAAc,KAAK,SAAS;AAC5B,sBAAY,KAAK,GAAG;AAAA,QACtB;AAEA,YAAI,cAAc,WAAW,EAAG;AAEhC,cAAM,OAAkB;AAAA,UACtB,WAAW,MAAM;AAAA,UACjB,eAAe,MAAM;AAAA,UACrB;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAEA,cAAM,KAAK;AAAA,UACT,MAAM,YAAY,eAAe,MAAM,OAAc,IAAI;AAAA,UACzD;AAAA,YACE,OAAO,MAAM;AAAA,YACb,UAAU;AAAA,YACV,aAAa,MAAM,SAChB,OAAO,CAAC,MAAM,EAAE,KAAK,EACrB,IAAI,CAAC,MAAM,EAAE,MAAO,SAAS,CAAC;AAAA,YACjC;AAAA,YACA;AAAA,YACA;AAAA,YACA,SAAS;AAAA,UACX;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,SAAK,iBAAiB,IAAI,KAAK,WAAW;AAAA,EAC5C;AAAA;AAAA,EAIA,MAAa,eAA8B;AACzC,UAAM,QAAQ,CAAC;AACf,eAAW,YAAY,KAAK,UAAU,OAAO,GAAG;AAC9C,YAAM,KAAK,SAAS,WAAW,CAAC;AAAA,IAClC;AACA,UAAM,QAAQ,WAAW,KAAK;AAC9B,SAAK,UAAU,MAAM;AACrB,SAAK,iBAAiB,MAAM;AAC5B,SAAK,OAAO,IAAI,4BAA4B;AAAA,EAC9C;AAAA;AAAA,EAGA,MAAa,cAA6C;AACxD,QAAI,CAAC,KAAK,kBAAkB;AAC1B,YAAM,KAAK,MAAM,QAAQ;AACzB,WAAK,mBAAmB;AAAA,IAC1B;AACA,UAAM,SAAS,MAAM,KAAK,MAAM,WAAW;AAC3C,WAAO,EAAE,OAAO;AAAA,EAClB;AAAA,EAEO,cAAwB;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,MAAa,aAA4B;AACvC,UAAM,QAAyB,CAAC,KAAK,SAAS,WAAW,CAAC;AAC1D,eAAW,YAAY,KAAK,UAAU,OAAO,GAAG;AAC9C,YAAM,KAAK,SAAS,WAAW,CAAC;AAAA,IAClC;AACA,QAAI,KAAK,kBAAkB;AACzB,YAAM,KAAK,KAAK,MAAM,WAAW,CAAC;AAClC,WAAK,mBAAmB;AAAA,IAC1B;AACA,UAAM,QAAQ,WAAW,KAAK;AAC9B,SAAK,UAAU,MAAM;AACrB,SAAK,iBAAiB,MAAM;AAC5B,SAAK,OAAO,IAAI,wBAAwB;AAAA,EAC1C;AAAA;AAAA,EAIQ,oBAAoB,SAA4B;AACtD,UAAM,MAAM,WAAW,KAAK;AAC5B,QAAI,CAAC,KAAK,UAAU,IAAI,GAAG,GAAG;AAC5B,WAAK,UAAU,IAAI,KAAK,KAAK,MAAM,SAAS,EAAE,SAAS,IAAI,CAAC,CAAC;AAAA,IAC/D;AACA,WAAO,KAAK,UAAU,IAAI,GAAG;AAAA,EAC/B;AAAA,EAEQ,iBAAiB,mBAAoC;AAC3D,QAAI,OAAO,sBAAsB,SAAU,QAAO;AAClD,QACE,qBACA,OAAO,sBAAsB,YAC7B,aAAa,mBACb;AACA,aAAQ,kBAAsC;AAAA,IAChD;AACA,WAAO,OAAO,iBAAiB;AAAA,EACjC;AAAA,EAEA,MAAc,YAAYA,QAA8B;AACtD,QAAI,CAAC,KAAK,2BAA2B,KAAK,cAAc,IAAIA,MAAK,EAAG;AACpE,QAAI,CAAC,KAAK,kBAAkB;AAC1B,YAAM,KAAK,MAAM,QAAQ;AACzB,WAAK,mBAAmB;AAAA,IAC1B;AACA,UAAM,KAAK,MAAM,aAAa;AAAA,MAC5B,QAAQ,CAAC,EAAE,OAAAA,QAAO,eAAe,KAAK,cAAc,CAAC;AAAA,IACvD,CAAC;AACD,SAAK,cAAc,IAAIA,MAAK;AAAA,EAC9B;AAAA;AAAA,EAGQ,eAAe,aAAwB;AAC7C,QAAI,aAAa,UAAU;AACzB,YAAMA,SAAQ,KAAK,iBAAiB,WAAW;AAC/C,WAAK,eAAe,IAAIA,QAAO,YAAY,QAAQ;AAAA,IACrD;AAAA,EACF;AAAA;AAAA,EAGQ,gBAAgB,aAAkB,SAAmB;AAC3D,QAAI,aAAa,UAAU;AACzB,aAAO,YAAY,SAAS,MAAM,OAAO;AAAA,IAC3C;AACA,QAAI,KAAK,wBAAwB,OAAO,gBAAgB,UAAU;AAChE,YAAM,SAAS,KAAK,eAAe,IAAI,WAAW;AAClD,UAAI,OAAQ,QAAO,OAAO,MAAM,OAAO;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,iBACN,aACA,UAC+G;AAC/G,SAAK,eAAe,WAAW;AAC/B,UAAMA,SAAQ,KAAK,iBAAiB,WAAW;AAC/C,WAAO;AAAA,MACL,OAAAA;AAAA,MACA,UAAU,SAAS,IAAI,CAAC,OAAO;AAAA,QAC7B,OAAO,KAAK,UAAU,KAAK,gBAAgB,aAAa,EAAE,KAAK,CAAC;AAAA,QAChE,KAAK,EAAE,OAAO;AAAA,QACd,SAAS,EAAE;AAAA,MACb,EAAE;AAAA,MACF,MAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA,EAGA,MAAc,cACZ,QACA,MACA,SACA;AACA,UAAM;AAAA,MACJ,SAAS;AAAA,MACT,gBAAgB;AAAA,MAChB;AAAA,MACA,MAAM;AAAA,MACN,eAAe,CAAC;AAAA,MAChB,SAAS;AAAA,IACX,IAAI;AAEJ,UAAM,MAAM,cAAc,KAAK;AAC/B,UAAM,eAAe,KAAK,iBAAiB,IAAI,GAAG;AAClD,UAAM,eAAe,SAAS,gBAAgB,cAAc;AAC5D,QAAI,iBAAiB,cAAc;AACjC,YAAM,IAAI;AAAA,QACR,cAAc,IAAI,uBAAuB,GAAG,uCAAkC,YAAY;AAAA,MAE5F;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,oBAAoB,UAAU;AACpD,UAAM,YAAY,KAAK,eAAe,QAAQ,aAAa;AAE3D,UAAM,aAAc,OAAiB;AAAA,MAAI,CAAC,MACxC,KAAK,iBAAiB,CAAC;AAAA,IACzB;AAEA,UAAM,SAAS,QAAQ;AACvB,UAAM,KAAK,mBAAmB,UAAU,YAAY,eAAe,QAAQ,cAAc;AAEzF,SAAK,OAAO;AAAA,MACV,GAAG,SAAS,cAAc,mBAAmB,UAAU,0BAA0B,WAAW,KAAK,IAAI,CAAC;AAAA,IACxG;AAEA,WAAO,EAAE,UAAU,WAAW,YAAY,KAAK,KAAK,cAAc,MAAM;AAAA,EAC1E;AAAA,EAEQ,eACN,QACA,eACyB;AACzB,UAAM,YAAY,oBAAI,IAAwB;AAC9C,eAAW,KAAK,QAAQ;AACtB,UAAI,GAAG,UAAU;AACf,cAAM,OAAO,KAAK,iBAAiB,CAAC;AACpC,kBAAU,IAAI,MAAM,EAAE,QAAQ;AAC9B,aAAK,eAAe,IAAI,MAAM,EAAE,QAAQ;AAAA,MAC1C;AAAA,IACF;AACA,QAAI,eAAe;AACjB,iBAAW,CAAC,GAAG,CAAC,KAAK,eAAe;AAClC,kBAAU,IAAI,GAAG,CAAC;AAClB,aAAK,eAAe,IAAI,GAAG,CAAC;AAAA,MAC9B;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA,EAGQ,iBAAiB,KAAaA,QAA2B;AAC/D,QAAI;AACF,aAAO,KAAK,MAAM,GAAG;AAAA,IACvB,SAAS,OAAO;AACd,WAAK,OAAO;AAAA,QACV,sCAAsCA,MAAK;AAAA,QAC3C,QAAQ,KAAK,EAAE;AAAA,MACjB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,mBACZ,SACA,KACAA,QACA,WACA,cACA,KACqB;AACrB,UAAM,SAAS,UAAU,IAAIA,MAAK;AAClC,QAAI,CAAC,OAAQ,QAAO;AAEpB,QAAI;AACF,aAAO,OAAO,MAAM,OAAO;AAAA,IAC7B,SAAS,OAAO;AACd,YAAM,MAAM,QAAQ,KAAK;AACzB,YAAM,kBAAkB,IAAI,qBAAqBA,QAAO,SAAS;AAAA,QAC/D,OAAO;AAAA,MACT,CAAC;AACD,WAAK,OAAO;AAAA,QACV,sCAAsCA,MAAK;AAAA,QAC3C,IAAI;AAAA,MACN;AACA,UAAI,IAAK,OAAM,KAAK,UAAUA,QAAO,GAAG;AACxC,iBAAW,eAAe,cAAc;AACtC,cAAM,YAAY,UAAU,SAASA,QAAO,eAAe;AAAA,MAC7D;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,iBACZ,IACA,KASe;AACf,UAAM,EAAE,OAAAA,QAAO,UAAU,aAAa,cAAc,KAAK,OAAO,QAAQ,IAAI;AAC5E,UAAM,cAAc,QAAQ,MAAM,aAAa,IAAI;AACnD,UAAM,YAAY,OAAO,aAAa;AAEtC,aAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,UAAI;AACF,YAAI,SAAS;AACX,qBAAW,eAAe,cAAc;AACtC,uBAAW,OAAO,UAAU;AAC1B,oBAAM,YAAY,SAAS,KAAKA,MAAK;AAAA,YACvC;AAAA,UACF;AAAA,QACF,OAAO;AACL,qBAAW,eAAe,cAAc;AACtC,kBAAM,YAAY,SAAS,UAAUA,MAAK;AAAA,UAC5C;AAAA,QACF;AAEA,cAAM,GAAG;AAET,YAAI,SAAS;AACX,qBAAW,eAAe,cAAc;AACtC,uBAAW,OAAO,UAAU;AAC1B,oBAAM,YAAY,QAAQ,KAAKA,MAAK;AAAA,YACtC;AAAA,UACF;AAAA,QACF,OAAO;AACL,qBAAW,eAAe,cAAc;AACtC,kBAAM,YAAY,QAAQ,UAAUA,MAAK;AAAA,UAC3C;AAAA,QACF;AACA;AAAA,MACF,SAAS,OAAO;AACd,cAAM,MAAM,QAAQ,KAAK;AACzB,cAAM,gBAAgB,YAAY;AAElC,YAAI,iBAAiB,cAAc,GAAG;AACpC,gBAAM,iBAAiB,IAAI;AAAA,YACzBA;AAAA,YACA;AAAA,YACA;AAAA,YACA,EAAE,OAAO,IAAI;AAAA,UACf;AACA,qBAAW,eAAe,cAAc;AACtC,kBAAM,YAAY,UAAU,UAAUA,QAAO,cAAc;AAAA,UAC7D;AAAA,QACF,OAAO;AACL,qBAAW,eAAe,cAAc;AACtC,kBAAM,YAAY,UAAU,UAAUA,QAAO,GAAG;AAAA,UAClD;AAAA,QACF;AAEA,aAAK,OAAO;AAAA,UACV,oBAAoB,UAAU,UAAU,SAAS,eAAeA,MAAK,aAAa,OAAO,IAAI,WAAW;AAAA,UACxG,IAAI;AAAA,QACN;AAEA,YAAI,eAAe;AACjB,cAAI,KAAK;AACP,uBAAW,OAAO,aAAa;AAC7B,oBAAM,KAAK,UAAUA,QAAO,GAAG;AAAA,YACjC;AAAA,UACF;AAAA,QACF,OAAO;AACL,gBAAM,KAAK,MAAM,YAAY,OAAO;AAAA,QACtC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,UAAUA,QAAe,YAAmC;AACxE,UAAM,WAAW,GAAGA,MAAK;AACzB,QAAI;AACF,YAAM,KAAK,SAAS,KAAK;AAAA,QACvB,OAAO;AAAA,QACP,UAAU,CAAC,EAAE,OAAO,WAAW,CAAC;AAAA,QAChC,MAAM;AAAA,MACR,CAAC;AACD,WAAK,OAAO,KAAK,wBAAwB,QAAQ,EAAE;AAAA,IACrD,SAAS,OAAO;AACd,WAAK,OAAO;AAAA,QACV,iCAAiC,QAAQ;AAAA,QACzC,QAAQ,KAAK,EAAE;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,mBACZ,UACA,QACA,eACA,WACe;AACf,UAAM,cAAc,WAAW,WAAW;AAC1C,UAAM,YAAY,WAAW,aAAa;AAE1C,aAAS,UAAU,GAAG,WAAW,aAAa,WAAW;AACvD,UAAI;AACF,cAAM,SAAS,UAAU,EAAE,QAAQ,cAAc,CAAC;AAClD;AAAA,MACF,SAAS,OAAO;AACd,YAAI,YAAY,YAAa,OAAM;AACnC,cAAM,MAAM,QAAQ,KAAK,EAAE;AAC3B,aAAK,OAAO;AAAA,UACV,2BAA2B,OAAO,KAAK,IAAI,CAAC,cAAc,OAAO,IAAI,WAAW,MAAM,GAAG,iBAAiB,SAAS;AAAA,QACrH;AACA,cAAM,KAAK,MAAM,SAAS;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,EACzD;AACF;;;AEhpBO,SAAS,MAAwB,MAAS;AAC/C,QAAM,KAAK,OAA6D;AAAA,IACtE,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAEA,KAAG,SAAS,CACV,YACwC;AAAA,IACxC,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,UAAU;AAAA,EACZ;AAEA,SAAO;AACT;","names":["topic","topic"]}
@@ -0,0 +1,17 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
3
+ var __decorateClass = (decorators, target, key, kind) => {
4
+ var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
5
+ for (var i = decorators.length - 1, decorator; i >= 0; i--)
6
+ if (decorator = decorators[i])
7
+ result = (kind ? decorator(target, key, result) : decorator(result)) || result;
8
+ if (kind && result) __defProp(target, key, result);
9
+ return result;
10
+ };
11
+ var __decorateParam = (index, decorator) => (target, key) => decorator(target, key, index);
12
+
13
+ export {
14
+ __decorateClass,
15
+ __decorateParam
16
+ };
17
+ //# sourceMappingURL=chunk-EQQGB2QZ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
package/dist/core.d.mts CHANGED
@@ -1,225 +1,5 @@
1
- /**
2
- * Any validation library with a `.parse()` method.
3
- * Works with Zod, Valibot, ArkType, or any custom validator.
4
- *
5
- * @example
6
- * ```ts
7
- * import { z } from 'zod';
8
- * const schema: SchemaLike<{ id: string }> = z.object({ id: z.string() });
9
- * ```
10
- */
11
- interface SchemaLike<T = any> {
12
- parse(data: unknown): T;
13
- }
14
- /** Infer the output type from a SchemaLike. */
15
- type InferSchema<S extends SchemaLike> = S extends SchemaLike<infer T> ? T : never;
16
- /**
17
- * A typed topic descriptor that pairs a topic name with its message type.
18
- * Created via the `topic()` factory function.
19
- *
20
- * @typeParam N - The literal topic name string.
21
- * @typeParam M - The message payload type for this topic.
22
- */
23
- interface TopicDescriptor<N extends string = string, M extends Record<string, any> = Record<string, any>> {
24
- readonly __topic: N;
25
- /** @internal Phantom type — never has a real value at runtime. */
26
- readonly __type: M;
27
- /** Runtime schema validator. Present only when created via `topic().schema()`. */
28
- readonly __schema?: SchemaLike<M>;
29
- }
30
- /**
31
- * Define a typed topic descriptor.
32
- *
33
- * @example
34
- * ```ts
35
- * // Without schema — type provided explicitly:
36
- * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
37
- *
38
- * // With schema — type inferred from schema:
39
- * const OrderCreated = topic('order.created').schema(z.object({
40
- * orderId: z.string(),
41
- * amount: z.number(),
42
- * }));
43
- *
44
- * // Use with KafkaClient:
45
- * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
46
- *
47
- * // Use with @SubscribeTo:
48
- * @SubscribeTo(OrderCreated)
49
- * async handleOrder(msg) { ... }
50
- * ```
51
- */
52
- declare function topic<N extends string>(name: N): {
53
- <M extends Record<string, any>>(): TopicDescriptor<N, M>;
54
- schema<S extends SchemaLike<Record<string, any>>>(schema: S): TopicDescriptor<N, InferSchema<S>>;
55
- };
56
- /**
57
- * Build a topic-message map type from a union of TopicDescriptors.
58
- *
59
- * @example
60
- * ```ts
61
- * const OrderCreated = topic('order.created')<{ orderId: string }>();
62
- * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();
63
- *
64
- * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
65
- * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }
66
- * ```
67
- */
68
- type TopicsFrom<D extends TopicDescriptor<any, any>> = {
69
- [K in D as K["__topic"]]: K["__type"];
70
- };
71
-
72
- /**
73
- * Mapping of topic names to their message types.
74
- * Define this interface to get type-safe publish/subscribe across your app.
75
- *
76
- * @example
77
- * ```ts
78
- * // with explicit extends (IDE hints for values)
79
- * interface MyTopics extends TTopicMessageMap {
80
- * "orders.created": { orderId: string; amount: number };
81
- * "users.updated": { userId: string; name: string };
82
- * }
83
- *
84
- * // or plain interface / type — works the same
85
- * interface MyTopics {
86
- * "orders.created": { orderId: string; amount: number };
87
- * }
88
- * ```
89
- */
90
- type TTopicMessageMap = {
91
- [topic: string]: Record<string, any>;
92
- };
93
- /**
94
- * Generic constraint for topic-message maps.
95
- * Works with both `type` aliases and `interface` declarations.
96
- */
97
- type TopicMapConstraint<T> = {
98
- [K in keyof T]: Record<string, any>;
99
- };
100
- type ClientId = string;
101
- type GroupId = string;
102
- type MessageHeaders = Record<string, string>;
103
- /** Options for sending a single message. */
104
- interface SendOptions {
105
- /** Partition key for message routing. */
106
- key?: string;
107
- /** Custom headers attached to the message. */
108
- headers?: MessageHeaders;
109
- }
110
- /** Metadata exposed to batch consumer handlers. */
111
- interface BatchMeta {
112
- /** Partition number for this batch. */
113
- partition: number;
114
- /** Highest offset available on the broker for this partition. */
115
- highWatermark: string;
116
- /** Send a heartbeat to the broker to prevent session timeout. */
117
- heartbeat(): Promise<void>;
118
- /** Mark an offset as processed (for manual offset management). */
119
- resolveOffset(offset: string): void;
120
- /** Commit offsets if the auto-commit threshold has been reached. */
121
- commitOffsetsIfNecessary(): Promise<void>;
122
- }
123
- /** Options for configuring a Kafka consumer. */
124
- interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
125
- /** Override the default consumer group ID from the constructor. */
126
- groupId?: string;
127
- /** Start reading from earliest offset. Default: `false`. */
128
- fromBeginning?: boolean;
129
- /** Automatically commit offsets. Default: `true`. */
130
- autoCommit?: boolean;
131
- /** Retry policy for failed message processing. */
132
- retry?: RetryOptions;
133
- /** Send failed messages to a Dead Letter Queue (`<topic>.dlq`). */
134
- dlq?: boolean;
135
- /** Interceptors called before/after each message. */
136
- interceptors?: ConsumerInterceptor<T>[];
137
- /** @internal Schema map populated by @SubscribeTo when descriptors have schemas. */
138
- schemas?: Map<string, SchemaLike>;
139
- /** Retry config for `consumer.subscribe()` when the topic doesn't exist yet. */
140
- subscribeRetry?: SubscribeRetryOptions;
141
- }
142
- /** Configuration for consumer retry behavior. */
143
- interface RetryOptions {
144
- /** Maximum number of retry attempts before giving up. */
145
- maxRetries: number;
146
- /** Base delay between retries in ms (multiplied by attempt number). Default: `1000`. */
147
- backoffMs?: number;
148
- }
149
- /**
150
- * Interceptor hooks for consumer message processing.
151
- * All methods are optional — implement only what you need.
152
- */
153
- interface ConsumerInterceptor<T extends TopicMapConstraint<T> = TTopicMessageMap> {
154
- /** Called before the message handler. */
155
- before?(message: T[keyof T], topic: string): Promise<void> | void;
156
- /** Called after the message handler succeeds. */
157
- after?(message: T[keyof T], topic: string): Promise<void> | void;
158
- /** Called when the message handler throws. */
159
- onError?(message: T[keyof T], topic: string, error: Error): Promise<void> | void;
160
- }
161
- /** Context passed to the `transaction()` callback with type-safe send methods. */
162
- interface TransactionContext<T extends TopicMapConstraint<T>> {
163
- send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
164
- send<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
165
- sendBatch<K extends keyof T>(topic: K, messages: Array<{
166
- value: T[K];
167
- key?: string;
168
- headers?: MessageHeaders;
169
- }>): Promise<void>;
170
- sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
171
- value: D["__type"];
172
- key?: string;
173
- headers?: MessageHeaders;
174
- }>): Promise<void>;
175
- }
176
- /** Interface describing all public methods of the Kafka client. */
177
- interface IKafkaClient<T extends TopicMapConstraint<T>> {
178
- checkStatus(): Promise<{
179
- topics: string[];
180
- }>;
181
- startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
182
- startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
183
- startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (messages: T[K[number]][], topic: K[number], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
184
- startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (messages: D["__type"][], topic: D["__topic"], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
185
- stopConsumer(): Promise<void>;
186
- sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
187
- sendBatch<K extends keyof T>(topic: K, messages: Array<{
188
- value: T[K];
189
- key?: string;
190
- headers?: MessageHeaders;
191
- }>): Promise<void>;
192
- transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
193
- getClientId: () => ClientId;
194
- disconnect(): Promise<void>;
195
- }
196
- /**
197
- * Logger interface for KafkaClient.
198
- * Compatible with NestJS Logger, console, winston, pino, or any custom logger.
199
- */
200
- interface KafkaLogger {
201
- log(message: string): void;
202
- warn(message: string, ...args: any[]): void;
203
- error(message: string, ...args: any[]): void;
204
- }
205
- /** Options for `KafkaClient` constructor. */
206
- interface KafkaClientOptions {
207
- /** Auto-create topics via admin before the first `sendMessage`, `sendBatch`, or `transaction` for each topic. Useful for development — not recommended in production. */
208
- autoCreateTopics?: boolean;
209
- /** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */
210
- strictSchemas?: boolean;
211
- /** Custom logger. Defaults to console with `[KafkaClient:<clientId>]` prefix. */
212
- logger?: KafkaLogger;
213
- /** Number of partitions for auto-created topics. Default: `1`. */
214
- numPartitions?: number;
215
- }
216
- /** Options for consumer subscribe retry when topic doesn't exist yet. */
217
- interface SubscribeRetryOptions {
218
- /** Maximum number of subscribe attempts. Default: `5`. */
219
- retries?: number;
220
- /** Delay between retries in ms. Default: `5000`. */
221
- backoffMs?: number;
222
- }
1
+ import { T as TopicMapConstraint, I as IKafkaClient, C as ClientId, G as GroupId, K as KafkaClientOptions, b as TopicDescriptor, f as SendOptions, M as MessageHeaders, j as TransactionContext, a as ConsumerOptions, B as BatchMeta } from './types-CtwJihJ3.mjs';
2
+ export { c as ConsumerInterceptor, d as InferSchema, e as KafkaLogger, R as RetryOptions, S as SchemaLike, g as SubscribeRetryOptions, h as TTopicMessageMap, i as TopicsFrom, t as topic } from './types-CtwJihJ3.mjs';
223
3
 
224
4
  /**
225
5
  * Type-safe Kafka client.
@@ -335,4 +115,4 @@ declare class KafkaRetryExhaustedError extends KafkaProcessingError {
335
115
  });
336
116
  }
337
117
 
338
- export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, KafkaClient, type KafkaClientOptions, type KafkaLogger, KafkaProcessingError, KafkaRetryExhaustedError, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, type SubscribeRetryOptions, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, topic };
118
+ export { BatchMeta, ClientId, ConsumerOptions, GroupId, IKafkaClient, KafkaClient, KafkaClientOptions, KafkaProcessingError, KafkaRetryExhaustedError, KafkaValidationError, MessageHeaders, SendOptions, TopicDescriptor, TopicMapConstraint, TransactionContext };