@drarzter/kafka-client 0.6.7 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -15,7 +15,7 @@ import {
15
15
  getEnvelopeContext,
16
16
  runWithEnvelopeContext,
17
17
  topic
18
- } from "./chunk-ISYOEX4W.mjs";
18
+ } from "./chunk-MJ342P4R.mjs";
19
19
  import {
20
20
  __decorateClass,
21
21
  __decorateParam
package/dist/otel.d.mts CHANGED
@@ -1,4 +1,4 @@
1
- import { K as KafkaInstrumentation } from './types-CqjRm-Cd.mjs';
1
+ import { K as KafkaInstrumentation } from './types-DqQ7IXZr.mjs';
2
2
 
3
3
  /**
4
4
  * Create a `KafkaInstrumentation` that automatically propagates
package/dist/otel.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { K as KafkaInstrumentation } from './types-CqjRm-Cd.js';
1
+ import { K as KafkaInstrumentation } from './types-DqQ7IXZr.js';
2
2
 
3
3
  /**
4
4
  * Create a `KafkaInstrumentation` that automatically propagates
@@ -1,4 +1,4 @@
1
- import { T as TopicMapConstraint, I as IKafkaClient } from './types-CqjRm-Cd.mjs';
1
+ import { T as TopicMapConstraint, I as IKafkaClient } from './types-DqQ7IXZr.mjs';
2
2
 
3
3
  /**
4
4
  * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.
package/dist/testing.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { T as TopicMapConstraint, I as IKafkaClient } from './types-CqjRm-Cd.js';
1
+ import { T as TopicMapConstraint, I as IKafkaClient } from './types-DqQ7IXZr.js';
2
2
 
3
3
  /**
4
4
  * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.
package/dist/testing.js CHANGED
@@ -76,6 +76,22 @@ function createMockKafkaClient(mockFactory) {
76
76
  stop: mock().mockResolvedValue(void 0)
77
77
  }),
78
78
  stopConsumer: resolved(void 0),
79
+ consume: returning(
80
+ (function* () {
81
+ })()
82
+ ),
83
+ replayDlq: resolved({ replayed: 0, skipped: 0 }),
84
+ resetOffsets: resolved(void 0),
85
+ seekToOffset: resolved(void 0),
86
+ pauseConsumer: mock(),
87
+ resumeConsumer: mock(),
88
+ getMetrics: returning({
89
+ processedCount: 0,
90
+ retryCount: 0,
91
+ dlqCount: 0,
92
+ dedupCount: 0
93
+ }),
94
+ resetMetrics: mock(),
79
95
  disconnect: resolved(void 0),
80
96
  enableGracefulShutdown: mock()
81
97
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/testing.ts","../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["export * from \"./testing/index\";\n","import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACzGA,mBAGO;AACP,8BAAwB;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,4BAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/testing.ts","../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["export * from \"./testing/index\";\n","import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n consume: returning(\n (function* () {})() as unknown as AsyncIterableIterator<any>,\n ),\n replayDlq: resolved({ replayed: 0, skipped: 0 }),\n resetOffsets: resolved(undefined),\n seekToOffset: resolved(undefined),\n pauseConsumer: mock(),\n resumeConsumer: mock(),\n getMetrics: returning({\n processedCount: 0,\n retryCount: 0,\n dlqCount: 0,\n dedupCount: 0,\n }),\n resetMetrics: mock(),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,SAAS;AAAA,OACN,aAAa;AAAA,MAAC,GAAG;AAAA,IACpB;AAAA,IACA,WAAW,SAAS,EAAE,UAAU,GAAG,SAAS,EAAE,CAAC;AAAA,IAC/C,cAAc,SAAS,MAAS;AAAA,IAChC,cAAc,SAAS,MAAS;AAAA,IAChC,eAAe,KAAK;AAAA,IACpB,gBAAgB,KAAK;AAAA,IACrB,YAAY,UAAU;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,YAAY;AAAA,IACd,CAAC;AAAA,IACD,cAAc,KAAK;AAAA,IACnB,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACxHA,mBAGO;AACP,8BAAwB;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,4BAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
package/dist/testing.mjs CHANGED
@@ -51,6 +51,22 @@ function createMockKafkaClient(mockFactory) {
51
51
  stop: mock().mockResolvedValue(void 0)
52
52
  }),
53
53
  stopConsumer: resolved(void 0),
54
+ consume: returning(
55
+ (function* () {
56
+ })()
57
+ ),
58
+ replayDlq: resolved({ replayed: 0, skipped: 0 }),
59
+ resetOffsets: resolved(void 0),
60
+ seekToOffset: resolved(void 0),
61
+ pauseConsumer: mock(),
62
+ resumeConsumer: mock(),
63
+ getMetrics: returning({
64
+ processedCount: 0,
65
+ retryCount: 0,
66
+ dlqCount: 0,
67
+ dedupCount: 0
68
+ }),
69
+ resetMetrics: mock(),
54
70
  disconnect: resolved(void 0),
55
71
  enableGracefulShutdown: mock()
56
72
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;AAcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACzGA;AAAA,EACE;AAAA,OAEK;AACP,SAAS,eAAe;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,eAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n consume: returning(\n (function* () {})() as unknown as AsyncIterableIterator<any>,\n ),\n replayDlq: resolved({ replayed: 0, skipped: 0 }),\n resetOffsets: resolved(undefined),\n seekToOffset: resolved(undefined),\n pauseConsumer: mock(),\n resumeConsumer: mock(),\n getMetrics: returning({\n processedCount: 0,\n retryCount: 0,\n dlqCount: 0,\n dedupCount: 0,\n }),\n resetMetrics: mock(),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;AAcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,SAAS;AAAA,OACN,aAAa;AAAA,MAAC,GAAG;AAAA,IACpB;AAAA,IACA,WAAW,SAAS,EAAE,UAAU,GAAG,SAAS,EAAE,CAAC;AAAA,IAC/C,cAAc,SAAS,MAAS;AAAA,IAChC,cAAc,SAAS,MAAS;AAAA,IAChC,eAAe,KAAK;AAAA,IACpB,gBAAgB,KAAK;AAAA,IACrB,YAAY,UAAU;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,YAAY;AAAA,IACd,CAAC;AAAA,IACD,cAAc,KAAK;AAAA,IACnB,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACxHA;AAAA,EACE;AAAA,OAEK;AACP,SAAS,eAAe;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,eAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
@@ -277,6 +277,41 @@ interface DeduplicationOptions {
277
277
  */
278
278
  duplicatesTopic?: string;
279
279
  }
280
+ /**
281
+ * Options for the per-partition circuit breaker.
282
+ *
283
+ * The circuit breaker tracks recent message outcomes in a **sliding window** and
284
+ * opens (pauses the partition) when too many failures accumulate:
285
+ *
286
+ * - **CLOSED** — normal operation. Each DLQ route or successful delivery is recorded
287
+ * in the window. When `failuresInWindow >= threshold` the circuit opens.
288
+ * - **OPEN** — the partition is paused. After `recoveryMs` the circuit moves to HALF-OPEN.
289
+ * - **HALF-OPEN** — the partition is resumed. If the next `halfOpenSuccesses` messages
290
+ * succeed the circuit closes; a new failure immediately re-opens it.
291
+ */
292
+ interface CircuitBreakerOptions {
293
+ /**
294
+ * Number of failures within the sliding window required to open the circuit.
295
+ * A failure is any message that ends up in the DLQ.
296
+ * Default: `5`.
297
+ */
298
+ threshold?: number;
299
+ /**
300
+ * Time (ms) to keep the circuit OPEN before attempting recovery (HALF_OPEN).
301
+ * Default: `30_000` (30 s).
302
+ */
303
+ recoveryMs?: number;
304
+ /**
305
+ * Number of outcomes (successes + failures) to keep in the sliding window.
306
+ * Default: `threshold * 2` (minimum `10`).
307
+ */
308
+ windowSize?: number;
309
+ /**
310
+ * Number of consecutive successes in HALF-OPEN state required to close the
311
+ * circuit. Default: `1`.
312
+ */
313
+ halfOpenSuccesses?: number;
314
+ }
280
315
  /** Options for configuring a Kafka consumer. */
281
316
  interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
282
317
  /** Override the default consumer group ID from the constructor. */
@@ -326,6 +361,21 @@ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
326
361
  * Messages without the header are passed through unchanged.
327
362
  */
328
363
  deduplication?: DeduplicationOptions;
364
+ /**
365
+ * Drop messages older than this threshold, measured in milliseconds from
366
+ * the `x-timestamp` header set by the producer.
367
+ *
368
+ * Expired messages are routed to `{topic}.dlq` when `dlq: true`, otherwise
369
+ * `onMessageLost` is called. The handler is never invoked for an expired message.
370
+ */
371
+ messageTtlMs?: number;
372
+ /**
373
+ * Automatically pause a partition when it accumulates too many consecutive
374
+ * failures, then resume after a recovery window.
375
+ *
376
+ * See `CircuitBreakerOptions` for the sliding-window semantics.
377
+ */
378
+ circuitBreaker?: CircuitBreakerOptions;
329
379
  }
330
380
  /** Configuration for consumer retry behavior. */
331
381
  interface RetryOptions {
@@ -372,8 +422,28 @@ type BeforeConsumeResult = (() => void) | {
372
422
  * - `'validation-error'` — schema validation failed before the handler ran.
373
423
  * - `'lamport-clock-duplicate'` — message was identified as a Lamport-clock duplicate
374
424
  * and `deduplication.strategy` is `'dlq'`.
425
+ * - `'ttl-expired'` — message age exceeded `messageTtlMs` before the handler ran.
375
426
  */
376
- type DlqReason = "handler-error" | "validation-error" | "lamport-clock-duplicate";
427
+ type DlqReason = "handler-error" | "validation-error" | "lamport-clock-duplicate" | "ttl-expired";
428
+ /** Options for `replayDlq`. */
429
+ interface DlqReplayOptions {
430
+ /**
431
+ * Override the target topic to re-publish to.
432
+ * Default: reads the `x-dlq-original-topic` header from each DLQ message.
433
+ */
434
+ targetTopic?: string;
435
+ /**
436
+ * Dry-run mode — log what would be replayed without actually sending.
437
+ * Increments the `replayed` counter so you can see what would happen.
438
+ */
439
+ dryRun?: boolean;
440
+ /**
441
+ * Optional filter — return `false` to skip a message.
442
+ * @param headers All headers on the DLQ message (including `x-dlq-*` metadata).
443
+ * @param value Raw message value (JSON string).
444
+ */
445
+ filter?: (headers: MessageHeaders, value: string) => boolean;
446
+ }
377
447
  /**
378
448
  * Snapshot of internal event counters accumulated since client creation
379
449
  * (or since the last `resetMetrics()` call).
@@ -482,11 +552,97 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
482
552
  getClientId(): ClientId;
483
553
  /**
484
554
  * Return a snapshot of internal event counters (retry / DLQ / dedup).
555
+ * - `getMetrics()` — aggregate across all topics.
556
+ * - `getMetrics(topic)` — counters for a specific topic only; returns all-zero
557
+ * if no events have been observed for that topic yet.
558
+ *
485
559
  * Counters accumulate since client creation or the last `resetMetrics()` call.
486
560
  */
487
- getMetrics(): Readonly<KafkaMetrics>;
488
- /** Reset all internal event counters to zero. */
489
- resetMetrics(): void;
561
+ getMetrics(topic?: string): Readonly<KafkaMetrics>;
562
+ /**
563
+ * Reset internal event counters to zero.
564
+ * - `resetMetrics()` — reset all topics.
565
+ * - `resetMetrics(topic)` — reset a single topic only.
566
+ */
567
+ resetMetrics(topic?: string): void;
568
+ /**
569
+ * Consume all messages currently in `{topic}.dlq`, strip the `x-dlq-*` metadata
570
+ * headers, and re-publish each message to its original topic (or `options.targetTopic`).
571
+ *
572
+ * A temporary consumer group is created and torn down automatically. The DLQ topic
573
+ * itself is not modified — messages remain there after replay.
574
+ *
575
+ * @returns `{ replayed, skipped }` — counts of re-published vs skipped messages.
576
+ */
577
+ replayDlq(topic: string, options?: DlqReplayOptions): Promise<{
578
+ replayed: number;
579
+ skipped: number;
580
+ }>;
581
+ /**
582
+ * Reset committed offsets for a consumer group to the earliest or latest position.
583
+ *
584
+ * The consumer group must be inactive (no running consumers) — Kafka does not
585
+ * allow offset resets while members are actively consuming. Call
586
+ * `stopConsumer(groupId)` first.
587
+ *
588
+ * @param groupId Consumer group to reset. Defaults to the client's default groupId.
589
+ * @param topic Topic to reset.
590
+ * @param position `'earliest'` seeks to the first available offset; `'latest'`
591
+ * seeks past the last message (consumer will only see new messages).
592
+ */
593
+ resetOffsets(groupId: string | undefined, topic: string, position: "earliest" | "latest"): Promise<void>;
594
+ /**
595
+ * Seek specific partitions to explicit offsets.
596
+ * More granular than `resetOffsets` — each partition can target a different offset.
597
+ *
598
+ * The consumer group must be inactive. Assignments for different topics are batched
599
+ * into one admin call per topic.
600
+ *
601
+ * @param groupId Consumer group to seek. Defaults to the client's default groupId.
602
+ * @param assignments Array of `{ topic, partition, offset }` tuples.
603
+ */
604
+ seekToOffset(groupId: string | undefined, assignments: Array<{
605
+ topic: string;
606
+ partition: number;
607
+ offset: string;
608
+ }>): Promise<void>;
609
+ /**
610
+ * Consume messages as an async iterator. Useful for scripts, migrations, and
611
+ * one-off processing where the full `startConsumer` lifecycle is unnecessary.
612
+ *
613
+ * Breaking out of the loop (or calling `return()` on the iterator) stops the
614
+ * underlying consumer automatically.
615
+ *
616
+ * @example
617
+ * ```ts
618
+ * for await (const envelope of kafka.consume('orders')) {
619
+ * await process(envelope);
620
+ * }
621
+ * ```
622
+ */
623
+ consume<K extends keyof T & string>(topic: K, options?: ConsumerOptions<T>): AsyncIterableIterator<EventEnvelope<T[K]>>;
624
+ /**
625
+ * Pause message delivery for specific topic-partitions on a consumer group.
626
+ * The consumer remains connected and its committed offsets are preserved —
627
+ * only polling is suspended. Call `resumeConsumer` to restart delivery.
628
+ *
629
+ * @param groupId Consumer group to pause. Defaults to the client's default groupId.
630
+ * @param assignments Topic-partition pairs to pause.
631
+ */
632
+ pauseConsumer(groupId: string | undefined, assignments: Array<{
633
+ topic: string;
634
+ partitions: number[];
635
+ }>): void;
636
+ /**
637
+ * Resume message delivery for previously paused topic-partitions.
638
+ *
639
+ * @param groupId Consumer group to resume. Defaults to the client's default groupId.
640
+ * @param assignments Topic-partition pairs to resume.
641
+ */
642
+ resumeConsumer(groupId: string | undefined, assignments: Array<{
643
+ topic: string;
644
+ partitions: number[];
645
+ }>): void;
490
646
  /**
491
647
  * Drain in-flight handlers, then disconnect all producers, consumers, and admin.
492
648
  * @param drainTimeoutMs Max ms to wait for in-flight handlers (default 30 000).
@@ -575,4 +731,4 @@ interface SubscribeRetryOptions {
575
731
  backoffMs?: number;
576
732
  }
577
733
 
578
- export { decodeHeaders as A, type BatchMessageItem as B, type ClientId as C, type DeduplicationOptions as D, type EnvelopeHeaderOptions as E, extractEnvelope as F, type GroupId as G, HEADER_CORRELATION_ID as H, type IKafkaClient as I, getEnvelopeContext as J, type KafkaInstrumentation as K, runWithEnvelopeContext as L, type MessageHeaders as M, topic as N, type RetryOptions as R, type SchemaLike as S, type TopicMapConstraint as T, type KafkaClientOptions as a, type ConsumerOptions as b, type TopicDescriptor as c, type KafkaHealthResult as d, type BatchMeta as e, type BeforeConsumeResult as f, type ConsumerHandle as g, type ConsumerInterceptor as h, type DlqReason as i, type EventEnvelope as j, HEADER_EVENT_ID as k, HEADER_LAMPORT_CLOCK as l, HEADER_SCHEMA_VERSION as m, HEADER_TIMESTAMP as n, HEADER_TRACEPARENT as o, type InferSchema as p, type KafkaLogger as q, type KafkaMetrics as r, type MessageLostContext as s, type SchemaParseContext as t, type SendOptions as u, type SubscribeRetryOptions as v, type TTopicMessageMap as w, type TopicsFrom as x, type TransactionContext as y, buildEnvelopeHeaders as z };
734
+ export { type TransactionContext as A, type BatchMessageItem as B, type ClientId as C, type DeduplicationOptions as D, type EnvelopeHeaderOptions as E, buildEnvelopeHeaders as F, type GroupId as G, HEADER_CORRELATION_ID as H, type IKafkaClient as I, decodeHeaders as J, type KafkaInstrumentation as K, extractEnvelope as L, type MessageHeaders as M, getEnvelopeContext as N, runWithEnvelopeContext as O, topic as P, type RetryOptions as R, type SchemaLike as S, type TopicMapConstraint as T, type KafkaClientOptions as a, type ConsumerOptions as b, type TopicDescriptor as c, type KafkaHealthResult as d, type BatchMeta as e, type BeforeConsumeResult as f, type CircuitBreakerOptions as g, type ConsumerHandle as h, type ConsumerInterceptor as i, type DlqReason as j, type DlqReplayOptions as k, type EventEnvelope as l, HEADER_EVENT_ID as m, HEADER_LAMPORT_CLOCK as n, HEADER_SCHEMA_VERSION as o, HEADER_TIMESTAMP as p, HEADER_TRACEPARENT as q, type InferSchema as r, type KafkaLogger as s, type KafkaMetrics as t, type MessageLostContext as u, type SchemaParseContext as v, type SendOptions as w, type SubscribeRetryOptions as x, type TTopicMessageMap as y, type TopicsFrom as z };
@@ -277,6 +277,41 @@ interface DeduplicationOptions {
277
277
  */
278
278
  duplicatesTopic?: string;
279
279
  }
280
+ /**
281
+ * Options for the per-partition circuit breaker.
282
+ *
283
+ * The circuit breaker tracks recent message outcomes in a **sliding window** and
284
+ * opens (pauses the partition) when too many failures accumulate:
285
+ *
286
+ * - **CLOSED** — normal operation. Each DLQ route or successful delivery is recorded
287
+ * in the window. When `failuresInWindow >= threshold` the circuit opens.
288
+ * - **OPEN** — the partition is paused. After `recoveryMs` the circuit moves to HALF-OPEN.
289
+ * - **HALF-OPEN** — the partition is resumed. If the next `halfOpenSuccesses` messages
290
+ * succeed the circuit closes; a new failure immediately re-opens it.
291
+ */
292
+ interface CircuitBreakerOptions {
293
+ /**
294
+ * Number of failures within the sliding window required to open the circuit.
295
+ * A failure is any message that ends up in the DLQ.
296
+ * Default: `5`.
297
+ */
298
+ threshold?: number;
299
+ /**
300
+ * Time (ms) to keep the circuit OPEN before attempting recovery (HALF_OPEN).
301
+ * Default: `30_000` (30 s).
302
+ */
303
+ recoveryMs?: number;
304
+ /**
305
+ * Number of outcomes (successes + failures) to keep in the sliding window.
306
+ * Default: `threshold * 2` (minimum `10`).
307
+ */
308
+ windowSize?: number;
309
+ /**
310
+ * Number of consecutive successes in HALF-OPEN state required to close the
311
+ * circuit. Default: `1`.
312
+ */
313
+ halfOpenSuccesses?: number;
314
+ }
280
315
  /** Options for configuring a Kafka consumer. */
281
316
  interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
282
317
  /** Override the default consumer group ID from the constructor. */
@@ -326,6 +361,21 @@ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
326
361
  * Messages without the header are passed through unchanged.
327
362
  */
328
363
  deduplication?: DeduplicationOptions;
364
+ /**
365
+ * Drop messages older than this threshold, measured in milliseconds from
366
+ * the `x-timestamp` header set by the producer.
367
+ *
368
+ * Expired messages are routed to `{topic}.dlq` when `dlq: true`, otherwise
369
+ * `onMessageLost` is called. The handler is never invoked for an expired message.
370
+ */
371
+ messageTtlMs?: number;
372
+ /**
373
+ * Automatically pause a partition when it accumulates too many consecutive
374
+ * failures, then resume after a recovery window.
375
+ *
376
+ * See `CircuitBreakerOptions` for the sliding-window semantics.
377
+ */
378
+ circuitBreaker?: CircuitBreakerOptions;
329
379
  }
330
380
  /** Configuration for consumer retry behavior. */
331
381
  interface RetryOptions {
@@ -372,8 +422,28 @@ type BeforeConsumeResult = (() => void) | {
372
422
  * - `'validation-error'` — schema validation failed before the handler ran.
373
423
  * - `'lamport-clock-duplicate'` — message was identified as a Lamport-clock duplicate
374
424
  * and `deduplication.strategy` is `'dlq'`.
425
+ * - `'ttl-expired'` — message age exceeded `messageTtlMs` before the handler ran.
375
426
  */
376
- type DlqReason = "handler-error" | "validation-error" | "lamport-clock-duplicate";
427
+ type DlqReason = "handler-error" | "validation-error" | "lamport-clock-duplicate" | "ttl-expired";
428
+ /** Options for `replayDlq`. */
429
+ interface DlqReplayOptions {
430
+ /**
431
+ * Override the target topic to re-publish to.
432
+ * Default: reads the `x-dlq-original-topic` header from each DLQ message.
433
+ */
434
+ targetTopic?: string;
435
+ /**
436
+ * Dry-run mode — log what would be replayed without actually sending.
437
+ * Increments the `replayed` counter so you can see what would happen.
438
+ */
439
+ dryRun?: boolean;
440
+ /**
441
+ * Optional filter — return `false` to skip a message.
442
+ * @param headers All headers on the DLQ message (including `x-dlq-*` metadata).
443
+ * @param value Raw message value (JSON string).
444
+ */
445
+ filter?: (headers: MessageHeaders, value: string) => boolean;
446
+ }
377
447
  /**
378
448
  * Snapshot of internal event counters accumulated since client creation
379
449
  * (or since the last `resetMetrics()` call).
@@ -482,11 +552,97 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
482
552
  getClientId(): ClientId;
483
553
  /**
484
554
  * Return a snapshot of internal event counters (retry / DLQ / dedup).
555
+ * - `getMetrics()` — aggregate across all topics.
556
+ * - `getMetrics(topic)` — counters for a specific topic only; returns all-zero
557
+ * if no events have been observed for that topic yet.
558
+ *
485
559
  * Counters accumulate since client creation or the last `resetMetrics()` call.
486
560
  */
487
- getMetrics(): Readonly<KafkaMetrics>;
488
- /** Reset all internal event counters to zero. */
489
- resetMetrics(): void;
561
+ getMetrics(topic?: string): Readonly<KafkaMetrics>;
562
+ /**
563
+ * Reset internal event counters to zero.
564
+ * - `resetMetrics()` — reset all topics.
565
+ * - `resetMetrics(topic)` — reset a single topic only.
566
+ */
567
+ resetMetrics(topic?: string): void;
568
+ /**
569
+ * Consume all messages currently in `{topic}.dlq`, strip the `x-dlq-*` metadata
570
+ * headers, and re-publish each message to its original topic (or `options.targetTopic`).
571
+ *
572
+ * A temporary consumer group is created and torn down automatically. The DLQ topic
573
+ * itself is not modified — messages remain there after replay.
574
+ *
575
+ * @returns `{ replayed, skipped }` — counts of re-published vs skipped messages.
576
+ */
577
+ replayDlq(topic: string, options?: DlqReplayOptions): Promise<{
578
+ replayed: number;
579
+ skipped: number;
580
+ }>;
581
+ /**
582
+ * Reset committed offsets for a consumer group to the earliest or latest position.
583
+ *
584
+ * The consumer group must be inactive (no running consumers) — Kafka does not
585
+ * allow offset resets while members are actively consuming. Call
586
+ * `stopConsumer(groupId)` first.
587
+ *
588
+ * @param groupId Consumer group to reset. Defaults to the client's default groupId.
589
+ * @param topic Topic to reset.
590
+ * @param position `'earliest'` seeks to the first available offset; `'latest'`
591
+ * seeks past the last message (consumer will only see new messages).
592
+ */
593
+ resetOffsets(groupId: string | undefined, topic: string, position: "earliest" | "latest"): Promise<void>;
594
+ /**
595
+ * Seek specific partitions to explicit offsets.
596
+ * More granular than `resetOffsets` — each partition can target a different offset.
597
+ *
598
+ * The consumer group must be inactive. Assignments for different topics are batched
599
+ * into one admin call per topic.
600
+ *
601
+ * @param groupId Consumer group to seek. Defaults to the client's default groupId.
602
+ * @param assignments Array of `{ topic, partition, offset }` tuples.
603
+ */
604
+ seekToOffset(groupId: string | undefined, assignments: Array<{
605
+ topic: string;
606
+ partition: number;
607
+ offset: string;
608
+ }>): Promise<void>;
609
+ /**
610
+ * Consume messages as an async iterator. Useful for scripts, migrations, and
611
+ * one-off processing where the full `startConsumer` lifecycle is unnecessary.
612
+ *
613
+ * Breaking out of the loop (or calling `return()` on the iterator) stops the
614
+ * underlying consumer automatically.
615
+ *
616
+ * @example
617
+ * ```ts
618
+ * for await (const envelope of kafka.consume('orders')) {
619
+ * await process(envelope);
620
+ * }
621
+ * ```
622
+ */
623
+ consume<K extends keyof T & string>(topic: K, options?: ConsumerOptions<T>): AsyncIterableIterator<EventEnvelope<T[K]>>;
624
+ /**
625
+ * Pause message delivery for specific topic-partitions on a consumer group.
626
+ * The consumer remains connected and its committed offsets are preserved —
627
+ * only polling is suspended. Call `resumeConsumer` to restart delivery.
628
+ *
629
+ * @param groupId Consumer group to pause. Defaults to the client's default groupId.
630
+ * @param assignments Topic-partition pairs to pause.
631
+ */
632
+ pauseConsumer(groupId: string | undefined, assignments: Array<{
633
+ topic: string;
634
+ partitions: number[];
635
+ }>): void;
636
+ /**
637
+ * Resume message delivery for previously paused topic-partitions.
638
+ *
639
+ * @param groupId Consumer group to resume. Defaults to the client's default groupId.
640
+ * @param assignments Topic-partition pairs to resume.
641
+ */
642
+ resumeConsumer(groupId: string | undefined, assignments: Array<{
643
+ topic: string;
644
+ partitions: number[];
645
+ }>): void;
490
646
  /**
491
647
  * Drain in-flight handlers, then disconnect all producers, consumers, and admin.
492
648
  * @param drainTimeoutMs Max ms to wait for in-flight handlers (default 30 000).
@@ -575,4 +731,4 @@ interface SubscribeRetryOptions {
575
731
  backoffMs?: number;
576
732
  }
577
733
 
578
- export { decodeHeaders as A, type BatchMessageItem as B, type ClientId as C, type DeduplicationOptions as D, type EnvelopeHeaderOptions as E, extractEnvelope as F, type GroupId as G, HEADER_CORRELATION_ID as H, type IKafkaClient as I, getEnvelopeContext as J, type KafkaInstrumentation as K, runWithEnvelopeContext as L, type MessageHeaders as M, topic as N, type RetryOptions as R, type SchemaLike as S, type TopicMapConstraint as T, type KafkaClientOptions as a, type ConsumerOptions as b, type TopicDescriptor as c, type KafkaHealthResult as d, type BatchMeta as e, type BeforeConsumeResult as f, type ConsumerHandle as g, type ConsumerInterceptor as h, type DlqReason as i, type EventEnvelope as j, HEADER_EVENT_ID as k, HEADER_LAMPORT_CLOCK as l, HEADER_SCHEMA_VERSION as m, HEADER_TIMESTAMP as n, HEADER_TRACEPARENT as o, type InferSchema as p, type KafkaLogger as q, type KafkaMetrics as r, type MessageLostContext as s, type SchemaParseContext as t, type SendOptions as u, type SubscribeRetryOptions as v, type TTopicMessageMap as w, type TopicsFrom as x, type TransactionContext as y, buildEnvelopeHeaders as z };
734
+ export { type TransactionContext as A, type BatchMessageItem as B, type ClientId as C, type DeduplicationOptions as D, type EnvelopeHeaderOptions as E, buildEnvelopeHeaders as F, type GroupId as G, HEADER_CORRELATION_ID as H, type IKafkaClient as I, decodeHeaders as J, type KafkaInstrumentation as K, extractEnvelope as L, type MessageHeaders as M, getEnvelopeContext as N, runWithEnvelopeContext as O, topic as P, type RetryOptions as R, type SchemaLike as S, type TopicMapConstraint as T, type KafkaClientOptions as a, type ConsumerOptions as b, type TopicDescriptor as c, type KafkaHealthResult as d, type BatchMeta as e, type BeforeConsumeResult as f, type CircuitBreakerOptions as g, type ConsumerHandle as h, type ConsumerInterceptor as i, type DlqReason as j, type DlqReplayOptions as k, type EventEnvelope as l, HEADER_EVENT_ID as m, HEADER_LAMPORT_CLOCK as n, HEADER_SCHEMA_VERSION as o, HEADER_TIMESTAMP as p, HEADER_TRACEPARENT as q, type InferSchema as r, type KafkaLogger as s, type KafkaMetrics as t, type MessageLostContext as u, type SchemaParseContext as v, type SendOptions as w, type SubscribeRetryOptions as x, type TTopicMessageMap as y, type TopicsFrom as z };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@drarzter/kafka-client",
3
- "version": "0.6.7",
3
+ "version": "0.7.0",
4
4
  "description": "Type-safe Kafka client wrapper for NestJS with typed topic-message maps",
5
5
  "license": "MIT",
6
6
  "main": "./dist/index.js",