@drarzter/kafka-client 0.6.6 → 0.6.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +126 -4
- package/dist/{chunk-KCUKXR6B.mjs → chunk-4526Y4PV.mjs} +458 -40
- package/dist/chunk-4526Y4PV.mjs.map +1 -0
- package/dist/core.d.mts +29 -3
- package/dist/core.d.ts +29 -3
- package/dist/core.js +457 -39
- package/dist/core.js.map +1 -1
- package/dist/core.mjs +1 -1
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +457 -39
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1 -1
- package/dist/index.mjs.map +1 -1
- package/dist/otel.d.mts +1 -1
- package/dist/otel.d.ts +1 -1
- package/dist/testing.d.mts +1 -1
- package/dist/testing.d.ts +1 -1
- package/dist/testing.js +11 -0
- package/dist/testing.js.map +1 -1
- package/dist/testing.mjs +11 -0
- package/dist/testing.mjs.map +1 -1
- package/dist/{types-CTwLrJVU.d.mts → types-736Gj0J3.d.mts} +142 -3
- package/dist/{types-CTwLrJVU.d.ts → types-736Gj0J3.d.ts} +142 -3
- package/package.json +1 -1
- package/dist/chunk-KCUKXR6B.mjs.map +0 -1
package/dist/index.mjs
CHANGED
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/nest/kafka.module.ts","../src/nest/kafka.constants.ts","../src/nest/kafka.explorer.ts","../src/nest/kafka.decorator.ts","../src/nest/kafka.health.ts"],"sourcesContent":["import { Module, DynamicModule, Provider, Logger } from \"@nestjs/common\";\nimport { DiscoveryModule } from \"@nestjs/core\";\nimport {\n KafkaClient,\n ClientId,\n GroupId,\n TopicMapConstraint,\n KafkaInstrumentation,\n KafkaClientOptions,\n} from \"../client/kafka.client\";\nimport { getKafkaClientToken } from \"./kafka.constants\";\nimport { KafkaExplorer } from \"./kafka.explorer\";\n\n/** Shared configuration fields for both `register()` and `registerAsync()`. */\ninterface KafkaModuleBaseOptions {\n /** Optional name for multi-client setups. Must match `@InjectKafkaClient(name)`. */\n name?: string;\n /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */\n isGlobal?: boolean;\n}\n\n/** Synchronous configuration for `KafkaModule.register()`. */\nexport interface KafkaModuleOptions extends KafkaModuleBaseOptions {\n /** Unique Kafka client identifier. */\n clientId: ClientId;\n /** Consumer group identifier. */\n groupId: GroupId;\n /** List of Kafka broker addresses. */\n brokers: string[];\n /** Auto-create topics via admin on first use (send/consume). Useful for development. */\n autoCreateTopics?: boolean;\n /** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */\n strictSchemas?: boolean;\n /** Number of partitions for auto-created topics. Default: `1`. */\n numPartitions?: number;\n /** Client-wide instrumentation hooks (e.g. OTel). Applied to both send and consume paths. */\n instrumentation?: KafkaInstrumentation[];\n /** Called when a message is dropped without being sent to a DLQ. @see `KafkaClientOptions.onMessageLost` */\n onMessageLost?: KafkaClientOptions[\"onMessageLost\"];\n /** Called whenever a consumer group rebalance occurs. @see `KafkaClientOptions.onRebalance` */\n onRebalance?: KafkaClientOptions[\"onRebalance\"];\n}\n\n/** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */\nexport interface KafkaModuleAsyncOptions extends KafkaModuleBaseOptions {\n imports?: any[];\n useFactory: (\n ...args: any[]\n ) => KafkaModuleOptions | Promise<KafkaModuleOptions>;\n inject?: any[];\n}\n\n/**\n * NestJS dynamic module for registering type-safe Kafka clients.\n * Use `register()` for static config or `registerAsync()` for DI-based config.\n */\n@Module({})\nexport class KafkaModule {\n /** Register a Kafka client with static options. */\n static register<T extends TopicMapConstraint<T>>(\n options: KafkaModuleOptions,\n ): DynamicModule {\n const token = getKafkaClientToken(options.name);\n\n const kafkaClientProvider: Provider = {\n provide: token,\n useFactory: () => KafkaModule.buildClient<T>(options),\n };\n\n return {\n global: options.isGlobal ?? false,\n module: KafkaModule,\n imports: [DiscoveryModule],\n providers: [kafkaClientProvider, KafkaExplorer],\n exports: [kafkaClientProvider],\n };\n }\n\n /** Register a Kafka client with async/factory-based options. */\n static registerAsync<T extends TopicMapConstraint<T>>(\n asyncOptions: KafkaModuleAsyncOptions,\n ): DynamicModule {\n const token = getKafkaClientToken(asyncOptions.name);\n\n const kafkaClientProvider: Provider = {\n provide: token,\n useFactory: async (...args: any[]): Promise<KafkaClient<T>> =>\n KafkaModule.buildClient<T>(await asyncOptions.useFactory(...args)),\n inject: asyncOptions.inject || [],\n };\n\n return {\n global: asyncOptions.isGlobal ?? false,\n module: KafkaModule,\n imports: [...(asyncOptions.imports || []), DiscoveryModule],\n providers: [kafkaClientProvider, KafkaExplorer],\n exports: [kafkaClientProvider],\n };\n }\n\n private static async buildClient<T extends TopicMapConstraint<T>>(\n options: KafkaModuleOptions,\n ): Promise<KafkaClient<T>> {\n const client = new KafkaClient<T>(\n options.clientId,\n options.groupId,\n options.brokers,\n {\n autoCreateTopics: options.autoCreateTopics,\n strictSchemas: options.strictSchemas,\n numPartitions: options.numPartitions,\n instrumentation: options.instrumentation,\n onMessageLost: options.onMessageLost,\n onRebalance: options.onRebalance,\n logger: new Logger(`KafkaClient:${options.clientId}`),\n },\n );\n await client.connectProducer();\n return client;\n }\n\n}\n","/** Default DI token for the Kafka client. */\nexport const KAFKA_CLIENT = \"KAFKA_CLIENT\";\n\n/** Returns the DI token for a named (or default) Kafka client instance. */\nexport const getKafkaClientToken = (name?: string): string =>\n name ? `KAFKA_CLIENT_${name}` : KAFKA_CLIENT;\n","import { Inject, Injectable, OnModuleInit, Logger } from \"@nestjs/common\";\nimport { DiscoveryService, ModuleRef } from \"@nestjs/core\";\nimport { KafkaClient } from \"../client/kafka.client\";\nimport {\n KAFKA_SUBSCRIBER_METADATA,\n KafkaSubscriberMetadata,\n} from \"./kafka.decorator\";\nimport { getKafkaClientToken } from \"./kafka.constants\";\n\ninterface SubscriberEntry extends KafkaSubscriberMetadata {\n methodName: string | symbol;\n}\n\n/** Discovers `@SubscribeTo()` decorators and wires them to their Kafka clients on startup. */\n@Injectable()\nexport class KafkaExplorer implements OnModuleInit {\n private readonly logger = new Logger(KafkaExplorer.name);\n\n constructor(\n @Inject(DiscoveryService)\n private readonly discoveryService: DiscoveryService,\n @Inject(ModuleRef)\n private readonly moduleRef: ModuleRef,\n ) {}\n\n async onModuleInit() {\n const providers = this.discoveryService.getProviders();\n\n for (const wrapper of providers) {\n const { instance } = wrapper;\n if (!instance || typeof instance !== \"object\") continue;\n\n const metadata: SubscriberEntry[] | undefined = Reflect.getMetadata(\n KAFKA_SUBSCRIBER_METADATA,\n instance.constructor,\n );\n\n if (!metadata || metadata.length === 0) continue;\n\n for (const entry of metadata) {\n const token = getKafkaClientToken(entry.clientName);\n let client: KafkaClient<any>;\n\n try {\n client = this.moduleRef.get(token, { strict: false });\n } catch {\n this.logger.error(\n `KafkaClient \"${entry.clientName || \"default\"}\" not found for @SubscribeTo on ${instance.constructor.name}.${String(entry.methodName)}`,\n );\n continue;\n }\n\n const handler = (instance as any)[entry.methodName].bind(instance);\n\n const consumerOptions = { ...entry.options };\n if (entry.schemas) {\n consumerOptions.schemas = entry.schemas;\n }\n\n if (entry.batch) {\n await client.startBatchConsumer(\n entry.topics as any,\n async (envelopes: any[], meta: any) => {\n await handler(envelopes, meta);\n },\n consumerOptions,\n );\n } else {\n await client.startConsumer(\n entry.topics as any,\n async (envelope: any) => {\n await handler(envelope);\n },\n consumerOptions,\n );\n }\n\n this.logger.log(\n `Registered @SubscribeTo(${entry.topics.join(\", \")})${entry.batch ? \" [batch]\" : \"\"} on ${instance.constructor.name}.${String(entry.methodName)}`,\n );\n }\n }\n }\n}\n","import { Inject } from \"@nestjs/common\";\nimport { getKafkaClientToken } from \"./kafka.constants\";\nimport { ConsumerOptions } from \"../client/kafka.client\";\nimport { TopicDescriptor, SchemaLike } from \"../client/message/topic\";\n\nexport const KAFKA_SUBSCRIBER_METADATA = \"KAFKA_SUBSCRIBER_METADATA\";\n\nexport interface KafkaSubscriberMetadata {\n topics: string[];\n schemas?: Map<string, SchemaLike>;\n options?: ConsumerOptions;\n clientName?: string;\n batch?: boolean;\n methodName?: string | symbol;\n}\n\n/** Inject a `KafkaClient` instance. Pass a name to target a specific named client. */\nexport const InjectKafkaClient = (name?: string): ParameterDecorator =>\n Inject(getKafkaClientToken(name));\n\n/**\n * Decorator that auto-subscribes a method to Kafka topics on module init.\n * The decorated method receives `(message, topic)` for each consumed message.\n */\nexport const SubscribeTo = (\n topics:\n | string\n | string[]\n | TopicDescriptor\n | TopicDescriptor[]\n | (string | TopicDescriptor)[],\n options?: ConsumerOptions & { clientName?: string; batch?: boolean },\n): MethodDecorator => {\n const arr = Array.isArray(topics) ? topics : [topics];\n const topicsArray = arr.map((t) => (typeof t === \"string\" ? t : t.__topic));\n\n // Extract schemas from descriptors that have them\n const schemas = new Map<string, SchemaLike>();\n for (const t of arr) {\n if (typeof t !== \"string\" && t.__schema) {\n schemas.set(t.__topic, t.__schema);\n }\n }\n\n const { clientName, batch, ...consumerOptions } = options || {};\n\n return (target, propertyKey, _descriptor) => {\n const existing: KafkaSubscriberMetadata[] =\n Reflect.getMetadata(KAFKA_SUBSCRIBER_METADATA, target.constructor) || [];\n\n Reflect.defineMetadata(\n KAFKA_SUBSCRIBER_METADATA,\n [\n ...existing,\n {\n topics: topicsArray,\n schemas: schemas.size > 0 ? schemas : undefined,\n options: Object.keys(consumerOptions).length\n ? consumerOptions\n : undefined,\n clientName,\n batch,\n methodName: propertyKey,\n },\n ],\n target.constructor,\n );\n };\n};\n","import { Injectable } from \"@nestjs/common\";\nimport type { IKafkaClient, KafkaHealthResult, TopicMapConstraint } from \"../client/types\";\nexport type { KafkaHealthResult } from \"../client/types\";\n\n/** Health check service. Call `check(client)` to verify broker connectivity. */\n@Injectable()\nexport class KafkaHealthIndicator {\n async check<T extends TopicMapConstraint<T>>(\n client: IKafkaClient<T>,\n ): Promise<KafkaHealthResult> {\n return client.checkStatus();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,QAAiC,UAAAA,eAAc;AACxD,SAAS,uBAAuB;;;ACAzB,IAAM,eAAe;AAGrB,IAAM,sBAAsB,CAAC,SAClC,OAAO,gBAAgB,IAAI,KAAK;;;ACLlC,SAAS,UAAAC,SAAQ,YAA0B,cAAc;AACzD,SAAS,kBAAkB,iBAAiB;;;ACD5C,SAAS,cAAc;AAKhB,IAAM,4BAA4B;AAYlC,IAAM,oBAAoB,CAAC,SAChC,OAAO,oBAAoB,IAAI,CAAC;AAM3B,IAAM,cAAc,CACzB,QAMA,YACoB;AACpB,QAAM,MAAM,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AACpD,QAAM,cAAc,IAAI,IAAI,CAAC,MAAO,OAAO,MAAM,WAAW,IAAI,EAAE,OAAQ;AAG1E,QAAM,UAAU,oBAAI,IAAwB;AAC5C,aAAW,KAAK,KAAK;AACnB,QAAI,OAAO,MAAM,YAAY,EAAE,UAAU;AACvC,cAAQ,IAAI,EAAE,SAAS,EAAE,QAAQ;AAAA,IACnC;AAAA,EACF;AAEA,QAAM,EAAE,YAAY,OAAO,GAAG,gBAAgB,IAAI,WAAW,CAAC;AAE9D,SAAO,CAAC,QAAQ,aAAa,gBAAgB;AAC3C,UAAM,WACJ,QAAQ,YAAY,2BAA2B,OAAO,WAAW,KAAK,CAAC;AAEzE,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,QACE,GAAG;AAAA,QACH;AAAA,UACE,QAAQ;AAAA,UACR,SAAS,QAAQ,OAAO,IAAI,UAAU;AAAA,UACtC,SAAS,OAAO,KAAK,eAAe,EAAE,SAClC,kBACA;AAAA,UACJ;AAAA,UACA;AAAA,UACA,YAAY;AAAA,QACd;AAAA,MACF;AAAA,MACA,OAAO;AAAA,IACT;AAAA,EACF;AACF;;;ADrDO,IAAM,gBAAN,MAA4C;AAAA,EAGjD,YAEmB,kBAEA,WACjB;AAHiB;AAEA;AAAA,EAChB;AAAA,EAPc,SAAS,IAAI,OAAO,cAAc,IAAI;AAAA,EASvD,MAAM,eAAe;AACnB,UAAM,YAAY,KAAK,iBAAiB,aAAa;AAErD,eAAW,WAAW,WAAW;AAC/B,YAAM,EAAE,SAAS,IAAI;AACrB,UAAI,CAAC,YAAY,OAAO,aAAa,SAAU;AAE/C,YAAM,WAA0C,QAAQ;AAAA,QACtD;AAAA,QACA,SAAS;AAAA,MACX;AAEA,UAAI,CAAC,YAAY,SAAS,WAAW,EAAG;AAExC,iBAAW,SAAS,UAAU;AAC5B,cAAM,QAAQ,oBAAoB,MAAM,UAAU;AAClD,YAAI;AAEJ,YAAI;AACF,mBAAS,KAAK,UAAU,IAAI,OAAO,EAAE,QAAQ,MAAM,CAAC;AAAA,QACtD,QAAQ;AACN,eAAK,OAAO;AAAA,YACV,gBAAgB,MAAM,cAAc,SAAS,mCAAmC,SAAS,YAAY,IAAI,IAAI,OAAO,MAAM,UAAU,CAAC;AAAA,UACvI;AACA;AAAA,QACF;AAEA,cAAM,UAAW,SAAiB,MAAM,UAAU,EAAE,KAAK,QAAQ;AAEjE,cAAM,kBAAkB,EAAE,GAAG,MAAM,QAAQ;AAC3C,YAAI,MAAM,SAAS;AACjB,0BAAgB,UAAU,MAAM;AAAA,QAClC;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,OAAO;AAAA,YACX,MAAM;AAAA,YACN,OAAO,WAAkB,SAAc;AACrC,oBAAM,QAAQ,WAAW,IAAI;AAAA,YAC/B;AAAA,YACA;AAAA,UACF;AAAA,QACF,OAAO;AACL,gBAAM,OAAO;AAAA,YACX,MAAM;AAAA,YACN,OAAO,aAAkB;AACvB,oBAAM,QAAQ,QAAQ;AAAA,YACxB;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAEA,aAAK,OAAO;AAAA,UACV,2BAA2B,MAAM,OAAO,KAAK,IAAI,CAAC,IAAI,MAAM,QAAQ,aAAa,EAAE,OAAO,SAAS,YAAY,IAAI,IAAI,OAAO,MAAM,UAAU,CAAC;AAAA,QACjJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AApEa,gBAAN;AAAA,EADN,WAAW;AAAA,EAKP,mBAAAC,QAAO,gBAAgB;AAAA,EAEvB,mBAAAA,QAAO,SAAS;AAAA,GANR;;;AF0CN,IAAM,cAAN,MAAkB;AAAA;AAAA,EAEvB,OAAO,SACL,SACe;AACf,UAAM,QAAQ,oBAAoB,QAAQ,IAAI;AAE9C,UAAM,sBAAgC;AAAA,MACpC,SAAS;AAAA,MACT,YAAY,MAAM,YAAY,YAAe,OAAO;AAAA,IACtD;AAEA,WAAO;AAAA,MACL,QAAQ,QAAQ,YAAY;AAAA,MAC5B,QAAQ;AAAA,MACR,SAAS,CAAC,eAAe;AAAA,MACzB,WAAW,CAAC,qBAAqB,aAAa;AAAA,MAC9C,SAAS,CAAC,mBAAmB;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,cACL,cACe;AACf,UAAM,QAAQ,oBAAoB,aAAa,IAAI;AAEnD,UAAM,sBAAgC;AAAA,MACpC,SAAS;AAAA,MACT,YAAY,UAAU,SACpB,YAAY,YAAe,MAAM,aAAa,WAAW,GAAG,IAAI,CAAC;AAAA,MACnE,QAAQ,aAAa,UAAU,CAAC;AAAA,IAClC;AAEA,WAAO;AAAA,MACL,QAAQ,aAAa,YAAY;AAAA,MACjC,QAAQ;AAAA,MACR,SAAS,CAAC,GAAI,aAAa,WAAW,CAAC,GAAI,eAAe;AAAA,MAC1D,WAAW,CAAC,qBAAqB,aAAa;AAAA,MAC9C,SAAS,CAAC,mBAAmB;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,aAAqB,YACnB,SACyB;AACzB,UAAM,SAAS,IAAI;AAAA,MACjB,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR;AAAA,QACE,kBAAkB,QAAQ;AAAA,QAC1B,eAAe,QAAQ;AAAA,QACvB,eAAe,QAAQ;AAAA,QACvB,iBAAiB,QAAQ;AAAA,QACzB,eAAe,QAAQ;AAAA,QACvB,aAAa,QAAQ;AAAA,QACrB,QAAQ,IAAIC,QAAO,eAAe,QAAQ,QAAQ,EAAE;AAAA,MACtD;AAAA,IACF;AACA,UAAM,OAAO,gBAAgB;AAC7B,WAAO;AAAA,EACT;AAEF;AAhEa,cAAN;AAAA,EADN,OAAO,CAAC,CAAC;AAAA,GACG;;;AIzDb,SAAS,cAAAC,mBAAkB;AAMpB,IAAM,uBAAN,MAA2B;AAAA,EAChC,MAAM,MACJ,QAC4B;AAC5B,WAAO,OAAO,YAAY;AAAA,EAC5B;AACF;AANa,uBAAN;AAAA,EADNC,YAAW;AAAA,GACC;","names":["Logger","Inject","Inject","Logger","Injectable","Injectable"]}
|
|
1
|
+
{"version":3,"sources":["../src/nest/kafka.module.ts","../src/nest/kafka.constants.ts","../src/nest/kafka.explorer.ts","../src/nest/kafka.decorator.ts","../src/nest/kafka.health.ts"],"sourcesContent":["import { Module, DynamicModule, Provider, Logger } from \"@nestjs/common\";\nimport { DiscoveryModule } from \"@nestjs/core\";\nimport {\n KafkaClient,\n ClientId,\n GroupId,\n TopicMapConstraint,\n KafkaInstrumentation,\n KafkaClientOptions,\n} from \"../client/kafka.client\";\nimport { getKafkaClientToken } from \"./kafka.constants\";\nimport { KafkaExplorer } from \"./kafka.explorer\";\n\n/** Shared configuration fields for both `register()` and `registerAsync()`. */\ninterface KafkaModuleBaseOptions {\n /** Optional name for multi-client setups. Must match `@InjectKafkaClient(name)`. */\n name?: string;\n /** If true, makes KAFKA_CLIENT available globally without importing KafkaModule in every feature module. */\n isGlobal?: boolean;\n}\n\n/** Synchronous configuration for `KafkaModule.register()`. */\nexport interface KafkaModuleOptions extends KafkaModuleBaseOptions {\n /** Unique Kafka client identifier. */\n clientId: ClientId;\n /** Consumer group identifier. */\n groupId: GroupId;\n /** List of Kafka broker addresses. */\n brokers: string[];\n /** Auto-create topics via admin on first use (send/consume). Useful for development. */\n autoCreateTopics?: boolean;\n /** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */\n strictSchemas?: boolean;\n /** Number of partitions for auto-created topics. Default: `1`. */\n numPartitions?: number;\n /** Client-wide instrumentation hooks (e.g. OTel). Applied to both send and consume paths. */\n instrumentation?: KafkaInstrumentation[];\n /** Called when a message is dropped without being sent to a DLQ. @see `KafkaClientOptions.onMessageLost` */\n onMessageLost?: KafkaClientOptions[\"onMessageLost\"];\n /** Called whenever a consumer group rebalance occurs. @see `KafkaClientOptions.onRebalance` */\n onRebalance?: KafkaClientOptions[\"onRebalance\"];\n}\n\n/** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */\nexport interface KafkaModuleAsyncOptions extends KafkaModuleBaseOptions {\n imports?: any[];\n useFactory: (\n ...args: any[]\n ) => KafkaModuleOptions | Promise<KafkaModuleOptions>;\n inject?: any[];\n}\n\n/**\n * NestJS dynamic module for registering type-safe Kafka clients.\n * Use `register()` for static config or `registerAsync()` for DI-based config.\n */\n@Module({})\nexport class KafkaModule {\n /** Register a Kafka client with static options. */\n static register<T extends TopicMapConstraint<T>>(\n options: KafkaModuleOptions,\n ): DynamicModule {\n const token = getKafkaClientToken(options.name);\n\n const kafkaClientProvider: Provider = {\n provide: token,\n useFactory: () => KafkaModule.buildClient<T>(options),\n };\n\n return {\n global: options.isGlobal ?? false,\n module: KafkaModule,\n imports: [DiscoveryModule],\n providers: [kafkaClientProvider, KafkaExplorer],\n exports: [kafkaClientProvider],\n };\n }\n\n /** Register a Kafka client with async/factory-based options. */\n static registerAsync<T extends TopicMapConstraint<T>>(\n asyncOptions: KafkaModuleAsyncOptions,\n ): DynamicModule {\n const token = getKafkaClientToken(asyncOptions.name);\n\n const kafkaClientProvider: Provider = {\n provide: token,\n useFactory: async (...args: any[]): Promise<KafkaClient<T>> =>\n KafkaModule.buildClient<T>(await asyncOptions.useFactory(...args)),\n inject: asyncOptions.inject || [],\n };\n\n return {\n global: asyncOptions.isGlobal ?? false,\n module: KafkaModule,\n imports: [...(asyncOptions.imports || []), DiscoveryModule],\n providers: [kafkaClientProvider, KafkaExplorer],\n exports: [kafkaClientProvider],\n };\n }\n\n private static async buildClient<T extends TopicMapConstraint<T>>(\n options: KafkaModuleOptions,\n ): Promise<KafkaClient<T>> {\n const client = new KafkaClient<T>(\n options.clientId,\n options.groupId,\n options.brokers,\n {\n autoCreateTopics: options.autoCreateTopics,\n strictSchemas: options.strictSchemas,\n numPartitions: options.numPartitions,\n instrumentation: options.instrumentation,\n onMessageLost: options.onMessageLost,\n onRebalance: options.onRebalance,\n logger: new Logger(`KafkaClient:${options.clientId}`),\n },\n );\n await client.connectProducer();\n return client;\n }\n}\n","/** Default DI token for the Kafka client. */\nexport const KAFKA_CLIENT = \"KAFKA_CLIENT\";\n\n/** Returns the DI token for a named (or default) Kafka client instance. */\nexport const getKafkaClientToken = (name?: string): string =>\n name ? `KAFKA_CLIENT_${name}` : KAFKA_CLIENT;\n","import { Inject, Injectable, OnModuleInit, Logger } from \"@nestjs/common\";\nimport { DiscoveryService, ModuleRef } from \"@nestjs/core\";\nimport { KafkaClient } from \"../client/kafka.client\";\nimport {\n KAFKA_SUBSCRIBER_METADATA,\n KafkaSubscriberMetadata,\n} from \"./kafka.decorator\";\nimport { getKafkaClientToken } from \"./kafka.constants\";\n\ninterface SubscriberEntry extends KafkaSubscriberMetadata {\n methodName: string | symbol;\n}\n\n/** Discovers `@SubscribeTo()` decorators and wires them to their Kafka clients on startup. */\n@Injectable()\nexport class KafkaExplorer implements OnModuleInit {\n private readonly logger = new Logger(KafkaExplorer.name);\n\n constructor(\n @Inject(DiscoveryService)\n private readonly discoveryService: DiscoveryService,\n @Inject(ModuleRef)\n private readonly moduleRef: ModuleRef,\n ) {}\n\n async onModuleInit() {\n const providers = this.discoveryService.getProviders();\n\n for (const wrapper of providers) {\n const { instance } = wrapper;\n if (!instance || typeof instance !== \"object\") continue;\n\n const metadata: SubscriberEntry[] | undefined = Reflect.getMetadata(\n KAFKA_SUBSCRIBER_METADATA,\n instance.constructor,\n );\n\n if (!metadata || metadata.length === 0) continue;\n\n for (const entry of metadata) {\n const token = getKafkaClientToken(entry.clientName);\n let client: KafkaClient<any>;\n\n try {\n client = this.moduleRef.get(token, { strict: false });\n } catch {\n this.logger.error(\n `KafkaClient \"${entry.clientName || \"default\"}\" not found for @SubscribeTo on ${instance.constructor.name}.${String(entry.methodName)}`,\n );\n continue;\n }\n\n const handler = (instance as any)[entry.methodName].bind(instance);\n\n const consumerOptions = { ...entry.options };\n if (entry.schemas) {\n consumerOptions.schemas = entry.schemas;\n }\n\n if (entry.batch) {\n await client.startBatchConsumer(\n entry.topics as any,\n async (envelopes: any[], meta: any) => {\n await handler(envelopes, meta);\n },\n consumerOptions,\n );\n } else {\n await client.startConsumer(\n entry.topics as any,\n async (envelope: any) => {\n await handler(envelope);\n },\n consumerOptions,\n );\n }\n\n this.logger.log(\n `Registered @SubscribeTo(${entry.topics.join(\", \")})${entry.batch ? \" [batch]\" : \"\"} on ${instance.constructor.name}.${String(entry.methodName)}`,\n );\n }\n }\n }\n}\n","import { Inject } from \"@nestjs/common\";\nimport { getKafkaClientToken } from \"./kafka.constants\";\nimport { ConsumerOptions } from \"../client/kafka.client\";\nimport { TopicDescriptor, SchemaLike } from \"../client/message/topic\";\n\nexport const KAFKA_SUBSCRIBER_METADATA = \"KAFKA_SUBSCRIBER_METADATA\";\n\nexport interface KafkaSubscriberMetadata {\n topics: string[];\n schemas?: Map<string, SchemaLike>;\n options?: ConsumerOptions;\n clientName?: string;\n batch?: boolean;\n methodName?: string | symbol;\n}\n\n/** Inject a `KafkaClient` instance. Pass a name to target a specific named client. */\nexport const InjectKafkaClient = (name?: string): ParameterDecorator =>\n Inject(getKafkaClientToken(name));\n\n/**\n * Decorator that auto-subscribes a method to Kafka topics on module init.\n * The decorated method receives `(message, topic)` for each consumed message.\n */\nexport const SubscribeTo = (\n topics:\n | string\n | string[]\n | TopicDescriptor\n | TopicDescriptor[]\n | (string | TopicDescriptor)[],\n options?: ConsumerOptions & { clientName?: string; batch?: boolean },\n): MethodDecorator => {\n const arr = Array.isArray(topics) ? topics : [topics];\n const topicsArray = arr.map((t) => (typeof t === \"string\" ? t : t.__topic));\n\n // Extract schemas from descriptors that have them\n const schemas = new Map<string, SchemaLike>();\n for (const t of arr) {\n if (typeof t !== \"string\" && t.__schema) {\n schemas.set(t.__topic, t.__schema);\n }\n }\n\n const { clientName, batch, ...consumerOptions } = options || {};\n\n return (target, propertyKey, _descriptor) => {\n const existing: KafkaSubscriberMetadata[] =\n Reflect.getMetadata(KAFKA_SUBSCRIBER_METADATA, target.constructor) || [];\n\n Reflect.defineMetadata(\n KAFKA_SUBSCRIBER_METADATA,\n [\n ...existing,\n {\n topics: topicsArray,\n schemas: schemas.size > 0 ? schemas : undefined,\n options: Object.keys(consumerOptions).length\n ? consumerOptions\n : undefined,\n clientName,\n batch,\n methodName: propertyKey,\n },\n ],\n target.constructor,\n );\n };\n};\n","import { Injectable } from \"@nestjs/common\";\nimport type {\n IKafkaClient,\n KafkaHealthResult,\n TopicMapConstraint,\n} from \"../client/types\";\nexport type { KafkaHealthResult } from \"../client/types\";\n\n/** Health check service. Call `check(client)` to verify broker connectivity. */\n@Injectable()\nexport class KafkaHealthIndicator {\n async check<T extends TopicMapConstraint<T>>(\n client: IKafkaClient<T>,\n ): Promise<KafkaHealthResult> {\n return client.checkStatus();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,QAAiC,UAAAA,eAAc;AACxD,SAAS,uBAAuB;;;ACAzB,IAAM,eAAe;AAGrB,IAAM,sBAAsB,CAAC,SAClC,OAAO,gBAAgB,IAAI,KAAK;;;ACLlC,SAAS,UAAAC,SAAQ,YAA0B,cAAc;AACzD,SAAS,kBAAkB,iBAAiB;;;ACD5C,SAAS,cAAc;AAKhB,IAAM,4BAA4B;AAYlC,IAAM,oBAAoB,CAAC,SAChC,OAAO,oBAAoB,IAAI,CAAC;AAM3B,IAAM,cAAc,CACzB,QAMA,YACoB;AACpB,QAAM,MAAM,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AACpD,QAAM,cAAc,IAAI,IAAI,CAAC,MAAO,OAAO,MAAM,WAAW,IAAI,EAAE,OAAQ;AAG1E,QAAM,UAAU,oBAAI,IAAwB;AAC5C,aAAW,KAAK,KAAK;AACnB,QAAI,OAAO,MAAM,YAAY,EAAE,UAAU;AACvC,cAAQ,IAAI,EAAE,SAAS,EAAE,QAAQ;AAAA,IACnC;AAAA,EACF;AAEA,QAAM,EAAE,YAAY,OAAO,GAAG,gBAAgB,IAAI,WAAW,CAAC;AAE9D,SAAO,CAAC,QAAQ,aAAa,gBAAgB;AAC3C,UAAM,WACJ,QAAQ,YAAY,2BAA2B,OAAO,WAAW,KAAK,CAAC;AAEzE,YAAQ;AAAA,MACN;AAAA,MACA;AAAA,QACE,GAAG;AAAA,QACH;AAAA,UACE,QAAQ;AAAA,UACR,SAAS,QAAQ,OAAO,IAAI,UAAU;AAAA,UACtC,SAAS,OAAO,KAAK,eAAe,EAAE,SAClC,kBACA;AAAA,UACJ;AAAA,UACA;AAAA,UACA,YAAY;AAAA,QACd;AAAA,MACF;AAAA,MACA,OAAO;AAAA,IACT;AAAA,EACF;AACF;;;ADrDO,IAAM,gBAAN,MAA4C;AAAA,EAGjD,YAEmB,kBAEA,WACjB;AAHiB;AAEA;AAAA,EAChB;AAAA,EAPc,SAAS,IAAI,OAAO,cAAc,IAAI;AAAA,EASvD,MAAM,eAAe;AACnB,UAAM,YAAY,KAAK,iBAAiB,aAAa;AAErD,eAAW,WAAW,WAAW;AAC/B,YAAM,EAAE,SAAS,IAAI;AACrB,UAAI,CAAC,YAAY,OAAO,aAAa,SAAU;AAE/C,YAAM,WAA0C,QAAQ;AAAA,QACtD;AAAA,QACA,SAAS;AAAA,MACX;AAEA,UAAI,CAAC,YAAY,SAAS,WAAW,EAAG;AAExC,iBAAW,SAAS,UAAU;AAC5B,cAAM,QAAQ,oBAAoB,MAAM,UAAU;AAClD,YAAI;AAEJ,YAAI;AACF,mBAAS,KAAK,UAAU,IAAI,OAAO,EAAE,QAAQ,MAAM,CAAC;AAAA,QACtD,QAAQ;AACN,eAAK,OAAO;AAAA,YACV,gBAAgB,MAAM,cAAc,SAAS,mCAAmC,SAAS,YAAY,IAAI,IAAI,OAAO,MAAM,UAAU,CAAC;AAAA,UACvI;AACA;AAAA,QACF;AAEA,cAAM,UAAW,SAAiB,MAAM,UAAU,EAAE,KAAK,QAAQ;AAEjE,cAAM,kBAAkB,EAAE,GAAG,MAAM,QAAQ;AAC3C,YAAI,MAAM,SAAS;AACjB,0BAAgB,UAAU,MAAM;AAAA,QAClC;AAEA,YAAI,MAAM,OAAO;AACf,gBAAM,OAAO;AAAA,YACX,MAAM;AAAA,YACN,OAAO,WAAkB,SAAc;AACrC,oBAAM,QAAQ,WAAW,IAAI;AAAA,YAC/B;AAAA,YACA;AAAA,UACF;AAAA,QACF,OAAO;AACL,gBAAM,OAAO;AAAA,YACX,MAAM;AAAA,YACN,OAAO,aAAkB;AACvB,oBAAM,QAAQ,QAAQ;AAAA,YACxB;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAEA,aAAK,OAAO;AAAA,UACV,2BAA2B,MAAM,OAAO,KAAK,IAAI,CAAC,IAAI,MAAM,QAAQ,aAAa,EAAE,OAAO,SAAS,YAAY,IAAI,IAAI,OAAO,MAAM,UAAU,CAAC;AAAA,QACjJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AApEa,gBAAN;AAAA,EADN,WAAW;AAAA,EAKP,mBAAAC,QAAO,gBAAgB;AAAA,EAEvB,mBAAAA,QAAO,SAAS;AAAA,GANR;;;AF0CN,IAAM,cAAN,MAAkB;AAAA;AAAA,EAEvB,OAAO,SACL,SACe;AACf,UAAM,QAAQ,oBAAoB,QAAQ,IAAI;AAE9C,UAAM,sBAAgC;AAAA,MACpC,SAAS;AAAA,MACT,YAAY,MAAM,YAAY,YAAe,OAAO;AAAA,IACtD;AAEA,WAAO;AAAA,MACL,QAAQ,QAAQ,YAAY;AAAA,MAC5B,QAAQ;AAAA,MACR,SAAS,CAAC,eAAe;AAAA,MACzB,WAAW,CAAC,qBAAqB,aAAa;AAAA,MAC9C,SAAS,CAAC,mBAAmB;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,cACL,cACe;AACf,UAAM,QAAQ,oBAAoB,aAAa,IAAI;AAEnD,UAAM,sBAAgC;AAAA,MACpC,SAAS;AAAA,MACT,YAAY,UAAU,SACpB,YAAY,YAAe,MAAM,aAAa,WAAW,GAAG,IAAI,CAAC;AAAA,MACnE,QAAQ,aAAa,UAAU,CAAC;AAAA,IAClC;AAEA,WAAO;AAAA,MACL,QAAQ,aAAa,YAAY;AAAA,MACjC,QAAQ;AAAA,MACR,SAAS,CAAC,GAAI,aAAa,WAAW,CAAC,GAAI,eAAe;AAAA,MAC1D,WAAW,CAAC,qBAAqB,aAAa;AAAA,MAC9C,SAAS,CAAC,mBAAmB;AAAA,IAC/B;AAAA,EACF;AAAA,EAEA,aAAqB,YACnB,SACyB;AACzB,UAAM,SAAS,IAAI;AAAA,MACjB,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR;AAAA,QACE,kBAAkB,QAAQ;AAAA,QAC1B,eAAe,QAAQ;AAAA,QACvB,eAAe,QAAQ;AAAA,QACvB,iBAAiB,QAAQ;AAAA,QACzB,eAAe,QAAQ;AAAA,QACvB,aAAa,QAAQ;AAAA,QACrB,QAAQ,IAAIC,QAAO,eAAe,QAAQ,QAAQ,EAAE;AAAA,MACtD;AAAA,IACF;AACA,UAAM,OAAO,gBAAgB;AAC7B,WAAO;AAAA,EACT;AACF;AA/Da,cAAN;AAAA,EADN,OAAO,CAAC,CAAC;AAAA,GACG;;;AIzDb,SAAS,cAAAC,mBAAkB;AAUpB,IAAM,uBAAN,MAA2B;AAAA,EAChC,MAAM,MACJ,QAC4B;AAC5B,WAAO,OAAO,YAAY;AAAA,EAC5B;AACF;AANa,uBAAN;AAAA,EADNC,YAAW;AAAA,GACC;","names":["Logger","Inject","Inject","Logger","Injectable","Injectable"]}
|
package/dist/otel.d.mts
CHANGED
package/dist/otel.d.ts
CHANGED
package/dist/testing.d.mts
CHANGED
package/dist/testing.d.ts
CHANGED
package/dist/testing.js
CHANGED
|
@@ -76,6 +76,17 @@ function createMockKafkaClient(mockFactory) {
|
|
|
76
76
|
stop: mock().mockResolvedValue(void 0)
|
|
77
77
|
}),
|
|
78
78
|
stopConsumer: resolved(void 0),
|
|
79
|
+
replayDlq: resolved({ replayed: 0, skipped: 0 }),
|
|
80
|
+
resetOffsets: resolved(void 0),
|
|
81
|
+
pauseConsumer: mock(),
|
|
82
|
+
resumeConsumer: mock(),
|
|
83
|
+
getMetrics: returning({
|
|
84
|
+
processedCount: 0,
|
|
85
|
+
retryCount: 0,
|
|
86
|
+
dlqCount: 0,
|
|
87
|
+
dedupCount: 0
|
|
88
|
+
}),
|
|
89
|
+
resetMetrics: mock(),
|
|
79
90
|
disconnect: resolved(void 0),
|
|
80
91
|
enableGracefulShutdown: mock()
|
|
81
92
|
};
|
package/dist/testing.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/testing.ts","../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["export * from \"./testing/index\";\n","import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACzGA,mBAGO;AACP,8BAAwB;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,4BAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/testing.ts","../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["export * from \"./testing/index\";\n","import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n replayDlq: resolved({ replayed: 0, skipped: 0 }),\n resetOffsets: resolved(undefined),\n pauseConsumer: mock(),\n resumeConsumer: mock(),\n getMetrics: returning({\n processedCount: 0,\n retryCount: 0,\n dlqCount: 0,\n dedupCount: 0,\n }),\n resetMetrics: mock(),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,WAAW,SAAS,EAAE,UAAU,GAAG,SAAS,EAAE,CAAC;AAAA,IAC/C,cAAc,SAAS,MAAS;AAAA,IAChC,eAAe,KAAK;AAAA,IACpB,gBAAgB,KAAK;AAAA,IACrB,YAAY,UAAU;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,YAAY;AAAA,IACd,CAAC;AAAA,IACD,cAAc,KAAK;AAAA,IACnB,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACpHA,mBAGO;AACP,8BAAwB;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,4BAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
|
package/dist/testing.mjs
CHANGED
|
@@ -51,6 +51,17 @@ function createMockKafkaClient(mockFactory) {
|
|
|
51
51
|
stop: mock().mockResolvedValue(void 0)
|
|
52
52
|
}),
|
|
53
53
|
stopConsumer: resolved(void 0),
|
|
54
|
+
replayDlq: resolved({ replayed: 0, skipped: 0 }),
|
|
55
|
+
resetOffsets: resolved(void 0),
|
|
56
|
+
pauseConsumer: mock(),
|
|
57
|
+
resumeConsumer: mock(),
|
|
58
|
+
getMetrics: returning({
|
|
59
|
+
processedCount: 0,
|
|
60
|
+
retryCount: 0,
|
|
61
|
+
dlqCount: 0,
|
|
62
|
+
dedupCount: 0
|
|
63
|
+
}),
|
|
64
|
+
resetMetrics: mock(),
|
|
54
65
|
disconnect: resolved(void 0),
|
|
55
66
|
enableGracefulShutdown: mock()
|
|
56
67
|
};
|
package/dist/testing.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;AAcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACzGA;AAAA,EACE;AAAA,OAEK;AACP,SAAS,eAAe;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,eAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n replayDlq: resolved({ replayed: 0, skipped: 0 }),\n resetOffsets: resolved(undefined),\n pauseConsumer: mock(),\n resumeConsumer: mock(),\n getMetrics: returning({\n processedCount: 0,\n retryCount: 0,\n dlqCount: 0,\n dedupCount: 0,\n }),\n resetMetrics: mock(),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;AAcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,WAAW,SAAS,EAAE,UAAU,GAAG,SAAS,EAAE,CAAC;AAAA,IAC/C,cAAc,SAAS,MAAS;AAAA,IAChC,eAAe,KAAK;AAAA,IACpB,gBAAgB,KAAK;AAAA,IACrB,YAAY,UAAU;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,YAAY;AAAA,IACd,CAAC;AAAA,IACD,cAAc,KAAK;AAAA,IACnB,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;ACpHA;AAAA,EACE;AAAA,OAEK;AACP,SAAS,eAAe;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,eAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
|
|
@@ -229,8 +229,13 @@ interface BatchMessageItem<V> {
|
|
|
229
229
|
interface BatchMeta {
|
|
230
230
|
/** Partition number for this batch. */
|
|
231
231
|
partition: number;
|
|
232
|
-
/**
|
|
233
|
-
|
|
232
|
+
/**
|
|
233
|
+
* Highest offset available on the broker for this partition.
|
|
234
|
+
* `null` when the message is being replayed via a retry topic consumer —
|
|
235
|
+
* in that path the broker high-watermark is not accessible without an admin
|
|
236
|
+
* call. Do not use this for lag calculation in the retry path.
|
|
237
|
+
*/
|
|
238
|
+
highWatermark: string | null;
|
|
234
239
|
/** Send a heartbeat to the broker to prevent session timeout. */
|
|
235
240
|
heartbeat(): Promise<void>;
|
|
236
241
|
/** Mark an offset as processed (for manual offset management). */
|
|
@@ -361,6 +366,47 @@ type BeforeConsumeResult = (() => void) | {
|
|
|
361
366
|
cleanup?(): void;
|
|
362
367
|
wrap?(fn: () => Promise<void>): Promise<void>;
|
|
363
368
|
};
|
|
369
|
+
/**
|
|
370
|
+
* Reason a message was sent to the DLQ.
|
|
371
|
+
* - `'handler-error'` — the consumer handler threw after all retry attempts.
|
|
372
|
+
* - `'validation-error'` — schema validation failed before the handler ran.
|
|
373
|
+
* - `'lamport-clock-duplicate'` — message was identified as a Lamport-clock duplicate
|
|
374
|
+
* and `deduplication.strategy` is `'dlq'`.
|
|
375
|
+
*/
|
|
376
|
+
type DlqReason = "handler-error" | "validation-error" | "lamport-clock-duplicate";
|
|
377
|
+
/** Options for `replayDlq`. */
|
|
378
|
+
interface DlqReplayOptions {
|
|
379
|
+
/**
|
|
380
|
+
* Override the target topic to re-publish to.
|
|
381
|
+
* Default: reads the `x-dlq-original-topic` header from each DLQ message.
|
|
382
|
+
*/
|
|
383
|
+
targetTopic?: string;
|
|
384
|
+
/**
|
|
385
|
+
* Dry-run mode — log what would be replayed without actually sending.
|
|
386
|
+
* Increments the `replayed` counter so you can see what would happen.
|
|
387
|
+
*/
|
|
388
|
+
dryRun?: boolean;
|
|
389
|
+
/**
|
|
390
|
+
* Optional filter — return `false` to skip a message.
|
|
391
|
+
* @param headers All headers on the DLQ message (including `x-dlq-*` metadata).
|
|
392
|
+
* @param value Raw message value (JSON string).
|
|
393
|
+
*/
|
|
394
|
+
filter?: (headers: MessageHeaders, value: string) => boolean;
|
|
395
|
+
}
|
|
396
|
+
/**
|
|
397
|
+
* Snapshot of internal event counters accumulated since client creation
|
|
398
|
+
* (or since the last `resetMetrics()` call).
|
|
399
|
+
*/
|
|
400
|
+
interface KafkaMetrics {
|
|
401
|
+
/** Total messages successfully processed by the consumer handler. */
|
|
402
|
+
processedCount: number;
|
|
403
|
+
/** Total retry attempts routed — covers both in-process retries and retry-topic hops. */
|
|
404
|
+
retryCount: number;
|
|
405
|
+
/** Total messages sent to a DLQ topic. */
|
|
406
|
+
dlqCount: number;
|
|
407
|
+
/** Total duplicate messages detected by the Lamport clock. */
|
|
408
|
+
dedupCount: number;
|
|
409
|
+
}
|
|
364
410
|
/**
|
|
365
411
|
* Client-wide instrumentation hooks for both send and consume paths.
|
|
366
412
|
* Use this for cross-cutting concerns like tracing and metrics.
|
|
@@ -381,6 +427,25 @@ interface KafkaInstrumentation {
|
|
|
381
427
|
beforeConsume?(envelope: EventEnvelope<any>): BeforeConsumeResult | void;
|
|
382
428
|
/** Called when the consumer handler throws. */
|
|
383
429
|
onConsumeError?(envelope: EventEnvelope<any>, error: Error): void;
|
|
430
|
+
/**
|
|
431
|
+
* Called when a message is queued for retry.
|
|
432
|
+
* Fires for both in-process retries (before the backoff sleep) and
|
|
433
|
+
* retry-topic routing (EOS and non-EOS paths).
|
|
434
|
+
*/
|
|
435
|
+
onRetry?(envelope: EventEnvelope<any>, attempt: number, maxRetries: number): void;
|
|
436
|
+
/** Called when a message is routed to a DLQ topic. */
|
|
437
|
+
onDlq?(envelope: EventEnvelope<any>, reason: DlqReason): void;
|
|
438
|
+
/**
|
|
439
|
+
* Called when a duplicate message is detected via the Lamport clock.
|
|
440
|
+
* Fires regardless of the configured `deduplication.strategy`.
|
|
441
|
+
*/
|
|
442
|
+
onDuplicate?(envelope: EventEnvelope<any>, strategy: "drop" | "dlq" | "topic"): void;
|
|
443
|
+
/**
|
|
444
|
+
* Called after the consumer handler successfully processes a message.
|
|
445
|
+
* Use this as a success counter for error-rate calculations.
|
|
446
|
+
* Fires for both single-message and batch consumers (once per envelope).
|
|
447
|
+
*/
|
|
448
|
+
onMessage?(envelope: EventEnvelope<any>): void;
|
|
384
449
|
}
|
|
385
450
|
/** Context passed to the `transaction()` callback with type-safe send methods. */
|
|
386
451
|
interface TransactionContext<T extends TopicMapConstraint<T>> {
|
|
@@ -434,6 +499,69 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
|
|
|
434
499
|
sendBatch<K extends keyof T>(topic: K, messages: Array<BatchMessageItem<T[K]>>): Promise<void>;
|
|
435
500
|
transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
|
|
436
501
|
getClientId(): ClientId;
|
|
502
|
+
/**
|
|
503
|
+
* Return a snapshot of internal event counters (retry / DLQ / dedup).
|
|
504
|
+
* - `getMetrics()` — aggregate across all topics.
|
|
505
|
+
* - `getMetrics(topic)` — counters for a specific topic only; returns all-zero
|
|
506
|
+
* if no events have been observed for that topic yet.
|
|
507
|
+
*
|
|
508
|
+
* Counters accumulate since client creation or the last `resetMetrics()` call.
|
|
509
|
+
*/
|
|
510
|
+
getMetrics(topic?: string): Readonly<KafkaMetrics>;
|
|
511
|
+
/**
|
|
512
|
+
* Reset internal event counters to zero.
|
|
513
|
+
* - `resetMetrics()` — reset all topics.
|
|
514
|
+
* - `resetMetrics(topic)` — reset a single topic only.
|
|
515
|
+
*/
|
|
516
|
+
resetMetrics(topic?: string): void;
|
|
517
|
+
/**
|
|
518
|
+
* Consume all messages currently in `{topic}.dlq`, strip the `x-dlq-*` metadata
|
|
519
|
+
* headers, and re-publish each message to its original topic (or `options.targetTopic`).
|
|
520
|
+
*
|
|
521
|
+
* A temporary consumer group is created and torn down automatically. The DLQ topic
|
|
522
|
+
* itself is not modified — messages remain there after replay.
|
|
523
|
+
*
|
|
524
|
+
* @returns `{ replayed, skipped }` — counts of re-published vs skipped messages.
|
|
525
|
+
*/
|
|
526
|
+
replayDlq(topic: string, options?: DlqReplayOptions): Promise<{
|
|
527
|
+
replayed: number;
|
|
528
|
+
skipped: number;
|
|
529
|
+
}>;
|
|
530
|
+
/**
|
|
531
|
+
* Reset committed offsets for a consumer group to the earliest or latest position.
|
|
532
|
+
*
|
|
533
|
+
* The consumer group must be inactive (no running consumers) — Kafka does not
|
|
534
|
+
* allow offset resets while members are actively consuming. Call
|
|
535
|
+
* `stopConsumer(groupId)` first.
|
|
536
|
+
*
|
|
537
|
+
* @param groupId Consumer group to reset. Defaults to the client's default groupId.
|
|
538
|
+
* @param topic Topic to reset.
|
|
539
|
+
* @param position `'earliest'` seeks to the first available offset; `'latest'`
|
|
540
|
+
* seeks past the last message (consumer will only see new messages).
|
|
541
|
+
*/
|
|
542
|
+
resetOffsets(groupId: string | undefined, topic: string, position: "earliest" | "latest"): Promise<void>;
|
|
543
|
+
/**
|
|
544
|
+
* Pause message delivery for specific topic-partitions on a consumer group.
|
|
545
|
+
* The consumer remains connected and its committed offsets are preserved —
|
|
546
|
+
* only polling is suspended. Call `resumeConsumer` to restart delivery.
|
|
547
|
+
*
|
|
548
|
+
* @param groupId Consumer group to pause. Defaults to the client's default groupId.
|
|
549
|
+
* @param assignments Topic-partition pairs to pause.
|
|
550
|
+
*/
|
|
551
|
+
pauseConsumer(groupId: string | undefined, assignments: Array<{
|
|
552
|
+
topic: string;
|
|
553
|
+
partitions: number[];
|
|
554
|
+
}>): void;
|
|
555
|
+
/**
|
|
556
|
+
* Resume message delivery for previously paused topic-partitions.
|
|
557
|
+
*
|
|
558
|
+
* @param groupId Consumer group to resume. Defaults to the client's default groupId.
|
|
559
|
+
* @param assignments Topic-partition pairs to resume.
|
|
560
|
+
*/
|
|
561
|
+
resumeConsumer(groupId: string | undefined, assignments: Array<{
|
|
562
|
+
topic: string;
|
|
563
|
+
partitions: number[];
|
|
564
|
+
}>): void;
|
|
437
565
|
/**
|
|
438
566
|
* Drain in-flight handlers, then disconnect all producers, consumers, and admin.
|
|
439
567
|
* @param drainTimeoutMs Max ms to wait for in-flight handlers (default 30 000).
|
|
@@ -484,6 +612,17 @@ interface KafkaClientOptions {
|
|
|
484
612
|
numPartitions?: number;
|
|
485
613
|
/** Client-wide instrumentation hooks (e.g. OTel). Applied to both send and consume paths. */
|
|
486
614
|
instrumentation?: KafkaInstrumentation[];
|
|
615
|
+
/**
|
|
616
|
+
* Override the transactional producer ID used by `transaction()`.
|
|
617
|
+
* Defaults to `${clientId}-tx`.
|
|
618
|
+
*
|
|
619
|
+
* The transactional ID must be **unique per producer instance** across the
|
|
620
|
+
* entire Kafka cluster. Two `KafkaClient` instances with the same ID will
|
|
621
|
+
* cause Kafka to fence one of the producers — the fenced producer will fail
|
|
622
|
+
* on the next `transaction()` call. Set a distinct value per replica when
|
|
623
|
+
* running multiple instances of the same service.
|
|
624
|
+
*/
|
|
625
|
+
transactionalId?: string;
|
|
487
626
|
/**
|
|
488
627
|
* Called when a message is dropped without being sent to a DLQ.
|
|
489
628
|
* Fires when the handler throws after all retries, or schema validation fails — and `dlq` is not enabled.
|
|
@@ -511,4 +650,4 @@ interface SubscribeRetryOptions {
|
|
|
511
650
|
backoffMs?: number;
|
|
512
651
|
}
|
|
513
652
|
|
|
514
|
-
export {
|
|
653
|
+
export { buildEnvelopeHeaders as A, type BatchMessageItem as B, type ClientId as C, type DeduplicationOptions as D, type EnvelopeHeaderOptions as E, decodeHeaders as F, type GroupId as G, HEADER_CORRELATION_ID as H, type IKafkaClient as I, extractEnvelope as J, type KafkaInstrumentation as K, getEnvelopeContext as L, type MessageHeaders as M, runWithEnvelopeContext as N, topic as O, type RetryOptions as R, type SchemaLike as S, type TopicMapConstraint as T, type KafkaClientOptions as a, type ConsumerOptions as b, type TopicDescriptor as c, type KafkaHealthResult as d, type BatchMeta as e, type BeforeConsumeResult as f, type ConsumerHandle as g, type ConsumerInterceptor as h, type DlqReason as i, type DlqReplayOptions as j, type EventEnvelope as k, HEADER_EVENT_ID as l, HEADER_LAMPORT_CLOCK as m, HEADER_SCHEMA_VERSION as n, HEADER_TIMESTAMP as o, HEADER_TRACEPARENT as p, type InferSchema as q, type KafkaLogger as r, type KafkaMetrics as s, type MessageLostContext as t, type SchemaParseContext as u, type SendOptions as v, type SubscribeRetryOptions as w, type TTopicMessageMap as x, type TopicsFrom as y, type TransactionContext as z };
|
|
@@ -229,8 +229,13 @@ interface BatchMessageItem<V> {
|
|
|
229
229
|
interface BatchMeta {
|
|
230
230
|
/** Partition number for this batch. */
|
|
231
231
|
partition: number;
|
|
232
|
-
/**
|
|
233
|
-
|
|
232
|
+
/**
|
|
233
|
+
* Highest offset available on the broker for this partition.
|
|
234
|
+
* `null` when the message is being replayed via a retry topic consumer —
|
|
235
|
+
* in that path the broker high-watermark is not accessible without an admin
|
|
236
|
+
* call. Do not use this for lag calculation in the retry path.
|
|
237
|
+
*/
|
|
238
|
+
highWatermark: string | null;
|
|
234
239
|
/** Send a heartbeat to the broker to prevent session timeout. */
|
|
235
240
|
heartbeat(): Promise<void>;
|
|
236
241
|
/** Mark an offset as processed (for manual offset management). */
|
|
@@ -361,6 +366,47 @@ type BeforeConsumeResult = (() => void) | {
|
|
|
361
366
|
cleanup?(): void;
|
|
362
367
|
wrap?(fn: () => Promise<void>): Promise<void>;
|
|
363
368
|
};
|
|
369
|
+
/**
|
|
370
|
+
* Reason a message was sent to the DLQ.
|
|
371
|
+
* - `'handler-error'` — the consumer handler threw after all retry attempts.
|
|
372
|
+
* - `'validation-error'` — schema validation failed before the handler ran.
|
|
373
|
+
* - `'lamport-clock-duplicate'` — message was identified as a Lamport-clock duplicate
|
|
374
|
+
* and `deduplication.strategy` is `'dlq'`.
|
|
375
|
+
*/
|
|
376
|
+
type DlqReason = "handler-error" | "validation-error" | "lamport-clock-duplicate";
|
|
377
|
+
/** Options for `replayDlq`. */
|
|
378
|
+
interface DlqReplayOptions {
|
|
379
|
+
/**
|
|
380
|
+
* Override the target topic to re-publish to.
|
|
381
|
+
* Default: reads the `x-dlq-original-topic` header from each DLQ message.
|
|
382
|
+
*/
|
|
383
|
+
targetTopic?: string;
|
|
384
|
+
/**
|
|
385
|
+
* Dry-run mode — log what would be replayed without actually sending.
|
|
386
|
+
* Increments the `replayed` counter so you can see what would happen.
|
|
387
|
+
*/
|
|
388
|
+
dryRun?: boolean;
|
|
389
|
+
/**
|
|
390
|
+
* Optional filter — return `false` to skip a message.
|
|
391
|
+
* @param headers All headers on the DLQ message (including `x-dlq-*` metadata).
|
|
392
|
+
* @param value Raw message value (JSON string).
|
|
393
|
+
*/
|
|
394
|
+
filter?: (headers: MessageHeaders, value: string) => boolean;
|
|
395
|
+
}
|
|
396
|
+
/**
|
|
397
|
+
* Snapshot of internal event counters accumulated since client creation
|
|
398
|
+
* (or since the last `resetMetrics()` call).
|
|
399
|
+
*/
|
|
400
|
+
interface KafkaMetrics {
|
|
401
|
+
/** Total messages successfully processed by the consumer handler. */
|
|
402
|
+
processedCount: number;
|
|
403
|
+
/** Total retry attempts routed — covers both in-process retries and retry-topic hops. */
|
|
404
|
+
retryCount: number;
|
|
405
|
+
/** Total messages sent to a DLQ topic. */
|
|
406
|
+
dlqCount: number;
|
|
407
|
+
/** Total duplicate messages detected by the Lamport clock. */
|
|
408
|
+
dedupCount: number;
|
|
409
|
+
}
|
|
364
410
|
/**
|
|
365
411
|
* Client-wide instrumentation hooks for both send and consume paths.
|
|
366
412
|
* Use this for cross-cutting concerns like tracing and metrics.
|
|
@@ -381,6 +427,25 @@ interface KafkaInstrumentation {
|
|
|
381
427
|
beforeConsume?(envelope: EventEnvelope<any>): BeforeConsumeResult | void;
|
|
382
428
|
/** Called when the consumer handler throws. */
|
|
383
429
|
onConsumeError?(envelope: EventEnvelope<any>, error: Error): void;
|
|
430
|
+
/**
|
|
431
|
+
* Called when a message is queued for retry.
|
|
432
|
+
* Fires for both in-process retries (before the backoff sleep) and
|
|
433
|
+
* retry-topic routing (EOS and non-EOS paths).
|
|
434
|
+
*/
|
|
435
|
+
onRetry?(envelope: EventEnvelope<any>, attempt: number, maxRetries: number): void;
|
|
436
|
+
/** Called when a message is routed to a DLQ topic. */
|
|
437
|
+
onDlq?(envelope: EventEnvelope<any>, reason: DlqReason): void;
|
|
438
|
+
/**
|
|
439
|
+
* Called when a duplicate message is detected via the Lamport clock.
|
|
440
|
+
* Fires regardless of the configured `deduplication.strategy`.
|
|
441
|
+
*/
|
|
442
|
+
onDuplicate?(envelope: EventEnvelope<any>, strategy: "drop" | "dlq" | "topic"): void;
|
|
443
|
+
/**
|
|
444
|
+
* Called after the consumer handler successfully processes a message.
|
|
445
|
+
* Use this as a success counter for error-rate calculations.
|
|
446
|
+
* Fires for both single-message and batch consumers (once per envelope).
|
|
447
|
+
*/
|
|
448
|
+
onMessage?(envelope: EventEnvelope<any>): void;
|
|
384
449
|
}
|
|
385
450
|
/** Context passed to the `transaction()` callback with type-safe send methods. */
|
|
386
451
|
interface TransactionContext<T extends TopicMapConstraint<T>> {
|
|
@@ -434,6 +499,69 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
|
|
|
434
499
|
sendBatch<K extends keyof T>(topic: K, messages: Array<BatchMessageItem<T[K]>>): Promise<void>;
|
|
435
500
|
transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
|
|
436
501
|
getClientId(): ClientId;
|
|
502
|
+
/**
|
|
503
|
+
* Return a snapshot of internal event counters (retry / DLQ / dedup).
|
|
504
|
+
* - `getMetrics()` — aggregate across all topics.
|
|
505
|
+
* - `getMetrics(topic)` — counters for a specific topic only; returns all-zero
|
|
506
|
+
* if no events have been observed for that topic yet.
|
|
507
|
+
*
|
|
508
|
+
* Counters accumulate since client creation or the last `resetMetrics()` call.
|
|
509
|
+
*/
|
|
510
|
+
getMetrics(topic?: string): Readonly<KafkaMetrics>;
|
|
511
|
+
/**
|
|
512
|
+
* Reset internal event counters to zero.
|
|
513
|
+
* - `resetMetrics()` — reset all topics.
|
|
514
|
+
* - `resetMetrics(topic)` — reset a single topic only.
|
|
515
|
+
*/
|
|
516
|
+
resetMetrics(topic?: string): void;
|
|
517
|
+
/**
|
|
518
|
+
* Consume all messages currently in `{topic}.dlq`, strip the `x-dlq-*` metadata
|
|
519
|
+
* headers, and re-publish each message to its original topic (or `options.targetTopic`).
|
|
520
|
+
*
|
|
521
|
+
* A temporary consumer group is created and torn down automatically. The DLQ topic
|
|
522
|
+
* itself is not modified — messages remain there after replay.
|
|
523
|
+
*
|
|
524
|
+
* @returns `{ replayed, skipped }` — counts of re-published vs skipped messages.
|
|
525
|
+
*/
|
|
526
|
+
replayDlq(topic: string, options?: DlqReplayOptions): Promise<{
|
|
527
|
+
replayed: number;
|
|
528
|
+
skipped: number;
|
|
529
|
+
}>;
|
|
530
|
+
/**
|
|
531
|
+
* Reset committed offsets for a consumer group to the earliest or latest position.
|
|
532
|
+
*
|
|
533
|
+
* The consumer group must be inactive (no running consumers) — Kafka does not
|
|
534
|
+
* allow offset resets while members are actively consuming. Call
|
|
535
|
+
* `stopConsumer(groupId)` first.
|
|
536
|
+
*
|
|
537
|
+
* @param groupId Consumer group to reset. Defaults to the client's default groupId.
|
|
538
|
+
* @param topic Topic to reset.
|
|
539
|
+
* @param position `'earliest'` seeks to the first available offset; `'latest'`
|
|
540
|
+
* seeks past the last message (consumer will only see new messages).
|
|
541
|
+
*/
|
|
542
|
+
resetOffsets(groupId: string | undefined, topic: string, position: "earliest" | "latest"): Promise<void>;
|
|
543
|
+
/**
|
|
544
|
+
* Pause message delivery for specific topic-partitions on a consumer group.
|
|
545
|
+
* The consumer remains connected and its committed offsets are preserved —
|
|
546
|
+
* only polling is suspended. Call `resumeConsumer` to restart delivery.
|
|
547
|
+
*
|
|
548
|
+
* @param groupId Consumer group to pause. Defaults to the client's default groupId.
|
|
549
|
+
* @param assignments Topic-partition pairs to pause.
|
|
550
|
+
*/
|
|
551
|
+
pauseConsumer(groupId: string | undefined, assignments: Array<{
|
|
552
|
+
topic: string;
|
|
553
|
+
partitions: number[];
|
|
554
|
+
}>): void;
|
|
555
|
+
/**
|
|
556
|
+
* Resume message delivery for previously paused topic-partitions.
|
|
557
|
+
*
|
|
558
|
+
* @param groupId Consumer group to resume. Defaults to the client's default groupId.
|
|
559
|
+
* @param assignments Topic-partition pairs to resume.
|
|
560
|
+
*/
|
|
561
|
+
resumeConsumer(groupId: string | undefined, assignments: Array<{
|
|
562
|
+
topic: string;
|
|
563
|
+
partitions: number[];
|
|
564
|
+
}>): void;
|
|
437
565
|
/**
|
|
438
566
|
* Drain in-flight handlers, then disconnect all producers, consumers, and admin.
|
|
439
567
|
* @param drainTimeoutMs Max ms to wait for in-flight handlers (default 30 000).
|
|
@@ -484,6 +612,17 @@ interface KafkaClientOptions {
|
|
|
484
612
|
numPartitions?: number;
|
|
485
613
|
/** Client-wide instrumentation hooks (e.g. OTel). Applied to both send and consume paths. */
|
|
486
614
|
instrumentation?: KafkaInstrumentation[];
|
|
615
|
+
/**
|
|
616
|
+
* Override the transactional producer ID used by `transaction()`.
|
|
617
|
+
* Defaults to `${clientId}-tx`.
|
|
618
|
+
*
|
|
619
|
+
* The transactional ID must be **unique per producer instance** across the
|
|
620
|
+
* entire Kafka cluster. Two `KafkaClient` instances with the same ID will
|
|
621
|
+
* cause Kafka to fence one of the producers — the fenced producer will fail
|
|
622
|
+
* on the next `transaction()` call. Set a distinct value per replica when
|
|
623
|
+
* running multiple instances of the same service.
|
|
624
|
+
*/
|
|
625
|
+
transactionalId?: string;
|
|
487
626
|
/**
|
|
488
627
|
* Called when a message is dropped without being sent to a DLQ.
|
|
489
628
|
* Fires when the handler throws after all retries, or schema validation fails — and `dlq` is not enabled.
|
|
@@ -511,4 +650,4 @@ interface SubscribeRetryOptions {
|
|
|
511
650
|
backoffMs?: number;
|
|
512
651
|
}
|
|
513
652
|
|
|
514
|
-
export {
|
|
653
|
+
export { buildEnvelopeHeaders as A, type BatchMessageItem as B, type ClientId as C, type DeduplicationOptions as D, type EnvelopeHeaderOptions as E, decodeHeaders as F, type GroupId as G, HEADER_CORRELATION_ID as H, type IKafkaClient as I, extractEnvelope as J, type KafkaInstrumentation as K, getEnvelopeContext as L, type MessageHeaders as M, runWithEnvelopeContext as N, topic as O, type RetryOptions as R, type SchemaLike as S, type TopicMapConstraint as T, type KafkaClientOptions as a, type ConsumerOptions as b, type TopicDescriptor as c, type KafkaHealthResult as d, type BatchMeta as e, type BeforeConsumeResult as f, type ConsumerHandle as g, type ConsumerInterceptor as h, type DlqReason as i, type DlqReplayOptions as j, type EventEnvelope as k, HEADER_EVENT_ID as l, HEADER_LAMPORT_CLOCK as m, HEADER_SCHEMA_VERSION as n, HEADER_TIMESTAMP as o, HEADER_TRACEPARENT as p, type InferSchema as q, type KafkaLogger as r, type KafkaMetrics as s, type MessageLostContext as t, type SchemaParseContext as u, type SendOptions as v, type SubscribeRetryOptions as w, type TTopicMessageMap as x, type TopicsFrom as y, type TransactionContext as z };
|