@drarzter/kafka-client 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,319 @@
1
+ /**
2
+ * Any validation library with a `.parse()` method.
3
+ * Works with Zod, Valibot, ArkType, or any custom validator.
4
+ *
5
+ * @example
6
+ * ```ts
7
+ * import { z } from 'zod';
8
+ * const schema: SchemaLike<{ id: string }> = z.object({ id: z.string() });
9
+ * ```
10
+ */
11
+ interface SchemaLike<T = any> {
12
+ parse(data: unknown): T;
13
+ }
14
+ /** Infer the output type from a SchemaLike. */
15
+ type InferSchema<S extends SchemaLike> = S extends SchemaLike<infer T> ? T : never;
16
+ /**
17
+ * A typed topic descriptor that pairs a topic name with its message type.
18
+ * Created via the `topic()` factory function.
19
+ *
20
+ * @typeParam N - The literal topic name string.
21
+ * @typeParam M - The message payload type for this topic.
22
+ */
23
+ interface TopicDescriptor<N extends string = string, M extends Record<string, any> = Record<string, any>> {
24
+ readonly __topic: N;
25
+ /** @internal Phantom type — never has a real value at runtime. */
26
+ readonly __type: M;
27
+ /** Runtime schema validator. Present only when created via `topic().schema()`. */
28
+ readonly __schema?: SchemaLike<M>;
29
+ }
30
+ /**
31
+ * Define a typed topic descriptor.
32
+ *
33
+ * @example
34
+ * ```ts
35
+ * // Without schema — type provided explicitly:
36
+ * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
37
+ *
38
+ * // With schema — type inferred from schema:
39
+ * const OrderCreated = topic('order.created').schema(z.object({
40
+ * orderId: z.string(),
41
+ * amount: z.number(),
42
+ * }));
43
+ *
44
+ * // Use with KafkaClient:
45
+ * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
46
+ *
47
+ * // Use with @SubscribeTo:
48
+ * @SubscribeTo(OrderCreated)
49
+ * async handleOrder(msg) { ... }
50
+ * ```
51
+ */
52
+ declare function topic<N extends string>(name: N): {
53
+ <M extends Record<string, any>>(): TopicDescriptor<N, M>;
54
+ schema<S extends SchemaLike<Record<string, any>>>(schema: S): TopicDescriptor<N, InferSchema<S>>;
55
+ };
56
+ /**
57
+ * Build a topic-message map type from a union of TopicDescriptors.
58
+ *
59
+ * @example
60
+ * ```ts
61
+ * const OrderCreated = topic('order.created')<{ orderId: string }>();
62
+ * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();
63
+ *
64
+ * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
65
+ * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }
66
+ * ```
67
+ */
68
+ type TopicsFrom<D extends TopicDescriptor<any, any>> = {
69
+ [K in D as K["__topic"]]: K["__type"];
70
+ };
71
+
72
+ /**
73
+ * Mapping of topic names to their message types.
74
+ * Define this interface to get type-safe publish/subscribe across your app.
75
+ *
76
+ * @example
77
+ * ```ts
78
+ * // with explicit extends (IDE hints for values)
79
+ * interface MyTopics extends TTopicMessageMap {
80
+ * "orders.created": { orderId: string; amount: number };
81
+ * "users.updated": { userId: string; name: string };
82
+ * }
83
+ *
84
+ * // or plain interface / type — works the same
85
+ * interface MyTopics {
86
+ * "orders.created": { orderId: string; amount: number };
87
+ * }
88
+ * ```
89
+ */
90
+ type TTopicMessageMap = {
91
+ [topic: string]: Record<string, any>;
92
+ };
93
+ /**
94
+ * Generic constraint for topic-message maps.
95
+ * Works with both `type` aliases and `interface` declarations.
96
+ */
97
+ type TopicMapConstraint<T> = {
98
+ [K in keyof T]: Record<string, any>;
99
+ };
100
+ type ClientId = string;
101
+ type GroupId = string;
102
+ type MessageHeaders = Record<string, string>;
103
+ /** Options for sending a single message. */
104
+ interface SendOptions {
105
+ /** Partition key for message routing. */
106
+ key?: string;
107
+ /** Custom headers attached to the message (merged with auto-generated envelope headers). */
108
+ headers?: MessageHeaders;
109
+ /** Override the auto-propagated correlation ID (default: inherited from ALS context or new UUID). */
110
+ correlationId?: string;
111
+ /** Schema version for the payload. Default: `1`. */
112
+ schemaVersion?: number;
113
+ /** Override the auto-generated event ID (UUID v4). */
114
+ eventId?: string;
115
+ }
116
+ /** Shape of each item in a `sendBatch` call. */
117
+ interface BatchMessageItem<V> {
118
+ value: V;
119
+ key?: string;
120
+ headers?: MessageHeaders;
121
+ correlationId?: string;
122
+ schemaVersion?: number;
123
+ eventId?: string;
124
+ }
125
+ /** Metadata exposed to batch consumer handlers. */
126
+ interface BatchMeta {
127
+ /** Partition number for this batch. */
128
+ partition: number;
129
+ /** Highest offset available on the broker for this partition. */
130
+ highWatermark: string;
131
+ /** Send a heartbeat to the broker to prevent session timeout. */
132
+ heartbeat(): Promise<void>;
133
+ /** Mark an offset as processed (for manual offset management). */
134
+ resolveOffset(offset: string): void;
135
+ /** Commit offsets if the auto-commit threshold has been reached. */
136
+ commitOffsetsIfNecessary(): Promise<void>;
137
+ }
138
+ /** Options for configuring a Kafka consumer. */
139
+ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
140
+ /** Override the default consumer group ID from the constructor. */
141
+ groupId?: string;
142
+ /** Start reading from earliest offset. Default: `false`. */
143
+ fromBeginning?: boolean;
144
+ /** Automatically commit offsets. Default: `true`. */
145
+ autoCommit?: boolean;
146
+ /** Retry policy for failed message processing. */
147
+ retry?: RetryOptions;
148
+ /** Send failed messages to a Dead Letter Queue (`<topic>.dlq`). */
149
+ dlq?: boolean;
150
+ /** Interceptors called before/after each message. */
151
+ interceptors?: ConsumerInterceptor<T>[];
152
+ /** @internal Schema map populated by @SubscribeTo when descriptors have schemas. */
153
+ schemas?: Map<string, SchemaLike>;
154
+ /** Retry config for `consumer.subscribe()` when the topic doesn't exist yet. */
155
+ subscribeRetry?: SubscribeRetryOptions;
156
+ }
157
+ /** Configuration for consumer retry behavior. */
158
+ interface RetryOptions {
159
+ /** Maximum number of retry attempts before giving up. */
160
+ maxRetries: number;
161
+ /** Base delay between retries in ms (multiplied by attempt number). Default: `1000`. */
162
+ backoffMs?: number;
163
+ }
164
+ /**
165
+ * Interceptor hooks for consumer message processing.
166
+ * All methods are optional — implement only what you need.
167
+ *
168
+ * Interceptors are per-consumer. For client-wide hooks (e.g. OTel),
169
+ * use `KafkaInstrumentation` instead.
170
+ */
171
+ interface ConsumerInterceptor<T extends TopicMapConstraint<T> = TTopicMessageMap> {
172
+ /** Called before the message handler. */
173
+ before?(envelope: EventEnvelope<T[keyof T]>): Promise<void> | void;
174
+ /** Called after the message handler succeeds. */
175
+ after?(envelope: EventEnvelope<T[keyof T]>): Promise<void> | void;
176
+ /** Called when the message handler throws. */
177
+ onError?(envelope: EventEnvelope<T[keyof T]>, error: Error): Promise<void> | void;
178
+ }
179
+ /**
180
+ * Client-wide instrumentation hooks for both send and consume paths.
181
+ * Use this for cross-cutting concerns like tracing and metrics.
182
+ *
183
+ * @see `otelInstrumentation()` from `@drarzter/kafka-client/otel`
184
+ */
185
+ interface KafkaInstrumentation {
186
+ /** Called before sending — can mutate `headers` (e.g. inject `traceparent`). */
187
+ beforeSend?(topic: string, headers: MessageHeaders): void;
188
+ /** Called after a successful send. */
189
+ afterSend?(topic: string): void;
190
+ /** Called before the consumer handler. Return a cleanup function called after the handler. */
191
+ beforeConsume?(envelope: EventEnvelope<any>): (() => void) | void;
192
+ /** Called when the consumer handler throws. */
193
+ onConsumeError?(envelope: EventEnvelope<any>, error: Error): void;
194
+ }
195
+ /** Context passed to the `transaction()` callback with type-safe send methods. */
196
+ interface TransactionContext<T extends TopicMapConstraint<T>> {
197
+ send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
198
+ send<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
199
+ sendBatch<K extends keyof T>(topic: K, messages: Array<BatchMessageItem<T[K]>>): Promise<void>;
200
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<BatchMessageItem<D["__type"]>>): Promise<void>;
201
+ }
202
+ /** Interface describing all public methods of the Kafka client. */
203
+ interface IKafkaClient<T extends TopicMapConstraint<T>> {
204
+ checkStatus(): Promise<{
205
+ topics: string[];
206
+ }>;
207
+ startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (envelope: EventEnvelope<T[K[number]]>) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
208
+ startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (envelope: EventEnvelope<D["__type"]>) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
209
+ startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (envelopes: EventEnvelope<T[K[number]]>[], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
210
+ startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (envelopes: EventEnvelope<D["__type"]>[], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
211
+ stopConsumer(): Promise<void>;
212
+ sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
213
+ sendBatch<K extends keyof T>(topic: K, messages: Array<BatchMessageItem<T[K]>>): Promise<void>;
214
+ transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
215
+ getClientId: () => ClientId;
216
+ disconnect(): Promise<void>;
217
+ }
218
+ /**
219
+ * Logger interface for KafkaClient.
220
+ * Compatible with NestJS Logger, console, winston, pino, or any custom logger.
221
+ */
222
+ interface KafkaLogger {
223
+ log(message: string): void;
224
+ warn(message: string, ...args: any[]): void;
225
+ error(message: string, ...args: any[]): void;
226
+ }
227
+ /** Options for `KafkaClient` constructor. */
228
+ interface KafkaClientOptions {
229
+ /** Auto-create topics via admin before the first `sendMessage`, `sendBatch`, or `transaction` for each topic. Useful for development — not recommended in production. */
230
+ autoCreateTopics?: boolean;
231
+ /** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */
232
+ strictSchemas?: boolean;
233
+ /** Custom logger. Defaults to console with `[KafkaClient:<clientId>]` prefix. */
234
+ logger?: KafkaLogger;
235
+ /** Number of partitions for auto-created topics. Default: `1`. */
236
+ numPartitions?: number;
237
+ /** Client-wide instrumentation hooks (e.g. OTel). Applied to both send and consume paths. */
238
+ instrumentation?: KafkaInstrumentation[];
239
+ }
240
+ /** Options for consumer subscribe retry when topic doesn't exist yet. */
241
+ interface SubscribeRetryOptions {
242
+ /** Maximum number of subscribe attempts. Default: `5`. */
243
+ retries?: number;
244
+ /** Delay between retries in ms. Default: `5000`. */
245
+ backoffMs?: number;
246
+ }
247
+
248
+ declare const HEADER_EVENT_ID = "x-event-id";
249
+ declare const HEADER_CORRELATION_ID = "x-correlation-id";
250
+ declare const HEADER_TIMESTAMP = "x-timestamp";
251
+ declare const HEADER_SCHEMA_VERSION = "x-schema-version";
252
+ declare const HEADER_TRACEPARENT = "traceparent";
253
+ /**
254
+ * Typed wrapper combining a parsed message payload with Kafka metadata
255
+ * and envelope headers.
256
+ *
257
+ * On **send**, the library auto-generates envelope headers
258
+ * (`x-event-id`, `x-correlation-id`, `x-timestamp`, `x-schema-version`).
259
+ *
260
+ * On **consume**, the library extracts those headers and assembles
261
+ * an `EventEnvelope` that is passed to the handler.
262
+ */
263
+ interface EventEnvelope<T> {
264
+ /** Deserialized + validated message body. */
265
+ payload: T;
266
+ /** Topic the message was produced to / consumed from. */
267
+ topic: string;
268
+ /** Kafka partition (consume-side only, `-1` on send). */
269
+ partition: number;
270
+ /** Kafka offset (consume-side only, empty string on send). */
271
+ offset: string;
272
+ /** ISO-8601 timestamp set by the producer. */
273
+ timestamp: string;
274
+ /** Unique ID for this event (UUID v4). */
275
+ eventId: string;
276
+ /** Correlation ID — auto-propagated via AsyncLocalStorage. */
277
+ correlationId: string;
278
+ /** Schema version of the payload. */
279
+ schemaVersion: number;
280
+ /** W3C Trace Context `traceparent` header (set by OTel instrumentation). */
281
+ traceparent?: string;
282
+ /** All decoded Kafka headers for extensibility. */
283
+ headers: MessageHeaders;
284
+ }
285
+ interface EnvelopeCtx {
286
+ correlationId: string;
287
+ traceparent?: string;
288
+ }
289
+ /** Read the current envelope context (correlationId / traceparent) from ALS. */
290
+ declare function getEnvelopeContext(): EnvelopeCtx | undefined;
291
+ /** Execute `fn` inside an envelope context so nested sends inherit correlationId. */
292
+ declare function runWithEnvelopeContext<R>(ctx: EnvelopeCtx, fn: () => R): R;
293
+ /** Options accepted by `buildEnvelopeHeaders`. */
294
+ interface EnvelopeHeaderOptions {
295
+ correlationId?: string;
296
+ schemaVersion?: number;
297
+ eventId?: string;
298
+ headers?: MessageHeaders;
299
+ }
300
+ /**
301
+ * Generate envelope headers for the send path.
302
+ *
303
+ * Priority for `correlationId`:
304
+ * explicit option → ALS context → new UUID.
305
+ */
306
+ declare function buildEnvelopeHeaders(options?: EnvelopeHeaderOptions): MessageHeaders;
307
+ /**
308
+ * Decode kafkajs headers (`Record<string, Buffer | string | undefined>`)
309
+ * into plain `Record<string, string>`.
310
+ */
311
+ declare function decodeHeaders(raw: Record<string, Buffer | string | (Buffer | string)[] | undefined> | undefined): MessageHeaders;
312
+ /**
313
+ * Build an `EventEnvelope` from a consumed kafkajs message.
314
+ * Tolerates missing envelope headers — generates defaults so messages
315
+ * from non-envelope producers still work.
316
+ */
317
+ declare function extractEnvelope<T>(payload: T, headers: MessageHeaders, topic: string, partition: number, offset: string): EventEnvelope<T>;
318
+
319
+ export { type BatchMessageItem as B, type ClientId as C, type EnvelopeHeaderOptions as E, type GroupId as G, HEADER_CORRELATION_ID as H, type IKafkaClient as I, type KafkaInstrumentation as K, type MessageHeaders as M, type RetryOptions as R, type SchemaLike as S, type TopicMapConstraint as T, type ConsumerOptions as a, type TopicDescriptor as b, type BatchMeta as c, type ConsumerInterceptor as d, type EventEnvelope as e, HEADER_EVENT_ID as f, HEADER_SCHEMA_VERSION as g, HEADER_TIMESTAMP as h, HEADER_TRACEPARENT as i, type InferSchema as j, type KafkaClientOptions as k, type KafkaLogger as l, type SendOptions as m, type SubscribeRetryOptions as n, type TTopicMessageMap as o, type TopicsFrom as p, type TransactionContext as q, buildEnvelopeHeaders as r, decodeHeaders as s, extractEnvelope as t, getEnvelopeContext as u, runWithEnvelopeContext as v, topic as w };
package/dist/index.d.mts CHANGED
@@ -1,5 +1,7 @@
1
- import { TopicMapConstraint, ClientId, GroupId, SchemaLike, ConsumerOptions, TopicDescriptor, KafkaClient } from './core.mjs';
2
- export { BatchMeta, ConsumerInterceptor, IKafkaClient, InferSchema, KafkaClientOptions, KafkaLogger, KafkaProcessingError, KafkaRetryExhaustedError, KafkaValidationError, MessageHeaders, RetryOptions, SendOptions, SubscribeRetryOptions, TTopicMessageMap, TopicsFrom, TransactionContext, topic } from './core.mjs';
1
+ import { KafkaClient } from './core.mjs';
2
+ export { KafkaProcessingError, KafkaRetryExhaustedError, KafkaValidationError } from './core.mjs';
3
+ import { T as TopicMapConstraint, C as ClientId, G as GroupId, K as KafkaInstrumentation, S as SchemaLike, a as ConsumerOptions, b as TopicDescriptor } from './envelope-QK1trQu4.mjs';
4
+ export { B as BatchMessageItem, c as BatchMeta, d as ConsumerInterceptor, E as EnvelopeHeaderOptions, e as EventEnvelope, H as HEADER_CORRELATION_ID, f as HEADER_EVENT_ID, g as HEADER_SCHEMA_VERSION, h as HEADER_TIMESTAMP, i as HEADER_TRACEPARENT, I as IKafkaClient, j as InferSchema, k as KafkaClientOptions, l as KafkaLogger, M as MessageHeaders, R as RetryOptions, m as SendOptions, n as SubscribeRetryOptions, o as TTopicMessageMap, p as TopicsFrom, q as TransactionContext, r as buildEnvelopeHeaders, s as decodeHeaders, t as extractEnvelope, u as getEnvelopeContext, v as runWithEnvelopeContext, w as topic } from './envelope-QK1trQu4.mjs';
3
5
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
4
6
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
5
7
 
@@ -17,6 +19,12 @@ interface KafkaModuleOptions {
17
19
  isGlobal?: boolean;
18
20
  /** Auto-create topics via admin on first use (send/consume). Useful for development. */
19
21
  autoCreateTopics?: boolean;
22
+ /** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */
23
+ strictSchemas?: boolean;
24
+ /** Number of partitions for auto-created topics. Default: `1`. */
25
+ numPartitions?: number;
26
+ /** Client-wide instrumentation hooks (e.g. OTel). Applied to both send and consume paths. */
27
+ instrumentation?: KafkaInstrumentation[];
20
28
  }
21
29
  /** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */
22
30
  interface KafkaModuleAsyncOptions {
@@ -86,4 +94,4 @@ declare class KafkaHealthIndicator {
86
94
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
87
95
  }
88
96
 
89
- export { ClientId, ConsumerOptions, GroupId, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, SchemaLike, SubscribeTo, TopicDescriptor, TopicMapConstraint, getKafkaClientToken };
97
+ export { ClientId, ConsumerOptions, GroupId, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaInstrumentation, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, SchemaLike, SubscribeTo, TopicDescriptor, TopicMapConstraint, getKafkaClientToken };
package/dist/index.d.ts CHANGED
@@ -1,5 +1,7 @@
1
- import { TopicMapConstraint, ClientId, GroupId, SchemaLike, ConsumerOptions, TopicDescriptor, KafkaClient } from './core.js';
2
- export { BatchMeta, ConsumerInterceptor, IKafkaClient, InferSchema, KafkaClientOptions, KafkaLogger, KafkaProcessingError, KafkaRetryExhaustedError, KafkaValidationError, MessageHeaders, RetryOptions, SendOptions, SubscribeRetryOptions, TTopicMessageMap, TopicsFrom, TransactionContext, topic } from './core.js';
1
+ import { KafkaClient } from './core.js';
2
+ export { KafkaProcessingError, KafkaRetryExhaustedError, KafkaValidationError } from './core.js';
3
+ import { T as TopicMapConstraint, C as ClientId, G as GroupId, K as KafkaInstrumentation, S as SchemaLike, a as ConsumerOptions, b as TopicDescriptor } from './envelope-QK1trQu4.js';
4
+ export { B as BatchMessageItem, c as BatchMeta, d as ConsumerInterceptor, E as EnvelopeHeaderOptions, e as EventEnvelope, H as HEADER_CORRELATION_ID, f as HEADER_EVENT_ID, g as HEADER_SCHEMA_VERSION, h as HEADER_TIMESTAMP, i as HEADER_TRACEPARENT, I as IKafkaClient, j as InferSchema, k as KafkaClientOptions, l as KafkaLogger, M as MessageHeaders, R as RetryOptions, m as SendOptions, n as SubscribeRetryOptions, o as TTopicMessageMap, p as TopicsFrom, q as TransactionContext, r as buildEnvelopeHeaders, s as decodeHeaders, t as extractEnvelope, u as getEnvelopeContext, v as runWithEnvelopeContext, w as topic } from './envelope-QK1trQu4.js';
3
5
  import { DynamicModule, OnModuleInit } from '@nestjs/common';
4
6
  import { DiscoveryService, ModuleRef } from '@nestjs/core';
5
7
 
@@ -17,6 +19,12 @@ interface KafkaModuleOptions {
17
19
  isGlobal?: boolean;
18
20
  /** Auto-create topics via admin on first use (send/consume). Useful for development. */
19
21
  autoCreateTopics?: boolean;
22
+ /** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */
23
+ strictSchemas?: boolean;
24
+ /** Number of partitions for auto-created topics. Default: `1`. */
25
+ numPartitions?: number;
26
+ /** Client-wide instrumentation hooks (e.g. OTel). Applied to both send and consume paths. */
27
+ instrumentation?: KafkaInstrumentation[];
20
28
  }
21
29
  /** Async configuration for `KafkaModule.registerAsync()` with dependency injection. */
22
30
  interface KafkaModuleAsyncOptions {
@@ -86,4 +94,4 @@ declare class KafkaHealthIndicator {
86
94
  check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
87
95
  }
88
96
 
89
- export { ClientId, ConsumerOptions, GroupId, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, SchemaLike, SubscribeTo, TopicDescriptor, TopicMapConstraint, getKafkaClientToken };
97
+ export { ClientId, ConsumerOptions, GroupId, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaInstrumentation, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, type KafkaSubscriberMetadata, SchemaLike, SubscribeTo, TopicDescriptor, TopicMapConstraint, getKafkaClientToken };