@drarzter/kafka-client 0.2.2 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/core.d.ts ADDED
@@ -0,0 +1,338 @@
1
+ /**
2
+ * Any validation library with a `.parse()` method.
3
+ * Works with Zod, Valibot, ArkType, or any custom validator.
4
+ *
5
+ * @example
6
+ * ```ts
7
+ * import { z } from 'zod';
8
+ * const schema: SchemaLike<{ id: string }> = z.object({ id: z.string() });
9
+ * ```
10
+ */
11
+ interface SchemaLike<T = any> {
12
+ parse(data: unknown): T;
13
+ }
14
+ /** Infer the output type from a SchemaLike. */
15
+ type InferSchema<S extends SchemaLike> = S extends SchemaLike<infer T> ? T : never;
16
+ /**
17
+ * A typed topic descriptor that pairs a topic name with its message type.
18
+ * Created via the `topic()` factory function.
19
+ *
20
+ * @typeParam N - The literal topic name string.
21
+ * @typeParam M - The message payload type for this topic.
22
+ */
23
+ interface TopicDescriptor<N extends string = string, M extends Record<string, any> = Record<string, any>> {
24
+ readonly __topic: N;
25
+ /** @internal Phantom type — never has a real value at runtime. */
26
+ readonly __type: M;
27
+ /** Runtime schema validator. Present only when created via `topic().schema()`. */
28
+ readonly __schema?: SchemaLike<M>;
29
+ }
30
+ /**
31
+ * Define a typed topic descriptor.
32
+ *
33
+ * @example
34
+ * ```ts
35
+ * // Without schema — type provided explicitly:
36
+ * const OrderCreated = topic('order.created')<{ orderId: string; amount: number }>();
37
+ *
38
+ * // With schema — type inferred from schema:
39
+ * const OrderCreated = topic('order.created').schema(z.object({
40
+ * orderId: z.string(),
41
+ * amount: z.number(),
42
+ * }));
43
+ *
44
+ * // Use with KafkaClient:
45
+ * await kafka.sendMessage(OrderCreated, { orderId: '123', amount: 100 });
46
+ *
47
+ * // Use with @SubscribeTo:
48
+ * @SubscribeTo(OrderCreated)
49
+ * async handleOrder(msg) { ... }
50
+ * ```
51
+ */
52
+ declare function topic<N extends string>(name: N): {
53
+ <M extends Record<string, any>>(): TopicDescriptor<N, M>;
54
+ schema<S extends SchemaLike<Record<string, any>>>(schema: S): TopicDescriptor<N, InferSchema<S>>;
55
+ };
56
+ /**
57
+ * Build a topic-message map type from a union of TopicDescriptors.
58
+ *
59
+ * @example
60
+ * ```ts
61
+ * const OrderCreated = topic('order.created')<{ orderId: string }>();
62
+ * const OrderCompleted = topic('order.completed')<{ completedAt: string }>();
63
+ *
64
+ * type MyTopics = TopicsFrom<typeof OrderCreated | typeof OrderCompleted>;
65
+ * // { 'order.created': { orderId: string }; 'order.completed': { completedAt: string } }
66
+ * ```
67
+ */
68
+ type TopicsFrom<D extends TopicDescriptor<any, any>> = {
69
+ [K in D as K["__topic"]]: K["__type"];
70
+ };
71
+
72
+ /**
73
+ * Mapping of topic names to their message types.
74
+ * Define this interface to get type-safe publish/subscribe across your app.
75
+ *
76
+ * @example
77
+ * ```ts
78
+ * // with explicit extends (IDE hints for values)
79
+ * interface MyTopics extends TTopicMessageMap {
80
+ * "orders.created": { orderId: string; amount: number };
81
+ * "users.updated": { userId: string; name: string };
82
+ * }
83
+ *
84
+ * // or plain interface / type — works the same
85
+ * interface MyTopics {
86
+ * "orders.created": { orderId: string; amount: number };
87
+ * }
88
+ * ```
89
+ */
90
+ type TTopicMessageMap = {
91
+ [topic: string]: Record<string, any>;
92
+ };
93
+ /**
94
+ * Generic constraint for topic-message maps.
95
+ * Works with both `type` aliases and `interface` declarations.
96
+ */
97
+ type TopicMapConstraint<T> = {
98
+ [K in keyof T]: Record<string, any>;
99
+ };
100
+ type ClientId = string;
101
+ type GroupId = string;
102
+ type MessageHeaders = Record<string, string>;
103
+ /** Options for sending a single message. */
104
+ interface SendOptions {
105
+ /** Partition key for message routing. */
106
+ key?: string;
107
+ /** Custom headers attached to the message. */
108
+ headers?: MessageHeaders;
109
+ }
110
+ /** Metadata exposed to batch consumer handlers. */
111
+ interface BatchMeta {
112
+ /** Partition number for this batch. */
113
+ partition: number;
114
+ /** Highest offset available on the broker for this partition. */
115
+ highWatermark: string;
116
+ /** Send a heartbeat to the broker to prevent session timeout. */
117
+ heartbeat(): Promise<void>;
118
+ /** Mark an offset as processed (for manual offset management). */
119
+ resolveOffset(offset: string): void;
120
+ /** Commit offsets if the auto-commit threshold has been reached. */
121
+ commitOffsetsIfNecessary(): Promise<void>;
122
+ }
123
+ /** Options for configuring a Kafka consumer. */
124
+ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
125
+ /** Override the default consumer group ID from the constructor. */
126
+ groupId?: string;
127
+ /** Start reading from earliest offset. Default: `false`. */
128
+ fromBeginning?: boolean;
129
+ /** Automatically commit offsets. Default: `true`. */
130
+ autoCommit?: boolean;
131
+ /** Retry policy for failed message processing. */
132
+ retry?: RetryOptions;
133
+ /** Send failed messages to a Dead Letter Queue (`<topic>.dlq`). */
134
+ dlq?: boolean;
135
+ /** Interceptors called before/after each message. */
136
+ interceptors?: ConsumerInterceptor<T>[];
137
+ /** @internal Schema map populated by @SubscribeTo when descriptors have schemas. */
138
+ schemas?: Map<string, SchemaLike>;
139
+ /** Retry config for `consumer.subscribe()` when the topic doesn't exist yet. */
140
+ subscribeRetry?: SubscribeRetryOptions;
141
+ }
142
+ /** Configuration for consumer retry behavior. */
143
+ interface RetryOptions {
144
+ /** Maximum number of retry attempts before giving up. */
145
+ maxRetries: number;
146
+ /** Base delay between retries in ms (multiplied by attempt number). Default: `1000`. */
147
+ backoffMs?: number;
148
+ }
149
+ /**
150
+ * Interceptor hooks for consumer message processing.
151
+ * All methods are optional — implement only what you need.
152
+ */
153
+ interface ConsumerInterceptor<T extends TopicMapConstraint<T> = TTopicMessageMap> {
154
+ /** Called before the message handler. */
155
+ before?(message: T[keyof T], topic: string): Promise<void> | void;
156
+ /** Called after the message handler succeeds. */
157
+ after?(message: T[keyof T], topic: string): Promise<void> | void;
158
+ /** Called when the message handler throws. */
159
+ onError?(message: T[keyof T], topic: string, error: Error): Promise<void> | void;
160
+ }
161
+ /** Context passed to the `transaction()` callback with type-safe send methods. */
162
+ interface TransactionContext<T extends TopicMapConstraint<T>> {
163
+ send<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
164
+ send<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
165
+ sendBatch<K extends keyof T>(topic: K, messages: Array<{
166
+ value: T[K];
167
+ key?: string;
168
+ headers?: MessageHeaders;
169
+ }>): Promise<void>;
170
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
171
+ value: D["__type"];
172
+ key?: string;
173
+ headers?: MessageHeaders;
174
+ }>): Promise<void>;
175
+ }
176
+ /** Interface describing all public methods of the Kafka client. */
177
+ interface IKafkaClient<T extends TopicMapConstraint<T>> {
178
+ checkStatus(): Promise<{
179
+ topics: string[];
180
+ }>;
181
+ startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
182
+ startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
183
+ startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (messages: T[K[number]][], topic: K[number], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
184
+ startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (messages: D["__type"][], topic: D["__topic"], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
185
+ stopConsumer(): Promise<void>;
186
+ sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
187
+ sendBatch<K extends keyof T>(topic: K, messages: Array<{
188
+ value: T[K];
189
+ key?: string;
190
+ headers?: MessageHeaders;
191
+ }>): Promise<void>;
192
+ transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
193
+ getClientId: () => ClientId;
194
+ disconnect(): Promise<void>;
195
+ }
196
+ /**
197
+ * Logger interface for KafkaClient.
198
+ * Compatible with NestJS Logger, console, winston, pino, or any custom logger.
199
+ */
200
+ interface KafkaLogger {
201
+ log(message: string): void;
202
+ warn(message: string, ...args: any[]): void;
203
+ error(message: string, ...args: any[]): void;
204
+ }
205
+ /** Options for `KafkaClient` constructor. */
206
+ interface KafkaClientOptions {
207
+ /** Auto-create topics via admin before the first `sendMessage`, `sendBatch`, or `transaction` for each topic. Useful for development — not recommended in production. */
208
+ autoCreateTopics?: boolean;
209
+ /** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */
210
+ strictSchemas?: boolean;
211
+ /** Custom logger. Defaults to console with `[KafkaClient:<clientId>]` prefix. */
212
+ logger?: KafkaLogger;
213
+ /** Number of partitions for auto-created topics. Default: `1`. */
214
+ numPartitions?: number;
215
+ }
216
+ /** Options for consumer subscribe retry when topic doesn't exist yet. */
217
+ interface SubscribeRetryOptions {
218
+ /** Maximum number of subscribe attempts. Default: `5`. */
219
+ retries?: number;
220
+ /** Delay between retries in ms. Default: `5000`. */
221
+ backoffMs?: number;
222
+ }
223
+
224
+ /**
225
+ * Type-safe Kafka client.
226
+ * Wraps kafkajs with JSON serialization, retries, DLQ, transactions, and interceptors.
227
+ *
228
+ * @typeParam T - Topic-to-message type mapping for compile-time safety.
229
+ */
230
+ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClient<T> {
231
+ private readonly kafka;
232
+ private readonly producer;
233
+ private readonly consumers;
234
+ private readonly admin;
235
+ private readonly logger;
236
+ private readonly autoCreateTopicsEnabled;
237
+ private readonly strictSchemasEnabled;
238
+ private readonly numPartitions;
239
+ private readonly ensuredTopics;
240
+ private readonly defaultGroupId;
241
+ private readonly schemaRegistry;
242
+ private readonly runningConsumers;
243
+ private isAdminConnected;
244
+ readonly clientId: ClientId;
245
+ constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
246
+ /** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
247
+ sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
248
+ sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
249
+ /** Send multiple typed messages in one call. Accepts a topic key or a TopicDescriptor. */
250
+ sendBatch<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, messages: Array<{
251
+ value: D["__type"];
252
+ key?: string;
253
+ headers?: MessageHeaders;
254
+ }>): Promise<void>;
255
+ sendBatch<K extends keyof T>(topic: K, messages: Array<{
256
+ value: T[K];
257
+ key?: string;
258
+ headers?: MessageHeaders;
259
+ }>): Promise<void>;
260
+ /** Execute multiple sends atomically. Commits on success, aborts on error. */
261
+ transaction(fn: (ctx: TransactionContext<T>) => Promise<void>): Promise<void>;
262
+ /** Connect the idempotent producer. Called automatically by `KafkaModule.register()`. */
263
+ connectProducer(): Promise<void>;
264
+ disconnectProducer(): Promise<void>;
265
+ /** Subscribe to topics and start consuming messages with the given handler. */
266
+ startConsumer<K extends Array<keyof T>>(topics: K, handleMessage: (message: T[K[number]], topic: K[number]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
267
+ startConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleMessage: (message: D["__type"], topic: D["__topic"]) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
268
+ /** Subscribe to topics and consume messages in batches. */
269
+ startBatchConsumer<K extends Array<keyof T>>(topics: K, handleBatch: (messages: T[K[number]][], topic: K[number], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
270
+ startBatchConsumer<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(topics: D[], handleBatch: (messages: D["__type"][], topic: D["__topic"], meta: BatchMeta) => Promise<void>, options?: ConsumerOptions<T>): Promise<void>;
271
+ stopConsumer(): Promise<void>;
272
+ /** Check broker connectivity and return available topics. */
273
+ checkStatus(): Promise<{
274
+ topics: string[];
275
+ }>;
276
+ getClientId(): ClientId;
277
+ /** Gracefully disconnect producer, all consumers, and admin. */
278
+ disconnect(): Promise<void>;
279
+ private getOrCreateConsumer;
280
+ private resolveTopicName;
281
+ private ensureTopic;
282
+ /** Register schema from descriptor into global registry (side-effect). */
283
+ private registerSchema;
284
+ /** Validate message against schema. Pure — no side-effects on registry. */
285
+ private validateMessage;
286
+ /**
287
+ * Build a kafkajs-ready send payload.
288
+ * Handles: topic resolution, schema registration, validation, JSON serialization.
289
+ */
290
+ private buildSendPayload;
291
+ /** Shared consumer setup: groupId check, schema map, connect, subscribe. */
292
+ private setupConsumer;
293
+ private buildSchemaMap;
294
+ /** Parse raw message as JSON. Returns null on failure (logs error). */
295
+ private parseJsonMessage;
296
+ /**
297
+ * Validate a parsed message against the schema map.
298
+ * On failure: logs error, sends to DLQ if enabled, calls interceptor.onError.
299
+ * Returns validated message or null.
300
+ */
301
+ private validateWithSchema;
302
+ /**
303
+ * Execute a handler with retry, interceptors, and DLQ support.
304
+ * Used by both single-message and batch consumers.
305
+ */
306
+ private executeWithRetry;
307
+ private sendToDlq;
308
+ private subscribeWithRetry;
309
+ private sleep;
310
+ }
311
+
312
+ /** Error thrown when a consumer message handler fails. */
313
+ declare class KafkaProcessingError extends Error {
314
+ readonly topic: string;
315
+ readonly originalMessage: unknown;
316
+ readonly cause?: Error;
317
+ constructor(message: string, topic: string, originalMessage: unknown, options?: {
318
+ cause?: Error;
319
+ });
320
+ }
321
+ /** Error thrown when schema validation fails on send or consume. */
322
+ declare class KafkaValidationError extends Error {
323
+ readonly topic: string;
324
+ readonly originalMessage: unknown;
325
+ readonly cause?: Error;
326
+ constructor(topic: string, originalMessage: unknown, options?: {
327
+ cause?: Error;
328
+ });
329
+ }
330
+ /** Error thrown when all retry attempts are exhausted for a message. */
331
+ declare class KafkaRetryExhaustedError extends KafkaProcessingError {
332
+ readonly attempts: number;
333
+ constructor(topic: string, originalMessage: unknown, attempts: number, options?: {
334
+ cause?: Error;
335
+ });
336
+ }
337
+
338
+ export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, KafkaClient, type KafkaClientOptions, type KafkaLogger, KafkaProcessingError, KafkaRetryExhaustedError, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, type SubscribeRetryOptions, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, topic };