@drarzter/kafka-client 0.2.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +25 -1
- package/dist/index.d.mts +44 -7
- package/dist/index.d.ts +44 -7
- package/dist/index.js +263 -278
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +263 -278
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -331,6 +331,13 @@ await kafka.startConsumer(['orders'], auditHandler, { groupId: 'orders-audit' })
|
|
|
331
331
|
async auditOrders(message) { ... }
|
|
332
332
|
```
|
|
333
333
|
|
|
334
|
+
**Important:** You cannot mix `eachMessage` and `eachBatch` consumers on the same `groupId`. The library throws a clear error if you try:
|
|
335
|
+
|
|
336
|
+
```text
|
|
337
|
+
Cannot use eachBatch on consumer group "my-group" — it is already running with eachMessage.
|
|
338
|
+
Use a different groupId for this consumer.
|
|
339
|
+
```
|
|
340
|
+
|
|
334
341
|
### Named clients
|
|
335
342
|
|
|
336
343
|
Register multiple named clients for different bounded contexts:
|
|
@@ -529,6 +536,8 @@ Options for `sendMessage()` — the third argument:
|
|
|
529
536
|
| `dlq` | `false` | Send to `{topic}.dlq` after all retries exhausted |
|
|
530
537
|
| `interceptors` | `[]` | Array of before/after/onError hooks |
|
|
531
538
|
| `batch` | `false` | (decorator only) Use `startBatchConsumer` instead of `startConsumer` |
|
|
539
|
+
| `subscribeRetry.retries` | `5` | Max attempts for `consumer.subscribe()` when topic doesn't exist yet |
|
|
540
|
+
| `subscribeRetry.backoffMs` | `5000` | Delay between subscribe retry attempts (ms) |
|
|
532
541
|
|
|
533
542
|
### Module options
|
|
534
543
|
|
|
@@ -541,7 +550,8 @@ Passed to `KafkaModule.register()` or returned from `registerAsync()` factory:
|
|
|
541
550
|
| `brokers` | — | Array of broker addresses (required) |
|
|
542
551
|
| `name` | — | Named client identifier for multi-client setups |
|
|
543
552
|
| `isGlobal` | `false` | Make the client available in all modules without re-importing |
|
|
544
|
-
| `autoCreateTopics` | `false` | Auto-create topics on first send
|
|
553
|
+
| `autoCreateTopics` | `false` | Auto-create topics on first send (dev only) |
|
|
554
|
+
| `strictSchemas` | `true` | Validate string topic keys against schemas registered via TopicDescriptor |
|
|
545
555
|
|
|
546
556
|
**Module-scoped** (default) — import `KafkaModule` in each module that needs it:
|
|
547
557
|
|
|
@@ -676,6 +686,20 @@ async handleOrder(message) {
|
|
|
676
686
|
}
|
|
677
687
|
```
|
|
678
688
|
|
|
689
|
+
### Strict schema mode
|
|
690
|
+
|
|
691
|
+
By default (`strictSchemas: true`), once a schema is registered via a TopicDescriptor, string topic keys are also validated against it:
|
|
692
|
+
|
|
693
|
+
```typescript
|
|
694
|
+
// First call registers the schema in the internal registry
|
|
695
|
+
await kafka.sendMessage(OrderCreated, { orderId: '1', userId: '2', amount: 100 });
|
|
696
|
+
|
|
697
|
+
// Now this is ALSO validated — throws if data doesn't match OrderCreated's schema
|
|
698
|
+
await kafka.sendMessage('order.created', { orderId: 123, userId: null, amount: -5 });
|
|
699
|
+
```
|
|
700
|
+
|
|
701
|
+
Disable with `strictSchemas: false` in `KafkaModule.register()` options if you want the old behavior (string topics bypass validation).
|
|
702
|
+
|
|
679
703
|
### Bring your own validator
|
|
680
704
|
|
|
681
705
|
Any object with `parse(data: unknown): T` works:
|
package/dist/index.d.mts
CHANGED
|
@@ -139,6 +139,8 @@ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
|
|
|
139
139
|
interceptors?: ConsumerInterceptor<T>[];
|
|
140
140
|
/** @internal Schema map populated by @SubscribeTo when descriptors have schemas. */
|
|
141
141
|
schemas?: Map<string, SchemaLike>;
|
|
142
|
+
/** Retry config for `consumer.subscribe()` when the topic doesn't exist yet. */
|
|
143
|
+
subscribeRetry?: SubscribeRetryOptions;
|
|
142
144
|
}
|
|
143
145
|
/** Configuration for consumer retry behavior. */
|
|
144
146
|
interface RetryOptions {
|
|
@@ -196,8 +198,17 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
|
|
|
196
198
|
}
|
|
197
199
|
/** Options for `KafkaClient` constructor. */
|
|
198
200
|
interface KafkaClientOptions {
|
|
199
|
-
/** Auto-create topics via admin before the first `sendMessage`, `sendBatch`,
|
|
201
|
+
/** Auto-create topics via admin before the first `sendMessage`, `sendBatch`, or `transaction` for each topic. Useful for development — not recommended in production. */
|
|
200
202
|
autoCreateTopics?: boolean;
|
|
203
|
+
/** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */
|
|
204
|
+
strictSchemas?: boolean;
|
|
205
|
+
}
|
|
206
|
+
/** Options for consumer subscribe retry when topic doesn't exist yet. */
|
|
207
|
+
interface SubscribeRetryOptions {
|
|
208
|
+
/** Maximum number of subscribe attempts. Default: `5`. */
|
|
209
|
+
retries?: number;
|
|
210
|
+
/** Delay between retries in ms. Default: `5000`. */
|
|
211
|
+
backoffMs?: number;
|
|
201
212
|
}
|
|
202
213
|
|
|
203
214
|
/**
|
|
@@ -213,15 +224,14 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
|
|
|
213
224
|
private readonly admin;
|
|
214
225
|
private readonly logger;
|
|
215
226
|
private readonly autoCreateTopicsEnabled;
|
|
227
|
+
private readonly strictSchemasEnabled;
|
|
216
228
|
private readonly ensuredTopics;
|
|
217
229
|
private readonly defaultGroupId;
|
|
230
|
+
private readonly schemaRegistry;
|
|
231
|
+
private readonly runningConsumers;
|
|
218
232
|
private isAdminConnected;
|
|
219
233
|
readonly clientId: ClientId;
|
|
220
234
|
constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
|
|
221
|
-
private getOrCreateConsumer;
|
|
222
|
-
private resolveTopicName;
|
|
223
|
-
private ensureTopic;
|
|
224
|
-
private validateMessage;
|
|
225
235
|
/** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
|
|
226
236
|
sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
|
|
227
237
|
sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
|
|
@@ -255,9 +265,36 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
|
|
|
255
265
|
getClientId(): ClientId;
|
|
256
266
|
/** Gracefully disconnect producer, all consumers, and admin. */
|
|
257
267
|
disconnect(): Promise<void>;
|
|
268
|
+
private getOrCreateConsumer;
|
|
269
|
+
private resolveTopicName;
|
|
270
|
+
private ensureTopic;
|
|
271
|
+
/** Register schema from descriptor into global registry (side-effect). */
|
|
272
|
+
private registerSchema;
|
|
273
|
+
/** Validate message against schema. Pure — no side-effects on registry. */
|
|
274
|
+
private validateMessage;
|
|
275
|
+
/**
|
|
276
|
+
* Build a kafkajs-ready send payload.
|
|
277
|
+
* Handles: topic resolution, schema registration, validation, JSON serialization.
|
|
278
|
+
*/
|
|
279
|
+
private buildSendPayload;
|
|
280
|
+
/** Shared consumer setup: groupId check, schema map, connect, subscribe. */
|
|
281
|
+
private setupConsumer;
|
|
258
282
|
private buildSchemaMap;
|
|
259
|
-
|
|
283
|
+
/** Parse raw message as JSON. Returns null on failure (logs error). */
|
|
284
|
+
private parseJsonMessage;
|
|
285
|
+
/**
|
|
286
|
+
* Validate a parsed message against the schema map.
|
|
287
|
+
* On failure: logs error, sends to DLQ if enabled, calls interceptor.onError.
|
|
288
|
+
* Returns validated message or null.
|
|
289
|
+
*/
|
|
290
|
+
private validateWithSchema;
|
|
291
|
+
/**
|
|
292
|
+
* Execute a handler with retry, interceptors, and DLQ support.
|
|
293
|
+
* Used by both single-message and batch consumers.
|
|
294
|
+
*/
|
|
295
|
+
private executeWithRetry;
|
|
260
296
|
private sendToDlq;
|
|
297
|
+
private subscribeWithRetry;
|
|
261
298
|
private sleep;
|
|
262
299
|
}
|
|
263
300
|
|
|
@@ -370,4 +407,4 @@ declare class KafkaHealthIndicator {
|
|
|
370
407
|
check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
|
|
371
408
|
}
|
|
372
409
|
|
|
373
|
-
export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
|
|
410
|
+
export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, type SubscribeRetryOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
|
package/dist/index.d.ts
CHANGED
|
@@ -139,6 +139,8 @@ interface ConsumerOptions<T extends TopicMapConstraint<T> = TTopicMessageMap> {
|
|
|
139
139
|
interceptors?: ConsumerInterceptor<T>[];
|
|
140
140
|
/** @internal Schema map populated by @SubscribeTo when descriptors have schemas. */
|
|
141
141
|
schemas?: Map<string, SchemaLike>;
|
|
142
|
+
/** Retry config for `consumer.subscribe()` when the topic doesn't exist yet. */
|
|
143
|
+
subscribeRetry?: SubscribeRetryOptions;
|
|
142
144
|
}
|
|
143
145
|
/** Configuration for consumer retry behavior. */
|
|
144
146
|
interface RetryOptions {
|
|
@@ -196,8 +198,17 @@ interface IKafkaClient<T extends TopicMapConstraint<T>> {
|
|
|
196
198
|
}
|
|
197
199
|
/** Options for `KafkaClient` constructor. */
|
|
198
200
|
interface KafkaClientOptions {
|
|
199
|
-
/** Auto-create topics via admin before the first `sendMessage`, `sendBatch`,
|
|
201
|
+
/** Auto-create topics via admin before the first `sendMessage`, `sendBatch`, or `transaction` for each topic. Useful for development — not recommended in production. */
|
|
200
202
|
autoCreateTopics?: boolean;
|
|
203
|
+
/** When `true`, string topic keys are validated against any schema previously registered via a TopicDescriptor. Default: `true`. */
|
|
204
|
+
strictSchemas?: boolean;
|
|
205
|
+
}
|
|
206
|
+
/** Options for consumer subscribe retry when topic doesn't exist yet. */
|
|
207
|
+
interface SubscribeRetryOptions {
|
|
208
|
+
/** Maximum number of subscribe attempts. Default: `5`. */
|
|
209
|
+
retries?: number;
|
|
210
|
+
/** Delay between retries in ms. Default: `5000`. */
|
|
211
|
+
backoffMs?: number;
|
|
201
212
|
}
|
|
202
213
|
|
|
203
214
|
/**
|
|
@@ -213,15 +224,14 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
|
|
|
213
224
|
private readonly admin;
|
|
214
225
|
private readonly logger;
|
|
215
226
|
private readonly autoCreateTopicsEnabled;
|
|
227
|
+
private readonly strictSchemasEnabled;
|
|
216
228
|
private readonly ensuredTopics;
|
|
217
229
|
private readonly defaultGroupId;
|
|
230
|
+
private readonly schemaRegistry;
|
|
231
|
+
private readonly runningConsumers;
|
|
218
232
|
private isAdminConnected;
|
|
219
233
|
readonly clientId: ClientId;
|
|
220
234
|
constructor(clientId: ClientId, groupId: GroupId, brokers: string[], options?: KafkaClientOptions);
|
|
221
|
-
private getOrCreateConsumer;
|
|
222
|
-
private resolveTopicName;
|
|
223
|
-
private ensureTopic;
|
|
224
|
-
private validateMessage;
|
|
225
235
|
/** Send a single typed message. Accepts a topic key or a TopicDescriptor. */
|
|
226
236
|
sendMessage<D extends TopicDescriptor<string & keyof T, T[string & keyof T]>>(descriptor: D, message: D["__type"], options?: SendOptions): Promise<void>;
|
|
227
237
|
sendMessage<K extends keyof T>(topic: K, message: T[K], options?: SendOptions): Promise<void>;
|
|
@@ -255,9 +265,36 @@ declare class KafkaClient<T extends TopicMapConstraint<T>> implements IKafkaClie
|
|
|
255
265
|
getClientId(): ClientId;
|
|
256
266
|
/** Gracefully disconnect producer, all consumers, and admin. */
|
|
257
267
|
disconnect(): Promise<void>;
|
|
268
|
+
private getOrCreateConsumer;
|
|
269
|
+
private resolveTopicName;
|
|
270
|
+
private ensureTopic;
|
|
271
|
+
/** Register schema from descriptor into global registry (side-effect). */
|
|
272
|
+
private registerSchema;
|
|
273
|
+
/** Validate message against schema. Pure — no side-effects on registry. */
|
|
274
|
+
private validateMessage;
|
|
275
|
+
/**
|
|
276
|
+
* Build a kafkajs-ready send payload.
|
|
277
|
+
* Handles: topic resolution, schema registration, validation, JSON serialization.
|
|
278
|
+
*/
|
|
279
|
+
private buildSendPayload;
|
|
280
|
+
/** Shared consumer setup: groupId check, schema map, connect, subscribe. */
|
|
281
|
+
private setupConsumer;
|
|
258
282
|
private buildSchemaMap;
|
|
259
|
-
|
|
283
|
+
/** Parse raw message as JSON. Returns null on failure (logs error). */
|
|
284
|
+
private parseJsonMessage;
|
|
285
|
+
/**
|
|
286
|
+
* Validate a parsed message against the schema map.
|
|
287
|
+
* On failure: logs error, sends to DLQ if enabled, calls interceptor.onError.
|
|
288
|
+
* Returns validated message or null.
|
|
289
|
+
*/
|
|
290
|
+
private validateWithSchema;
|
|
291
|
+
/**
|
|
292
|
+
* Execute a handler with retry, interceptors, and DLQ support.
|
|
293
|
+
* Used by both single-message and batch consumers.
|
|
294
|
+
*/
|
|
295
|
+
private executeWithRetry;
|
|
260
296
|
private sendToDlq;
|
|
297
|
+
private subscribeWithRetry;
|
|
261
298
|
private sleep;
|
|
262
299
|
}
|
|
263
300
|
|
|
@@ -370,4 +407,4 @@ declare class KafkaHealthIndicator {
|
|
|
370
407
|
check<T extends TopicMapConstraint<T>>(client: KafkaClient<T>): Promise<KafkaHealthResult>;
|
|
371
408
|
}
|
|
372
409
|
|
|
373
|
-
export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
|
|
410
|
+
export { type BatchMeta, type ClientId, type ConsumerInterceptor, type ConsumerOptions, type GroupId, type IKafkaClient, type InferSchema, InjectKafkaClient, KAFKA_CLIENT, KAFKA_SUBSCRIBER_METADATA, KafkaClient, type KafkaClientOptions, KafkaExplorer, KafkaHealthIndicator, type KafkaHealthResult, KafkaModule, type KafkaModuleAsyncOptions, type KafkaModuleOptions, KafkaProcessingError, KafkaRetryExhaustedError, type KafkaSubscriberMetadata, KafkaValidationError, type MessageHeaders, type RetryOptions, type SchemaLike, type SendOptions, type SubscribeRetryOptions, SubscribeTo, type TTopicMessageMap, type TopicDescriptor, type TopicMapConstraint, type TopicsFrom, type TransactionContext, getKafkaClientToken, topic };
|