@loipv/nestjs-kafka 0.0.7 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +428 -32
  2. package/dist/health/kafka-health-indicator.js +3 -4
  3. package/dist/health/kafka-health-indicator.js.map +1 -1
  4. package/dist/index.d.ts +1 -0
  5. package/dist/index.js +3 -1
  6. package/dist/index.js.map +1 -1
  7. package/dist/interfaces/consumer-options.interface.d.ts +6 -4
  8. package/dist/interfaces/kafka-module-options.interface.d.ts +12 -4
  9. package/dist/interfaces/kafka-module-options.interface.js.map +1 -1
  10. package/dist/interfaces/message.interface.d.ts +4 -4
  11. package/dist/interfaces/message.interface.js.map +1 -1
  12. package/dist/kafka.module.js +9 -0
  13. package/dist/kafka.module.js.map +1 -1
  14. package/dist/services/batch-processor.service.d.ts +3 -1
  15. package/dist/services/batch-processor.service.js +0 -1
  16. package/dist/services/batch-processor.service.js.map +1 -1
  17. package/dist/services/circuit-breaker.service.js +2 -1
  18. package/dist/services/circuit-breaker.service.js.map +1 -1
  19. package/dist/services/consumer-registry.service.d.ts +7 -2
  20. package/dist/services/consumer-registry.service.js +228 -126
  21. package/dist/services/consumer-registry.service.js.map +1 -1
  22. package/dist/services/dlq-metrics.service.js +3 -1
  23. package/dist/services/dlq-metrics.service.js.map +1 -1
  24. package/dist/services/dlq-retry.service.js +12 -9
  25. package/dist/services/dlq-retry.service.js.map +1 -1
  26. package/dist/services/dlq.service.d.ts +3 -1
  27. package/dist/services/dlq.service.js.map +1 -1
  28. package/dist/services/idempotency.service.d.ts +3 -1
  29. package/dist/services/idempotency.service.js.map +1 -1
  30. package/dist/services/index.d.ts +1 -0
  31. package/dist/services/index.js +1 -0
  32. package/dist/services/index.js.map +1 -1
  33. package/dist/services/kafka-client.service.d.ts +3 -1
  34. package/dist/services/kafka-client.service.js +51 -15
  35. package/dist/services/kafka-client.service.js.map +1 -1
  36. package/dist/services/kafka-core.service.d.ts +4 -1
  37. package/dist/services/kafka-core.service.js +29 -32
  38. package/dist/services/kafka-core.service.js.map +1 -1
  39. package/dist/services/pressure-manager.service.d.ts +3 -1
  40. package/dist/services/pressure-manager.service.js.map +1 -1
  41. package/dist/services/tracing.service.d.ts +57 -0
  42. package/dist/services/tracing.service.js +286 -0
  43. package/dist/services/tracing.service.js.map +1 -0
  44. package/dist/tsconfig.build.tsbuildinfo +1 -1
  45. package/package.json +20 -5
package/README.md CHANGED
@@ -1,6 +1,8 @@
1
1
  # @loipv/nestjs-kafka
2
2
 
3
- A production-ready NestJS module for Kafka client and consumer functionality built on top of [KafkaJS](https://kafka.js.org/). This library provides enterprise-grade features including intelligent batch processing, idempotency guarantees, key-based grouping, and automatic pressure management.
3
+ A production-ready NestJS module for Kafka client and consumer functionality built on top of [confluent-kafka-javascript](https://github.com/confluentinc/confluent-kafka-javascript). This library provides enterprise-grade features including intelligent batch processing, idempotency guarantees, key-based grouping, and automatic pressure management.
4
+
5
+ > **Note:** Starting from v1.0.0, this library uses `@confluentinc/kafka-javascript` instead of `kafkajs` for better performance and official Confluent support. See [Migration Guide](#migration-guide-from-v0x-to-v1x) for upgrade instructions.
4
6
 
5
7
  ## Features
6
8
 
@@ -11,13 +13,14 @@ A production-ready NestJS module for Kafka client and consumer functionality bui
11
13
  - **Back Pressure**: Automatic pause/resume when consumers are overwhelmed
12
14
  - **Idempotency**: In-memory duplicate prevention with TTL
13
15
  - **Dead Letter Queue (DLQ)**: Automatic retry with exponential backoff
16
+ - **OpenTelemetry Tracing**: Distributed tracing across produce → consume with same trace ID
14
17
  - **Health Checks**: Integration with `@nestjs/terminus`
15
18
  - **Graceful Shutdown**: Proper cleanup on application shutdown
16
19
 
17
20
  ## Installation
18
21
 
19
22
  ```bash
20
- npm install @loipv/nestjs-kafka kafkajs
23
+ npm install @loipv/nestjs-kafka @confluentinc/kafka-javascript
21
24
  ```
22
25
 
23
26
  ### Peer Dependencies
@@ -28,6 +31,22 @@ Make sure you have the following peer dependencies installed:
28
31
  npm install @nestjs/common @nestjs/core @nestjs/terminus reflect-metadata rxjs
29
32
  ```
30
33
 
34
+ ### Platform Support
35
+
36
+ confluent-kafka-javascript is built on librdkafka (C library). Supported platforms:
37
+ - **Linux**: x64, arm64
38
+ - **macOS**: arm64 (Apple Silicon)
39
+ - **Windows**: x64
40
+ - **Node.js**: 18, 20, 21, 22
41
+
42
+ ### Optional: OpenTelemetry Tracing
43
+
44
+ For distributed tracing support:
45
+
46
+ ```bash
47
+ npm install @opentelemetry/api @opentelemetry/sdk-trace-node @opentelemetry/auto-instrumentations-node
48
+ ```
49
+
31
50
  ## Quick Start
32
51
 
33
52
  ### 1. Import KafkaModule
@@ -71,7 +90,7 @@ export class OrderModule {}
71
90
  ```typescript
72
91
  import { Injectable } from '@nestjs/common';
73
92
  import { Consumer } from '@loipv/nestjs-kafka';
74
- import { KafkaMessage } from 'kafkajs';
93
+ import { KafkaMessage } from '@confluentinc/kafka-javascript/kafkajs';
75
94
 
76
95
  @Injectable()
77
96
  export class OrderConsumer {
@@ -454,6 +473,34 @@ async handleOrder(message: KafkaMessage) {
454
473
 
455
474
  Use this to temporarily disable a consumer without removing the code.
456
475
 
476
+ ### Partition Assignment Strategy
477
+
478
+ Control how partitions are assigned to consumers in a consumer group:
479
+
480
+ ```typescript
481
+ @Consumer('orders', {
482
+ groupId: 'order-processors',
483
+ // Use cooperative-sticky for minimal rebalancing disruption
484
+ partitionAssigners: ['cooperative-sticky'],
485
+ })
486
+ async handleOrder(message: KafkaMessage) {
487
+ // Process order
488
+ }
489
+
490
+ // Multiple strategies (first one is primary)
491
+ @Consumer('events', {
492
+ partitionAssigners: ['roundrobin', 'range'],
493
+ })
494
+ async handleEvent(message: KafkaMessage) {
495
+ // Process event
496
+ }
497
+ ```
498
+
499
+ **Available strategies:**
500
+ - `'roundrobin'` - Assigns partitions in round-robin fashion across consumers
501
+ - `'range'` - Assigns partitions based on ranges (default)
502
+ - `'cooperative-sticky'` - Cooperative rebalancing with sticky assignment (recommended for minimal disruption during rebalancing)
503
+
457
504
  ### Auto-Deserialization
458
505
 
459
506
  Messages are automatically deserialized by default:
@@ -479,40 +526,47 @@ async handleBinary(message: KafkaMessage) {
479
526
  }
480
527
  ```
481
528
 
482
- ### Retry & Restart on Failure
529
+ ### Retry Mechanism (Without DLQ)
483
530
 
484
- Control consumer restart behavior when errors occur:
531
+ When **NOT using DLQ**, the library implements an in-memory retry mechanism with exponential backoff:
485
532
 
486
533
  ```typescript
487
- // Disable restart on failure
488
- @Consumer('critical-topic', {
534
+ @Consumer('orders', {
489
535
  retry: {
490
- restartOnFailure: false,
536
+ retries: 3, // Default: 3 retries
537
+ initialRetryTime: 1000, // Default: 1000ms
538
+ multiplier: 2, // Default: 2 (exponential backoff)
539
+ skipMessageOnMaxRetries: false, // Default: false (throw error)
491
540
  },
492
541
  })
493
- async handleCritical(message: KafkaMessagePayload) {
494
- // Consumer will NOT restart if this throws
542
+ async handleOrders(message: KafkaMessagePayload) {
543
+ // If this fails, it will retry: 1s, 2s, 4s delays
544
+ // After 3 retries, error is thrown (consumer may stop/restart)
495
545
  }
496
546
 
497
- // Custom restart logic
498
- @Consumer('orders', {
547
+ // Skip message to avoid blocking consumer (useful for multi-topic consumers)
548
+ @Consumer('non-critical-logs', {
499
549
  retry: {
500
- retries: 10,
501
- maxRetryTime: 60000,
502
- restartOnFailure: async (error) => {
503
- // Don't restart on authentication errors
504
- if (error.message.includes('authentication')) {
505
- return false;
506
- }
507
- return true; // Restart for other errors
508
- },
550
+ retries: 5,
551
+ skipMessageOnMaxRetries: true, // Skip message after max retries
509
552
  },
510
553
  })
511
- async handleOrders(message: KafkaMessagePayload) {
512
- // Process order
554
+ async handleLogs(message: KafkaMessagePayload) {
555
+ // If this fails 5 times, message is skipped (offset committed)
556
+ // Consumer continues processing next message
513
557
  }
514
558
  ```
515
559
 
560
+ **Important Notes:**
561
+ - **Default behavior** (`skipMessageOnMaxRetries: false`): Error is thrown after max retries, ensuring no message is silently dropped
562
+ - **Skip mode** (`skipMessageOnMaxRetries: true`): Message is skipped after max retries to prevent consumer blocking
563
+ - **With DLQ**: Messages are sent to DLQ topic after max retries (recommended approach)
564
+
565
+ **When to use skip mode:**
566
+ - Multi-topic consumers where one failing topic shouldn't block others
567
+ - Non-critical messages that can be safely dropped
568
+ - Development/debugging environments
569
+
516
570
  ### All Consumer Options
517
571
 
518
572
  ```typescript
@@ -566,14 +620,16 @@ interface ConsumerOptions {
566
620
  autoCommitInterval?: number;
567
621
  fromBeginning?: boolean; // Default: false
568
622
 
569
- // Retry & restart on failure
623
+ // Partition assignment strategy
624
+ partitionAssigners?: PartitionAssigner[]; // 'roundrobin' | 'range' | 'cooperative-sticky'
625
+
626
+ // Retry options
570
627
  retry?: {
571
628
  retries?: number; // Default: 5
572
629
  maxRetryTime?: number; // Default: 30000
573
630
  initialRetryTime?: number; // Default: 300
574
- factor?: number; // Default: 0.2
575
- multiplier?: number; // Default: 2
576
- restartOnFailure?: boolean | ((error: Error) => Promise<boolean>);
631
+ multiplier?: number; // Default: 2 (exponential backoff)
632
+ skipMessageOnMaxRetries?: boolean; // Default: false (throw error after max retries)
577
633
  };
578
634
  }
579
635
  ```
@@ -606,13 +662,26 @@ await kafka.sendMultiTopicBatch([
606
662
  await kafka.sendQueued('topic', { value: 'message' });
607
663
  ```
608
664
 
609
- ### Send Options
665
+ ### Producer Options
666
+
667
+ > **Note:** In v1.0.0+, `acks`, `timeout`, and `compression` are configured at the producer level in `KafkaModule.forRoot()`, not per-send call.
610
668
 
611
669
  ```typescript
612
- await kafka.send('topic', message, {
613
- acks: -1, // -1 (all), 0 (none), 1 (leader only)
614
- timeout: 30000,
615
- compression: 1, // 0=None, 1=GZIP, 2=Snappy, 3=LZ4, 4=ZSTD
670
+ // Configure producer options at module level
671
+ KafkaModule.forRoot({
672
+ clientId: 'my-app',
673
+ brokers: ['localhost:9092'],
674
+ producer: {
675
+ acks: -1, // -1 (all), 0 (none), 1 (leader only)
676
+ timeout: 30000,
677
+ compression: 1, // 0=None, 1=GZIP, 2=Snappy, 3=LZ4, 4=ZSTD
678
+ },
679
+ });
680
+
681
+ // Send messages (acks/timeout/compression configured above)
682
+ await kafka.send('topic', {
683
+ key: 'message-key',
684
+ value: { data: 'value' },
616
685
  });
617
686
  ```
618
687
 
@@ -680,6 +749,213 @@ export class HealthController {
680
749
  }
681
750
  ```
682
751
 
752
+ ## OpenTelemetry Tracing
753
+
754
+ The library supports distributed tracing with OpenTelemetry, allowing you to trace messages from producer to consumer with the same trace ID.
755
+
756
+ ### How It Works
757
+
758
+ 1. **Producer**: When sending a message, the library creates a span and injects the trace context (W3C Trace Context format) into Kafka message headers
759
+ 2. **Consumer**: When receiving a message, the library extracts the trace context from headers and creates a child span linked to the producer's trace
760
+ 3. **Batch Consumer**: For batch processing, the first message's trace becomes the parent, and all other messages are added as span links
761
+
762
+ ```
763
+ ┌─────────────────┐ ┌─────────────────┐
764
+ │ HTTP Request │ │ Consumer App │
765
+ │ TraceID: abc │ │ │
766
+ │ ┌───────────┐ │ Kafka Topic │ ┌───────────┐ │
767
+ │ │ publish │──┼─────────────────────────┼──│ process │ │
768
+ │ │ span │ │ Headers: │ │ span │ │
769
+ │ │ │ │ traceparent: 00-abc... │ │ │ │
770
+ │ └───────────┘ │ │ └───────────┘ │
771
+ │ │ │ TraceID: abc │
772
+ └─────────────────┘ └─────────────────┘
773
+ ```
774
+
775
+ ### Prerequisites
776
+
777
+ 1. Install OpenTelemetry packages:
778
+
779
+ ```bash
780
+ npm install @opentelemetry/api @opentelemetry/sdk-node @opentelemetry/exporter-trace-otlp-http
781
+ ```
782
+
783
+ 2. **IMPORTANT**: Initialize OpenTelemetry SDK **BEFORE** your NestJS app starts:
784
+
785
+ ```typescript
786
+ // tracing.ts
787
+ import { NodeSDK } from '@opentelemetry/sdk-node';
788
+ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
789
+ import { Resource } from '@opentelemetry/resources';
790
+ import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions';
791
+
792
+ const sdk = new NodeSDK({
793
+ resource: new Resource({
794
+ [ATTR_SERVICE_NAME]: 'my-kafka-service',
795
+ }),
796
+ traceExporter: new OTLPTraceExporter({
797
+ url: 'http://localhost:4318/v1/traces', // Jaeger/OTLP endpoint
798
+ }),
799
+ });
800
+
801
+ sdk.start();
802
+ ```
803
+
804
+ ```typescript
805
+ // main.ts
806
+ import './tracing'; // MUST be first import
807
+ import { NestFactory } from '@nestjs/core';
808
+ import { AppModule } from './app.module';
809
+
810
+ async function bootstrap() {
811
+ const app = await NestFactory.create(AppModule);
812
+ await app.listen(3000);
813
+ }
814
+ bootstrap();
815
+ ```
816
+
817
+ ### Important: Disable KafkaJS Auto-Instrumentation
818
+
819
+ If you're using `@opentelemetry/auto-instrumentations-node`, you **MUST** disable the KafkaJS auto-instrumentation to use this library's tracing. Otherwise, you'll get duplicate spans and the library's spans (with `consumer.group` attribute) won't be used.
820
+
821
+ ```typescript
822
+ // tracing.ts
823
+ import { NodeSDK } from '@opentelemetry/sdk-node';
824
+ import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
825
+
826
+ const sdk = new NodeSDK({
827
+ instrumentations: [
828
+ getNodeAutoInstrumentations({
829
+ // Disable kafkajs auto-instrumentation
830
+ '@opentelemetry/instrumentation-kafkajs': {
831
+ enabled: false,
832
+ },
833
+ }),
834
+ ],
835
+ // ... other config
836
+ });
837
+
838
+ sdk.start();
839
+ ```
840
+
841
+ ### Enable Tracing
842
+
843
+ ```typescript
844
+ // app.module.ts
845
+ import { Module } from '@nestjs/common';
846
+ import { KafkaModule, ConsumerModule } from '@loipv/nestjs-kafka';
847
+
848
+ @Module({
849
+ imports: [
850
+ KafkaModule.forRoot({
851
+ clientId: 'my-app',
852
+ brokers: ['localhost:9092'],
853
+ tracing: {
854
+ enabled: true, // Required: enable tracing
855
+ tracerName: 'my-kafka-service', // Optional: custom tracer name
856
+ tracerVersion: '1.0.0', // Optional: custom tracer version
857
+ },
858
+ }),
859
+ ConsumerModule.forRoot(),
860
+ ],
861
+ })
862
+ export class AppModule {}
863
+ ```
864
+
865
+ ### Tracing Options
866
+
867
+ | Option | Type | Default | Description |
868
+ |--------|------|---------|-------------|
869
+ | `enabled` | boolean | `false` | Enable OpenTelemetry tracing |
870
+ | `tracerName` | string | `'@loipv/nestjs-kafka'` | Custom tracer name |
871
+ | `tracerVersion` | string | `'0.0.1'` | Custom tracer version |
872
+
873
+ ### Span Names
874
+
875
+ Span names include the consumer group for easy identification in tracing UI:
876
+
877
+ | Type | Span Name Format | Example |
878
+ |------|------------------|---------|
879
+ | Producer | `{topic} publish` | `orders publish` |
880
+ | Consumer | `{groupId} {topic} process` | `order-group orders process` |
881
+ | Batch Consumer | `{groupId} {topic} process batch` | `order-group orders process batch` |
882
+
883
+ ### Span Attributes (OpenTelemetry Semantic Conventions v1.24+)
884
+
885
+ **Producer Span:**
886
+
887
+ | Attribute | Example | Description |
888
+ |-----------|---------|-------------|
889
+ | `messaging.system` | `kafka` | Messaging system |
890
+ | `messaging.destination.name` | `orders` | Topic name |
891
+ | `messaging.operation.name` | `publish` | Operation name |
892
+ | `messaging.operation.type` | `publish` | Operation type |
893
+ | `messaging.destination.partition.id` | `0` | Partition (if specified) |
894
+ | `messaging.kafka.message.key` | `customer-123` | Message key (if present) |
895
+
896
+ **Consumer Span:**
897
+
898
+ | Attribute | Example | Description |
899
+ |-----------|---------|-------------|
900
+ | `messaging.system` | `kafka` | Messaging system |
901
+ | `messaging.destination.name` | `orders` | Topic name |
902
+ | `messaging.destination.partition.id` | `0` | Partition number |
903
+ | `messaging.operation.name` | `process` | Operation name |
904
+ | `messaging.operation.type` | `process` | Operation type |
905
+ | `messaging.kafka.offset` | `12345` | Message offset |
906
+ | `messaging.kafka.consumer.group` | `order-group` | Consumer group ID |
907
+ | `messaging.kafka.message.key` | `customer-123` | Message key (if present) |
908
+
909
+ **Batch Consumer Span (additional):**
910
+
911
+ | Attribute | Example | Description |
912
+ |-----------|---------|-------------|
913
+ | `messaging.batch.message_count` | `100` | Number of messages in batch |
914
+
915
+ ### Batch Tracing with Links
916
+
917
+ For batch consumers (`batch: true`), messages from different requests/traces are processed together. The library handles this by:
918
+
919
+ 1. **First message's trace** becomes the **parent** (for trace continuity)
920
+ 2. **All other messages** are added as **span links** (shows relationship in tracing UI)
921
+
922
+ ```
923
+ Request A (trace-A) ──publish──▶ Message 1 ──┐
924
+ Request B (trace-B) ──publish──▶ Message 2 ──┼──▶ Batch Consumer Span
925
+ Request C (trace-C) ──publish──▶ Message 3 ──┘ │
926
+ ├── Parent: trace-A
927
+ └── Links: [trace-B, trace-C]
928
+ ```
929
+
930
+ This allows you to:
931
+ - Follow the full trace from the first message's request
932
+ - See all related traces via span links in your tracing UI (Jaeger, Zipkin, etc.)
933
+
934
+ ### Trace Context Propagation
935
+
936
+ The library uses W3C Trace Context format for propagation via Kafka headers:
937
+
938
+ | Header | Format | Example |
939
+ |--------|--------|---------|
940
+ | `traceparent` | `{version}-{traceId}-{spanId}-{flags}` | `00-abc123...-def456...-01` |
941
+ | `tracestate` | Vendor-specific state | `vendor=value` |
942
+
943
+ ### Troubleshooting
944
+
945
+ **No spans appearing:**
946
+ 1. Ensure `tracing.enabled: true` in KafkaModule options
947
+ 2. Ensure OpenTelemetry SDK is initialized **before** NestJS app starts
948
+ 3. Ensure `@opentelemetry/api` is installed
949
+ 4. If using auto-instrumentations, disable `@opentelemetry/instrumentation-kafkajs`
950
+
951
+ **Missing `messaging.kafka.consumer.group` attribute:**
952
+ - You're likely using `@opentelemetry/instrumentation-kafkajs` which creates its own spans
953
+ - Disable it and use this library's TracingService instead
954
+
955
+ **Duplicate spans:**
956
+ - Both auto-instrumentation and this library are creating spans
957
+ - Disable `@opentelemetry/instrumentation-kafkajs` in auto-instrumentations config
958
+
683
959
  ## API Reference
684
960
 
685
961
  ### Exports
@@ -694,6 +970,7 @@ export { Consumer } from './decorators';
694
970
 
695
971
  // Services
696
972
  export { KafkaClient } from './services/kafka-client.service';
973
+ export { TracingService } from './services/tracing.service';
697
974
  export { DlqMetricsService } from './services/dlq-metrics.service';
698
975
  export { CircuitBreakerService } from './services/circuit-breaker.service';
699
976
 
@@ -704,6 +981,7 @@ export { KafkaHealthIndicator } from './health/kafka-health-indicator';
704
981
  export {
705
982
  KafkaModuleOptions,
706
983
  KafkaModuleAsyncOptions,
984
+ TracingOptions,
707
985
  ConsumerOptions,
708
986
  DlqOptions,
709
987
  DlqRetryOptions,
@@ -713,6 +991,124 @@ export {
713
991
  } from './interfaces';
714
992
  ```
715
993
 
994
+ ## Migration Guide (from v0.x to v1.x)
995
+
996
+ v1.0.0 introduces a **breaking change**: migrating from `kafkajs` to `@confluentinc/kafka-javascript` for better performance and official Confluent support.
997
+
998
+ ### Why Migrate?
999
+
1000
+ - **Performance**: confluent-kafka-javascript is built on librdkafka (C library) - significantly better performance
1001
+ - **Commercial Support**: Official Confluent support
1002
+ - **Active Development**: More active development compared to kafkajs
1003
+
1004
+ ### Breaking Changes
1005
+
1006
+ #### 1. Install Dependencies
1007
+
1008
+ ```bash
1009
+ # Remove kafkajs, add confluent-kafka-javascript
1010
+ npm uninstall kafkajs
1011
+ npm install @confluentinc/kafka-javascript
1012
+ ```
1013
+
1014
+ #### 2. Update Imports
1015
+
1016
+ ```typescript
1017
+ // Before (v0.x)
1018
+ import { KafkaMessage } from 'kafkajs';
1019
+
1020
+ // After (v1.x)
1021
+ import { KafkaMessage } from '@confluentinc/kafka-javascript/kafkajs';
1022
+ ```
1023
+
1024
+ #### 3. Producer Options Moved to Module Level
1025
+
1026
+ `acks`, `timeout`, and `compression` are now configured at the producer level, not per-send call.
1027
+
1028
+ ```typescript
1029
+ // Before (v0.x) - per-send options
1030
+ await kafka.send('topic', message, {
1031
+ acks: -1,
1032
+ timeout: 30000,
1033
+ compression: 1,
1034
+ });
1035
+
1036
+ // After (v1.x) - producer-level options
1037
+ KafkaModule.forRoot({
1038
+ clientId: 'my-app',
1039
+ brokers: ['localhost:9092'],
1040
+ producer: {
1041
+ acks: -1,
1042
+ timeout: 30000,
1043
+ compression: 1,
1044
+ },
1045
+ });
1046
+
1047
+ await kafka.send('topic', message); // No acks/timeout/compression
1048
+ ```
1049
+
1050
+ #### 4. autoCommitThreshold Removed
1051
+
1052
+ `autoCommitThreshold` is not supported in confluent-kafka-javascript. Remove this option from your consumer configuration.
1053
+
1054
+ ```typescript
1055
+ // Before (v0.x)
1056
+ @Consumer('topic', {
1057
+ autoCommitThreshold: 100, // Not supported
1058
+ })
1059
+
1060
+ // After (v1.x)
1061
+ @Consumer('topic', {
1062
+ // autoCommitThreshold removed - use autoCommitInterval instead
1063
+ autoCommitInterval: 5000,
1064
+ })
1065
+ ```
1066
+
1067
+ #### 5. Retry Options Changes
1068
+
1069
+ The following retry options have been removed as they are not supported in confluent-kafka-javascript:
1070
+
1071
+ - `retry.restartOnFailure` - Consumer restart control is handled by the library internally
1072
+ - `retry.factor` - Use `retry.multiplier` for exponential backoff
1073
+
1074
+ ```typescript
1075
+ // Before (v0.x)
1076
+ @Consumer('topic', {
1077
+ retry: {
1078
+ restartOnFailure: false, // Removed
1079
+ factor: 0.2, // Removed
1080
+ },
1081
+ })
1082
+
1083
+ // After (v1.x)
1084
+ @Consumer('topic', {
1085
+ retry: {
1086
+ retries: 3,
1087
+ multiplier: 2, // Use multiplier for backoff
1088
+ },
1089
+ })
1090
+ ```
1091
+
1092
+ #### 6. Platform Requirements
1093
+
1094
+ confluent-kafka-javascript only supports:
1095
+ - **Linux**: x64, arm64
1096
+ - **macOS**: arm64 (Apple Silicon)
1097
+ - **Windows**: x64
1098
+ - **Node.js**: 18, 20, 21, 22
1099
+
1100
+ ### No Changes Required
1101
+
1102
+ The following features work the same way:
1103
+ - `@Consumer()` decorator syntax
1104
+ - `KafkaClient.send()`, `sendBatch()`, `sendMultiTopicBatch()`, `sendQueued()`
1105
+ - DLQ configuration and retry
1106
+ - OpenTelemetry tracing
1107
+ - Health checks
1108
+ - Multi-connection support
1109
+ - Batch processing and key grouping
1110
+ - Idempotency and back pressure
1111
+
716
1112
  ## License
717
1113
 
718
1114
  MIT
@@ -46,12 +46,11 @@ let KafkaHealthIndicator = class KafkaHealthIndicator {
46
46
  try {
47
47
  const admin = this.kafkaCore.getKafka().admin();
48
48
  await admin.connect();
49
- const clusterInfo = await admin.describeCluster();
49
+ const topics = await admin.listTopics();
50
50
  await admin.disconnect();
51
51
  const details = {
52
- brokers: clusterInfo.brokers.length,
53
- controller: clusterInfo.controller,
54
- clusterId: clusterInfo.clusterId,
52
+ connected: true,
53
+ topicCount: topics.length,
55
54
  };
56
55
  if (this.healthIndicatorService) {
57
56
  return this.healthIndicatorService.check(key).up(details);
@@ -1 +1 @@
1
- {"version":3,"file":"kafka-health-indicator.js","sourceRoot":"","sources":["../../lib/health/kafka-health-indicator.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,2CAAsD;AACtD,2EAA+D;AAC/D,uEAAkE;AA2C3D,IAAM,oBAAoB,GAA1B,MAAM,oBAAoB;IAEZ;IACA;IACY;IAH/B,YACmB,WAAwB,EACxB,SAA2B,EACf,sBAA+C;QAF3D,gBAAW,GAAX,WAAW,CAAa;QACxB,cAAS,GAAT,SAAS,CAAkB;QACf,2BAAsB,GAAtB,sBAAsB,CAAyB;IAC1E,CAAC;IAKL,SAAS,CAAC,GAAW;QACnB,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,EAAE,CAAC;QAE/C,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;YAChC,MAAM,SAAS,GAAG,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACzD,IAAI,SAAS,EAAE,CAAC;gBACd,OAAO,SAAS,CAAC,EAAE,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC3C,CAAC;YACD,OAAO,SAAS,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,iCAAiC,EAAE,CAAC,CAAC;QAC1F,CAAC;QAGD,OAAO;YACL,CAAC,GAAG,CAAC,EAAE;gBACL,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM;gBACjC,SAAS,EAAE,SAAS;gBACpB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,iCAAiC,EAAE,CAAC;aACrE;SACF,CAAC;IACJ,CAAC;IAKD,KAAK,CAAC,YAAY,CAAC,GAAW;QAC5B,IAAI,CAAC;YACH,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,CAAC;YAChD,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;YAEtB,MAAM,WAAW,GAAG,MAAM,KAAK,CAAC,eAAe,EAAE,CAAC;YAClD,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;YAEzB,MAAM,OAAO,GAAG;gBACd,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,MAAM;gBACnC,UAAU,EAAE,WAAW,CAAC,UAAU;gBAClC,SAAS,EAAE,WAAW,CAAC,SAAS;aACjC,CAAC;YAEF,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,OAAO,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC;YAC5D,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC;QACjD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,YAAY,GAAG,EAAE,KAAK,EAAG,KAAe,CAAC,OAAO,EAAE,CAAC;YAEzD,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,OAAO,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACnE,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAAE,EAAE,CAAC;QACxD,CAAC;IACH,CAAC;IAKD,KAAK,CAAC,gBAAgB,CACpB,GAAW,EACX,OAAe,EACf,SAAiB,IAAI;QAErB,IAAI,CAAC;YACH,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,CAAC;YAChD,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;YAEtB,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,YAAY,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;YACtD,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;YAEzB,IAAI,QAAQ,GAAG,CAAC,CAAC;YACjB,KAAK,MAAM,WAAW,IAAI,OAAO,EAAE,CAAC;gBAClC,KAAK,MAAM,SAAS,IAAI,WAAW,CAAC,UAAU,EAAE,CAAC;oBAC/C,MAAM,MAAM,GAAG,QAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;oBAC9C,QAAQ,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC;gBAClC,CAAC;YACH,CAAC;YAED,MAAM,SAAS,GAAG,QAAQ,GAAG,MAAM,CAAC;YACpC,MAAM,OAAO,GAAG,EAAE,OAAO,EAAE,GAAG,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;YAEnD,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,MAAM,SAAS,GAAG,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;gBACzD,OAAO,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YACrE,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC;QACtE,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,YAAY,GAAG,EAAE,KAAK,EAAG,KAAe,CAAC,OAAO,EAAE,CAAC;YAEzD,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,OAAO,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACnE,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAAE,EAAE,CAAC;QACxD,CAAC;IACH,CAAC;CACF,CAAA;AA1GY,oDAAoB;+BAApB,oBAAoB;IADhC,IAAA,mBAAU,GAAE;IAKR,WAAA,IAAA,iBAAQ,GAAE,CAAA;qCAFmB,kCAAW;QACb,qCAAgB;GAHnC,oBAAoB,CA0GhC"}
1
+ {"version":3,"file":"kafka-health-indicator.js","sourceRoot":"","sources":["../../lib/health/kafka-health-indicator.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,2CAAsD;AACtD,2EAA+D;AAC/D,uEAAkE;AA2C3D,IAAM,oBAAoB,GAA1B,MAAM,oBAAoB;IAEZ;IACA;IACY;IAH/B,YACmB,WAAwB,EACxB,SAA2B,EACf,sBAA+C;QAF3D,gBAAW,GAAX,WAAW,CAAa;QACxB,cAAS,GAAT,SAAS,CAAkB;QACf,2BAAsB,GAAtB,sBAAsB,CAAyB;IAC1E,CAAC;IAKL,SAAS,CAAC,GAAW;QACnB,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,SAAS,EAAE,CAAC;QAE/C,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;YAChC,MAAM,SAAS,GAAG,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACzD,IAAI,SAAS,EAAE,CAAC;gBACd,OAAO,SAAS,CAAC,EAAE,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC3C,CAAC;YACD,OAAO,SAAS,CAAC,IAAI,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,iCAAiC,EAAE,CAAC,CAAC;QAC1F,CAAC;QAGD,OAAO;YACL,CAAC,GAAG,CAAC,EAAE;gBACL,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM;gBACjC,SAAS,EAAE,SAAS;gBACpB,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,iCAAiC,EAAE,CAAC;aACrE;SACF,CAAC;IACJ,CAAC;IAOD,KAAK,CAAC,YAAY,CAAC,GAAW;QAC5B,IAAI,CAAC;YACH,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,CAAC;YAChD,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;YAGtB,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;YACxC,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;YAEzB,MAAM,OAAO,GAAG;gBACd,SAAS,EAAE,IAAI;gBACf,UAAU,EAAE,MAAM,CAAC,MAAM;aAC1B,CAAC;YAEF,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,OAAO,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC;YAC5D,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC;QACjD,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,YAAY,GAAG,EAAE,KAAK,EAAG,KAAe,CAAC,OAAO,EAAE,CAAC;YAEzD,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,OAAO,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACnE,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAAE,EAAE,CAAC;QACxD,CAAC;IACH,CAAC;IAKD,KAAK,CAAC,gBAAgB,CACpB,GAAW,EACX,OAAe,EACf,SAAiB,IAAI;QAErB,IAAI,CAAC;YACH,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,CAAC;YAChD,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;YAEtB,MAAM,OAAO,GAAG,MAAM,KAAK,CAAC,YAAY,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC;YACtD,MAAM,KAAK,CAAC,UAAU,EAAE,CAAC;YAEzB,IAAI,QAAQ,GAAG,CAAC,CAAC;YACjB,KAAK,MAAM,WAAW,IAAI,OAAO,EAAE,CAAC;gBAClC,KAAK,MAAM,SAAS,IAAI,WAAW,CAAC,UAAU,EAAE,CAAC;oBAC/C,MAAM,MAAM,GAAG,QAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;oBAC9C,QAAQ,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC;gBAClC,CAAC;YACH,CAAC;YAED,MAAM,SAAS,GAAG,QAAQ,GAAG,MAAM,CAAC;YACpC,MAAM,OAAO,GAAG,EAAE,OAAO,EAAE,GAAG,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;YAEnD,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,MAAM,SAAS,GAAG,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;gBACzD,OAAO,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YACrE,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE,GAAG,OAAO,EAAE,EAAE,CAAC;QACtE,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,YAAY,GAAG,EAAE,KAAK,EAAG,KAAe,CAAC,OAAO,EAAE,CAAC;YAEzD,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBAChC,OAAO,IAAI,CAAC,sBAAsB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACnE,CAAC;YAED,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,YAAY,EAAE,EAAE,CAAC;QACxD,CAAC;IACH,CAAC;CACF,CAAA;AA5GY,oDAAoB;+BAApB,oBAAoB;IADhC,IAAA,mBAAU,GAAE;IAKR,WAAA,IAAA,iBAAQ,GAAE,CAAA;qCAFmB,kCAAW;QACb,qCAAgB;GAHnC,oBAAoB,CA4GhC"}
package/dist/index.d.ts CHANGED
@@ -2,5 +2,6 @@ export { KafkaModule } from './kafka.module';
2
2
  export { ConsumerModule } from './consumer.module';
3
3
  export { Consumer, InjectKafkaClient } from './decorators';
4
4
  export { KafkaClient, ConnectionBoundClient, } from './services/kafka-client.service';
5
+ export { TracingService } from './services/tracing.service';
5
6
  export { KafkaHealthIndicator } from './health/kafka-health-indicator';
6
7
  export * from './interfaces';
package/dist/index.js CHANGED
@@ -14,7 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
- exports.KafkaHealthIndicator = exports.ConnectionBoundClient = exports.KafkaClient = exports.InjectKafkaClient = exports.Consumer = exports.ConsumerModule = exports.KafkaModule = void 0;
17
+ exports.KafkaHealthIndicator = exports.TracingService = exports.ConnectionBoundClient = exports.KafkaClient = exports.InjectKafkaClient = exports.Consumer = exports.ConsumerModule = exports.KafkaModule = void 0;
18
18
  var kafka_module_1 = require("./kafka.module");
19
19
  Object.defineProperty(exports, "KafkaModule", { enumerable: true, get: function () { return kafka_module_1.KafkaModule; } });
20
20
  var consumer_module_1 = require("./consumer.module");
@@ -25,6 +25,8 @@ Object.defineProperty(exports, "InjectKafkaClient", { enumerable: true, get: fun
25
25
  var kafka_client_service_1 = require("./services/kafka-client.service");
26
26
  Object.defineProperty(exports, "KafkaClient", { enumerable: true, get: function () { return kafka_client_service_1.KafkaClient; } });
27
27
  Object.defineProperty(exports, "ConnectionBoundClient", { enumerable: true, get: function () { return kafka_client_service_1.ConnectionBoundClient; } });
28
+ var tracing_service_1 = require("./services/tracing.service");
29
+ Object.defineProperty(exports, "TracingService", { enumerable: true, get: function () { return tracing_service_1.TracingService; } });
28
30
  var kafka_health_indicator_1 = require("./health/kafka-health-indicator");
29
31
  Object.defineProperty(exports, "KafkaHealthIndicator", { enumerable: true, get: function () { return kafka_health_indicator_1.KafkaHealthIndicator; } });
30
32
  __exportStar(require("./interfaces"), exports);
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../lib/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;AACA,+CAA6C;AAApC,2GAAA,WAAW,OAAA;AACpB,qDAAmD;AAA1C,iHAAA,cAAc,OAAA;AAGvB,2CAA2D;AAAlD,sGAAA,QAAQ,OAAA;AAAE,+GAAA,iBAAiB,OAAA;AAGpC,wEAGyC;AAFvC,mHAAA,WAAW,OAAA;AACX,6HAAA,qBAAqB,OAAA;AAIvB,0EAAuE;AAA9D,8HAAA,oBAAoB,OAAA;AAG7B,+CAA6B"}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../lib/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;AACA,+CAA6C;AAApC,2GAAA,WAAW,OAAA;AACpB,qDAAmD;AAA1C,iHAAA,cAAc,OAAA;AAGvB,2CAA2D;AAAlD,sGAAA,QAAQ,OAAA;AAAE,+GAAA,iBAAiB,OAAA;AAGpC,wEAGyC;AAFvC,mHAAA,WAAW,OAAA;AACX,6HAAA,qBAAqB,OAAA;AAEvB,8DAA4D;AAAnD,iHAAA,cAAc,OAAA;AAGvB,0EAAuE;AAA9D,8HAAA,oBAAoB,OAAA;AAG7B,+CAA6B"}
@@ -1,4 +1,6 @@
1
- import { KafkaMessage } from 'kafkajs';
1
+ import { KafkaJS } from '@confluentinc/kafka-javascript';
2
+ type KafkaMessage = KafkaJS.KafkaMessage;
3
+ export type PartitionAssigner = 'roundrobin' | 'range' | 'cooperative-sticky';
2
4
  export interface DlqRetryOptions {
3
5
  enabled?: boolean;
4
6
  maxRetries?: number;
@@ -21,9 +23,8 @@ export interface ConsumerRetryOptions {
21
23
  retries?: number;
22
24
  maxRetryTime?: number;
23
25
  initialRetryTime?: number;
24
- factor?: number;
25
26
  multiplier?: number;
26
- restartOnFailure?: boolean | ((error: Error) => Promise<boolean>);
27
+ skipMessageOnMaxRetries?: boolean;
27
28
  }
28
29
  export interface ConsumerOptions {
29
30
  topic?: string;
@@ -47,9 +48,9 @@ export interface ConsumerOptions {
47
48
  dlq?: DlqOptions;
48
49
  autoCommit?: boolean;
49
50
  autoCommitInterval?: number;
50
- autoCommitThreshold?: number;
51
51
  fromBeginning?: boolean;
52
52
  allowAutoTopicCreation?: boolean;
53
+ partitionAssigners?: PartitionAssigner[];
53
54
  retry?: ConsumerRetryOptions;
54
55
  }
55
56
  export interface ConsumerMetadata {
@@ -73,3 +74,4 @@ export interface PressureManagerOptions {
73
74
  maxQueueSize: number;
74
75
  checkIntervalMs?: number;
75
76
  }
77
+ export {};