kafka-ts 0.0.1-beta → 0.0.3-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. package/.github/workflows/release.yml +17 -0
  2. package/.prettierrc +3 -2
  3. package/LICENSE +1 -1
  4. package/README.md +48 -35
  5. package/docker-compose.yml +102 -102
  6. package/examples/package-lock.json +28 -27
  7. package/examples/package.json +12 -12
  8. package/examples/src/client.ts +6 -6
  9. package/examples/src/consumer.ts +9 -8
  10. package/examples/src/create-topic.ts +23 -16
  11. package/examples/src/producer.ts +7 -7
  12. package/examples/src/replicator.ts +4 -4
  13. package/examples/src/utils/delay.ts +1 -0
  14. package/examples/src/utils/json.ts +1 -1
  15. package/examples/tsconfig.json +2 -2
  16. package/package.json +21 -15
  17. package/src/__snapshots__/request-handler.test.ts.snap +9 -718
  18. package/src/api/api-versions.ts +2 -2
  19. package/src/api/create-topics.ts +2 -2
  20. package/src/api/delete-topics.ts +2 -2
  21. package/src/api/fetch.ts +3 -3
  22. package/src/api/find-coordinator.ts +2 -2
  23. package/src/api/heartbeat.ts +2 -2
  24. package/src/api/index.ts +18 -18
  25. package/src/api/init-producer-id.ts +2 -2
  26. package/src/api/join-group.ts +3 -3
  27. package/src/api/leave-group.ts +2 -2
  28. package/src/api/list-offsets.ts +3 -3
  29. package/src/api/metadata.ts +3 -3
  30. package/src/api/offset-commit.ts +2 -2
  31. package/src/api/offset-fetch.ts +2 -2
  32. package/src/api/produce.ts +3 -3
  33. package/src/api/sasl-authenticate.ts +2 -2
  34. package/src/api/sasl-handshake.ts +2 -2
  35. package/src/api/sync-group.ts +2 -2
  36. package/src/broker.ts +9 -9
  37. package/src/client.ts +6 -6
  38. package/src/{request-handler.test.ts → cluster.test.ts} +72 -69
  39. package/src/cluster.ts +7 -7
  40. package/src/connection.ts +17 -15
  41. package/src/consumer/consumer-group.ts +14 -14
  42. package/src/consumer/consumer-metadata.ts +2 -2
  43. package/src/consumer/consumer.ts +84 -82
  44. package/src/consumer/fetch-manager.ts +179 -0
  45. package/src/consumer/fetcher.ts +57 -0
  46. package/src/consumer/offset-manager.ts +6 -6
  47. package/src/consumer/processor.ts +47 -0
  48. package/src/distributors/assignments-to-replicas.test.ts +7 -7
  49. package/src/distributors/assignments-to-replicas.ts +1 -1
  50. package/src/distributors/messages-to-topic-partition-leaders.test.ts +6 -6
  51. package/src/index.ts +4 -3
  52. package/src/metadata.ts +4 -4
  53. package/src/producer/producer.ts +8 -8
  54. package/src/types.ts +2 -0
  55. package/src/utils/api.ts +4 -4
  56. package/src/utils/debug.ts +2 -2
  57. package/src/utils/decoder.ts +4 -4
  58. package/src/utils/encoder.ts +6 -6
  59. package/src/utils/error.ts +3 -3
  60. package/src/utils/retrier.ts +1 -1
  61. package/src/utils/tracer.ts +7 -4
  62. package/tsconfig.json +16 -16
  63. package/dist/api/api-versions.d.ts +0 -9
  64. package/dist/api/api-versions.js +0 -24
  65. package/dist/api/create-topics.d.ts +0 -38
  66. package/dist/api/create-topics.js +0 -53
  67. package/dist/api/delete-topics.d.ts +0 -18
  68. package/dist/api/delete-topics.js +0 -33
  69. package/dist/api/fetch.d.ts +0 -77
  70. package/dist/api/fetch.js +0 -106
  71. package/dist/api/find-coordinator.d.ts +0 -21
  72. package/dist/api/find-coordinator.js +0 -39
  73. package/dist/api/heartbeat.d.ts +0 -11
  74. package/dist/api/heartbeat.js +0 -27
  75. package/dist/api/index.d.ts +0 -573
  76. package/dist/api/index.js +0 -164
  77. package/dist/api/init-producer-id.d.ts +0 -13
  78. package/dist/api/init-producer-id.js +0 -29
  79. package/dist/api/join-group.d.ts +0 -34
  80. package/dist/api/join-group.js +0 -51
  81. package/dist/api/leave-group.d.ts +0 -19
  82. package/dist/api/leave-group.js +0 -39
  83. package/dist/api/list-offsets.d.ts +0 -29
  84. package/dist/api/list-offsets.js +0 -48
  85. package/dist/api/metadata.d.ts +0 -40
  86. package/dist/api/metadata.js +0 -58
  87. package/dist/api/offset-commit.d.ts +0 -28
  88. package/dist/api/offset-commit.js +0 -48
  89. package/dist/api/offset-fetch.d.ts +0 -33
  90. package/dist/api/offset-fetch.js +0 -57
  91. package/dist/api/produce.d.ts +0 -53
  92. package/dist/api/produce.js +0 -129
  93. package/dist/api/sasl-authenticate.d.ts +0 -11
  94. package/dist/api/sasl-authenticate.js +0 -23
  95. package/dist/api/sasl-handshake.d.ts +0 -6
  96. package/dist/api/sasl-handshake.js +0 -19
  97. package/dist/api/sync-group.d.ts +0 -24
  98. package/dist/api/sync-group.js +0 -36
  99. package/dist/broker.d.ts +0 -29
  100. package/dist/broker.js +0 -60
  101. package/dist/client.d.ts +0 -23
  102. package/dist/client.js +0 -36
  103. package/dist/cluster.d.ts +0 -24
  104. package/dist/cluster.js +0 -72
  105. package/dist/connection.d.ts +0 -25
  106. package/dist/connection.js +0 -155
  107. package/dist/consumer/consumer-group.d.ts +0 -36
  108. package/dist/consumer/consumer-group.js +0 -182
  109. package/dist/consumer/consumer-metadata.d.ts +0 -7
  110. package/dist/consumer/consumer-metadata.js +0 -14
  111. package/dist/consumer/consumer.d.ts +0 -37
  112. package/dist/consumer/consumer.js +0 -178
  113. package/dist/consumer/metadata.d.ts +0 -24
  114. package/dist/consumer/metadata.js +0 -64
  115. package/dist/consumer/offset-manager.d.ts +0 -22
  116. package/dist/consumer/offset-manager.js +0 -56
  117. package/dist/distributors/assignments-to-replicas.d.ts +0 -17
  118. package/dist/distributors/assignments-to-replicas.js +0 -60
  119. package/dist/distributors/assignments-to-replicas.test.d.ts +0 -1
  120. package/dist/distributors/assignments-to-replicas.test.js +0 -40
  121. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +0 -17
  122. package/dist/distributors/messages-to-topic-partition-leaders.js +0 -15
  123. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +0 -1
  124. package/dist/distributors/messages-to-topic-partition-leaders.test.js +0 -30
  125. package/dist/examples/src/replicator.js +0 -34
  126. package/dist/examples/src/utils/json.js +0 -5
  127. package/dist/index.d.ts +0 -3
  128. package/dist/index.js +0 -19
  129. package/dist/metadata.d.ts +0 -24
  130. package/dist/metadata.js +0 -89
  131. package/dist/producer/producer.d.ts +0 -19
  132. package/dist/producer/producer.js +0 -111
  133. package/dist/request-handler.d.ts +0 -16
  134. package/dist/request-handler.js +0 -67
  135. package/dist/request-handler.test.d.ts +0 -1
  136. package/dist/request-handler.test.js +0 -340
  137. package/dist/src/api/api-versions.js +0 -18
  138. package/dist/src/api/create-topics.js +0 -46
  139. package/dist/src/api/delete-topics.js +0 -26
  140. package/dist/src/api/fetch.js +0 -95
  141. package/dist/src/api/find-coordinator.js +0 -34
  142. package/dist/src/api/heartbeat.js +0 -22
  143. package/dist/src/api/index.js +0 -38
  144. package/dist/src/api/init-producer-id.js +0 -24
  145. package/dist/src/api/join-group.js +0 -48
  146. package/dist/src/api/leave-group.js +0 -30
  147. package/dist/src/api/list-offsets.js +0 -39
  148. package/dist/src/api/metadata.js +0 -47
  149. package/dist/src/api/offset-commit.js +0 -39
  150. package/dist/src/api/offset-fetch.js +0 -44
  151. package/dist/src/api/produce.js +0 -119
  152. package/dist/src/api/sync-group.js +0 -31
  153. package/dist/src/broker.js +0 -35
  154. package/dist/src/connection.js +0 -21
  155. package/dist/src/consumer/consumer-group.js +0 -131
  156. package/dist/src/consumer/consumer.js +0 -103
  157. package/dist/src/consumer/metadata.js +0 -52
  158. package/dist/src/consumer/offset-manager.js +0 -23
  159. package/dist/src/index.js +0 -19
  160. package/dist/src/producer/producer.js +0 -84
  161. package/dist/src/request-handler.js +0 -57
  162. package/dist/src/request-handler.test.js +0 -321
  163. package/dist/src/types.js +0 -2
  164. package/dist/src/utils/api.js +0 -5
  165. package/dist/src/utils/decoder.js +0 -161
  166. package/dist/src/utils/encoder.js +0 -137
  167. package/dist/src/utils/error.js +0 -10
  168. package/dist/types.d.ts +0 -9
  169. package/dist/types.js +0 -2
  170. package/dist/utils/api.d.ts +0 -9
  171. package/dist/utils/api.js +0 -5
  172. package/dist/utils/debug.d.ts +0 -2
  173. package/dist/utils/debug.js +0 -11
  174. package/dist/utils/decoder.d.ts +0 -29
  175. package/dist/utils/decoder.js +0 -147
  176. package/dist/utils/delay.d.ts +0 -1
  177. package/dist/utils/delay.js +0 -5
  178. package/dist/utils/encoder.d.ts +0 -28
  179. package/dist/utils/encoder.js +0 -122
  180. package/dist/utils/error.d.ts +0 -11
  181. package/dist/utils/error.js +0 -27
  182. package/dist/utils/memo.d.ts +0 -1
  183. package/dist/utils/memo.js +0 -16
  184. package/dist/utils/retrier.d.ts +0 -10
  185. package/dist/utils/retrier.js +0 -22
  186. package/dist/utils/tracer.d.ts +0 -1
  187. package/dist/utils/tracer.js +0 -26
  188. package/examples/node_modules/.package-lock.json +0 -22
@@ -0,0 +1,17 @@
1
+ name: Publish package
2
+ on:
3
+ release:
4
+ types: [published]
5
+ jobs:
6
+ build:
7
+ runs-on: ubuntu-latest
8
+ steps:
9
+ - uses: actions/checkout@v4
10
+ - uses: actions/setup-node@v4
11
+ with:
12
+ node-version: '20.x'
13
+ registry-url: 'https://registry.npmjs.org'
14
+ - run: npm ci
15
+ - run: npm publish
16
+ env:
17
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
package/.prettierrc CHANGED
@@ -3,5 +3,6 @@
3
3
  "tabWidth": 4,
4
4
  "endOfLine": "lf",
5
5
  "semi": true,
6
- "trailingComma": "all"
7
- }
6
+ "trailingComma": "all",
7
+ "singleQuote": true
8
+ }
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  The MIT License
2
2
 
3
- Copyright (c) 2018 Túlio Ornelas (ornelas.tulio@gmail.com)
3
+ Copyright (c) 2024 Priit Käärd (priit@hashtatic.com)
4
4
 
5
5
  Permission is hereby granted, free of charge,
6
6
  to any person obtaining a copy of this software and
package/README.md CHANGED
@@ -1,13 +1,13 @@
1
1
  # KafkaTS
2
2
 
3
- **Please note that this project is still in early development and is not yet ready for production use. The interface before stable release is subject to change.**
4
-
5
3
  **KafkaTS** is a Apache Kafka client library for Node.js. It provides both a low-level API for communicating directly with the Apache Kafka cluster and high-level APIs for publishing and subscribing to Kafka topics.
6
4
 
5
+ **Please note that this project is still in early development and is not yet ready for production use. The interface before stable release is subject to change.**
6
+
7
7
  ## Installation
8
8
 
9
9
  ```bash
10
- npm install kafkats
10
+ npm install kafka-ts
11
11
  ```
12
12
 
13
13
  ## Quick start
@@ -16,8 +16,8 @@ npm install kafkats
16
16
 
17
17
  ```typescript
18
18
  export const kafka = createKafkaClient({
19
- clientId: "my-app",
20
- bootstrapServers: [{ host: "localhost", port: 9092 }],
19
+ clientId: 'my-app',
20
+ bootstrapServers: [{ host: 'localhost', port: 9092 }],
21
21
  });
22
22
  ```
23
23
 
@@ -26,7 +26,7 @@ export const kafka = createKafkaClient({
26
26
  ```typescript
27
27
  const consumer = await kafka.startConsumer({
28
28
  groupId: 'my-consumer-group'.
29
- topics: ["my-topic"],
29
+ topics: ['my-topic'],
30
30
  onMessage: (message) => {
31
31
  console.log(message);
32
32
  },
@@ -36,36 +36,47 @@ const consumer = await kafka.startConsumer({
36
36
  #### Producing messages
37
37
 
38
38
  ```typescript
39
- export const producer = kafka.createProcucer();
39
+ export const producer = kafka.createProducer();
40
40
 
41
- await producer.send([{ topic: "example-topic-f", partition: 0, key: null, value: line }]);
41
+ await producer.send([{ topic: 'my-topic', partition: 0, key: 'key', value: 'value' }]);
42
42
  ```
43
43
 
44
44
  #### Low-level API
45
45
 
46
46
  ```typescript
47
47
  const cluster = kafka.createCluster();
48
- await cluster.connect();
49
-
50
- const { controllerId } = await cluster.sendRequest(API.METADATA, {
51
- allowTopicAutoCreation: false,
52
- includeTopicAuthorizedOperations: false,
53
- topics: [],
54
- });
55
-
56
- await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
57
- validateOnly: false,
58
- timeoutMs: 10_000,
59
- topics: [
60
- {
61
- name: "my-topic",
62
- numPartitions: 10,
63
- replicationFactor: 3,
64
- assignments: [],
65
- configs: [],
66
- },
67
- ],
68
- });
48
+ await cluster.connect();
49
+
50
+ const { controllerId } = await cluster.sendRequest(API.METADATA, {
51
+ allowTopicAutoCreation: false,
52
+ includeTopicAuthorizedOperations: false,
53
+ topics: [],
54
+ });
55
+
56
+ await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
57
+ validateOnly: false,
58
+ timeoutMs: 10_000,
59
+ topics: [
60
+ {
61
+ name: 'my-topic',
62
+ numPartitions: 10,
63
+ replicationFactor: 3,
64
+ assignments: [],
65
+ configs: [],
66
+ },
67
+ ],
68
+ });
69
+
70
+ await cluster.disconnect();
71
+ ```
72
+
73
+ #### Graceful shutdown
74
+
75
+ ```typescript
76
+ process.once('SIGTERM', async () => {
77
+ await consumer.close(); // waits for the consumer to finish processing the last batch and disconnects
78
+ await producer.close();
79
+ });
69
80
  ```
70
81
 
71
82
  See the [examples](./examples) for more detailed examples.
@@ -77,12 +88,14 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
77
88
 
78
89
  ### New features compared to kafkajs
79
90
 
80
- * **Static consumer membership** - Rebalancing during rolling deployments cause delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue processing based on the existing assignment.
81
- * **Consuming messages without consumer groups** - When you don't need for the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
91
+ - **Static consumer membership** - Rebalancing during rolling deployments causes delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue consuming partitions in the existing assignment.
92
+ - **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
93
+ - **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
82
94
 
83
95
  ## Backlog
84
96
 
85
- - [ ] Consumer concurrency control
86
- - [ ] Partitioner
87
- - [ ] API versioning
88
- - [ ] SCRAM support
97
+ Minimal set of features required before a stable release:
98
+
99
+ - Partitioner (Currently have to specify the partition on producer.send())
100
+ - API versioning (Currently only tested against Kafka 3.7+)
101
+ - SASL SCRAM support (+ pluggable authentication providers)
@@ -1,104 +1,104 @@
1
1
  # kafka with raft:
2
2
  services:
3
- kafka-0:
4
- container_name: kafka-0
5
- image: apache/kafka:3.7.1
6
- ports:
7
- - "9092:9092"
8
- - "29092:29092"
9
- environment:
10
- KAFKA_NODE_ID: 0
11
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
12
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
13
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
14
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9092,INTERBROKER://kafka-0:19092'
15
- KAFKA_PROCESS_ROLES: 'broker,controller'
16
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
17
- KAFKA_LISTENERS: 'EXTERNAL://:9092,INTERBROKER://:19092,CONTROLLER://:29092'
18
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
19
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
20
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
21
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
22
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
23
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
24
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
26
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
27
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
28
- KAFKA_SSL_KEY_PASSWORD: 'password'
29
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
30
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
31
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
32
- KAFKA_SSL_CLIENT_AUTH: 'required'
33
- volumes:
34
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
35
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
36
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
37
- kafka-1:
38
- container_name: kafka-1
39
- image: apache/kafka:3.7.1
40
- ports:
41
- - "9093:9093"
42
- - "29093:29093"
43
- environment:
44
- KAFKA_NODE_ID: 1
45
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
46
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
47
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
48
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9093,INTERBROKER://kafka-1:19093'
49
- KAFKA_PROCESS_ROLES: 'broker,controller'
50
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
51
- KAFKA_LISTENERS: 'EXTERNAL://:9093,INTERBROKER://:19093,CONTROLLER://:29093'
52
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
53
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
54
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
55
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
56
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
57
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
58
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
59
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
60
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
61
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
62
- KAFKA_SSL_KEY_PASSWORD: 'password'
63
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
64
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
65
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
66
- KAFKA_SSL_CLIENT_AUTH: 'required'
67
- volumes:
68
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
69
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
70
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
71
- kafka-2:
72
- container_name: kafka-2
73
- image: apache/kafka:3.7.1
74
- ports:
75
- - "9094:9094"
76
- - "29094:29094"
77
- environment:
78
- KAFKA_NODE_ID: 2
79
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
80
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
81
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
82
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9094,INTERBROKER://kafka-2:19094'
83
- KAFKA_PROCESS_ROLES: 'broker,controller'
84
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
85
- KAFKA_LISTENERS: 'EXTERNAL://:9094,INTERBROKER://:19094,CONTROLLER://:29094'
86
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
87
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
88
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
89
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
90
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
91
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
92
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
93
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
94
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
95
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
96
- KAFKA_SSL_KEY_PASSWORD: 'password'
97
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
98
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
99
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
100
- KAFKA_SSL_CLIENT_AUTH: 'required'
101
- volumes:
102
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
103
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
104
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
3
+ kafka-0:
4
+ container_name: kafka-0
5
+ image: apache/kafka:3.7.1
6
+ ports:
7
+ - '9092:9092'
8
+ - '29092:29092'
9
+ environment:
10
+ KAFKA_NODE_ID: 0
11
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
12
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
13
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
14
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9092,INTERBROKER://kafka-0:19092'
15
+ KAFKA_PROCESS_ROLES: 'broker,controller'
16
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
17
+ KAFKA_LISTENERS: 'EXTERNAL://:9092,INTERBROKER://:19092,CONTROLLER://:29092'
18
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
19
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
20
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
21
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
22
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
23
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
24
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
26
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
27
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
28
+ KAFKA_SSL_KEY_PASSWORD: 'password'
29
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
30
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
31
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
32
+ KAFKA_SSL_CLIENT_AUTH: 'required'
33
+ volumes:
34
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
35
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
36
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
37
+ kafka-1:
38
+ container_name: kafka-1
39
+ image: apache/kafka:3.7.1
40
+ ports:
41
+ - '9093:9093'
42
+ - '29093:29093'
43
+ environment:
44
+ KAFKA_NODE_ID: 1
45
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
46
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
47
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
48
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9093,INTERBROKER://kafka-1:19093'
49
+ KAFKA_PROCESS_ROLES: 'broker,controller'
50
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
51
+ KAFKA_LISTENERS: 'EXTERNAL://:9093,INTERBROKER://:19093,CONTROLLER://:29093'
52
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
53
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
54
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
55
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
56
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
57
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
58
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
59
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
60
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
61
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
62
+ KAFKA_SSL_KEY_PASSWORD: 'password'
63
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
64
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
65
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
66
+ KAFKA_SSL_CLIENT_AUTH: 'required'
67
+ volumes:
68
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
69
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
70
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
71
+ kafka-2:
72
+ container_name: kafka-2
73
+ image: apache/kafka:3.7.1
74
+ ports:
75
+ - '9094:9094'
76
+ - '29094:29094'
77
+ environment:
78
+ KAFKA_NODE_ID: 2
79
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
80
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
81
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
82
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9094,INTERBROKER://kafka-2:19094'
83
+ KAFKA_PROCESS_ROLES: 'broker,controller'
84
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
85
+ KAFKA_LISTENERS: 'EXTERNAL://:9094,INTERBROKER://:19094,CONTROLLER://:29094'
86
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
87
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
88
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
89
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
90
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
91
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
92
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
93
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
94
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
95
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
96
+ KAFKA_SSL_KEY_PASSWORD: 'password'
97
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
98
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
99
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
100
+ KAFKA_SSL_CLIENT_AUTH: 'required'
101
+ volumes:
102
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
103
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
104
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
@@ -1,30 +1,31 @@
1
1
  {
2
- "name": "examples",
3
- "version": "1.0.0",
4
- "lockfileVersion": 3,
5
- "requires": true,
6
- "packages": {
7
- "": {
8
- "name": "examples",
9
- "version": "1.0.0",
10
- "license": "ISC",
11
- "dependencies": {
12
- "kafkats": "file:../"
13
- }
14
- },
15
- "..": {
16
- "version": "1.0.0",
17
- "license": "MIT",
18
- "devDependencies": {
19
- "@types/node": "^20.12.12",
20
- "prettier": "^3.2.5",
21
- "typescript": "^5.4.5",
22
- "vitest": "^1.6.0"
23
- }
24
- },
25
- "node_modules/kafkats": {
26
- "resolved": "..",
27
- "link": true
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "lockfileVersion": 3,
5
+ "requires": true,
6
+ "packages": {
7
+ "": {
8
+ "name": "examples",
9
+ "version": "1.0.0",
10
+ "license": "ISC",
11
+ "dependencies": {
12
+ "kafka-ts": "file:../"
13
+ }
14
+ },
15
+ "..": {
16
+ "name": "kafka-ts",
17
+ "version": "0.0.1-beta",
18
+ "license": "MIT",
19
+ "devDependencies": {
20
+ "@types/node": "^20.12.12",
21
+ "prettier": "^3.2.5",
22
+ "typescript": "^5.4.5",
23
+ "vitest": "^1.6.0"
24
+ }
25
+ },
26
+ "node_modules/kafka-ts": {
27
+ "resolved": "..",
28
+ "link": true
29
+ }
28
30
  }
29
- }
30
31
  }
@@ -1,14 +1,14 @@
1
1
  {
2
- "name": "examples",
3
- "version": "1.0.0",
4
- "description": "",
5
- "main": "dist/replicator.js",
6
- "scripts": {
7
- "test": "echo \"Error: no test specified\" && exit 1"
8
- },
9
- "dependencies": {
10
- "kafkats": "file:../"
11
- },
12
- "author": "",
13
- "license": "ISC"
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "description": "",
5
+ "main": "dist/replicator.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "dependencies": {
10
+ "kafka-ts": "file:../"
11
+ },
12
+ "author": "",
13
+ "license": "ISC"
14
14
  }
@@ -1,9 +1,9 @@
1
- import { readFileSync } from "fs";
2
- import { createKafkaClient } from "kafkats";
1
+ import { readFileSync } from 'fs';
2
+ import { createKafkaClient } from 'kafka-ts';
3
3
 
4
4
  export const kafka = createKafkaClient({
5
- clientId: "examples",
6
- bootstrapServers: [{ host: "localhost", port: 9092 }],
7
- sasl: { mechanism: "PLAIN", username: "admin", password: "admin" },
8
- ssl: { ca: readFileSync("../certs/ca.crt").toString() },
5
+ clientId: 'examples',
6
+ bootstrapServers: [{ host: 'localhost', port: 9092 }],
7
+ sasl: { mechanism: 'PLAIN', username: 'admin', password: 'admin' },
8
+ ssl: { ca: readFileSync('../certs/ca.crt').toString() },
9
9
  });
@@ -1,17 +1,18 @@
1
- import { kafka } from "./client";
1
+ import { kafka } from './client';
2
2
 
3
3
  (async () => {
4
4
  const consumer = await kafka.startConsumer({
5
- groupId: "example-group",
6
- groupInstanceId: "example-group-instance",
7
- topics: ["example-topic-f"],
8
- allowTopicAutoCreation: true,
9
- onMessage: (message) => {
10
- console.log(message);
5
+ groupId: 'example-group',
6
+ groupInstanceId: 'example-group-instance',
7
+ topics: ['my-topic'],
8
+ onBatch: (batch) => {
9
+ console.log(batch);
11
10
  },
11
+ granularity: 'broker',
12
+ concurrency: 10,
12
13
  });
13
14
 
14
- process.on("SIGINT", async () => {
15
+ process.on('SIGINT', async () => {
15
16
  await consumer.close();
16
17
  });
17
18
  })();
@@ -1,5 +1,5 @@
1
- import { kafka } from "./client";
2
- import { API } from "kafkats";
1
+ import { API, API_ERROR, KafkaTSApiError } from 'kafka-ts';
2
+ import { kafka } from './client';
3
3
 
4
4
  (async () => {
5
5
  const cluster = kafka.createCluster();
@@ -11,24 +11,31 @@ import { API } from "kafkats";
11
11
  topics: [],
12
12
  });
13
13
 
14
- await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
15
- validateOnly: false,
16
- timeoutMs: 10_000,
17
- topics: [
18
- {
19
- name: "my-topic",
20
- numPartitions: 10,
21
- replicationFactor: 3,
22
- assignments: [],
23
- configs: [],
24
- },
25
- ],
26
- });
14
+ try {
15
+ await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
16
+ validateOnly: false,
17
+ timeoutMs: 10_000,
18
+ topics: [
19
+ {
20
+ name: 'my-topic',
21
+ numPartitions: 10,
22
+ replicationFactor: 3,
23
+ assignments: [],
24
+ configs: [],
25
+ },
26
+ ],
27
+ });
28
+ } catch (error) {
29
+ if ((error as KafkaTSApiError).errorCode === API_ERROR.TOPIC_ALREADY_EXISTS) {
30
+ return;
31
+ }
32
+ throw error;
33
+ }
27
34
 
28
35
  const metadata = await cluster.sendRequestToNode(controllerId)(API.METADATA, {
29
36
  allowTopicAutoCreation: false,
30
37
  includeTopicAuthorizedOperations: false,
31
- topics: [{ id: null, name: "my-topic" }],
38
+ topics: [{ id: null, name: 'my-topic' }],
32
39
  });
33
40
 
34
41
  console.log(metadata);
@@ -1,24 +1,24 @@
1
- import { createInterface } from "readline";
2
- import { kafka } from "./client";
1
+ import { createInterface } from 'readline';
2
+ import { kafka } from './client';
3
3
 
4
4
  const producer = kafka.createProducer({ allowTopicAutoCreation: true });
5
5
 
6
6
  const rl = createInterface({ input: process.stdin, output: process.stdout });
7
7
 
8
- process.stdout.write("> ");
9
- rl.on("line", async (line) => {
8
+ process.stdout.write('> ');
9
+ rl.on('line', async (line) => {
10
10
  await producer.send([
11
11
  {
12
- topic: "example-topic-f",
12
+ topic: 'example-topic-f',
13
13
  key: null,
14
14
  value: line,
15
15
  partition: 0,
16
16
  },
17
17
  ]);
18
- process.stdout.write("> ");
18
+ process.stdout.write('> ');
19
19
  });
20
20
 
21
- process.on("SIGINT", async () => {
21
+ process.on('SIGINT', async () => {
22
22
  rl.close();
23
23
  await producer.close();
24
24
  });
@@ -1,7 +1,7 @@
1
- import { kafka } from "./client";
1
+ import { kafka } from './client';
2
2
 
3
3
  (async () => {
4
- const topic = "example-topic";
4
+ const topic = 'example-topic';
5
5
 
6
6
  const producer = kafka.createProducer({ allowTopicAutoCreation: true });
7
7
  const consumer = await kafka.startConsumer({
@@ -10,7 +10,7 @@ import { kafka } from "./client";
10
10
  await producer.send(
11
11
  messages.map((message) => ({
12
12
  ...message,
13
- headers: { "X-Replicated": "true" },
13
+ headers: { 'X-Replicated': 'true' },
14
14
  topic: `${message.topic}-replicated`,
15
15
  offset: 0n,
16
16
  })),
@@ -18,7 +18,7 @@ import { kafka } from "./client";
18
18
  console.log(`Replicated ${messages.length} messages`);
19
19
  },
20
20
  });
21
- process.on("SIGINT", async () => {
21
+ process.on('SIGINT', async () => {
22
22
  await consumer.close();
23
23
  await producer.close();
24
24
  });
@@ -0,0 +1 @@
1
+ export const delay = (delayMs: number) => new Promise<void>((resolve) => setTimeout(resolve, delayMs));
@@ -1 +1 @@
1
- export const serializer = (_: string, value: unknown) => (typeof value === "bigint" ? value.toString() : value);
1
+ export const serializer = (_: string, value: unknown) => (typeof value === 'bigint' ? value.toString() : value);
@@ -3,5 +3,5 @@
3
3
  "compilerOptions": {
4
4
  "outDir": "dist",
5
5
  "inlineSourceMap": true
6
- },
7
- }
6
+ }
7
+ }