kafka-ts 0.0.1-beta → 0.0.1-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/.github/workflows/release.yml +17 -0
  2. package/.prettierrc +3 -2
  3. package/LICENSE +1 -1
  4. package/README.md +56 -35
  5. package/docker-compose.yml +108 -102
  6. package/examples/package-lock.json +28 -27
  7. package/examples/package.json +12 -12
  8. package/examples/src/client.ts +6 -6
  9. package/examples/src/consumer.ts +9 -8
  10. package/examples/src/create-topic.ts +22 -16
  11. package/examples/src/producer.ts +7 -7
  12. package/examples/src/replicator.ts +4 -4
  13. package/examples/src/utils/delay.ts +1 -0
  14. package/examples/src/utils/json.ts +1 -1
  15. package/examples/tsconfig.json +2 -2
  16. package/package.json +22 -15
  17. package/scripts/create-scram-user.sh +5 -0
  18. package/scripts/generate-certs.sh +1 -0
  19. package/scripts/kafka-local.properties +33 -0
  20. package/src/__snapshots__/request-handler.test.ts.snap +9 -718
  21. package/src/api/api-versions.ts +2 -2
  22. package/src/api/create-topics.ts +2 -2
  23. package/src/api/delete-topics.ts +2 -2
  24. package/src/api/fetch.ts +3 -3
  25. package/src/api/find-coordinator.ts +2 -2
  26. package/src/api/heartbeat.ts +2 -2
  27. package/src/api/index.ts +18 -18
  28. package/src/api/init-producer-id.ts +2 -2
  29. package/src/api/join-group.ts +3 -3
  30. package/src/api/leave-group.ts +2 -2
  31. package/src/api/list-offsets.ts +3 -3
  32. package/src/api/metadata.ts +3 -3
  33. package/src/api/offset-commit.ts +2 -2
  34. package/src/api/offset-fetch.ts +2 -2
  35. package/src/api/produce.ts +3 -3
  36. package/src/api/sasl-authenticate.ts +2 -2
  37. package/src/api/sasl-handshake.ts +2 -2
  38. package/src/api/sync-group.ts +2 -2
  39. package/src/auth/index.ts +2 -0
  40. package/src/auth/plain.ts +10 -0
  41. package/src/auth/scram.ts +52 -0
  42. package/src/broker.ts +12 -14
  43. package/src/client.ts +7 -7
  44. package/src/{request-handler.test.ts → cluster.test.ts} +73 -69
  45. package/src/cluster.ts +8 -8
  46. package/src/connection.ts +17 -15
  47. package/src/consumer/consumer-group.ts +14 -14
  48. package/src/consumer/consumer-metadata.ts +2 -2
  49. package/src/consumer/consumer.ts +84 -82
  50. package/src/consumer/fetch-manager.ts +179 -0
  51. package/src/consumer/fetcher.ts +57 -0
  52. package/src/consumer/offset-manager.ts +6 -6
  53. package/src/consumer/processor.ts +47 -0
  54. package/src/distributors/assignments-to-replicas.test.ts +7 -7
  55. package/src/distributors/assignments-to-replicas.ts +1 -1
  56. package/src/distributors/messages-to-topic-partition-leaders.test.ts +6 -6
  57. package/src/index.ts +6 -3
  58. package/src/metadata.ts +4 -4
  59. package/src/producer/producer.ts +8 -8
  60. package/src/types.ts +2 -0
  61. package/src/utils/api.ts +4 -4
  62. package/src/utils/crypto.ts +15 -0
  63. package/src/utils/debug.ts +2 -2
  64. package/src/utils/decoder.ts +4 -4
  65. package/src/utils/encoder.ts +6 -6
  66. package/src/utils/error.ts +3 -3
  67. package/src/utils/retrier.ts +1 -1
  68. package/src/utils/tracer.ts +7 -4
  69. package/tsconfig.json +16 -16
  70. package/dist/api/api-versions.d.ts +0 -9
  71. package/dist/api/api-versions.js +0 -24
  72. package/dist/api/create-topics.d.ts +0 -38
  73. package/dist/api/create-topics.js +0 -53
  74. package/dist/api/delete-topics.d.ts +0 -18
  75. package/dist/api/delete-topics.js +0 -33
  76. package/dist/api/fetch.d.ts +0 -77
  77. package/dist/api/fetch.js +0 -106
  78. package/dist/api/find-coordinator.d.ts +0 -21
  79. package/dist/api/find-coordinator.js +0 -39
  80. package/dist/api/heartbeat.d.ts +0 -11
  81. package/dist/api/heartbeat.js +0 -27
  82. package/dist/api/index.d.ts +0 -573
  83. package/dist/api/index.js +0 -164
  84. package/dist/api/init-producer-id.d.ts +0 -13
  85. package/dist/api/init-producer-id.js +0 -29
  86. package/dist/api/join-group.d.ts +0 -34
  87. package/dist/api/join-group.js +0 -51
  88. package/dist/api/leave-group.d.ts +0 -19
  89. package/dist/api/leave-group.js +0 -39
  90. package/dist/api/list-offsets.d.ts +0 -29
  91. package/dist/api/list-offsets.js +0 -48
  92. package/dist/api/metadata.d.ts +0 -40
  93. package/dist/api/metadata.js +0 -58
  94. package/dist/api/offset-commit.d.ts +0 -28
  95. package/dist/api/offset-commit.js +0 -48
  96. package/dist/api/offset-fetch.d.ts +0 -33
  97. package/dist/api/offset-fetch.js +0 -57
  98. package/dist/api/produce.d.ts +0 -53
  99. package/dist/api/produce.js +0 -129
  100. package/dist/api/sasl-authenticate.d.ts +0 -11
  101. package/dist/api/sasl-authenticate.js +0 -23
  102. package/dist/api/sasl-handshake.d.ts +0 -6
  103. package/dist/api/sasl-handshake.js +0 -19
  104. package/dist/api/sync-group.d.ts +0 -24
  105. package/dist/api/sync-group.js +0 -36
  106. package/dist/broker.d.ts +0 -29
  107. package/dist/broker.js +0 -60
  108. package/dist/client.d.ts +0 -23
  109. package/dist/client.js +0 -36
  110. package/dist/cluster.d.ts +0 -24
  111. package/dist/cluster.js +0 -72
  112. package/dist/connection.d.ts +0 -25
  113. package/dist/connection.js +0 -155
  114. package/dist/consumer/consumer-group.d.ts +0 -36
  115. package/dist/consumer/consumer-group.js +0 -182
  116. package/dist/consumer/consumer-metadata.d.ts +0 -7
  117. package/dist/consumer/consumer-metadata.js +0 -14
  118. package/dist/consumer/consumer.d.ts +0 -37
  119. package/dist/consumer/consumer.js +0 -178
  120. package/dist/consumer/metadata.d.ts +0 -24
  121. package/dist/consumer/metadata.js +0 -64
  122. package/dist/consumer/offset-manager.d.ts +0 -22
  123. package/dist/consumer/offset-manager.js +0 -56
  124. package/dist/distributors/assignments-to-replicas.d.ts +0 -17
  125. package/dist/distributors/assignments-to-replicas.js +0 -60
  126. package/dist/distributors/assignments-to-replicas.test.d.ts +0 -1
  127. package/dist/distributors/assignments-to-replicas.test.js +0 -40
  128. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +0 -17
  129. package/dist/distributors/messages-to-topic-partition-leaders.js +0 -15
  130. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +0 -1
  131. package/dist/distributors/messages-to-topic-partition-leaders.test.js +0 -30
  132. package/dist/examples/src/replicator.js +0 -34
  133. package/dist/examples/src/utils/json.js +0 -5
  134. package/dist/index.d.ts +0 -3
  135. package/dist/index.js +0 -19
  136. package/dist/metadata.d.ts +0 -24
  137. package/dist/metadata.js +0 -89
  138. package/dist/producer/producer.d.ts +0 -19
  139. package/dist/producer/producer.js +0 -111
  140. package/dist/request-handler.d.ts +0 -16
  141. package/dist/request-handler.js +0 -67
  142. package/dist/request-handler.test.d.ts +0 -1
  143. package/dist/request-handler.test.js +0 -340
  144. package/dist/src/api/api-versions.js +0 -18
  145. package/dist/src/api/create-topics.js +0 -46
  146. package/dist/src/api/delete-topics.js +0 -26
  147. package/dist/src/api/fetch.js +0 -95
  148. package/dist/src/api/find-coordinator.js +0 -34
  149. package/dist/src/api/heartbeat.js +0 -22
  150. package/dist/src/api/index.js +0 -38
  151. package/dist/src/api/init-producer-id.js +0 -24
  152. package/dist/src/api/join-group.js +0 -48
  153. package/dist/src/api/leave-group.js +0 -30
  154. package/dist/src/api/list-offsets.js +0 -39
  155. package/dist/src/api/metadata.js +0 -47
  156. package/dist/src/api/offset-commit.js +0 -39
  157. package/dist/src/api/offset-fetch.js +0 -44
  158. package/dist/src/api/produce.js +0 -119
  159. package/dist/src/api/sync-group.js +0 -31
  160. package/dist/src/broker.js +0 -35
  161. package/dist/src/connection.js +0 -21
  162. package/dist/src/consumer/consumer-group.js +0 -131
  163. package/dist/src/consumer/consumer.js +0 -103
  164. package/dist/src/consumer/metadata.js +0 -52
  165. package/dist/src/consumer/offset-manager.js +0 -23
  166. package/dist/src/index.js +0 -19
  167. package/dist/src/producer/producer.js +0 -84
  168. package/dist/src/request-handler.js +0 -57
  169. package/dist/src/request-handler.test.js +0 -321
  170. package/dist/src/types.js +0 -2
  171. package/dist/src/utils/api.js +0 -5
  172. package/dist/src/utils/decoder.js +0 -161
  173. package/dist/src/utils/encoder.js +0 -137
  174. package/dist/src/utils/error.js +0 -10
  175. package/dist/types.d.ts +0 -9
  176. package/dist/types.js +0 -2
  177. package/dist/utils/api.d.ts +0 -9
  178. package/dist/utils/api.js +0 -5
  179. package/dist/utils/debug.d.ts +0 -2
  180. package/dist/utils/debug.js +0 -11
  181. package/dist/utils/decoder.d.ts +0 -29
  182. package/dist/utils/decoder.js +0 -147
  183. package/dist/utils/delay.d.ts +0 -1
  184. package/dist/utils/delay.js +0 -5
  185. package/dist/utils/encoder.d.ts +0 -28
  186. package/dist/utils/encoder.js +0 -122
  187. package/dist/utils/error.d.ts +0 -11
  188. package/dist/utils/error.js +0 -27
  189. package/dist/utils/memo.d.ts +0 -1
  190. package/dist/utils/memo.js +0 -16
  191. package/dist/utils/retrier.d.ts +0 -10
  192. package/dist/utils/retrier.js +0 -22
  193. package/dist/utils/tracer.d.ts +0 -1
  194. package/dist/utils/tracer.js +0 -26
  195. package/examples/node_modules/.package-lock.json +0 -22
@@ -0,0 +1,17 @@
1
+ name: Publish package
2
+ on:
3
+ release:
4
+ types: [published]
5
+ jobs:
6
+ build:
7
+ runs-on: ubuntu-latest
8
+ steps:
9
+ - uses: actions/checkout@v4
10
+ - uses: actions/setup-node@v4
11
+ with:
12
+ node-version: '20.x'
13
+ registry-url: 'https://registry.npmjs.org'
14
+ - run: npm ci
15
+ - run: npm publish
16
+ env:
17
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
package/.prettierrc CHANGED
@@ -3,5 +3,6 @@
3
3
  "tabWidth": 4,
4
4
  "endOfLine": "lf",
5
5
  "semi": true,
6
- "trailingComma": "all"
7
- }
6
+ "trailingComma": "all",
7
+ "singleQuote": true
8
+ }
package/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  The MIT License
2
2
 
3
- Copyright (c) 2018 Túlio Ornelas (ornelas.tulio@gmail.com)
3
+ Copyright (c) 2024 Priit Käärd (priit@hashtatic.com)
4
4
 
5
5
  Permission is hereby granted, free of charge,
6
6
  to any person obtaining a copy of this software and
package/README.md CHANGED
@@ -1,13 +1,13 @@
1
1
  # KafkaTS
2
2
 
3
- **Please note that this project is still in early development and is not yet ready for production use. The interface before stable release is subject to change.**
4
-
5
3
  **KafkaTS** is a Apache Kafka client library for Node.js. It provides both a low-level API for communicating directly with the Apache Kafka cluster and high-level APIs for publishing and subscribing to Kafka topics.
6
4
 
5
+ **Please note that this project is still in early development and is not yet ready for production use. The interface before stable release is subject to change.**
6
+
7
7
  ## Installation
8
8
 
9
9
  ```bash
10
- npm install kafkats
10
+ npm install kafka-ts
11
11
  ```
12
12
 
13
13
  ## Quick start
@@ -16,8 +16,8 @@ npm install kafkats
16
16
 
17
17
  ```typescript
18
18
  export const kafka = createKafkaClient({
19
- clientId: "my-app",
20
- bootstrapServers: [{ host: "localhost", port: 9092 }],
19
+ clientId: 'my-app',
20
+ bootstrapServers: [{ host: 'localhost', port: 9092 }],
21
21
  });
22
22
  ```
23
23
 
@@ -26,7 +26,7 @@ export const kafka = createKafkaClient({
26
26
  ```typescript
27
27
  const consumer = await kafka.startConsumer({
28
28
  groupId: 'my-consumer-group'.
29
- topics: ["my-topic"],
29
+ topics: ['my-topic'],
30
30
  onMessage: (message) => {
31
31
  console.log(message);
32
32
  },
@@ -36,36 +36,47 @@ const consumer = await kafka.startConsumer({
36
36
  #### Producing messages
37
37
 
38
38
  ```typescript
39
- export const producer = kafka.createProcucer();
39
+ export const producer = kafka.createProducer();
40
40
 
41
- await producer.send([{ topic: "example-topic-f", partition: 0, key: null, value: line }]);
41
+ await producer.send([{ topic: 'my-topic', partition: 0, key: 'key', value: 'value' }]);
42
42
  ```
43
43
 
44
44
  #### Low-level API
45
45
 
46
46
  ```typescript
47
47
  const cluster = kafka.createCluster();
48
- await cluster.connect();
49
-
50
- const { controllerId } = await cluster.sendRequest(API.METADATA, {
51
- allowTopicAutoCreation: false,
52
- includeTopicAuthorizedOperations: false,
53
- topics: [],
54
- });
55
-
56
- await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
57
- validateOnly: false,
58
- timeoutMs: 10_000,
59
- topics: [
60
- {
61
- name: "my-topic",
62
- numPartitions: 10,
63
- replicationFactor: 3,
64
- assignments: [],
65
- configs: [],
66
- },
67
- ],
68
- });
48
+ await cluster.connect();
49
+
50
+ const { controllerId } = await cluster.sendRequest(API.METADATA, {
51
+ allowTopicAutoCreation: false,
52
+ includeTopicAuthorizedOperations: false,
53
+ topics: [],
54
+ });
55
+
56
+ await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
57
+ validateOnly: false,
58
+ timeoutMs: 10_000,
59
+ topics: [
60
+ {
61
+ name: 'my-topic',
62
+ numPartitions: 10,
63
+ replicationFactor: 3,
64
+ assignments: [],
65
+ configs: [],
66
+ },
67
+ ],
68
+ });
69
+
70
+ await cluster.disconnect();
71
+ ```
72
+
73
+ #### Graceful shutdown
74
+
75
+ ```typescript
76
+ process.once('SIGTERM', async () => {
77
+ await consumer.close(); // waits for the consumer to finish processing the last batch and disconnects
78
+ await producer.close();
79
+ });
69
80
  ```
70
81
 
71
82
  See the [examples](./examples) for more detailed examples.
@@ -77,12 +88,22 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
77
88
 
78
89
  ### New features compared to kafkajs
79
90
 
80
- * **Static consumer membership** - Rebalancing during rolling deployments cause delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue processing based on the existing assignment.
81
- * **Consuming messages without consumer groups** - When you don't need for the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
91
+ - **Static consumer membership** - Rebalancing during rolling deployments causes delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue consuming partitions in the existing assignment.
92
+ - **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
93
+ - **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
94
+
95
+
96
+ ## Supported SASL mechanisms
97
+
98
+ - PLAIN
99
+ - SCRAM-SHA-256
100
+ - SCRAM-SHA-512
101
+
102
+ Custom SASL mechanisms can be implemented following the `SASLProvider` interface. See [src/auth](./src/auth) for examples.
82
103
 
83
104
  ## Backlog
84
105
 
85
- - [ ] Consumer concurrency control
86
- - [ ] Partitioner
87
- - [ ] API versioning
88
- - [ ] SCRAM support
106
+ Minimal set of features left to implement before a stable release:
107
+
108
+ - Partitioner (Currently have to specify the partition on producer.send())
109
+ - API versioning (Currently only tested against Kafka 3.7+)
@@ -1,104 +1,110 @@
1
1
  # kafka with raft:
2
2
  services:
3
- kafka-0:
4
- container_name: kafka-0
5
- image: apache/kafka:3.7.1
6
- ports:
7
- - "9092:9092"
8
- - "29092:29092"
9
- environment:
10
- KAFKA_NODE_ID: 0
11
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
12
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
13
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
14
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9092,INTERBROKER://kafka-0:19092'
15
- KAFKA_PROCESS_ROLES: 'broker,controller'
16
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
17
- KAFKA_LISTENERS: 'EXTERNAL://:9092,INTERBROKER://:19092,CONTROLLER://:29092'
18
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
19
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
20
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
21
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
22
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
23
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
24
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
26
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
27
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
28
- KAFKA_SSL_KEY_PASSWORD: 'password'
29
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
30
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
31
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
32
- KAFKA_SSL_CLIENT_AUTH: 'required'
33
- volumes:
34
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
35
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
36
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
37
- kafka-1:
38
- container_name: kafka-1
39
- image: apache/kafka:3.7.1
40
- ports:
41
- - "9093:9093"
42
- - "29093:29093"
43
- environment:
44
- KAFKA_NODE_ID: 1
45
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
46
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
47
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
48
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9093,INTERBROKER://kafka-1:19093'
49
- KAFKA_PROCESS_ROLES: 'broker,controller'
50
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
51
- KAFKA_LISTENERS: 'EXTERNAL://:9093,INTERBROKER://:19093,CONTROLLER://:29093'
52
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
53
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
54
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
55
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
56
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
57
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
58
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
59
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
60
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
61
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
62
- KAFKA_SSL_KEY_PASSWORD: 'password'
63
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
64
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
65
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
66
- KAFKA_SSL_CLIENT_AUTH: 'required'
67
- volumes:
68
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
69
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
70
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
71
- kafka-2:
72
- container_name: kafka-2
73
- image: apache/kafka:3.7.1
74
- ports:
75
- - "9094:9094"
76
- - "29094:29094"
77
- environment:
78
- KAFKA_NODE_ID: 2
79
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
80
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
81
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
82
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9094,INTERBROKER://kafka-2:19094'
83
- KAFKA_PROCESS_ROLES: 'broker,controller'
84
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
85
- KAFKA_LISTENERS: 'EXTERNAL://:9094,INTERBROKER://:19094,CONTROLLER://:29094'
86
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
87
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
88
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
89
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
90
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
91
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
92
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
93
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
94
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
95
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
96
- KAFKA_SSL_KEY_PASSWORD: 'password'
97
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
98
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
99
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
100
- KAFKA_SSL_CLIENT_AUTH: 'required'
101
- volumes:
102
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
103
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
104
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
3
+ kafka-0:
4
+ container_name: kafka-0
5
+ image: apache/kafka:3.7.1
6
+ ports:
7
+ - '9092:9092'
8
+ - '29092:29092'
9
+ environment:
10
+ KAFKA_NODE_ID: 0
11
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
12
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
13
+ KAFKA_LISTENER_NAME_EXTERNAL_SCRAM-SHA-256_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.scram.ScramLoginModule required;'
14
+ KAFKA_LISTENER_NAME_EXTERNAL_SCRAM-SHA-512_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.scram.ScramLoginModule required;'
15
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512'
16
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9092,INTERBROKER://kafka-0:19092'
17
+ KAFKA_PROCESS_ROLES: 'broker,controller'
18
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
19
+ KAFKA_LISTENERS: 'EXTERNAL://:9092,INTERBROKER://:19092,CONTROLLER://:29092'
20
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
21
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
22
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
23
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
24
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
25
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
26
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
27
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
28
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
29
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
30
+ KAFKA_SSL_KEY_PASSWORD: 'password'
31
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
32
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
33
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
34
+ KAFKA_SSL_CLIENT_AUTH: 'required'
35
+ volumes:
36
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
37
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
38
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
39
+ kafka-1:
40
+ container_name: kafka-1
41
+ image: apache/kafka:3.7.1
42
+ ports:
43
+ - '9093:9093'
44
+ - '29093:29093'
45
+ environment:
46
+ KAFKA_NODE_ID: 1
47
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
48
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
49
+ KAFKA_LISTENER_NAME_EXTERNAL_SCRAM-SHA-256_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.scram.ScramLoginModule required;'
50
+ KAFKA_LISTENER_NAME_EXTERNAL_SCRAM-SHA-512_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.scram.ScramLoginModule required;'
51
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512'
52
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9093,INTERBROKER://kafka-1:19093'
53
+ KAFKA_PROCESS_ROLES: 'broker,controller'
54
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
55
+ KAFKA_LISTENERS: 'EXTERNAL://:9093,INTERBROKER://:19093,CONTROLLER://:29093'
56
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
57
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
58
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
59
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
60
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
61
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
62
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
63
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
64
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
65
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
66
+ KAFKA_SSL_KEY_PASSWORD: 'password'
67
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
68
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
69
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
70
+ KAFKA_SSL_CLIENT_AUTH: 'required'
71
+ volumes:
72
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
73
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
74
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
75
+ kafka-2:
76
+ container_name: kafka-2
77
+ image: apache/kafka:3.7.1
78
+ ports:
79
+ - '9094:9094'
80
+ - '29094:29094'
81
+ environment:
82
+ KAFKA_NODE_ID: 2
83
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
84
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
85
+ KAFKA_LISTENER_NAME_EXTERNAL_SCRAM-SHA-256_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.scram.ScramLoginModule required;'
86
+ KAFKA_LISTENER_NAME_EXTERNAL_SCRAM-SHA-512_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.scram.ScramLoginModule required;'
87
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512'
88
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9094,INTERBROKER://kafka-2:19094'
89
+ KAFKA_PROCESS_ROLES: 'broker,controller'
90
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
91
+ KAFKA_LISTENERS: 'EXTERNAL://:9094,INTERBROKER://:19094,CONTROLLER://:29094'
92
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
93
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
94
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
95
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
96
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
97
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
98
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
99
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
100
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
101
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
102
+ KAFKA_SSL_KEY_PASSWORD: 'password'
103
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
104
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
105
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
106
+ KAFKA_SSL_CLIENT_AUTH: 'required'
107
+ volumes:
108
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
109
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
110
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
@@ -1,30 +1,31 @@
1
1
  {
2
- "name": "examples",
3
- "version": "1.0.0",
4
- "lockfileVersion": 3,
5
- "requires": true,
6
- "packages": {
7
- "": {
8
- "name": "examples",
9
- "version": "1.0.0",
10
- "license": "ISC",
11
- "dependencies": {
12
- "kafkats": "file:../"
13
- }
14
- },
15
- "..": {
16
- "version": "1.0.0",
17
- "license": "MIT",
18
- "devDependencies": {
19
- "@types/node": "^20.12.12",
20
- "prettier": "^3.2.5",
21
- "typescript": "^5.4.5",
22
- "vitest": "^1.6.0"
23
- }
24
- },
25
- "node_modules/kafkats": {
26
- "resolved": "..",
27
- "link": true
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "lockfileVersion": 3,
5
+ "requires": true,
6
+ "packages": {
7
+ "": {
8
+ "name": "examples",
9
+ "version": "1.0.0",
10
+ "license": "ISC",
11
+ "dependencies": {
12
+ "kafka-ts": "file:../"
13
+ }
14
+ },
15
+ "..": {
16
+ "name": "kafka-ts",
17
+ "version": "0.0.1-beta",
18
+ "license": "MIT",
19
+ "devDependencies": {
20
+ "@types/node": "^20.12.12",
21
+ "prettier": "^3.2.5",
22
+ "typescript": "^5.4.5",
23
+ "vitest": "^1.6.0"
24
+ }
25
+ },
26
+ "node_modules/kafka-ts": {
27
+ "resolved": "..",
28
+ "link": true
29
+ }
28
30
  }
29
- }
30
31
  }
@@ -1,14 +1,14 @@
1
1
  {
2
- "name": "examples",
3
- "version": "1.0.0",
4
- "description": "",
5
- "main": "dist/replicator.js",
6
- "scripts": {
7
- "test": "echo \"Error: no test specified\" && exit 1"
8
- },
9
- "dependencies": {
10
- "kafkats": "file:../"
11
- },
12
- "author": "",
13
- "license": "ISC"
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "description": "",
5
+ "main": "dist/replicator.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "dependencies": {
10
+ "kafka-ts": "file:../"
11
+ },
12
+ "author": "",
13
+ "license": "ISC"
14
14
  }
@@ -1,9 +1,9 @@
1
- import { readFileSync } from "fs";
2
- import { createKafkaClient } from "kafkats";
1
+ import { readFileSync } from 'fs';
2
+ import { createKafkaClient, saslScramSha512 } from 'kafka-ts';
3
3
 
4
4
  export const kafka = createKafkaClient({
5
- clientId: "examples",
6
- bootstrapServers: [{ host: "localhost", port: 9092 }],
7
- sasl: { mechanism: "PLAIN", username: "admin", password: "admin" },
8
- ssl: { ca: readFileSync("../certs/ca.crt").toString() },
5
+ clientId: 'examples',
6
+ bootstrapServers: [{ host: 'localhost', port: 9092 }],
7
+ sasl: saslScramSha512({ username: 'admin', password: 'admin' }),
8
+ ssl: { ca: readFileSync('../certs/ca.crt').toString() },
9
9
  });
@@ -1,17 +1,18 @@
1
- import { kafka } from "./client";
1
+ import { kafka } from './client';
2
2
 
3
3
  (async () => {
4
4
  const consumer = await kafka.startConsumer({
5
- groupId: "example-group",
6
- groupInstanceId: "example-group-instance",
7
- topics: ["example-topic-f"],
8
- allowTopicAutoCreation: true,
9
- onMessage: (message) => {
10
- console.log(message);
5
+ groupId: 'example-group',
6
+ groupInstanceId: 'example-group-instance',
7
+ topics: ['my-topic'],
8
+ onBatch: (batch) => {
9
+ console.log(batch);
11
10
  },
11
+ granularity: 'broker',
12
+ concurrency: 10,
12
13
  });
13
14
 
14
- process.on("SIGINT", async () => {
15
+ process.on('SIGINT', async () => {
15
16
  await consumer.close();
16
17
  });
17
18
  })();
@@ -1,5 +1,5 @@
1
- import { kafka } from "./client";
2
- import { API } from "kafkats";
1
+ import { API, API_ERROR, KafkaTSApiError } from 'kafka-ts';
2
+ import { kafka } from './client';
3
3
 
4
4
  (async () => {
5
5
  const cluster = kafka.createCluster();
@@ -11,24 +11,30 @@ import { API } from "kafkats";
11
11
  topics: [],
12
12
  });
13
13
 
14
- await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
15
- validateOnly: false,
16
- timeoutMs: 10_000,
17
- topics: [
18
- {
19
- name: "my-topic",
20
- numPartitions: 10,
21
- replicationFactor: 3,
22
- assignments: [],
23
- configs: [],
24
- },
25
- ],
26
- });
14
+ try {
15
+ await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
16
+ validateOnly: false,
17
+ timeoutMs: 10_000,
18
+ topics: [
19
+ {
20
+ name: 'my-topic',
21
+ numPartitions: 10,
22
+ replicationFactor: 3,
23
+ assignments: [],
24
+ configs: [],
25
+ },
26
+ ],
27
+ });
28
+ } catch (error) {
29
+ if ((error as KafkaTSApiError).errorCode !== API_ERROR.TOPIC_ALREADY_EXISTS) {
30
+ throw error;
31
+ }
32
+ }
27
33
 
28
34
  const metadata = await cluster.sendRequestToNode(controllerId)(API.METADATA, {
29
35
  allowTopicAutoCreation: false,
30
36
  includeTopicAuthorizedOperations: false,
31
- topics: [{ id: null, name: "my-topic" }],
37
+ topics: [{ id: null, name: 'my-topic' }],
32
38
  });
33
39
 
34
40
  console.log(metadata);
@@ -1,24 +1,24 @@
1
- import { createInterface } from "readline";
2
- import { kafka } from "./client";
1
+ import { createInterface } from 'readline';
2
+ import { kafka } from './client';
3
3
 
4
4
  const producer = kafka.createProducer({ allowTopicAutoCreation: true });
5
5
 
6
6
  const rl = createInterface({ input: process.stdin, output: process.stdout });
7
7
 
8
- process.stdout.write("> ");
9
- rl.on("line", async (line) => {
8
+ process.stdout.write('> ');
9
+ rl.on('line', async (line) => {
10
10
  await producer.send([
11
11
  {
12
- topic: "example-topic-f",
12
+ topic: 'example-topic-f',
13
13
  key: null,
14
14
  value: line,
15
15
  partition: 0,
16
16
  },
17
17
  ]);
18
- process.stdout.write("> ");
18
+ process.stdout.write('> ');
19
19
  });
20
20
 
21
- process.on("SIGINT", async () => {
21
+ process.on('SIGINT', async () => {
22
22
  rl.close();
23
23
  await producer.close();
24
24
  });
@@ -1,7 +1,7 @@
1
- import { kafka } from "./client";
1
+ import { kafka } from './client';
2
2
 
3
3
  (async () => {
4
- const topic = "example-topic";
4
+ const topic = 'example-topic';
5
5
 
6
6
  const producer = kafka.createProducer({ allowTopicAutoCreation: true });
7
7
  const consumer = await kafka.startConsumer({
@@ -10,7 +10,7 @@ import { kafka } from "./client";
10
10
  await producer.send(
11
11
  messages.map((message) => ({
12
12
  ...message,
13
- headers: { "X-Replicated": "true" },
13
+ headers: { 'X-Replicated': 'true' },
14
14
  topic: `${message.topic}-replicated`,
15
15
  offset: 0n,
16
16
  })),
@@ -18,7 +18,7 @@ import { kafka } from "./client";
18
18
  console.log(`Replicated ${messages.length} messages`);
19
19
  },
20
20
  });
21
- process.on("SIGINT", async () => {
21
+ process.on('SIGINT', async () => {
22
22
  await consumer.close();
23
23
  await producer.close();
24
24
  });