kafka-ts 0.0.2-beta → 0.0.3-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/.github/workflows/release.yml +14 -14
  2. package/.prettierrc +3 -2
  3. package/README.md +43 -33
  4. package/docker-compose.yml +102 -102
  5. package/examples/package-lock.json +28 -28
  6. package/examples/package.json +12 -12
  7. package/examples/src/client.ts +6 -6
  8. package/examples/src/consumer.ts +9 -8
  9. package/examples/src/create-topic.ts +23 -16
  10. package/examples/src/producer.ts +7 -7
  11. package/examples/src/replicator.ts +4 -4
  12. package/examples/src/utils/delay.ts +1 -0
  13. package/examples/src/utils/json.ts +1 -1
  14. package/examples/tsconfig.json +2 -2
  15. package/package.json +21 -19
  16. package/src/api/api-versions.ts +2 -2
  17. package/src/api/create-topics.ts +2 -2
  18. package/src/api/delete-topics.ts +2 -2
  19. package/src/api/fetch.ts +3 -3
  20. package/src/api/find-coordinator.ts +2 -2
  21. package/src/api/heartbeat.ts +2 -2
  22. package/src/api/index.ts +18 -18
  23. package/src/api/init-producer-id.ts +2 -2
  24. package/src/api/join-group.ts +3 -3
  25. package/src/api/leave-group.ts +2 -2
  26. package/src/api/list-offsets.ts +3 -3
  27. package/src/api/metadata.ts +3 -3
  28. package/src/api/offset-commit.ts +2 -2
  29. package/src/api/offset-fetch.ts +2 -2
  30. package/src/api/produce.ts +3 -3
  31. package/src/api/sasl-authenticate.ts +2 -2
  32. package/src/api/sasl-handshake.ts +2 -2
  33. package/src/api/sync-group.ts +2 -2
  34. package/src/broker.ts +9 -9
  35. package/src/client.ts +6 -6
  36. package/src/cluster.test.ts +68 -68
  37. package/src/cluster.ts +7 -7
  38. package/src/connection.ts +17 -15
  39. package/src/consumer/consumer-group.ts +14 -14
  40. package/src/consumer/consumer-metadata.ts +2 -2
  41. package/src/consumer/consumer.ts +84 -82
  42. package/src/consumer/fetch-manager.ts +179 -0
  43. package/src/consumer/fetcher.ts +57 -0
  44. package/src/consumer/offset-manager.ts +6 -6
  45. package/src/consumer/processor.ts +47 -0
  46. package/src/distributors/assignments-to-replicas.test.ts +7 -7
  47. package/src/distributors/assignments-to-replicas.ts +1 -1
  48. package/src/distributors/messages-to-topic-partition-leaders.test.ts +6 -6
  49. package/src/index.ts +4 -3
  50. package/src/metadata.ts +4 -4
  51. package/src/producer/producer.ts +8 -8
  52. package/src/types.ts +2 -0
  53. package/src/utils/api.ts +4 -4
  54. package/src/utils/debug.ts +2 -2
  55. package/src/utils/decoder.ts +4 -4
  56. package/src/utils/encoder.ts +6 -6
  57. package/src/utils/error.ts +3 -3
  58. package/src/utils/retrier.ts +1 -1
  59. package/src/utils/tracer.ts +7 -4
  60. package/tsconfig.json +16 -16
@@ -1,17 +1,17 @@
1
1
  name: Publish package
2
2
  on:
3
- release:
4
- types: [published]
3
+ release:
4
+ types: [published]
5
5
  jobs:
6
- build:
7
- runs-on: ubuntu-latest
8
- steps:
9
- - uses: actions/checkout@v4
10
- - uses: actions/setup-node@v4
11
- with:
12
- node-version: '20.x'
13
- registry-url: 'https://registry.npmjs.org'
14
- - run: npm ci
15
- - run: npm publish
16
- env:
17
- NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
6
+ build:
7
+ runs-on: ubuntu-latest
8
+ steps:
9
+ - uses: actions/checkout@v4
10
+ - uses: actions/setup-node@v4
11
+ with:
12
+ node-version: '20.x'
13
+ registry-url: 'https://registry.npmjs.org'
14
+ - run: npm ci
15
+ - run: npm publish
16
+ env:
17
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
package/.prettierrc CHANGED
@@ -3,5 +3,6 @@
3
3
  "tabWidth": 4,
4
4
  "endOfLine": "lf",
5
5
  "semi": true,
6
- "trailingComma": "all"
7
- }
6
+ "trailingComma": "all",
7
+ "singleQuote": true
8
+ }
package/README.md CHANGED
@@ -16,8 +16,8 @@ npm install kafka-ts
16
16
 
17
17
  ```typescript
18
18
  export const kafka = createKafkaClient({
19
- clientId: "my-app",
20
- bootstrapServers: [{ host: "localhost", port: 9092 }],
19
+ clientId: 'my-app',
20
+ bootstrapServers: [{ host: 'localhost', port: 9092 }],
21
21
  });
22
22
  ```
23
23
 
@@ -26,7 +26,7 @@ export const kafka = createKafkaClient({
26
26
  ```typescript
27
27
  const consumer = await kafka.startConsumer({
28
28
  groupId: 'my-consumer-group'.
29
- topics: ["my-topic"],
29
+ topics: ['my-topic'],
30
30
  onMessage: (message) => {
31
31
  console.log(message);
32
32
  },
@@ -36,36 +36,47 @@ const consumer = await kafka.startConsumer({
36
36
  #### Producing messages
37
37
 
38
38
  ```typescript
39
- export const producer = kafka.createProcucer();
39
+ export const producer = kafka.createProducer();
40
40
 
41
- await producer.send([{ topic: "example-topic-f", partition: 0, key: null, value: line }]);
41
+ await producer.send([{ topic: 'my-topic', partition: 0, key: 'key', value: 'value' }]);
42
42
  ```
43
43
 
44
44
  #### Low-level API
45
45
 
46
46
  ```typescript
47
47
  const cluster = kafka.createCluster();
48
- await cluster.connect();
49
-
50
- const { controllerId } = await cluster.sendRequest(API.METADATA, {
51
- allowTopicAutoCreation: false,
52
- includeTopicAuthorizedOperations: false,
53
- topics: [],
54
- });
55
-
56
- await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
57
- validateOnly: false,
58
- timeoutMs: 10_000,
59
- topics: [
60
- {
61
- name: "my-topic",
62
- numPartitions: 10,
63
- replicationFactor: 3,
64
- assignments: [],
65
- configs: [],
66
- },
67
- ],
68
- });
48
+ await cluster.connect();
49
+
50
+ const { controllerId } = await cluster.sendRequest(API.METADATA, {
51
+ allowTopicAutoCreation: false,
52
+ includeTopicAuthorizedOperations: false,
53
+ topics: [],
54
+ });
55
+
56
+ await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
57
+ validateOnly: false,
58
+ timeoutMs: 10_000,
59
+ topics: [
60
+ {
61
+ name: 'my-topic',
62
+ numPartitions: 10,
63
+ replicationFactor: 3,
64
+ assignments: [],
65
+ configs: [],
66
+ },
67
+ ],
68
+ });
69
+
70
+ await cluster.disconnect();
71
+ ```
72
+
73
+ #### Graceful shutdown
74
+
75
+ ```typescript
76
+ process.once('SIGTERM', async () => {
77
+ await consumer.close(); // waits for the consumer to finish processing the last batch and disconnects
78
+ await producer.close();
79
+ });
69
80
  ```
70
81
 
71
82
  See the [examples](./examples) for more detailed examples.
@@ -77,15 +88,14 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
77
88
 
78
89
  ### New features compared to kafkajs
79
90
 
80
- * **Static consumer membership** - Rebalancing during rolling deployments causes delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue consuming partitions in the existing assignment.
81
- * **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
82
- * **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
91
+ - **Static consumer membership** - Rebalancing during rolling deployments causes delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue consuming partitions in the existing assignment.
92
+ - **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
93
+ - **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
83
94
 
84
95
  ## Backlog
85
96
 
86
97
  Minimal set of features required before a stable release:
87
98
 
88
- - Consumer concurrency control (Currently each relevant broker is polled in sequence)
89
- - Partitioner (Currently have to specify the partition on producer.send())
90
- - API versioning (Currently only tested against Kafka 3.7+)
91
- - SASL SCRAM support (+ pluggable authentication providers)
99
+ - Partitioner (Currently have to specify the partition on producer.send())
100
+ - API versioning (Currently only tested against Kafka 3.7+)
101
+ - SASL SCRAM support (+ pluggable authentication providers)
@@ -1,104 +1,104 @@
1
1
  # kafka with raft:
2
2
  services:
3
- kafka-0:
4
- container_name: kafka-0
5
- image: apache/kafka:3.7.1
6
- ports:
7
- - "9092:9092"
8
- - "29092:29092"
9
- environment:
10
- KAFKA_NODE_ID: 0
11
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
12
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
13
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
14
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9092,INTERBROKER://kafka-0:19092'
15
- KAFKA_PROCESS_ROLES: 'broker,controller'
16
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
17
- KAFKA_LISTENERS: 'EXTERNAL://:9092,INTERBROKER://:19092,CONTROLLER://:29092'
18
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
19
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
20
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
21
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
22
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
23
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
24
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
26
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
27
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
28
- KAFKA_SSL_KEY_PASSWORD: 'password'
29
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
30
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
31
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
32
- KAFKA_SSL_CLIENT_AUTH: 'required'
33
- volumes:
34
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
35
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
36
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
37
- kafka-1:
38
- container_name: kafka-1
39
- image: apache/kafka:3.7.1
40
- ports:
41
- - "9093:9093"
42
- - "29093:29093"
43
- environment:
44
- KAFKA_NODE_ID: 1
45
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
46
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
47
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
48
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9093,INTERBROKER://kafka-1:19093'
49
- KAFKA_PROCESS_ROLES: 'broker,controller'
50
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
51
- KAFKA_LISTENERS: 'EXTERNAL://:9093,INTERBROKER://:19093,CONTROLLER://:29093'
52
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
53
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
54
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
55
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
56
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
57
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
58
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
59
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
60
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
61
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
62
- KAFKA_SSL_KEY_PASSWORD: 'password'
63
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
64
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
65
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
66
- KAFKA_SSL_CLIENT_AUTH: 'required'
67
- volumes:
68
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
69
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
70
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
71
- kafka-2:
72
- container_name: kafka-2
73
- image: apache/kafka:3.7.1
74
- ports:
75
- - "9094:9094"
76
- - "29094:29094"
77
- environment:
78
- KAFKA_NODE_ID: 2
79
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
80
- KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
81
- KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
82
- KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9094,INTERBROKER://kafka-2:19094'
83
- KAFKA_PROCESS_ROLES: 'broker,controller'
84
- KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
85
- KAFKA_LISTENERS: 'EXTERNAL://:9094,INTERBROKER://:19094,CONTROLLER://:29094'
86
- KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
87
- KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
88
- CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
89
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
90
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
91
- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
92
- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
93
- KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
94
- KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
95
- KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
96
- KAFKA_SSL_KEY_PASSWORD: 'password'
97
- KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
98
- KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
99
- KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
100
- KAFKA_SSL_CLIENT_AUTH: 'required'
101
- volumes:
102
- - ./log4j.properties:/etc/kafka/docker/log4j.properties
103
- - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
104
- - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
3
+ kafka-0:
4
+ container_name: kafka-0
5
+ image: apache/kafka:3.7.1
6
+ ports:
7
+ - '9092:9092'
8
+ - '29092:29092'
9
+ environment:
10
+ KAFKA_NODE_ID: 0
11
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
12
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
13
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
14
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9092,INTERBROKER://kafka-0:19092'
15
+ KAFKA_PROCESS_ROLES: 'broker,controller'
16
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
17
+ KAFKA_LISTENERS: 'EXTERNAL://:9092,INTERBROKER://:19092,CONTROLLER://:29092'
18
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
19
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
20
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
21
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
22
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
23
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
24
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
26
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
27
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
28
+ KAFKA_SSL_KEY_PASSWORD: 'password'
29
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
30
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
31
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
32
+ KAFKA_SSL_CLIENT_AUTH: 'required'
33
+ volumes:
34
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
35
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
36
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
37
+ kafka-1:
38
+ container_name: kafka-1
39
+ image: apache/kafka:3.7.1
40
+ ports:
41
+ - '9093:9093'
42
+ - '29093:29093'
43
+ environment:
44
+ KAFKA_NODE_ID: 1
45
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
46
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
47
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
48
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9093,INTERBROKER://kafka-1:19093'
49
+ KAFKA_PROCESS_ROLES: 'broker,controller'
50
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
51
+ KAFKA_LISTENERS: 'EXTERNAL://:9093,INTERBROKER://:19093,CONTROLLER://:29093'
52
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
53
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
54
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
55
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
56
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
57
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
58
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
59
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
60
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
61
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
62
+ KAFKA_SSL_KEY_PASSWORD: 'password'
63
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
64
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
65
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
66
+ KAFKA_SSL_CLIENT_AUTH: 'required'
67
+ volumes:
68
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
69
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
70
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
71
+ kafka-2:
72
+ container_name: kafka-2
73
+ image: apache/kafka:3.7.1
74
+ ports:
75
+ - '9094:9094'
76
+ - '29094:29094'
77
+ environment:
78
+ KAFKA_NODE_ID: 2
79
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
80
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
81
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
82
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9094,INTERBROKER://kafka-2:19094'
83
+ KAFKA_PROCESS_ROLES: 'broker,controller'
84
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
85
+ KAFKA_LISTENERS: 'EXTERNAL://:9094,INTERBROKER://:19094,CONTROLLER://:29094'
86
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
87
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
88
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
89
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
90
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
91
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
92
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
93
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
94
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
95
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
96
+ KAFKA_SSL_KEY_PASSWORD: 'password'
97
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
98
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
99
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
100
+ KAFKA_SSL_CLIENT_AUTH: 'required'
101
+ volumes:
102
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
103
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
104
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
@@ -1,31 +1,31 @@
1
1
  {
2
- "name": "examples",
3
- "version": "1.0.0",
4
- "lockfileVersion": 3,
5
- "requires": true,
6
- "packages": {
7
- "": {
8
- "name": "examples",
9
- "version": "1.0.0",
10
- "license": "ISC",
11
- "dependencies": {
12
- "kafka-ts": "file:../"
13
- }
14
- },
15
- "..": {
16
- "name": "kafka-ts",
17
- "version": "0.0.1-beta",
18
- "license": "MIT",
19
- "devDependencies": {
20
- "@types/node": "^20.12.12",
21
- "prettier": "^3.2.5",
22
- "typescript": "^5.4.5",
23
- "vitest": "^1.6.0"
24
- }
25
- },
26
- "node_modules/kafka-ts": {
27
- "resolved": "..",
28
- "link": true
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "lockfileVersion": 3,
5
+ "requires": true,
6
+ "packages": {
7
+ "": {
8
+ "name": "examples",
9
+ "version": "1.0.0",
10
+ "license": "ISC",
11
+ "dependencies": {
12
+ "kafka-ts": "file:../"
13
+ }
14
+ },
15
+ "..": {
16
+ "name": "kafka-ts",
17
+ "version": "0.0.1-beta",
18
+ "license": "MIT",
19
+ "devDependencies": {
20
+ "@types/node": "^20.12.12",
21
+ "prettier": "^3.2.5",
22
+ "typescript": "^5.4.5",
23
+ "vitest": "^1.6.0"
24
+ }
25
+ },
26
+ "node_modules/kafka-ts": {
27
+ "resolved": "..",
28
+ "link": true
29
+ }
29
30
  }
30
- }
31
31
  }
@@ -1,14 +1,14 @@
1
1
  {
2
- "name": "examples",
3
- "version": "1.0.0",
4
- "description": "",
5
- "main": "dist/replicator.js",
6
- "scripts": {
7
- "test": "echo \"Error: no test specified\" && exit 1"
8
- },
9
- "dependencies": {
10
- "kafka-ts": "file:../"
11
- },
12
- "author": "",
13
- "license": "ISC"
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "description": "",
5
+ "main": "dist/replicator.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "dependencies": {
10
+ "kafka-ts": "file:../"
11
+ },
12
+ "author": "",
13
+ "license": "ISC"
14
14
  }
@@ -1,9 +1,9 @@
1
- import { readFileSync } from "fs";
2
- import { createKafkaClient } from "kafka-ts";
1
+ import { readFileSync } from 'fs';
2
+ import { createKafkaClient } from 'kafka-ts';
3
3
 
4
4
  export const kafka = createKafkaClient({
5
- clientId: "examples",
6
- bootstrapServers: [{ host: "localhost", port: 9092 }],
7
- sasl: { mechanism: "PLAIN", username: "admin", password: "admin" },
8
- ssl: { ca: readFileSync("../certs/ca.crt").toString() },
5
+ clientId: 'examples',
6
+ bootstrapServers: [{ host: 'localhost', port: 9092 }],
7
+ sasl: { mechanism: 'PLAIN', username: 'admin', password: 'admin' },
8
+ ssl: { ca: readFileSync('../certs/ca.crt').toString() },
9
9
  });
@@ -1,17 +1,18 @@
1
- import { kafka } from "./client";
1
+ import { kafka } from './client';
2
2
 
3
3
  (async () => {
4
4
  const consumer = await kafka.startConsumer({
5
- groupId: "example-group",
6
- groupInstanceId: "example-group-instance",
7
- topics: ["example-topic-f"],
8
- allowTopicAutoCreation: true,
9
- onMessage: (message) => {
10
- console.log(message);
5
+ groupId: 'example-group',
6
+ groupInstanceId: 'example-group-instance',
7
+ topics: ['my-topic'],
8
+ onBatch: (batch) => {
9
+ console.log(batch);
11
10
  },
11
+ granularity: 'broker',
12
+ concurrency: 10,
12
13
  });
13
14
 
14
- process.on("SIGINT", async () => {
15
+ process.on('SIGINT', async () => {
15
16
  await consumer.close();
16
17
  });
17
18
  })();
@@ -1,5 +1,5 @@
1
- import { kafka } from "./client";
2
- import { API } from "kafka-ts";
1
+ import { API, API_ERROR, KafkaTSApiError } from 'kafka-ts';
2
+ import { kafka } from './client';
3
3
 
4
4
  (async () => {
5
5
  const cluster = kafka.createCluster();
@@ -11,24 +11,31 @@ import { API } from "kafka-ts";
11
11
  topics: [],
12
12
  });
13
13
 
14
- await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
15
- validateOnly: false,
16
- timeoutMs: 10_000,
17
- topics: [
18
- {
19
- name: "my-topic",
20
- numPartitions: 10,
21
- replicationFactor: 3,
22
- assignments: [],
23
- configs: [],
24
- },
25
- ],
26
- });
14
+ try {
15
+ await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
16
+ validateOnly: false,
17
+ timeoutMs: 10_000,
18
+ topics: [
19
+ {
20
+ name: 'my-topic',
21
+ numPartitions: 10,
22
+ replicationFactor: 3,
23
+ assignments: [],
24
+ configs: [],
25
+ },
26
+ ],
27
+ });
28
+ } catch (error) {
29
+ if ((error as KafkaTSApiError).errorCode === API_ERROR.TOPIC_ALREADY_EXISTS) {
30
+ return;
31
+ }
32
+ throw error;
33
+ }
27
34
 
28
35
  const metadata = await cluster.sendRequestToNode(controllerId)(API.METADATA, {
29
36
  allowTopicAutoCreation: false,
30
37
  includeTopicAuthorizedOperations: false,
31
- topics: [{ id: null, name: "my-topic" }],
38
+ topics: [{ id: null, name: 'my-topic' }],
32
39
  });
33
40
 
34
41
  console.log(metadata);
@@ -1,24 +1,24 @@
1
- import { createInterface } from "readline";
2
- import { kafka } from "./client";
1
+ import { createInterface } from 'readline';
2
+ import { kafka } from './client';
3
3
 
4
4
  const producer = kafka.createProducer({ allowTopicAutoCreation: true });
5
5
 
6
6
  const rl = createInterface({ input: process.stdin, output: process.stdout });
7
7
 
8
- process.stdout.write("> ");
9
- rl.on("line", async (line) => {
8
+ process.stdout.write('> ');
9
+ rl.on('line', async (line) => {
10
10
  await producer.send([
11
11
  {
12
- topic: "example-topic-f",
12
+ topic: 'example-topic-f',
13
13
  key: null,
14
14
  value: line,
15
15
  partition: 0,
16
16
  },
17
17
  ]);
18
- process.stdout.write("> ");
18
+ process.stdout.write('> ');
19
19
  });
20
20
 
21
- process.on("SIGINT", async () => {
21
+ process.on('SIGINT', async () => {
22
22
  rl.close();
23
23
  await producer.close();
24
24
  });
@@ -1,7 +1,7 @@
1
- import { kafka } from "./client";
1
+ import { kafka } from './client';
2
2
 
3
3
  (async () => {
4
- const topic = "example-topic";
4
+ const topic = 'example-topic';
5
5
 
6
6
  const producer = kafka.createProducer({ allowTopicAutoCreation: true });
7
7
  const consumer = await kafka.startConsumer({
@@ -10,7 +10,7 @@ import { kafka } from "./client";
10
10
  await producer.send(
11
11
  messages.map((message) => ({
12
12
  ...message,
13
- headers: { "X-Replicated": "true" },
13
+ headers: { 'X-Replicated': 'true' },
14
14
  topic: `${message.topic}-replicated`,
15
15
  offset: 0n,
16
16
  })),
@@ -18,7 +18,7 @@ import { kafka } from "./client";
18
18
  console.log(`Replicated ${messages.length} messages`);
19
19
  },
20
20
  });
21
- process.on("SIGINT", async () => {
21
+ process.on('SIGINT', async () => {
22
22
  await consumer.close();
23
23
  await producer.close();
24
24
  });
@@ -0,0 +1 @@
1
+ export const delay = (delayMs: number) => new Promise<void>((resolve) => setTimeout(resolve, delayMs));
@@ -1 +1 @@
1
- export const serializer = (_: string, value: unknown) => (typeof value === "bigint" ? value.toString() : value);
1
+ export const serializer = (_: string, value: unknown) => (typeof value === 'bigint' ? value.toString() : value);
@@ -3,5 +3,5 @@
3
3
  "compilerOptions": {
4
4
  "outDir": "dist",
5
5
  "inlineSourceMap": true
6
- },
7
- }
6
+ }
7
+ }