kafka-ts 0.0.1-beta.4 → 0.0.1-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. package/.github/workflows/release.yml +19 -6
  2. package/README.md +15 -21
  3. package/dist/api/api-versions.d.ts +9 -0
  4. package/dist/api/api-versions.js +24 -0
  5. package/dist/api/create-topics.d.ts +38 -0
  6. package/dist/api/create-topics.js +53 -0
  7. package/dist/api/delete-topics.d.ts +18 -0
  8. package/dist/api/delete-topics.js +33 -0
  9. package/dist/api/fetch.d.ts +84 -0
  10. package/dist/api/fetch.js +142 -0
  11. package/dist/api/find-coordinator.d.ts +21 -0
  12. package/dist/api/find-coordinator.js +39 -0
  13. package/dist/api/heartbeat.d.ts +11 -0
  14. package/dist/api/heartbeat.js +27 -0
  15. package/dist/api/index.d.ts +578 -0
  16. package/dist/api/index.js +165 -0
  17. package/dist/api/init-producer-id.d.ts +13 -0
  18. package/dist/api/init-producer-id.js +29 -0
  19. package/dist/api/join-group.d.ts +34 -0
  20. package/dist/api/join-group.js +51 -0
  21. package/dist/api/leave-group.d.ts +19 -0
  22. package/dist/api/leave-group.js +39 -0
  23. package/dist/api/list-offsets.d.ts +29 -0
  24. package/dist/api/list-offsets.js +48 -0
  25. package/dist/api/metadata.d.ts +40 -0
  26. package/dist/api/metadata.js +58 -0
  27. package/dist/api/offset-commit.d.ts +28 -0
  28. package/dist/api/offset-commit.js +48 -0
  29. package/dist/api/offset-fetch.d.ts +33 -0
  30. package/dist/api/offset-fetch.js +57 -0
  31. package/dist/api/produce.d.ts +54 -0
  32. package/dist/api/produce.js +126 -0
  33. package/dist/api/sasl-authenticate.d.ts +11 -0
  34. package/dist/api/sasl-authenticate.js +23 -0
  35. package/dist/api/sasl-handshake.d.ts +6 -0
  36. package/dist/api/sasl-handshake.js +19 -0
  37. package/dist/api/sync-group.d.ts +24 -0
  38. package/dist/api/sync-group.js +36 -0
  39. package/dist/auth/index.d.ts +2 -0
  40. package/dist/auth/index.js +8 -0
  41. package/dist/auth/plain.d.ts +5 -0
  42. package/dist/auth/plain.js +12 -0
  43. package/dist/auth/scram.d.ts +9 -0
  44. package/dist/auth/scram.js +40 -0
  45. package/dist/broker.d.ts +30 -0
  46. package/dist/broker.js +55 -0
  47. package/dist/client.d.ts +23 -0
  48. package/dist/client.js +36 -0
  49. package/dist/cluster.d.ts +27 -0
  50. package/dist/cluster.js +70 -0
  51. package/dist/cluster.test.d.ts +1 -0
  52. package/dist/cluster.test.js +345 -0
  53. package/dist/codecs/gzip.d.ts +2 -0
  54. package/dist/codecs/gzip.js +8 -0
  55. package/dist/codecs/index.d.ts +2 -0
  56. package/dist/codecs/index.js +17 -0
  57. package/dist/codecs/none.d.ts +2 -0
  58. package/dist/codecs/none.js +7 -0
  59. package/dist/codecs/types.d.ts +5 -0
  60. package/dist/codecs/types.js +2 -0
  61. package/dist/connection.d.ts +26 -0
  62. package/dist/connection.js +175 -0
  63. package/dist/consumer/consumer-group.d.ts +41 -0
  64. package/dist/consumer/consumer-group.js +217 -0
  65. package/dist/consumer/consumer-metadata.d.ts +7 -0
  66. package/dist/consumer/consumer-metadata.js +14 -0
  67. package/dist/consumer/consumer.d.ts +44 -0
  68. package/dist/consumer/consumer.js +225 -0
  69. package/dist/consumer/fetch-manager.d.ts +33 -0
  70. package/dist/consumer/fetch-manager.js +140 -0
  71. package/dist/consumer/fetcher.d.ts +25 -0
  72. package/dist/consumer/fetcher.js +64 -0
  73. package/dist/consumer/offset-manager.d.ts +22 -0
  74. package/dist/consumer/offset-manager.js +66 -0
  75. package/dist/consumer/processor.d.ts +19 -0
  76. package/dist/consumer/processor.js +59 -0
  77. package/dist/distributors/assignments-to-replicas.d.ts +16 -0
  78. package/dist/distributors/assignments-to-replicas.js +59 -0
  79. package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
  80. package/dist/distributors/assignments-to-replicas.test.js +40 -0
  81. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  82. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  83. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  84. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  85. package/dist/distributors/partitioner.d.ts +7 -0
  86. package/dist/distributors/partitioner.js +23 -0
  87. package/dist/index.d.ts +9 -0
  88. package/dist/index.js +26 -0
  89. package/dist/metadata.d.ts +24 -0
  90. package/dist/metadata.js +106 -0
  91. package/dist/producer/producer.d.ts +24 -0
  92. package/dist/producer/producer.js +131 -0
  93. package/dist/types.d.ts +11 -0
  94. package/dist/types.js +2 -0
  95. package/dist/utils/api.d.ts +9 -0
  96. package/dist/utils/api.js +5 -0
  97. package/dist/utils/crypto.d.ts +8 -0
  98. package/dist/utils/crypto.js +18 -0
  99. package/dist/utils/decoder.d.ts +30 -0
  100. package/dist/utils/decoder.js +152 -0
  101. package/dist/utils/delay.d.ts +1 -0
  102. package/dist/utils/delay.js +5 -0
  103. package/dist/utils/encoder.d.ts +28 -0
  104. package/dist/utils/encoder.js +125 -0
  105. package/dist/utils/error.d.ts +11 -0
  106. package/dist/utils/error.js +27 -0
  107. package/dist/utils/logger.d.ts +9 -0
  108. package/dist/utils/logger.js +32 -0
  109. package/dist/utils/memo.d.ts +1 -0
  110. package/dist/utils/memo.js +16 -0
  111. package/dist/utils/murmur2.d.ts +3 -0
  112. package/dist/utils/murmur2.js +40 -0
  113. package/dist/utils/retrier.d.ts +10 -0
  114. package/dist/utils/retrier.js +22 -0
  115. package/dist/utils/tracer.d.ts +5 -0
  116. package/dist/utils/tracer.js +39 -0
  117. package/docker-compose.yml +3 -3
  118. package/examples/package-lock.json +3501 -3
  119. package/examples/package.json +8 -1
  120. package/examples/src/benchmark/common.ts +98 -0
  121. package/examples/src/benchmark/kafka-ts.ts +67 -0
  122. package/examples/src/benchmark/kafkajs.ts +51 -0
  123. package/examples/src/client.ts +4 -1
  124. package/examples/src/opentelemetry.ts +46 -0
  125. package/examples/src/producer.ts +11 -11
  126. package/package.json +4 -2
  127. package/scripts/create-scram-user.sh +4 -2
  128. package/scripts/generate-certs.sh +2 -0
  129. package/src/__snapshots__/cluster.test.ts.snap +35 -185
  130. package/src/api/fetch.ts +6 -1
  131. package/src/api/index.ts +3 -1
  132. package/src/api/metadata.ts +1 -1
  133. package/src/api/produce.ts +7 -10
  134. package/src/cluster.test.ts +2 -2
  135. package/src/cluster.ts +9 -16
  136. package/src/connection.ts +28 -15
  137. package/src/consumer/consumer-group.ts +35 -15
  138. package/src/consumer/consumer.ts +28 -18
  139. package/src/consumer/fetch-manager.ts +29 -45
  140. package/src/consumer/fetcher.ts +21 -14
  141. package/src/consumer/offset-manager.ts +18 -7
  142. package/src/consumer/processor.ts +14 -10
  143. package/src/distributors/assignments-to-replicas.ts +1 -3
  144. package/src/index.ts +1 -1
  145. package/src/metadata.ts +4 -0
  146. package/src/producer/producer.ts +11 -6
  147. package/src/utils/decoder.ts +0 -4
  148. package/src/utils/encoder.ts +26 -19
  149. package/src/utils/logger.ts +4 -4
  150. package/src/utils/tracer.ts +39 -23
  151. package/certs/ca.key +0 -52
  152. package/certs/ca.srl +0 -1
  153. package/certs/kafka.crt +0 -29
  154. package/certs/kafka.csr +0 -26
  155. package/certs/kafka.key +0 -52
  156. package/src/utils/mutex.ts +0 -31
@@ -1,17 +1,30 @@
1
1
  name: Publish package
2
2
  on:
3
- release:
4
- types: [published]
3
+ push:
4
+ tags:
5
+ - 'v*'
5
6
  jobs:
6
7
  build:
7
8
  runs-on: ubuntu-latest
9
+ permissions:
10
+ contents: write
8
11
  steps:
9
- - uses: actions/checkout@v4
10
- - uses: actions/setup-node@v4
12
+ - name: Checkout
13
+ uses: actions/checkout@v4
14
+ - name: Setup node
15
+ uses: actions/setup-node@v4
11
16
  with:
12
17
  node-version: '20.x'
13
18
  registry-url: 'https://registry.npmjs.org'
14
- - run: npm ci
15
- - run: npm publish
19
+ - name: Install dependencies
20
+ run: npm ci
21
+ - name: Build
22
+ run: npm run build
23
+ - name: Create release
24
+ env:
25
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
26
+ run: gh release create ${{ github.ref_name }} --generate-notes
27
+ - name: Publish npm package
28
+ run: npm publish
16
29
  env:
17
30
  NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
package/README.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  **KafkaTS** is a Apache Kafka client library for Node.js. It provides both a low-level API for communicating directly with the Apache Kafka cluster and high-level APIs for publishing and subscribing to Kafka topics.
4
4
 
5
- **Please note that this project is still in early development and is not yet ready for production use. The interface before stable release is subject to change.**
5
+ Supported Kafka versions: 3.7.0 and later
6
6
 
7
7
  ## Installation
8
8
 
@@ -96,12 +96,20 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
96
96
 
97
97
  ### `createKafkaClient()`
98
98
 
99
- | Name | Type | Required | Default | Description |
100
- | ---------------- | ---------------------- | -------- | ------- | ----------------------------------------------------- |
101
- | clientId | string | false | _null_ | The client id used for all requests. |
102
- | bootstrapServers | TcpSocketConnectOpts[] | true | | List of kafka brokers for initial cluster discovery. |
103
- | sasl | SASLProvider | false | | SASL provider (see "Supported SASL mechanisms" below) |
104
- | ssl | TLSSocketOptions | false | | SSL configuration. |
99
+ | Name | Type | Required | Default | Description |
100
+ | ---------------- | ---------------------- | -------- | ------- | ---------------------------------------------------- |
101
+ | clientId | string | false | _null_ | The client id used for all requests. |
102
+ | bootstrapServers | TcpSocketConnectOpts[] | true | | List of kafka brokers for initial cluster discovery. |
103
+ | sasl | SASLProvider | false | | SASL provider |
104
+ | ssl | TLSSocketOptions | false | | SSL configuration. |
105
+
106
+ #### Supported SASL mechanisms
107
+
108
+ - PLAIN: `saslPlain({ username, password })`
109
+ - SCRAM-SHA-256: `saslScramSha256({ username, password })`
110
+ - SCRAM-SHA-512: `saslScramSha512({ username, password })`
111
+
112
+ Custom SASL mechanisms can be implemented following the `SASLProvider` interface. See [src/auth](./src/auth) for examples.
105
113
 
106
114
  ### `kafka.startConsumer()`
107
115
 
@@ -151,17 +159,3 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
151
159
  | key | Buffer \| null | false | _null_ | Message key |
152
160
  | value | Buffer \| null | true | | Message value |
153
161
  | headers | Record<string, string> | false | _null_ | Message headers |
154
-
155
- ### Supported SASL mechanisms
156
-
157
- - PLAIN: `saslPlain({ username, password })`
158
- - SCRAM-SHA-256: `saslScramSha256({ username, password })`
159
- - SCRAM-SHA-512: `saslScramSha512({ username, password })`
160
-
161
- Custom SASL mechanisms can be implemented following the `SASLProvider` interface. See [src/auth](./src/auth) for examples.
162
-
163
- ## Backlog
164
-
165
- Minimal set of features left to implement before a stable release:
166
-
167
- - API versioning (Currently only tested against Kafka 3.7+)
@@ -0,0 +1,9 @@
1
+ export declare const API_VERSIONS: import("../utils/api.js").Api<unknown, {
2
+ errorCode: number;
3
+ versions: {
4
+ apiKey: number;
5
+ minVersion: number;
6
+ maxVersion: number;
7
+ }[];
8
+ throttleTimeMs: number;
9
+ }>;
@@ -0,0 +1,24 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.API_VERSIONS = void 0;
4
+ const api_js_1 = require("../utils/api.js");
5
+ const error_js_1 = require("../utils/error.js");
6
+ exports.API_VERSIONS = (0, api_js_1.createApi)({
7
+ apiKey: 18,
8
+ apiVersion: 2,
9
+ request: (encoder) => encoder,
10
+ response: (decoder) => {
11
+ const result = {
12
+ errorCode: decoder.readInt16(),
13
+ versions: decoder.readArray((version) => ({
14
+ apiKey: version.readInt16(),
15
+ minVersion: version.readInt16(),
16
+ maxVersion: version.readInt16(),
17
+ })),
18
+ throttleTimeMs: decoder.readInt32(),
19
+ };
20
+ if (result.errorCode)
21
+ throw new error_js_1.KafkaTSApiError(result.errorCode, null, result);
22
+ return result;
23
+ },
24
+ });
@@ -0,0 +1,38 @@
1
+ export declare const CREATE_TOPICS: import("../utils/api").Api<{
2
+ topics: {
3
+ name: string;
4
+ numPartitions: number;
5
+ replicationFactor: number;
6
+ assignments: {
7
+ partitionIndex: number;
8
+ brokerIds: number[];
9
+ }[];
10
+ configs: {
11
+ name: string;
12
+ value: string | null;
13
+ }[];
14
+ }[];
15
+ timeoutMs: number;
16
+ validateOnly: boolean;
17
+ }, {
18
+ _tag: void;
19
+ throttleTimeMs: number;
20
+ topics: {
21
+ name: string | null;
22
+ topicId: string;
23
+ errorCode: number;
24
+ errorMessage: string | null;
25
+ numPartitions: number;
26
+ replicationFactor: number;
27
+ configs: {
28
+ name: string | null;
29
+ value: string | null;
30
+ readOnly: boolean;
31
+ configSource: number;
32
+ isSensitive: boolean;
33
+ _tag: void;
34
+ }[];
35
+ _tag: void;
36
+ }[];
37
+ _tag2: void;
38
+ }>;
@@ -0,0 +1,53 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.CREATE_TOPICS = void 0;
4
+ const api_1 = require("../utils/api");
5
+ const error_1 = require("../utils/error");
6
+ exports.CREATE_TOPICS = (0, api_1.createApi)({
7
+ apiKey: 19,
8
+ apiVersion: 7,
9
+ request: (encoder, data) => encoder
10
+ .writeUVarInt(0)
11
+ .writeCompactArray(data.topics, (encoder, topic) => encoder
12
+ .writeCompactString(topic.name)
13
+ .writeInt32(topic.numPartitions)
14
+ .writeInt16(topic.replicationFactor)
15
+ .writeCompactArray(topic.assignments, (encoder, assignment) => encoder
16
+ .writeInt32(assignment.partitionIndex)
17
+ .writeCompactArray(assignment.brokerIds, (encoder, brokerId) => encoder.writeInt32(brokerId))
18
+ .writeUVarInt(0))
19
+ .writeCompactArray(topic.configs, (encoder, config) => encoder.writeCompactString(config.name).writeCompactString(config.value).writeUVarInt(0))
20
+ .writeUVarInt(0))
21
+ .writeInt32(data.timeoutMs)
22
+ .writeBoolean(data.validateOnly)
23
+ .writeUVarInt(0),
24
+ response: (decoder) => {
25
+ const result = {
26
+ _tag: decoder.readTagBuffer(),
27
+ throttleTimeMs: decoder.readInt32(),
28
+ topics: decoder.readCompactArray((topic) => ({
29
+ name: topic.readCompactString(),
30
+ topicId: topic.readUUID(),
31
+ errorCode: topic.readInt16(),
32
+ errorMessage: topic.readCompactString(),
33
+ numPartitions: topic.readInt32(),
34
+ replicationFactor: topic.readInt16(),
35
+ configs: topic.readCompactArray((config) => ({
36
+ name: config.readCompactString(),
37
+ value: config.readCompactString(),
38
+ readOnly: config.readBoolean(),
39
+ configSource: config.readInt8(),
40
+ isSensitive: config.readBoolean(),
41
+ _tag: config.readTagBuffer(),
42
+ })),
43
+ _tag: topic.readTagBuffer(),
44
+ })),
45
+ _tag2: decoder.readTagBuffer(),
46
+ };
47
+ result.topics.forEach((topic) => {
48
+ if (topic.errorCode)
49
+ throw new error_1.KafkaTSApiError(topic.errorCode, topic.errorMessage, result);
50
+ });
51
+ return result;
52
+ },
53
+ });
@@ -0,0 +1,18 @@
1
+ export declare const DELETE_TOPICS: import("../utils/api").Api<{
2
+ topics: {
3
+ name: string | null;
4
+ topicId: string | null;
5
+ }[];
6
+ timeoutMs: number;
7
+ }, {
8
+ _tag: void;
9
+ throttleTimeMs: number;
10
+ responses: {
11
+ name: string | null;
12
+ topicId: string;
13
+ errorCode: number;
14
+ errorMessage: string | null;
15
+ _tag: void;
16
+ }[];
17
+ _tag2: void;
18
+ }>;
@@ -0,0 +1,33 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DELETE_TOPICS = void 0;
4
+ const api_1 = require("../utils/api");
5
+ const error_1 = require("../utils/error");
6
+ exports.DELETE_TOPICS = (0, api_1.createApi)({
7
+ apiKey: 20,
8
+ apiVersion: 6,
9
+ request: (encoder, data) => encoder
10
+ .writeUVarInt(0)
11
+ .writeCompactArray(data.topics, (encoder, topic) => encoder.writeCompactString(topic.name).writeUUID(topic.topicId).writeUVarInt(0))
12
+ .writeInt32(data.timeoutMs)
13
+ .writeUVarInt(0),
14
+ response: (decoder) => {
15
+ const result = {
16
+ _tag: decoder.readTagBuffer(),
17
+ throttleTimeMs: decoder.readInt32(),
18
+ responses: decoder.readCompactArray((decoder) => ({
19
+ name: decoder.readCompactString(),
20
+ topicId: decoder.readUUID(),
21
+ errorCode: decoder.readInt16(),
22
+ errorMessage: decoder.readCompactString(),
23
+ _tag: decoder.readTagBuffer(),
24
+ })),
25
+ _tag2: decoder.readTagBuffer(),
26
+ };
27
+ result.responses.forEach((response) => {
28
+ if (response.errorCode)
29
+ throw new error_1.KafkaTSApiError(response.errorCode, response.errorMessage, result);
30
+ });
31
+ return result;
32
+ },
33
+ });
@@ -0,0 +1,84 @@
1
+ /// <reference types="node" />
2
+ export declare const enum IsolationLevel {
3
+ READ_UNCOMMITTED = 0,
4
+ READ_COMMITTED = 1
5
+ }
6
+ export type FetchResponse = Awaited<ReturnType<(typeof FETCH)['response']>>;
7
+ export declare const FETCH: import("../utils/api").Api<{
8
+ maxWaitMs: number;
9
+ minBytes: number;
10
+ maxBytes: number;
11
+ isolationLevel: IsolationLevel;
12
+ sessionId: number;
13
+ sessionEpoch: number;
14
+ topics: {
15
+ topicId: string;
16
+ partitions: {
17
+ partition: number;
18
+ currentLeaderEpoch: number;
19
+ fetchOffset: bigint;
20
+ lastFetchedEpoch: number;
21
+ logStartOffset: bigint;
22
+ partitionMaxBytes: number;
23
+ }[];
24
+ }[];
25
+ forgottenTopicsData: {
26
+ topicId: string;
27
+ partitions: number[];
28
+ }[];
29
+ rackId: string;
30
+ }, {
31
+ responses: {
32
+ partitions: {
33
+ records: {
34
+ records: {
35
+ attributes: number;
36
+ timestampDelta: bigint;
37
+ offsetDelta: number;
38
+ key: Buffer | null;
39
+ value: Buffer | null;
40
+ headers: {
41
+ key: Buffer | null;
42
+ value: Buffer | null;
43
+ }[];
44
+ }[];
45
+ compression: number;
46
+ timestampType: string;
47
+ isTransactional: boolean;
48
+ isControlBatch: boolean;
49
+ hasDeleteHorizonMs: boolean;
50
+ baseOffset: bigint;
51
+ batchLength: number;
52
+ partitionLeaderEpoch: number;
53
+ magic: number;
54
+ crc: number;
55
+ attributes: number;
56
+ lastOffsetDelta: number;
57
+ baseTimestamp: bigint;
58
+ maxTimestamp: bigint;
59
+ producerId: bigint;
60
+ producerEpoch: number;
61
+ baseSequence: number;
62
+ }[];
63
+ partitionIndex: number;
64
+ errorCode: number;
65
+ highWatermark: bigint;
66
+ lastStableOffset: bigint;
67
+ logStartOffset: bigint;
68
+ abortedTransactions: {
69
+ producerId: bigint;
70
+ firstOffset: bigint;
71
+ _tag: void;
72
+ }[];
73
+ preferredReadReplica: number;
74
+ _tag: void;
75
+ }[];
76
+ topicId: string;
77
+ _tag: void;
78
+ }[];
79
+ _tag: void;
80
+ throttleTimeMs: number;
81
+ errorCode: number;
82
+ sessionId: number;
83
+ _tag2: void;
84
+ }>;
@@ -0,0 +1,142 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FETCH = void 0;
4
+ const codecs_1 = require("../codecs");
5
+ const api_1 = require("../utils/api");
6
+ const decoder_1 = require("../utils/decoder");
7
+ const error_1 = require("../utils/error");
8
+ exports.FETCH = (0, api_1.createApi)({
9
+ apiKey: 1,
10
+ apiVersion: 16,
11
+ request: (encoder, data) => encoder
12
+ .writeUVarInt(0)
13
+ .writeInt32(data.maxWaitMs)
14
+ .writeInt32(data.minBytes)
15
+ .writeInt32(data.maxBytes)
16
+ .writeInt8(data.isolationLevel)
17
+ .writeInt32(data.sessionId)
18
+ .writeInt32(data.sessionEpoch)
19
+ .writeCompactArray(data.topics, (encoder, topic) => encoder
20
+ .writeUUID(topic.topicId)
21
+ .writeCompactArray(topic.partitions, (encoder, partition) => encoder
22
+ .writeInt32(partition.partition)
23
+ .writeInt32(partition.currentLeaderEpoch)
24
+ .writeInt64(partition.fetchOffset)
25
+ .writeInt32(partition.lastFetchedEpoch)
26
+ .writeInt64(partition.logStartOffset)
27
+ .writeInt32(partition.partitionMaxBytes)
28
+ .writeUVarInt(0))
29
+ .writeUVarInt(0))
30
+ .writeCompactArray(data.forgottenTopicsData, (encoder, forgottenTopic) => encoder
31
+ .writeUUID(forgottenTopic.topicId)
32
+ .writeCompactArray(forgottenTopic.partitions, (encoder, partition) => encoder.writeInt32(partition))
33
+ .writeUVarInt(0))
34
+ .writeCompactString(data.rackId)
35
+ .writeUVarInt(0),
36
+ response: async (decoder) => {
37
+ const result = {
38
+ _tag: decoder.readTagBuffer(),
39
+ throttleTimeMs: decoder.readInt32(),
40
+ errorCode: decoder.readInt16(),
41
+ sessionId: decoder.readInt32(),
42
+ responses: decoder.readCompactArray((response) => ({
43
+ topicId: response.readUUID(),
44
+ partitions: response.readCompactArray((partition) => ({
45
+ partitionIndex: partition.readInt32(),
46
+ errorCode: partition.readInt16(),
47
+ highWatermark: partition.readInt64(),
48
+ lastStableOffset: partition.readInt64(),
49
+ logStartOffset: partition.readInt64(),
50
+ abortedTransactions: partition.readCompactArray((abortedTransaction) => ({
51
+ producerId: abortedTransaction.readInt64(),
52
+ firstOffset: abortedTransaction.readInt64(),
53
+ _tag: abortedTransaction.readTagBuffer(),
54
+ })),
55
+ preferredReadReplica: partition.readInt32(),
56
+ records: decodeRecordBatch(partition),
57
+ _tag: partition.readTagBuffer(),
58
+ })),
59
+ _tag: response.readTagBuffer(),
60
+ })),
61
+ _tag2: decoder.readTagBuffer(),
62
+ };
63
+ if (result.errorCode)
64
+ throw new error_1.KafkaTSApiError(result.errorCode, null, result);
65
+ result.responses.forEach((response) => {
66
+ response.partitions.forEach((partition) => {
67
+ if (partition.errorCode)
68
+ throw new error_1.KafkaTSApiError(partition.errorCode, null, result);
69
+ });
70
+ });
71
+ const decompressedResponses = await Promise.all(result.responses.map(async (response) => ({
72
+ ...response,
73
+ partitions: await Promise.all(response.partitions.map(async (partition) => ({
74
+ ...partition,
75
+ records: await Promise.all(partition.records.map(async ({ recordsLength, compressedRecords, ...record }) => {
76
+ const { decompress } = (0, codecs_1.findCodec)(record.compression);
77
+ const decompressedRecords = await decompress(compressedRecords);
78
+ const decompressedDecoder = new decoder_1.Decoder(Buffer.concat([recordsLength, decompressedRecords]));
79
+ return { ...record, records: decodeRecord(decompressedDecoder) };
80
+ })),
81
+ }))),
82
+ })));
83
+ return { ...result, responses: decompressedResponses };
84
+ },
85
+ });
86
+ const decodeRecordBatch = (decoder) => {
87
+ const size = decoder.readUVarInt() - 1;
88
+ if (size <= 0) {
89
+ return [];
90
+ }
91
+ const recordBatchDecoder = new decoder_1.Decoder(decoder.read(size));
92
+ const results = [];
93
+ while (recordBatchDecoder.getBufferLength() > recordBatchDecoder.getOffset() + 12) {
94
+ const baseOffset = recordBatchDecoder.readInt64();
95
+ const batchLength = recordBatchDecoder.readInt32();
96
+ if (!batchLength) {
97
+ continue;
98
+ }
99
+ const batchDecoder = new decoder_1.Decoder(recordBatchDecoder.read(batchLength));
100
+ const result = {
101
+ baseOffset,
102
+ batchLength,
103
+ partitionLeaderEpoch: batchDecoder.readInt32(),
104
+ magic: batchDecoder.readInt8(),
105
+ crc: batchDecoder.readUInt32(),
106
+ attributes: batchDecoder.readInt16(),
107
+ lastOffsetDelta: batchDecoder.readInt32(),
108
+ baseTimestamp: batchDecoder.readInt64(),
109
+ maxTimestamp: batchDecoder.readInt64(),
110
+ producerId: batchDecoder.readInt64(),
111
+ producerEpoch: batchDecoder.readInt16(),
112
+ baseSequence: batchDecoder.readInt32(),
113
+ recordsLength: batchDecoder.read(4),
114
+ compressedRecords: batchDecoder.read(),
115
+ };
116
+ const compression = result.attributes & 0x07;
117
+ const timestampType = (result.attributes & 0x08) >> 3 ? 'LogAppendTime' : 'CreateTime';
118
+ const isTransactional = !!((result.attributes & 0x10) >> 4);
119
+ const isControlBatch = !!((result.attributes & 0x20) >> 5);
120
+ const hasDeleteHorizonMs = !!((result.attributes & 0x40) >> 6);
121
+ results.push({
122
+ ...result,
123
+ compression,
124
+ timestampType,
125
+ isTransactional,
126
+ isControlBatch,
127
+ hasDeleteHorizonMs,
128
+ });
129
+ }
130
+ return results;
131
+ };
132
+ const decodeRecord = (decoder) => decoder.readRecords((record) => ({
133
+ attributes: record.readInt8(),
134
+ timestampDelta: record.readVarLong(),
135
+ offsetDelta: record.readVarInt(),
136
+ key: record.readVarIntBuffer(),
137
+ value: record.readVarIntBuffer(),
138
+ headers: record.readVarIntArray((header) => ({
139
+ key: header.readVarIntBuffer(),
140
+ value: header.readVarIntBuffer(),
141
+ })),
142
+ }));
@@ -0,0 +1,21 @@
1
+ export declare const KEY_TYPE: {
2
+ GROUP: number;
3
+ TRANSACTION: number;
4
+ };
5
+ export declare const FIND_COORDINATOR: import("../utils/api").Api<{
6
+ keyType: number;
7
+ keys: string[];
8
+ }, {
9
+ _tag: void;
10
+ throttleTimeMs: number;
11
+ coordinators: {
12
+ key: string | null;
13
+ nodeId: number;
14
+ host: string;
15
+ port: number;
16
+ errorCode: number;
17
+ errorMessage: string | null;
18
+ _tag: void;
19
+ }[];
20
+ _tag2: void;
21
+ }>;
@@ -0,0 +1,39 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FIND_COORDINATOR = exports.KEY_TYPE = void 0;
4
+ const api_1 = require("../utils/api");
5
+ const error_1 = require("../utils/error");
6
+ exports.KEY_TYPE = {
7
+ GROUP: 0,
8
+ TRANSACTION: 1,
9
+ };
10
+ exports.FIND_COORDINATOR = (0, api_1.createApi)({
11
+ apiKey: 10,
12
+ apiVersion: 4,
13
+ request: (encoder, data) => encoder
14
+ .writeUVarInt(0)
15
+ .writeInt8(data.keyType)
16
+ .writeCompactArray(data.keys, (encoder, key) => encoder.writeCompactString(key))
17
+ .writeUVarInt(0),
18
+ response: (decoder) => {
19
+ const result = {
20
+ _tag: decoder.readTagBuffer(),
21
+ throttleTimeMs: decoder.readInt32(),
22
+ coordinators: decoder.readCompactArray((decoder) => ({
23
+ key: decoder.readCompactString(),
24
+ nodeId: decoder.readInt32(),
25
+ host: decoder.readCompactString(),
26
+ port: decoder.readInt32(),
27
+ errorCode: decoder.readInt16(),
28
+ errorMessage: decoder.readCompactString(),
29
+ _tag: decoder.readTagBuffer(),
30
+ })),
31
+ _tag2: decoder.readTagBuffer(),
32
+ };
33
+ result.coordinators.forEach((coordinator) => {
34
+ if (coordinator.errorCode)
35
+ throw new error_1.KafkaTSApiError(coordinator.errorCode, coordinator.errorMessage, result);
36
+ });
37
+ return result;
38
+ },
39
+ });
@@ -0,0 +1,11 @@
1
+ export declare const HEARTBEAT: import("../utils/api").Api<{
2
+ groupId: string;
3
+ generationId: number;
4
+ memberId: string;
5
+ groupInstanceId: string | null;
6
+ }, {
7
+ _tag: void;
8
+ throttleTimeMs: number;
9
+ errorCode: number;
10
+ _tag2: void;
11
+ }>;
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.HEARTBEAT = void 0;
4
+ const api_1 = require("../utils/api");
5
+ const error_1 = require("../utils/error");
6
+ exports.HEARTBEAT = (0, api_1.createApi)({
7
+ apiKey: 12,
8
+ apiVersion: 4,
9
+ request: (encoder, data) => encoder
10
+ .writeUVarInt(0)
11
+ .writeCompactString(data.groupId)
12
+ .writeInt32(data.generationId)
13
+ .writeCompactString(data.memberId)
14
+ .writeCompactString(data.groupInstanceId)
15
+ .writeUVarInt(0),
16
+ response: (decoder) => {
17
+ const result = {
18
+ _tag: decoder.readTagBuffer(),
19
+ throttleTimeMs: decoder.readInt32(),
20
+ errorCode: decoder.readInt16(),
21
+ _tag2: decoder.readTagBuffer(),
22
+ };
23
+ if (result.errorCode)
24
+ throw new error_1.KafkaTSApiError(result.errorCode, null, result);
25
+ return result;
26
+ },
27
+ });