kafka-ts 0.0.1-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. package/.prettierrc +7 -0
  2. package/LICENSE +24 -0
  3. package/README.md +88 -0
  4. package/certs/ca.crt +29 -0
  5. package/certs/ca.key +52 -0
  6. package/certs/ca.srl +1 -0
  7. package/certs/kafka.crt +29 -0
  8. package/certs/kafka.csr +26 -0
  9. package/certs/kafka.key +52 -0
  10. package/certs/kafka.keystore.jks +0 -0
  11. package/certs/kafka.truststore.jks +0 -0
  12. package/dist/api/api-versions.d.ts +9 -0
  13. package/dist/api/api-versions.js +24 -0
  14. package/dist/api/create-topics.d.ts +38 -0
  15. package/dist/api/create-topics.js +53 -0
  16. package/dist/api/delete-topics.d.ts +18 -0
  17. package/dist/api/delete-topics.js +33 -0
  18. package/dist/api/fetch.d.ts +77 -0
  19. package/dist/api/fetch.js +106 -0
  20. package/dist/api/find-coordinator.d.ts +21 -0
  21. package/dist/api/find-coordinator.js +39 -0
  22. package/dist/api/heartbeat.d.ts +11 -0
  23. package/dist/api/heartbeat.js +27 -0
  24. package/dist/api/index.d.ts +573 -0
  25. package/dist/api/index.js +164 -0
  26. package/dist/api/init-producer-id.d.ts +13 -0
  27. package/dist/api/init-producer-id.js +29 -0
  28. package/dist/api/join-group.d.ts +34 -0
  29. package/dist/api/join-group.js +51 -0
  30. package/dist/api/leave-group.d.ts +19 -0
  31. package/dist/api/leave-group.js +39 -0
  32. package/dist/api/list-offsets.d.ts +29 -0
  33. package/dist/api/list-offsets.js +48 -0
  34. package/dist/api/metadata.d.ts +40 -0
  35. package/dist/api/metadata.js +58 -0
  36. package/dist/api/offset-commit.d.ts +28 -0
  37. package/dist/api/offset-commit.js +48 -0
  38. package/dist/api/offset-fetch.d.ts +33 -0
  39. package/dist/api/offset-fetch.js +57 -0
  40. package/dist/api/produce.d.ts +53 -0
  41. package/dist/api/produce.js +129 -0
  42. package/dist/api/sasl-authenticate.d.ts +11 -0
  43. package/dist/api/sasl-authenticate.js +23 -0
  44. package/dist/api/sasl-handshake.d.ts +6 -0
  45. package/dist/api/sasl-handshake.js +19 -0
  46. package/dist/api/sync-group.d.ts +24 -0
  47. package/dist/api/sync-group.js +36 -0
  48. package/dist/broker.d.ts +29 -0
  49. package/dist/broker.js +60 -0
  50. package/dist/client.d.ts +23 -0
  51. package/dist/client.js +36 -0
  52. package/dist/cluster.d.ts +24 -0
  53. package/dist/cluster.js +72 -0
  54. package/dist/connection.d.ts +25 -0
  55. package/dist/connection.js +155 -0
  56. package/dist/consumer/consumer-group.d.ts +36 -0
  57. package/dist/consumer/consumer-group.js +182 -0
  58. package/dist/consumer/consumer-metadata.d.ts +7 -0
  59. package/dist/consumer/consumer-metadata.js +14 -0
  60. package/dist/consumer/consumer.d.ts +37 -0
  61. package/dist/consumer/consumer.js +178 -0
  62. package/dist/consumer/metadata.d.ts +24 -0
  63. package/dist/consumer/metadata.js +64 -0
  64. package/dist/consumer/offset-manager.d.ts +22 -0
  65. package/dist/consumer/offset-manager.js +56 -0
  66. package/dist/distributors/assignments-to-replicas.d.ts +17 -0
  67. package/dist/distributors/assignments-to-replicas.js +60 -0
  68. package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
  69. package/dist/distributors/assignments-to-replicas.test.js +40 -0
  70. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  71. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  72. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  73. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  74. package/dist/examples/src/replicator.js +34 -0
  75. package/dist/examples/src/utils/json.js +5 -0
  76. package/dist/index.d.ts +3 -0
  77. package/dist/index.js +19 -0
  78. package/dist/metadata.d.ts +24 -0
  79. package/dist/metadata.js +89 -0
  80. package/dist/producer/producer.d.ts +19 -0
  81. package/dist/producer/producer.js +111 -0
  82. package/dist/request-handler.d.ts +16 -0
  83. package/dist/request-handler.js +67 -0
  84. package/dist/request-handler.test.d.ts +1 -0
  85. package/dist/request-handler.test.js +340 -0
  86. package/dist/src/api/api-versions.js +18 -0
  87. package/dist/src/api/create-topics.js +46 -0
  88. package/dist/src/api/delete-topics.js +26 -0
  89. package/dist/src/api/fetch.js +95 -0
  90. package/dist/src/api/find-coordinator.js +34 -0
  91. package/dist/src/api/heartbeat.js +22 -0
  92. package/dist/src/api/index.js +38 -0
  93. package/dist/src/api/init-producer-id.js +24 -0
  94. package/dist/src/api/join-group.js +48 -0
  95. package/dist/src/api/leave-group.js +30 -0
  96. package/dist/src/api/list-offsets.js +39 -0
  97. package/dist/src/api/metadata.js +47 -0
  98. package/dist/src/api/offset-commit.js +39 -0
  99. package/dist/src/api/offset-fetch.js +44 -0
  100. package/dist/src/api/produce.js +119 -0
  101. package/dist/src/api/sync-group.js +31 -0
  102. package/dist/src/broker.js +35 -0
  103. package/dist/src/connection.js +21 -0
  104. package/dist/src/consumer/consumer-group.js +131 -0
  105. package/dist/src/consumer/consumer.js +103 -0
  106. package/dist/src/consumer/metadata.js +52 -0
  107. package/dist/src/consumer/offset-manager.js +23 -0
  108. package/dist/src/index.js +19 -0
  109. package/dist/src/producer/producer.js +84 -0
  110. package/dist/src/request-handler.js +57 -0
  111. package/dist/src/request-handler.test.js +321 -0
  112. package/dist/src/types.js +2 -0
  113. package/dist/src/utils/api.js +5 -0
  114. package/dist/src/utils/decoder.js +161 -0
  115. package/dist/src/utils/encoder.js +137 -0
  116. package/dist/src/utils/error.js +10 -0
  117. package/dist/types.d.ts +9 -0
  118. package/dist/types.js +2 -0
  119. package/dist/utils/api.d.ts +9 -0
  120. package/dist/utils/api.js +5 -0
  121. package/dist/utils/debug.d.ts +2 -0
  122. package/dist/utils/debug.js +11 -0
  123. package/dist/utils/decoder.d.ts +29 -0
  124. package/dist/utils/decoder.js +147 -0
  125. package/dist/utils/delay.d.ts +1 -0
  126. package/dist/utils/delay.js +5 -0
  127. package/dist/utils/encoder.d.ts +28 -0
  128. package/dist/utils/encoder.js +122 -0
  129. package/dist/utils/error.d.ts +11 -0
  130. package/dist/utils/error.js +27 -0
  131. package/dist/utils/memo.d.ts +1 -0
  132. package/dist/utils/memo.js +16 -0
  133. package/dist/utils/retrier.d.ts +10 -0
  134. package/dist/utils/retrier.js +22 -0
  135. package/dist/utils/tracer.d.ts +1 -0
  136. package/dist/utils/tracer.js +26 -0
  137. package/docker-compose.yml +104 -0
  138. package/examples/node_modules/.package-lock.json +22 -0
  139. package/examples/package-lock.json +30 -0
  140. package/examples/package.json +14 -0
  141. package/examples/src/client.ts +9 -0
  142. package/examples/src/consumer.ts +17 -0
  143. package/examples/src/create-topic.ts +37 -0
  144. package/examples/src/producer.ts +24 -0
  145. package/examples/src/replicator.ts +25 -0
  146. package/examples/src/utils/json.ts +1 -0
  147. package/examples/tsconfig.json +7 -0
  148. package/log4j.properties +95 -0
  149. package/package.json +17 -0
  150. package/scripts/generate-certs.sh +24 -0
  151. package/src/__snapshots__/request-handler.test.ts.snap +1687 -0
  152. package/src/api/api-versions.ts +21 -0
  153. package/src/api/create-topics.ts +78 -0
  154. package/src/api/delete-topics.ts +42 -0
  155. package/src/api/fetch.ts +143 -0
  156. package/src/api/find-coordinator.ts +39 -0
  157. package/src/api/heartbeat.ts +33 -0
  158. package/src/api/index.ts +164 -0
  159. package/src/api/init-producer-id.ts +35 -0
  160. package/src/api/join-group.ts +67 -0
  161. package/src/api/leave-group.ts +48 -0
  162. package/src/api/list-offsets.ts +65 -0
  163. package/src/api/metadata.ts +66 -0
  164. package/src/api/offset-commit.ts +67 -0
  165. package/src/api/offset-fetch.ts +74 -0
  166. package/src/api/produce.ts +173 -0
  167. package/src/api/sasl-authenticate.ts +21 -0
  168. package/src/api/sasl-handshake.ts +16 -0
  169. package/src/api/sync-group.ts +54 -0
  170. package/src/broker.ts +74 -0
  171. package/src/client.ts +47 -0
  172. package/src/cluster.ts +87 -0
  173. package/src/connection.ts +141 -0
  174. package/src/consumer/consumer-group.ts +209 -0
  175. package/src/consumer/consumer-metadata.ts +14 -0
  176. package/src/consumer/consumer.ts +229 -0
  177. package/src/consumer/offset-manager.ts +93 -0
  178. package/src/distributors/assignments-to-replicas.test.ts +43 -0
  179. package/src/distributors/assignments-to-replicas.ts +85 -0
  180. package/src/distributors/messages-to-topic-partition-leaders.test.ts +32 -0
  181. package/src/distributors/messages-to-topic-partition-leaders.ts +19 -0
  182. package/src/index.ts +3 -0
  183. package/src/metadata.ts +122 -0
  184. package/src/producer/producer.ts +132 -0
  185. package/src/request-handler.test.ts +366 -0
  186. package/src/types.ts +9 -0
  187. package/src/utils/api.ts +11 -0
  188. package/src/utils/debug.ts +9 -0
  189. package/src/utils/decoder.ts +168 -0
  190. package/src/utils/delay.ts +1 -0
  191. package/src/utils/encoder.ts +141 -0
  192. package/src/utils/error.ts +21 -0
  193. package/src/utils/memo.ts +12 -0
  194. package/src/utils/retrier.ts +39 -0
  195. package/src/utils/tracer.ts +28 -0
  196. package/tsconfig.json +17 -0
@@ -0,0 +1,65 @@
1
+ import { createApi } from "../utils/api";
2
+ import { KafkaTSApiError } from "../utils/error";
3
+ import { IsolationLevel } from "./fetch";
4
+
5
+ export const LIST_OFFSETS = createApi({
6
+ apiKey: 2,
7
+ apiVersion: 8,
8
+ request: (
9
+ encoder,
10
+ data: {
11
+ replicaId: number;
12
+ isolationLevel: IsolationLevel;
13
+ topics: {
14
+ name: string;
15
+ partitions: {
16
+ partitionIndex: number;
17
+ currentLeaderEpoch: number;
18
+ timestamp: bigint;
19
+ }[];
20
+ }[];
21
+ },
22
+ ) =>
23
+ encoder
24
+ .writeUVarInt(0)
25
+ .writeInt32(data.replicaId)
26
+ .writeInt8(data.isolationLevel)
27
+ .writeCompactArray(data.topics, (encoder, topic) =>
28
+ encoder
29
+ .writeCompactString(topic.name)
30
+ .writeCompactArray(topic.partitions, (encoder, partition) =>
31
+ encoder
32
+ .writeInt32(partition.partitionIndex)
33
+ .writeInt32(partition.currentLeaderEpoch)
34
+ .writeInt64(partition.timestamp)
35
+ .writeUVarInt(0),
36
+ )
37
+ .writeUVarInt(0),
38
+ )
39
+ .writeUVarInt(0),
40
+ response: (decoder) => {
41
+ const result = {
42
+ _tag: decoder.readTagBuffer(),
43
+ throttleTimeMs: decoder.readInt32(),
44
+ topics: decoder.readCompactArray((decoder) => ({
45
+ name: decoder.readCompactString()!,
46
+ partitions: decoder.readCompactArray((decoder) => ({
47
+ partitionIndex: decoder.readInt32(),
48
+ errorCode: decoder.readInt16(),
49
+ timestamp: decoder.readInt64(),
50
+ offset: decoder.readInt64(),
51
+ leaderEpoch: decoder.readInt32(),
52
+ _tag: decoder.readTagBuffer(),
53
+ })),
54
+ _tag: decoder.readTagBuffer(),
55
+ })),
56
+ _tag2: decoder.readTagBuffer(),
57
+ };
58
+ result.topics.forEach((topic) => {
59
+ topic.partitions.forEach((partition) => {
60
+ if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
61
+ });
62
+ });
63
+ return result;
64
+ },
65
+ });
@@ -0,0 +1,66 @@
1
+ import { createApi } from "../utils/api";
2
+ import { KafkaTSApiError } from "../utils/error";
3
+
4
+ export type Metadata = ReturnType<(typeof METADATA)["response"]>;
5
+
6
+ export const METADATA = createApi({
7
+ apiKey: 3,
8
+ apiVersion: 12,
9
+ request: (
10
+ encoder,
11
+ data: {
12
+ topics: { id: string | null; name: string }[] | null;
13
+ allowTopicAutoCreation: boolean;
14
+ includeTopicAuthorizedOperations: boolean;
15
+ },
16
+ ) =>
17
+ encoder
18
+ .writeUVarInt(0)
19
+ .writeCompactArray(data.topics, (encoder, topic) =>
20
+ encoder.writeUUID(topic.id).writeCompactString(topic.name).writeUVarInt(0),
21
+ )
22
+ .writeBoolean(data.allowTopicAutoCreation)
23
+ .writeBoolean(data.includeTopicAuthorizedOperations)
24
+ .writeUVarInt(0),
25
+ response: (decoder) => {
26
+ const result = {
27
+ _tag: decoder.readTagBuffer(),
28
+ throttleTimeMs: decoder.readInt32(),
29
+ brokers: decoder.readCompactArray((broker) => ({
30
+ nodeId: broker.readInt32(),
31
+ host: broker.readCompactString()!,
32
+ port: broker.readInt32(),
33
+ rack: broker.readCompactString(),
34
+ _tag: broker.readTagBuffer(),
35
+ })),
36
+ clusterId: decoder.readCompactString(),
37
+ controllerId: decoder.readInt32(),
38
+ topics: decoder.readCompactArray((topic) => ({
39
+ errorCode: topic.readInt16(),
40
+ name: topic.readCompactString()!,
41
+ topicId: topic.readUUID(),
42
+ isInternal: topic.readBoolean(),
43
+ partitions: topic.readCompactArray((partition) => ({
44
+ errorCode: partition.readInt16(),
45
+ partitionIndex: partition.readInt32(),
46
+ leaderId: partition.readInt32(),
47
+ leaderEpoch: partition.readInt32(),
48
+ replicaNodes: partition.readCompactArray((node) => node.readInt32()),
49
+ isrNodes: partition.readCompactArray((node) => node.readInt32()),
50
+ offlineReplicas: partition.readCompactArray((node) => node.readInt32()),
51
+ _tag: partition.readTagBuffer(),
52
+ })),
53
+ topicAuthorizedOperations: topic.readInt32(),
54
+ _tag: topic.readTagBuffer(),
55
+ })),
56
+ _tag2: decoder.readTagBuffer(),
57
+ };
58
+ result.topics.forEach((topic) => {
59
+ if (topic.errorCode) throw new KafkaTSApiError(topic.errorCode, null, result);
60
+ topic.partitions.forEach((partition) => {
61
+ if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
62
+ });
63
+ });
64
+ return result;
65
+ },
66
+ });
@@ -0,0 +1,67 @@
1
+ import { createApi } from "../utils/api";
2
+ import { KafkaTSApiError } from "../utils/error";
3
+
4
+ export const OFFSET_COMMIT = createApi({
5
+ apiKey: 8,
6
+ apiVersion: 9,
7
+ request: (
8
+ encoder,
9
+ data: {
10
+ groupId: string;
11
+ generationIdOrMemberEpoch: number;
12
+ memberId: string;
13
+ groupInstanceId: string | null;
14
+ topics: {
15
+ name: string;
16
+ partitions: {
17
+ partitionIndex: number;
18
+ committedOffset: bigint;
19
+ committedLeaderEpoch: number;
20
+ committedMetadata: string | null;
21
+ }[];
22
+ }[];
23
+ },
24
+ ) =>
25
+ encoder
26
+ .writeUVarInt(0)
27
+ .writeCompactString(data.groupId)
28
+ .writeInt32(data.generationIdOrMemberEpoch)
29
+ .writeCompactString(data.memberId)
30
+ .writeCompactString(data.groupInstanceId)
31
+ .writeCompactArray(data.topics, (encoder, topic) =>
32
+ encoder
33
+ .writeCompactString(topic.name)
34
+ .writeCompactArray(topic.partitions, (encoder, partition) =>
35
+ encoder
36
+ .writeInt32(partition.partitionIndex)
37
+ .writeInt64(partition.committedOffset)
38
+ .writeInt32(partition.committedLeaderEpoch)
39
+ .writeCompactString(partition.committedMetadata)
40
+ .writeUVarInt(0),
41
+ )
42
+ .writeUVarInt(0),
43
+ )
44
+ .writeUVarInt(0),
45
+ response: (decoder) => {
46
+ const result = {
47
+ _tag: decoder.readTagBuffer(),
48
+ throttleTimeMs: decoder.readInt32(),
49
+ topics: decoder.readCompactArray((decoder) => ({
50
+ name: decoder.readCompactString(),
51
+ partitions: decoder.readCompactArray((decoder) => ({
52
+ partitionIndex: decoder.readInt32(),
53
+ errorCode: decoder.readInt16(),
54
+ _tag: decoder.readTagBuffer(),
55
+ })),
56
+ _tag: decoder.readTagBuffer(),
57
+ })),
58
+ _tag2: decoder.readTagBuffer(),
59
+ };
60
+ result.topics.forEach((topic) => {
61
+ topic.partitions.forEach((partition) => {
62
+ if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
63
+ });
64
+ });
65
+ return result;
66
+ },
67
+ });
@@ -0,0 +1,74 @@
1
+ import { createApi } from "../utils/api";
2
+ import { KafkaTSApiError } from "../utils/error";
3
+
4
+ export const OFFSET_FETCH = createApi({
5
+ apiKey: 9,
6
+ apiVersion: 9,
7
+ request: (
8
+ encoder,
9
+ data: {
10
+ groups: {
11
+ groupId: string;
12
+ memberId: string | null;
13
+ memberEpoch: number;
14
+ topics: {
15
+ name: string;
16
+ partitionIndexes: number[];
17
+ }[];
18
+ }[];
19
+ requireStable: boolean;
20
+ },
21
+ ) =>
22
+ encoder
23
+ .writeUVarInt(0)
24
+ .writeCompactArray(data.groups, (encoder, group) =>
25
+ encoder
26
+ .writeCompactString(group.groupId)
27
+ .writeCompactString(group.memberId)
28
+ .writeInt32(group.memberEpoch)
29
+ .writeCompactArray(group.topics, (encoder, topic) =>
30
+ encoder
31
+ .writeCompactString(topic.name)
32
+ .writeCompactArray(topic.partitionIndexes, (encoder, partitionIndex) =>
33
+ encoder.writeInt32(partitionIndex),
34
+ )
35
+ .writeUVarInt(0),
36
+ )
37
+ .writeUVarInt(0),
38
+ )
39
+ .writeBoolean(data.requireStable)
40
+ .writeUVarInt(0),
41
+ response: (decoder) => {
42
+ const result = {
43
+ _tag: decoder.readTagBuffer(),
44
+ throttleTimeMs: decoder.readInt32(),
45
+ groups: decoder.readCompactArray((decoder) => ({
46
+ groupId: decoder.readCompactString(),
47
+ topics: decoder.readCompactArray((decoder) => ({
48
+ name: decoder.readCompactString()!,
49
+ partitions: decoder.readCompactArray((decoder) => ({
50
+ partitionIndex: decoder.readInt32(),
51
+ committedOffset: decoder.readInt64(),
52
+ committedLeaderEpoch: decoder.readInt32(),
53
+ committedMetadata: decoder.readCompactString(),
54
+ errorCode: decoder.readInt16(),
55
+ _tag: decoder.readTagBuffer(),
56
+ })),
57
+ _tag: decoder.readTagBuffer(),
58
+ })),
59
+ errorCode: decoder.readInt16(),
60
+ _tag: decoder.readTagBuffer(),
61
+ })),
62
+ _tag2: decoder.readTagBuffer(),
63
+ };
64
+ result.groups.forEach((group) => {
65
+ if (group.errorCode) throw new KafkaTSApiError(group.errorCode, null, result);
66
+ group.topics.forEach((topic) => {
67
+ topic.partitions.forEach((partition) => {
68
+ if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
69
+ });
70
+ });
71
+ });
72
+ return result;
73
+ },
74
+ });
@@ -0,0 +1,173 @@
1
+ import { createApi } from "../utils/api.js";
2
+ import { Encoder } from "../utils/encoder.js";
3
+ import { KafkaTSApiError } from "../utils/error.js";
4
+
5
+ export const PRODUCE = createApi({
6
+ apiKey: 0,
7
+ apiVersion: 10,
8
+ request: (
9
+ encoder,
10
+ data: {
11
+ transactionalId: string | null;
12
+ acks: number;
13
+ timeoutMs: number;
14
+ topicData: {
15
+ name: string;
16
+ partitionData: {
17
+ index: number;
18
+ baseOffset: bigint;
19
+ partitionLeaderEpoch: number;
20
+ attributes: number;
21
+ lastOffsetDelta: number;
22
+ baseTimestamp: bigint;
23
+ maxTimestamp: bigint;
24
+ producerId: bigint;
25
+ producerEpoch: number;
26
+ baseSequence: number;
27
+ records: {
28
+ attributes: number;
29
+ timestampDelta: bigint;
30
+ offsetDelta: number;
31
+ key: string | null;
32
+ value: string | null;
33
+ headers: {
34
+ key: string;
35
+ value: string;
36
+ }[];
37
+ }[];
38
+ }[];
39
+ }[];
40
+ },
41
+ ) =>
42
+ encoder
43
+ .writeUVarInt(0)
44
+ .writeCompactString(data.transactionalId)
45
+ .writeInt16(data.acks)
46
+ .writeInt32(data.timeoutMs)
47
+ .writeCompactArray(data.topicData, (encoder, topic) =>
48
+ encoder
49
+ .writeCompactString(topic.name)
50
+ .writeCompactArray(topic.partitionData, (encoder, partition) => {
51
+ const batchBody = new Encoder()
52
+ .writeInt16(partition.attributes)
53
+ .writeInt32(partition.lastOffsetDelta)
54
+ .writeInt64(partition.baseTimestamp)
55
+ .writeInt64(partition.maxTimestamp)
56
+ .writeInt64(partition.producerId)
57
+ .writeInt16(partition.producerEpoch)
58
+ .writeInt32(partition.baseSequence)
59
+ .writeArray(partition.records, (encoder, record) => {
60
+ const recordBody = new Encoder()
61
+ .writeInt8(record.attributes)
62
+ .writeVarLong(record.timestampDelta)
63
+ .writeVarInt(record.offsetDelta)
64
+ .writeVarIntString(record.key)
65
+ .writeVarIntString(record.value)
66
+ .writeVarIntArray(record.headers, (encoder, header) =>
67
+ encoder.writeVarIntString(header.key).writeVarIntString(header.value),
68
+ )
69
+ .value();
70
+
71
+ return encoder.writeVarInt(recordBody.length).write(recordBody);
72
+ })
73
+ .value();
74
+
75
+ const batchHeader = new Encoder()
76
+ .writeInt32(partition.partitionLeaderEpoch)
77
+ .writeInt8(2) // magic byte
78
+ .writeUInt32(unsigned(crc32C(batchBody)))
79
+ .write(batchBody)
80
+ .value();
81
+
82
+ const batch = new Encoder()
83
+ .writeInt64(partition.baseOffset)
84
+ .writeInt32(batchHeader.length)
85
+ .write(batchHeader)
86
+ .value();
87
+
88
+ return encoder
89
+ .writeInt32(partition.index)
90
+ .writeUVarInt(batch.length + 1) // batch size
91
+ .write(batch)
92
+ .writeUVarInt(0);
93
+ })
94
+ .writeUVarInt(0),
95
+ )
96
+ .writeUVarInt(0),
97
+ response: (decoder) => {
98
+ const result = {
99
+ _tag: decoder.readTagBuffer(),
100
+ responses: decoder.readCompactArray((response) => ({
101
+ name: response.readCompactString(),
102
+ partitionResponses: response.readCompactArray((partitionResponse) => ({
103
+ index: partitionResponse.readInt32(),
104
+ errorCode: partitionResponse.readInt16(),
105
+ baseOffset: partitionResponse.readInt64(),
106
+ logAppendTime: partitionResponse.readInt64(),
107
+ logStartOffset: partitionResponse.readInt64(),
108
+ recordErrors: partitionResponse.readCompactArray((recordError) => ({
109
+ batchIndex: recordError.readInt32(),
110
+ batchIndexError: recordError.readInt16(),
111
+ _tag: recordError.readTagBuffer(),
112
+ })),
113
+ errorMessage: partitionResponse.readCompactString(),
114
+ _tag: partitionResponse.readTagBuffer(),
115
+ })),
116
+ _tag: response.readTagBuffer(),
117
+ })),
118
+ throttleTimeMs: decoder.readInt32(),
119
+ _tag2: decoder.readTagBuffer(),
120
+ };
121
+ result.responses.forEach((topic) => {
122
+ topic.partitionResponses.forEach((partition) => {
123
+ if (partition.errorCode !== 0) {
124
+ throw new KafkaTSApiError(partition.errorCode, partition.errorMessage, result);
125
+ }
126
+ });
127
+ });
128
+ return result;
129
+ },
130
+ });
131
+
132
+ const unsigned = (value: number) => Uint32Array.from([value])[0];
133
+
134
+ const crc32C = (buffer: Buffer) => {
135
+ let crc = 0 ^ -1;
136
+ for (let i = 0; i < buffer.length; i++) {
137
+ crc = T[(crc ^ buffer[i]) & 0xff] ^ (crc >>> 8);
138
+ }
139
+
140
+ return (crc ^ -1) >>> 0;
141
+ };
142
+
143
+ const T = new Int32Array([
144
+ 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf,
145
+ 0x78b2dbcc, 0x6be22838, 0x9989ab3b, 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,
146
+ 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57,
147
+ 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,
148
+ 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e,
149
+ 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,
150
+ 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, 0x7da08661, 0x8fcb0562, 0x9c9bf696,
151
+ 0x6ef07595, 0x417b1dbc, 0xb3109ebf, 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,
152
+ 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, 0xed03a29b, 0x1f682198, 0x5125dad3,
153
+ 0xa34e59d0, 0xb01eaa24, 0x42752927, 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,
154
+ 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, 0x61c69362, 0x93ad1061, 0x80fde395,
155
+ 0x72966096, 0xa65c047d, 0x5437877e, 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,
156
+ 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, 0x90a324fa, 0x62c8a7f9, 0xb602c312,
157
+ 0x44694011, 0x5739b3e5, 0xa55230e6, 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,
158
+ 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, 0x456cac67, 0xb7072f64, 0xa457dc90,
159
+ 0x563c5f93, 0x082f63b7, 0xfa44e0b4, 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,
160
+ 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, 0xb4091bff, 0x466298fc, 0x1871a4d8,
161
+ 0xea1a27db, 0xf94ad42f, 0x0b21572c, 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,
162
+ 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e,
163
+ 0x3bc21e9d, 0xef087a76, 0x1d63f975, 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,
164
+ 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19,
165
+ 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,
166
+ 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, 0x8ecee914, 0x7ca56a17, 0x6ff599e3,
167
+ 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,
168
+ 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a,
169
+ 0x115b2b19, 0x020bd8ed, 0xf0605bee, 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,
170
+ 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, 0xf36e6f75, 0x0105ec76, 0x12551f82,
171
+ 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
172
+ 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,
173
+ ]);
@@ -0,0 +1,21 @@
1
+ import { createApi } from "../utils/api";
2
+ import { KafkaTSApiError } from "../utils/error";
3
+
4
+ export const SASL_AUTHENTICATE = createApi({
5
+ apiKey: 36,
6
+ apiVersion: 2,
7
+ request: (encoder, data: { authBytes: Buffer }) =>
8
+ encoder.writeUVarInt(0).writeCompactBytes(data.authBytes).writeUVarInt(0),
9
+ response: (decoder) => {
10
+ const result = {
11
+ _tag: decoder.readTagBuffer(),
12
+ errorCode: decoder.readInt16(),
13
+ errorMessage: decoder.readCompactString(),
14
+ authBytes: decoder.readCompactBytes(),
15
+ sessionLifetimeMs: decoder.readInt64(),
16
+ _tag2: decoder.readTagBuffer(),
17
+ };
18
+ if (result.errorCode) throw new KafkaTSApiError(result.errorCode, result.errorMessage, result);
19
+ return result;
20
+ },
21
+ });
@@ -0,0 +1,16 @@
1
+ import { createApi } from "../utils/api";
2
+ import { KafkaTSApiError } from "../utils/error";
3
+
4
+ export const SASL_HANDSHAKE = createApi({
5
+ apiKey: 17,
6
+ apiVersion: 1,
7
+ request: (encoder, data: { mechanism: string }) => encoder.writeString(data.mechanism),
8
+ response: (decoder) => {
9
+ const result = {
10
+ errorCode: decoder.readInt16(),
11
+ mechanisms: decoder.readArray((mechanism) => mechanism.readString()),
12
+ };
13
+ if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
14
+ return result;
15
+ },
16
+ });
@@ -0,0 +1,54 @@
1
+ import { createApi } from "../utils/api";
2
+ import { KafkaTSApiError } from "../utils/error";
3
+
4
+ export type Assignment = { [topic: string]: number[] };
5
+
6
+ export type MemberAssignment = {
7
+ memberId: string;
8
+ assignment: Assignment;
9
+ };
10
+
11
+ export const SYNC_GROUP = createApi({
12
+ apiKey: 14,
13
+ apiVersion: 5,
14
+ request: (
15
+ encoder,
16
+ data: {
17
+ groupId: string;
18
+ generationId: number;
19
+ memberId: string;
20
+ groupInstanceId: string | null;
21
+ protocolType: string | null;
22
+ protocolName: string | null;
23
+ assignments: MemberAssignment[];
24
+ },
25
+ ) =>
26
+ encoder
27
+ .writeUVarInt(0)
28
+ .writeCompactString(data.groupId)
29
+ .writeInt32(data.generationId)
30
+ .writeCompactString(data.memberId)
31
+ .writeCompactString(data.groupInstanceId)
32
+ .writeCompactString(data.protocolType)
33
+ .writeCompactString(data.protocolName)
34
+ .writeCompactArray(data.assignments, (encoder, assignment) =>
35
+ encoder
36
+ .writeCompactString(assignment.memberId)
37
+ .writeCompactString(JSON.stringify(assignment.assignment))
38
+ .writeUVarInt(0),
39
+ )
40
+ .writeUVarInt(0),
41
+ response: (decoder) => {
42
+ const result = {
43
+ _tag: decoder.readTagBuffer(),
44
+ throttleTimeMs: decoder.readInt32(),
45
+ errorCode: decoder.readInt16(),
46
+ protocolType: decoder.readCompactString(),
47
+ protocolName: decoder.readCompactString(),
48
+ assignments: decoder.readCompactString()!,
49
+ _tag2: decoder.readTagBuffer(),
50
+ };
51
+ if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
52
+ return result;
53
+ },
54
+ });
package/src/broker.ts ADDED
@@ -0,0 +1,74 @@
1
+ import { TcpSocketConnectOpts } from "net";
2
+ import { TLSSocketOptions } from "tls";
3
+ import { API } from "./api";
4
+ import { Connection, SendRequest } from "./connection";
5
+ import { KafkaTSError } from "./utils/error";
6
+ import { memo } from "./utils/memo";
7
+
8
+ export type SASLOptions = { mechanism: "PLAIN"; username: string; password: string };
9
+
10
+ type BrokerOptions = {
11
+ clientId: string | null;
12
+ options: TcpSocketConnectOpts;
13
+ sasl: SASLOptions | null;
14
+ ssl: TLSSocketOptions | null;
15
+ };
16
+
17
+ export class Broker {
18
+ private connection: Connection;
19
+ public sendRequest: SendRequest;
20
+
21
+ constructor(private options: BrokerOptions) {
22
+ this.connection = new Connection({
23
+ clientId: this.options.clientId,
24
+ connection: this.options.options,
25
+ ssl: this.options.ssl,
26
+ });
27
+ this.sendRequest = this.connection.sendRequest.bind(this.connection);
28
+ }
29
+
30
+ public async connect() {
31
+ await this.connection.connect();
32
+ await this.validateApiVersions();
33
+ await this.saslHandshake();
34
+ await this.saslAuthenticate();
35
+ return this;
36
+ }
37
+
38
+ public ensureConnected = memo(() => this.connect());
39
+
40
+ public async disconnect() {
41
+ await this.connection.disconnect();
42
+ }
43
+
44
+ private async validateApiVersions() {
45
+ const { versions } = await this.sendRequest(API.API_VERSIONS, {});
46
+
47
+ const apiByKey = Object.fromEntries(Object.values(API).map((api) => [api.apiKey, api]));
48
+ versions.forEach(({ apiKey, minVersion, maxVersion }) => {
49
+ if (!apiByKey[apiKey]) {
50
+ return;
51
+ }
52
+ const { apiVersion } = apiByKey[apiKey];
53
+ if (apiVersion < minVersion || apiVersion > maxVersion) {
54
+ throw new KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker`);
55
+ }
56
+ });
57
+ }
58
+
59
+ private async saslHandshake() {
60
+ if (!this.options.sasl) {
61
+ return;
62
+ }
63
+ await this.sendRequest(API.SASL_HANDSHAKE, { mechanism: this.options.sasl.mechanism });
64
+ }
65
+
66
+ private async saslAuthenticate() {
67
+ if (this.options.sasl?.mechanism !== "PLAIN") {
68
+ return;
69
+ }
70
+ const { username, password } = this.options.sasl;
71
+ const authBytes = [null, username, password].join("\u0000");
72
+ await this.sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
73
+ }
74
+ }
package/src/client.ts ADDED
@@ -0,0 +1,47 @@
1
+ import { TcpSocketConnectOpts } from "net";
2
+ import { TLSSocketOptions } from "tls";
3
+ import { SASLOptions } from "./broker";
4
+ import { Cluster } from "./cluster";
5
+ import { Consumer, ConsumerOptions } from "./consumer/consumer";
6
+ import { Producer, ProducerOptions } from "./producer/producer";
7
+
8
+ type ClientOptions = {
9
+ clientId?: string | null;
10
+ bootstrapServers: TcpSocketConnectOpts[];
11
+ sasl?: SASLOptions | null;
12
+ ssl?: TLSSocketOptions | null;
13
+ };
14
+
15
+ export class Client {
16
+ private options: Required<ClientOptions>;
17
+
18
+ constructor(options: ClientOptions) {
19
+ this.options = {
20
+ ...options,
21
+ clientId: options.clientId ?? null,
22
+ sasl: options.sasl ?? null,
23
+ ssl: options.ssl ?? null,
24
+ };
25
+ }
26
+
27
+ public async startConsumer(options: ConsumerOptions) {
28
+ const consumer = new Consumer(this.createCluster(), options);
29
+ await consumer.start();
30
+ return consumer;
31
+ }
32
+
33
+ public createProducer(options: ProducerOptions) {
34
+ return new Producer(this.createCluster(), options);
35
+ }
36
+
37
+ public createCluster() {
38
+ return new Cluster({
39
+ clientId: this.options.clientId,
40
+ bootstrapServers: this.options.bootstrapServers,
41
+ sasl: this.options.sasl,
42
+ ssl: this.options.ssl,
43
+ });
44
+ }
45
+ }
46
+
47
+ export const createKafkaClient = (options: ClientOptions) => new Client(options);