kafka-ts 1.1.6 → 1.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/api/index.d.ts +122 -121
  2. package/dist/api/index.js +24 -1
  3. package/dist/broker.d.ts +0 -1
  4. package/dist/broker.js +5 -8
  5. package/dist/cluster.d.ts +2 -2
  6. package/dist/cluster.js +48 -23
  7. package/dist/connection.js +9 -13
  8. package/dist/consumer/consumer-group.d.ts +2 -1
  9. package/dist/consumer/consumer-group.js +75 -36
  10. package/dist/consumer/consumer.d.ts +3 -0
  11. package/dist/consumer/consumer.js +47 -13
  12. package/dist/consumer/metadata.d.ts +24 -0
  13. package/dist/consumer/metadata.js +64 -0
  14. package/dist/examples/src/replicator.js +34 -0
  15. package/dist/examples/src/utils/json.js +5 -0
  16. package/dist/producer/producer.d.ts +3 -1
  17. package/dist/producer/producer.js +85 -86
  18. package/dist/request-handler.d.ts +16 -0
  19. package/dist/request-handler.js +67 -0
  20. package/dist/request-handler.test.d.ts +1 -0
  21. package/dist/request-handler.test.js +340 -0
  22. package/dist/src/api/api-versions.js +18 -0
  23. package/dist/src/api/create-topics.js +46 -0
  24. package/dist/src/api/delete-topics.js +26 -0
  25. package/dist/src/api/fetch.js +95 -0
  26. package/dist/src/api/find-coordinator.js +34 -0
  27. package/dist/src/api/heartbeat.js +22 -0
  28. package/dist/src/api/index.js +38 -0
  29. package/dist/src/api/init-producer-id.js +24 -0
  30. package/dist/src/api/join-group.js +48 -0
  31. package/dist/src/api/leave-group.js +30 -0
  32. package/dist/src/api/list-offsets.js +39 -0
  33. package/dist/src/api/metadata.js +47 -0
  34. package/dist/src/api/offset-commit.js +39 -0
  35. package/dist/src/api/offset-fetch.js +44 -0
  36. package/dist/src/api/produce.js +119 -0
  37. package/dist/src/api/sync-group.js +31 -0
  38. package/dist/src/broker.js +35 -0
  39. package/dist/src/connection.js +21 -0
  40. package/dist/src/consumer/consumer-group.js +131 -0
  41. package/dist/src/consumer/consumer.js +103 -0
  42. package/dist/src/consumer/metadata.js +52 -0
  43. package/dist/src/consumer/offset-manager.js +23 -0
  44. package/dist/src/index.js +19 -0
  45. package/dist/src/producer/producer.js +84 -0
  46. package/dist/src/request-handler.js +57 -0
  47. package/dist/src/request-handler.test.js +321 -0
  48. package/dist/src/types.js +2 -0
  49. package/dist/src/utils/api.js +5 -0
  50. package/dist/src/utils/decoder.js +161 -0
  51. package/dist/src/utils/encoder.js +137 -0
  52. package/dist/src/utils/error.js +10 -0
  53. package/dist/utils/debug.d.ts +2 -0
  54. package/dist/utils/debug.js +11 -0
  55. package/dist/utils/error.d.ts +1 -4
  56. package/dist/utils/error.js +5 -9
  57. package/dist/utils/logger.d.ts +7 -0
  58. package/dist/utils/logger.js +20 -5
  59. package/dist/utils/memo.d.ts +1 -0
  60. package/dist/utils/memo.js +16 -0
  61. package/dist/utils/mutex.d.ts +3 -0
  62. package/dist/utils/mutex.js +32 -0
  63. package/package.json +1 -1
@@ -0,0 +1,16 @@
1
+ import { Connection } from "./connection";
2
+ import { Api } from "./utils/api";
3
+ type RequestHandlerOptions = {
4
+ clientId: string | null;
5
+ };
6
+ export declare class RequestHandler {
7
+ private connection;
8
+ private options;
9
+ private queue;
10
+ private currentBuffer;
11
+ constructor(connection: Connection, options: RequestHandlerOptions);
12
+ private handleData;
13
+ sendRequest<Request, Response>(api: Api<Request, Response>, args: Request): Promise<Response>;
14
+ }
15
+ export type SendRequest = typeof RequestHandler.prototype.sendRequest;
16
+ export {};
@@ -0,0 +1,67 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.RequestHandler = void 0;
7
+ const node_assert_1 = __importDefault(require("node:assert"));
8
+ const decoder_1 = require("./utils/decoder");
9
+ const encoder_1 = require("./utils/encoder");
10
+ class RequestHandler {
11
+ connection;
12
+ options;
13
+ queue = {};
14
+ currentBuffer = null;
15
+ constructor(connection, options) {
16
+ this.connection = connection;
17
+ this.options = options;
18
+ this.connection.on("data", this.handleData);
19
+ }
20
+ handleData(buffer) {
21
+ this.currentBuffer = this.currentBuffer ? Buffer.concat([this.currentBuffer, buffer]) : buffer;
22
+ if (this.currentBuffer.length < 4) {
23
+ return;
24
+ }
25
+ const decoder = (0, decoder_1.createDecoder)({ buffer: this.currentBuffer });
26
+ const size = decoder.readInt32();
27
+ if (size > decoder.buffer.length) {
28
+ return;
29
+ }
30
+ const correlationId = decoder.readInt32();
31
+ const request = this.queue[correlationId];
32
+ delete this.queue[correlationId];
33
+ request.callback(decoder);
34
+ // debug(handleData.name, 'Response offsets', {
35
+ // offset: decoder.offset,
36
+ // length: decoder.buffer.length,
37
+ // rest: decoder.buffer.subarray(decoder.offset, decoder.buffer.length)?.toString(),
38
+ // });
39
+ (0, node_assert_1.default)(decoder.offset - 4 === size, `Buffer not correctly consumed: ${decoder.offset - 4} !== ${buffer.length}`);
40
+ this.currentBuffer = null;
41
+ }
42
+ async sendRequest(api, args) {
43
+ const correlationId = Math.floor(Math.random() * 1000000);
44
+ const encoder = (0, encoder_1.createEncoder)()
45
+ .writeInt16(api.apiKey)
46
+ .writeInt16(api.apiVersion)
47
+ .writeInt32(correlationId)
48
+ .writeString(this.options.clientId);
49
+ const request = api.request(encoder, args).value();
50
+ const buffer = (0, encoder_1.createEncoder)().writeInt32(request.length).write(request).value();
51
+ return new Promise(async (resolve, reject) => {
52
+ await this.connection.write(buffer);
53
+ this.queue[correlationId] = {
54
+ callback: (decoder) => {
55
+ try {
56
+ const response = api.response(decoder);
57
+ resolve(response);
58
+ }
59
+ catch (error) {
60
+ reject(error);
61
+ }
62
+ },
63
+ };
64
+ });
65
+ }
66
+ }
67
+ exports.RequestHandler = RequestHandler;
@@ -0,0 +1 @@
1
+ export declare const kafka: import("./client").Client;
@@ -0,0 +1,340 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.kafka = void 0;
4
+ const crypto_1 = require("crypto");
5
+ const fs_1 = require("fs");
6
+ const vitest_1 = require("vitest");
7
+ const api_1 = require("./api");
8
+ const find_coordinator_1 = require("./api/find-coordinator");
9
+ const client_1 = require("./client");
10
+ exports.kafka = (0, client_1.createKafkaClient)({
11
+ clientId: "kafka-ts",
12
+ bootstrapServers: [{ host: "localhost", port: 9092 }],
13
+ sasl: { mechanism: "PLAIN", username: "admin", password: "admin" },
14
+ ssl: { ca: (0, fs_1.readFileSync)("./certs/ca.crt").toString() },
15
+ });
16
+ vitest_1.describe.sequential("Request handler", () => {
17
+ const groupId = (0, crypto_1.randomBytes)(16).toString("hex");
18
+ let cluster;
19
+ (0, vitest_1.beforeAll)(async () => {
20
+ cluster = await exports.kafka.createCluster().connect();
21
+ const metadataResult = await cluster.sendRequest(api_1.API.METADATA, {
22
+ topics: null,
23
+ allowTopicAutoCreation: false,
24
+ includeTopicAuthorizedOperations: false,
25
+ });
26
+ if (metadataResult.topics.some((topic) => topic.name === "kafka-ts-test-topic")) {
27
+ await cluster.sendRequest(api_1.API.DELETE_TOPICS, {
28
+ topics: [{ name: "kafka-ts-test-topic", topicId: null }],
29
+ timeoutMs: 10000,
30
+ });
31
+ }
32
+ });
33
+ (0, vitest_1.afterAll)(async () => {
34
+ await cluster.disconnect();
35
+ });
36
+ (0, vitest_1.it)("should request api versions", async () => {
37
+ const result = await cluster.sendRequest(api_1.API.API_VERSIONS, {});
38
+ (0, vitest_1.expect)(result).toMatchSnapshot();
39
+ });
40
+ let topicId = "d6718d178e1b47c886441ad2d19faea5";
41
+ (0, vitest_1.it)("should create topics", async () => {
42
+ const result = await cluster.sendRequest(api_1.API.CREATE_TOPICS, {
43
+ topics: [
44
+ {
45
+ name: "kafka-ts-test-topic",
46
+ numPartitions: 1,
47
+ replicationFactor: 1,
48
+ assignments: [],
49
+ configs: [],
50
+ },
51
+ ],
52
+ timeoutMs: 10000,
53
+ validateOnly: false,
54
+ });
55
+ topicId = result.topics[0].topicId;
56
+ result.topics.forEach((topic) => {
57
+ topic.topicId = "Any<UUID>";
58
+ });
59
+ (0, vitest_1.expect)(result).toMatchSnapshot();
60
+ await new Promise((resolve) => setTimeout(resolve, 1000));
61
+ });
62
+ (0, vitest_1.it)("should request metadata for all topics", async () => {
63
+ const result = await cluster.sendRequest(api_1.API.METADATA, {
64
+ topics: null,
65
+ allowTopicAutoCreation: false,
66
+ includeTopicAuthorizedOperations: false,
67
+ });
68
+ result.controllerId = 0;
69
+ result.topics.forEach((topic) => {
70
+ topic.topicId = "Any<UUID>";
71
+ topic.partitions.forEach((partition) => {
72
+ partition.leaderId = 0;
73
+ partition.isrNodes = [0];
74
+ partition.replicaNodes = [0];
75
+ });
76
+ });
77
+ (0, vitest_1.expect)(result).toMatchSnapshot();
78
+ });
79
+ let leaderId = 0;
80
+ (0, vitest_1.it)("should request metadata for a topic", async () => {
81
+ const result = await cluster.sendRequest(api_1.API.METADATA, {
82
+ topics: [{ id: topicId, name: "kafka-ts-test-topic" }],
83
+ allowTopicAutoCreation: false,
84
+ includeTopicAuthorizedOperations: false,
85
+ });
86
+ leaderId = result.topics[0].partitions[0].leaderId;
87
+ result.controllerId = 0;
88
+ result.topics.forEach((topic) => {
89
+ topic.topicId = "Any<UUID>";
90
+ topic.partitions.forEach((partition) => {
91
+ partition.leaderId = 0;
92
+ partition.isrNodes = [0];
93
+ partition.replicaNodes = [0];
94
+ });
95
+ });
96
+ (0, vitest_1.expect)(result).toMatchSnapshot();
97
+ });
98
+ (0, vitest_1.it)("should init producer id", async () => {
99
+ const result = await cluster.sendRequest(api_1.API.INIT_PRODUCER_ID, {
100
+ transactionalId: null,
101
+ transactionTimeoutMs: 0,
102
+ producerId: 0n,
103
+ producerEpoch: 0,
104
+ });
105
+ result.producerId = 0n;
106
+ (0, vitest_1.expect)(result).toMatchSnapshot();
107
+ });
108
+ (0, vitest_1.it)("should produce messages", async () => {
109
+ const now = Date.now();
110
+ const result = await cluster.sendRequestToNode(leaderId)(api_1.API.PRODUCE, {
111
+ transactionalId: null,
112
+ timeoutMs: 10000,
113
+ acks: 1,
114
+ topicData: [
115
+ {
116
+ name: "kafka-ts-test-topic",
117
+ partitionData: [
118
+ {
119
+ index: 0,
120
+ baseOffset: 0n,
121
+ partitionLeaderEpoch: 0,
122
+ attributes: 0,
123
+ baseSequence: 0,
124
+ baseTimestamp: BigInt(now),
125
+ lastOffsetDelta: 0,
126
+ maxTimestamp: BigInt(now),
127
+ producerEpoch: 0,
128
+ producerId: 9n,
129
+ records: [
130
+ {
131
+ attributes: 0,
132
+ offsetDelta: 0,
133
+ timestampDelta: 0n,
134
+ key: "key",
135
+ value: "value",
136
+ headers: [
137
+ {
138
+ key: "header-key",
139
+ value: "header-value",
140
+ },
141
+ ],
142
+ },
143
+ ],
144
+ },
145
+ ],
146
+ },
147
+ ],
148
+ });
149
+ (0, vitest_1.expect)(result).toMatchSnapshot();
150
+ });
151
+ (0, vitest_1.it)("should fetch messages", async () => {
152
+ const result = await cluster.sendRequestToNode(leaderId)(api_1.API.FETCH, {
153
+ maxWaitMs: 100,
154
+ minBytes: 1,
155
+ maxBytes: 10485760,
156
+ isolationLevel: 1,
157
+ sessionId: 0,
158
+ sessionEpoch: -1,
159
+ topics: [
160
+ {
161
+ topicId,
162
+ partitions: [
163
+ {
164
+ partition: 0,
165
+ currentLeaderEpoch: -1,
166
+ fetchOffset: 0n,
167
+ lastFetchedEpoch: 0,
168
+ logStartOffset: -1n,
169
+ partitionMaxBytes: 10485760,
170
+ },
171
+ ],
172
+ },
173
+ ],
174
+ forgottenTopicsData: [],
175
+ rackId: "",
176
+ });
177
+ result.responses.forEach((response) => {
178
+ response.topicId = "Any<UUID>";
179
+ response.partitions.forEach((partition) => {
180
+ partition.records.forEach((record) => {
181
+ (0, vitest_1.expect)(record.baseTimestamp).toBeGreaterThan(1721926744730n);
182
+ (0, vitest_1.expect)(record.maxTimestamp).toBeGreaterThan(1721926744730n);
183
+ (0, vitest_1.expect)(record.crc).toBeGreaterThan(0);
184
+ record.baseTimestamp = 0n;
185
+ record.maxTimestamp = 0n;
186
+ record.crc = 0;
187
+ });
188
+ });
189
+ });
190
+ (0, vitest_1.expect)(result).toMatchSnapshot();
191
+ });
192
+ let coordinatorId = -1;
193
+ (0, vitest_1.it)("should find coordinator", async () => {
194
+ const result = await cluster.sendRequest(api_1.API.FIND_COORDINATOR, { keyType: find_coordinator_1.KEY_TYPE.GROUP, keys: [groupId] });
195
+ result.coordinators.forEach((coordinator) => {
196
+ coordinator.key = "Any<String>";
197
+ });
198
+ coordinatorId = result.coordinators[0].nodeId;
199
+ result.coordinators.forEach((coordinator) => {
200
+ coordinator.nodeId = 1;
201
+ coordinator.port = 9093;
202
+ });
203
+ (0, vitest_1.expect)(result).toMatchSnapshot();
204
+ });
205
+ let memberId = "";
206
+ (0, vitest_1.it)("should fail join group request with new memberId", async () => {
207
+ try {
208
+ const result = await cluster.sendRequestToNode(coordinatorId)(api_1.API.JOIN_GROUP, {
209
+ groupId,
210
+ sessionTimeoutMs: 30000,
211
+ rebalanceTimeoutMs: 60000,
212
+ memberId,
213
+ groupInstanceId: null,
214
+ protocolType: "consumer",
215
+ protocols: [
216
+ {
217
+ name: "RoundRobinAssigner",
218
+ metadata: { version: 0, topics: ["kafka-ts-test-topic"] },
219
+ },
220
+ ],
221
+ reason: null,
222
+ });
223
+ (0, vitest_1.expect)(false, "Should throw an error").toBe(true);
224
+ }
225
+ catch (error) {
226
+ const { response } = error;
227
+ memberId = response.memberId;
228
+ response.memberId = "Any<UUID>";
229
+ (0, vitest_1.expect)(response).toMatchSnapshot();
230
+ }
231
+ });
232
+ (0, vitest_1.it)("should join group", async () => {
233
+ const result = await cluster.sendRequestToNode(coordinatorId)(api_1.API.JOIN_GROUP, {
234
+ groupId,
235
+ sessionTimeoutMs: 30000,
236
+ rebalanceTimeoutMs: 60000,
237
+ memberId,
238
+ groupInstanceId: null,
239
+ protocolType: "consumer",
240
+ protocols: [
241
+ {
242
+ name: "RoundRobinAssigner",
243
+ metadata: { version: 0, topics: ["kafka-ts-test-topic"] },
244
+ },
245
+ ],
246
+ reason: null,
247
+ });
248
+ result.memberId = "Any<UUID>";
249
+ result.leader = "Any<UUID>";
250
+ result.members.forEach((member) => {
251
+ member.memberId = "Any<UUID>";
252
+ });
253
+ (0, vitest_1.expect)(result).toMatchSnapshot();
254
+ });
255
+ (0, vitest_1.it)("should sync group", async () => {
256
+ const result = await cluster.sendRequestToNode(coordinatorId)(api_1.API.SYNC_GROUP, {
257
+ groupId,
258
+ generationId: 1,
259
+ memberId,
260
+ groupInstanceId: null,
261
+ protocolType: "consumer",
262
+ protocolName: "RoundRobinAssigner",
263
+ assignments: [
264
+ {
265
+ memberId,
266
+ assignment: { "kafka-test-topic": [0] },
267
+ },
268
+ ],
269
+ });
270
+ (0, vitest_1.expect)(result).toMatchSnapshot();
271
+ });
272
+ (0, vitest_1.it)("should commit offsets", async () => {
273
+ const result = await cluster.sendRequestToNode(coordinatorId)(api_1.API.OFFSET_COMMIT, {
274
+ groupId,
275
+ generationIdOrMemberEpoch: 1,
276
+ memberId,
277
+ groupInstanceId: null,
278
+ topics: [
279
+ {
280
+ name: "kafka-ts-test-topic",
281
+ partitions: [
282
+ { partitionIndex: 0, committedOffset: 1n, committedLeaderEpoch: 0, committedMetadata: null },
283
+ ],
284
+ },
285
+ ],
286
+ });
287
+ (0, vitest_1.expect)(result).toMatchSnapshot();
288
+ });
289
+ (0, vitest_1.it)("should fetch offsets", async () => {
290
+ const result = await cluster.sendRequestToNode(coordinatorId)(api_1.API.OFFSET_FETCH, {
291
+ groups: [
292
+ {
293
+ groupId,
294
+ memberId,
295
+ memberEpoch: 0,
296
+ topics: [
297
+ {
298
+ name: "kafka-ts-test-topic",
299
+ partitionIndexes: [0],
300
+ },
301
+ ],
302
+ },
303
+ ],
304
+ requireStable: false,
305
+ });
306
+ result.groups.forEach((group) => {
307
+ group.groupId = "Any<String>";
308
+ });
309
+ (0, vitest_1.expect)(result).toMatchSnapshot();
310
+ });
311
+ (0, vitest_1.it)("should heartbeat", async () => {
312
+ const result = await cluster.sendRequestToNode(coordinatorId)(api_1.API.HEARTBEAT, {
313
+ groupId,
314
+ generationId: 1,
315
+ memberId,
316
+ groupInstanceId: null,
317
+ });
318
+ (0, vitest_1.expect)(result).toMatchSnapshot();
319
+ });
320
+ (0, vitest_1.it)("should leave group", async () => {
321
+ const result = await cluster.sendRequestToNode(coordinatorId)(api_1.API.LEAVE_GROUP, {
322
+ groupId,
323
+ members: [{ memberId, groupInstanceId: null, reason: null }],
324
+ });
325
+ result.members.forEach((member) => {
326
+ member.memberId = "Any<UUID>";
327
+ });
328
+ (0, vitest_1.expect)(result).toMatchSnapshot();
329
+ });
330
+ (0, vitest_1.it)("should delete topics", async () => {
331
+ const result = await cluster.sendRequest(api_1.API.DELETE_TOPICS, {
332
+ topics: [{ name: "kafka-ts-test-topic", topicId: null }],
333
+ timeoutMs: 10000,
334
+ });
335
+ result.responses.forEach((response) => {
336
+ response.topicId = "Any<UUID>";
337
+ });
338
+ (0, vitest_1.expect)(result).toMatchSnapshot();
339
+ });
340
+ });
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.API_VERSIONS = void 0;
4
+ const api_js_1 = require("../utils/api.js");
5
+ exports.API_VERSIONS = (0, api_js_1.createApi)({
6
+ apiKey: 18,
7
+ apiVersion: 2,
8
+ request: (encoder) => encoder.value(),
9
+ response: (decoder) => ({
10
+ errorCode: decoder.readInt16(),
11
+ versions: decoder.readArray((version) => ({
12
+ apiKey: version.readInt16(),
13
+ minVersion: version.readInt16(),
14
+ maxVersion: version.readInt16(),
15
+ })),
16
+ throttleTimeMs: decoder.readInt32(),
17
+ }),
18
+ });
@@ -0,0 +1,46 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.CREATE_TOPICS = void 0;
4
+ const api_1 = require("../utils/api");
5
+ exports.CREATE_TOPICS = (0, api_1.createApi)({
6
+ apiKey: 19,
7
+ apiVersion: 7,
8
+ request: (encoder, data) => encoder
9
+ .writeUVarInt(0)
10
+ .writeCompactArray(data.topics, (encoder, topic) => encoder
11
+ .writeCompactString(topic.name)
12
+ .writeInt32(topic.numPartitions)
13
+ .writeInt16(topic.replicationFactor)
14
+ .writeCompactArray(topic.assignments, (encoder, assignment) => encoder
15
+ .writeInt32(assignment.partitionIndex)
16
+ .writeCompactArray(assignment.brokerIds, (encoder, brokerId) => encoder.writeInt32(brokerId))
17
+ .writeUVarInt(0))
18
+ .writeCompactArray(topic.configs, (encoder, config) => encoder.writeCompactString(config.name).writeCompactString(config.value).writeUVarInt(0))
19
+ .writeUVarInt(0))
20
+ .writeInt32(data.timeoutMs)
21
+ .writeBoolean(data.validateOnly)
22
+ .writeUVarInt(0)
23
+ .value(),
24
+ response: (decoder) => ({
25
+ _tag: decoder.readTagBuffer(),
26
+ throttleTimeMs: decoder.readInt32(),
27
+ topics: decoder.readCompactArray((topic) => ({
28
+ name: topic.readCompactString(),
29
+ topicId: topic.readUUID(),
30
+ errorCode: topic.readInt16(),
31
+ errorMessage: topic.readCompactString(),
32
+ numPartitions: topic.readInt32(),
33
+ replicationFactor: topic.readInt16(),
34
+ configs: topic.readCompactArray((config) => ({
35
+ name: config.readCompactString(),
36
+ value: config.readCompactString(),
37
+ readOnly: config.readBoolean(),
38
+ configSource: config.readInt8(),
39
+ isSensitive: config.readBoolean(),
40
+ _tag: config.readTagBuffer(),
41
+ })),
42
+ _tag: topic.readTagBuffer(),
43
+ })),
44
+ _tag2: decoder.readTagBuffer(),
45
+ }),
46
+ });
@@ -0,0 +1,26 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DELETE_TOPICS = void 0;
4
+ const api_1 = require("../utils/api");
5
+ exports.DELETE_TOPICS = (0, api_1.createApi)({
6
+ apiKey: 20,
7
+ apiVersion: 6,
8
+ request: (encoder, data) => encoder
9
+ .writeUVarInt(0)
10
+ .writeCompactArray(data.topics, (encoder, topic) => encoder.writeCompactString(topic.name).writeUUID(topic.topicId).writeUVarInt(0))
11
+ .writeInt32(data.timeoutMs)
12
+ .writeUVarInt(0)
13
+ .value(),
14
+ response: (decoder) => ({
15
+ _tag: decoder.readTagBuffer(),
16
+ throttleTimeMs: decoder.readInt32(),
17
+ responses: decoder.readCompactArray((decoder) => ({
18
+ name: decoder.readCompactString(),
19
+ topicId: decoder.readUUID(),
20
+ errorCode: decoder.readInt16(),
21
+ errorMessage: decoder.readCompactString(),
22
+ _tag: decoder.readTagBuffer(),
23
+ })),
24
+ _tag2: decoder.readTagBuffer(),
25
+ }),
26
+ });
@@ -0,0 +1,95 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FETCH = void 0;
4
+ const api_1 = require("../utils/api");
5
+ exports.FETCH = (0, api_1.createApi)({
6
+ apiKey: 1,
7
+ apiVersion: 16,
8
+ request: (encoder, data) => encoder
9
+ .writeUVarInt(0)
10
+ .writeInt32(data.maxWaitMs)
11
+ .writeInt32(data.minBytes)
12
+ .writeInt32(data.maxBytes)
13
+ .writeInt8(data.isolationLevel)
14
+ .writeInt32(data.sessionId)
15
+ .writeInt32(data.sessionEpoch)
16
+ .writeCompactArray(data.topics, (encoder, topic) => encoder
17
+ .writeUUID(topic.topicId)
18
+ .writeCompactArray(topic.partitions, (encoder, partition) => encoder
19
+ .writeInt32(partition.partition)
20
+ .writeInt32(partition.currentLeaderEpoch)
21
+ .writeInt64(partition.fetchOffset)
22
+ .writeInt32(partition.lastFetchedEpoch)
23
+ .writeInt64(partition.logStartOffset)
24
+ .writeInt32(partition.partitionMaxBytes)
25
+ .writeUVarInt(0))
26
+ .writeUVarInt(0))
27
+ .writeCompactArray(data.forgottenTopicsData, (encoder, forgottenTopic) => encoder
28
+ .writeUUID(forgottenTopic.topicId)
29
+ .writeCompactArray(forgottenTopic.partitions, (encoder, partition) => encoder.writeInt32(partition))
30
+ .writeUVarInt(0))
31
+ .writeCompactString(data.rackId)
32
+ .writeUVarInt(0)
33
+ .value(),
34
+ response: (decoder) => ({
35
+ _tag: decoder.readTagBuffer(),
36
+ throttleTimeMs: decoder.readInt32(),
37
+ errorCode: decoder.readInt16(),
38
+ sessionId: decoder.readInt32(),
39
+ responses: decoder.readCompactArray((response) => ({
40
+ topicId: response.readUUID(),
41
+ partitions: response.readCompactArray((partition) => ({
42
+ partitionIndex: partition.readInt32(),
43
+ errorCode: partition.readInt16(),
44
+ highWatermark: partition.readInt64(),
45
+ lastStableOffset: partition.readInt64(),
46
+ logStartOffset: partition.readInt64(),
47
+ abortedTransactions: partition.readCompactArray((abortedTransaction) => ({
48
+ producerId: abortedTransaction.readInt64(),
49
+ firstOffset: abortedTransaction.readInt64(),
50
+ _tag: abortedTransaction.readTagBuffer(),
51
+ })),
52
+ preferredReadReplica: partition.readInt32(),
53
+ records: decodeRecords(partition),
54
+ _tag: partition.readTagBuffer(),
55
+ })),
56
+ _tag: response.readTagBuffer(),
57
+ })),
58
+ _tag2: decoder.readTagBuffer(),
59
+ }),
60
+ });
61
+ const decodeRecords = (decoder) => {
62
+ const size = decoder.readUVarInt() - 1;
63
+ if (size <= 0) {
64
+ return [];
65
+ }
66
+ const results = [];
67
+ while (decoder.buffer.length > decoder.offset + 49) {
68
+ results.push({
69
+ baseOffset: decoder.readInt64(),
70
+ batchLength: decoder.readInt32(),
71
+ partitionLeaderEpoch: decoder.readInt32(),
72
+ magic: decoder.readInt8(),
73
+ crc: decoder.readUInt32(),
74
+ attributes: decoder.readInt16(),
75
+ lastOffsetDelta: decoder.readInt32(),
76
+ baseTimestamp: decoder.readInt64(),
77
+ maxTimestamp: decoder.readInt64(),
78
+ producerId: decoder.readInt64(),
79
+ producerEpoch: decoder.readInt16(),
80
+ baseSequence: decoder.readInt32(),
81
+ records: decoder.readRecords((record) => ({
82
+ attributes: record.readInt8(),
83
+ timestampDelta: record.readVarLong(),
84
+ offsetDelta: record.readVarInt(),
85
+ key: record.read(Math.ceil((record.readVarInt() - 1) / 2))?.toString() || null,
86
+ value: record.read(Math.ceil((record.readVarInt() - 1) / 2))?.toString() || null,
87
+ headers: record.readCompactArray((header) => ({
88
+ key: header.read(Math.ceil((header.readVarInt() - 1) / 2))?.toString(),
89
+ value: header.read(Math.ceil((header.readVarInt() - 1) / 2))?.toString(),
90
+ })),
91
+ })),
92
+ });
93
+ }
94
+ return results;
95
+ };
@@ -0,0 +1,34 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FIND_COORDINATOR = exports.KEY_TYPE = void 0;
4
+ const api_1 = require("../utils/api");
5
+ exports.KEY_TYPE = {
6
+ GROUP: 0,
7
+ TRANSACTION: 1,
8
+ };
9
+ exports.FIND_COORDINATOR = (0, api_1.createApi)({
10
+ apiKey: 10,
11
+ apiVersion: 4,
12
+ request: (encoder, data) => {
13
+ return encoder
14
+ .writeUVarInt(0)
15
+ .writeInt8(data.keyType)
16
+ .writeCompactArray(data.keys, (encoder, key) => encoder.writeCompactString(key))
17
+ .writeUVarInt(0)
18
+ .value();
19
+ },
20
+ response: (decoder) => ({
21
+ _tag: decoder.readTagBuffer(),
22
+ throttleTimeMs: decoder.readInt32(),
23
+ coordinators: decoder.readCompactArray((decoder) => ({
24
+ key: decoder.readCompactString(),
25
+ nodeId: decoder.readInt32(),
26
+ host: decoder.readCompactString(),
27
+ port: decoder.readInt32(),
28
+ errorCode: decoder.readInt16(),
29
+ errorMessage: decoder.readCompactString(),
30
+ _tag: decoder.readTagBuffer(),
31
+ })),
32
+ _tag2: decoder.readTagBuffer(),
33
+ }),
34
+ });
@@ -0,0 +1,22 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.HEARTBEAT = void 0;
4
+ const api_1 = require("../utils/api");
5
+ exports.HEARTBEAT = (0, api_1.createApi)({
6
+ apiKey: 12,
7
+ apiVersion: 4,
8
+ request: (encoder, data) => encoder
9
+ .writeUVarInt(0)
10
+ .writeCompactString(data.groupId)
11
+ .writeInt32(data.generationId)
12
+ .writeCompactString(data.memberId)
13
+ .writeCompactString(data.groupInstanceId)
14
+ .writeUVarInt(0)
15
+ .value(),
16
+ response: (decoder) => ({
17
+ _tag: decoder.readTagBuffer(),
18
+ throttleTimeMs: decoder.readInt32(),
19
+ errorCode: decoder.readInt16(),
20
+ _tag2: decoder.readTagBuffer(),
21
+ }),
22
+ });