kafka-ts 1.3.1-beta.1 → 1.3.1-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. package/README.md +7 -7
  2. package/dist/api/alter-configs.d.ts +26 -0
  3. package/dist/api/alter-configs.js +33 -0
  4. package/dist/api/api-versions.d.ts +5 -2
  5. package/dist/api/api-versions.js +13 -0
  6. package/dist/api/create-topics.d.ts +14 -12
  7. package/dist/api/create-topics.js +104 -12
  8. package/dist/api/delete-topics.d.ts +10 -8
  9. package/dist/api/delete-topics.js +61 -7
  10. package/dist/api/fetch.d.ts +15 -12
  11. package/dist/api/fetch.js +131 -13
  12. package/dist/api/find-coordinator.d.ts +9 -7
  13. package/dist/api/find-coordinator.js +63 -5
  14. package/dist/api/heartbeat.d.ts +7 -5
  15. package/dist/api/heartbeat.js +42 -4
  16. package/dist/api/index.d.ts +47 -118
  17. package/dist/api/init-producer-id.d.ts +7 -5
  18. package/dist/api/init-producer-id.js +53 -9
  19. package/dist/api/join-group.d.ts +9 -7
  20. package/dist/api/join-group.js +95 -6
  21. package/dist/api/leave-group.d.ts +8 -6
  22. package/dist/api/leave-group.js +49 -6
  23. package/dist/api/list-offsets.d.ts +9 -7
  24. package/dist/api/list-offsets.js +85 -8
  25. package/dist/api/metadata.d.ts +10 -9
  26. package/dist/api/metadata.js +109 -8
  27. package/dist/api/offset-commit.d.ts +10 -8
  28. package/dist/api/offset-commit.js +88 -8
  29. package/dist/api/offset-fetch.d.ts +11 -9
  30. package/dist/api/offset-fetch.js +94 -9
  31. package/dist/api/produce.d.ts +8 -10
  32. package/dist/api/produce.js +132 -38
  33. package/dist/api/sasl-authenticate.d.ts +8 -6
  34. package/dist/api/sasl-authenticate.js +43 -3
  35. package/dist/api/sasl-handshake.d.ts +7 -4
  36. package/dist/api/sasl-handshake.js +10 -0
  37. package/dist/api/sync-group.d.ts +7 -5
  38. package/dist/api/sync-group.js +62 -5
  39. package/dist/broker.js +6 -5
  40. package/dist/cluster.test.js +17 -14
  41. package/dist/connection.d.ts +11 -1
  42. package/dist/connection.js +27 -2
  43. package/dist/consumer/consumer.js +13 -9
  44. package/dist/consumer/metadata.d.ts +24 -0
  45. package/dist/consumer/metadata.js +64 -0
  46. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  47. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  48. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  49. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  50. package/dist/examples/src/replicator.js +34 -0
  51. package/dist/examples/src/utils/json.js +5 -0
  52. package/dist/request-handler.d.ts +16 -0
  53. package/dist/request-handler.js +67 -0
  54. package/dist/request-handler.test.d.ts +1 -0
  55. package/dist/request-handler.test.js +340 -0
  56. package/dist/src/api/api-versions.js +18 -0
  57. package/dist/src/api/create-topics.js +46 -0
  58. package/dist/src/api/delete-topics.js +26 -0
  59. package/dist/src/api/fetch.js +95 -0
  60. package/dist/src/api/find-coordinator.js +34 -0
  61. package/dist/src/api/heartbeat.js +22 -0
  62. package/dist/src/api/index.js +38 -0
  63. package/dist/src/api/init-producer-id.js +24 -0
  64. package/dist/src/api/join-group.js +48 -0
  65. package/dist/src/api/leave-group.js +30 -0
  66. package/dist/src/api/list-offsets.js +39 -0
  67. package/dist/src/api/metadata.js +47 -0
  68. package/dist/src/api/offset-commit.js +39 -0
  69. package/dist/src/api/offset-fetch.js +44 -0
  70. package/dist/src/api/produce.js +119 -0
  71. package/dist/src/api/sync-group.js +31 -0
  72. package/dist/src/broker.js +35 -0
  73. package/dist/src/connection.js +21 -0
  74. package/dist/src/consumer/consumer-group.js +131 -0
  75. package/dist/src/consumer/consumer.js +103 -0
  76. package/dist/src/consumer/metadata.js +52 -0
  77. package/dist/src/consumer/offset-manager.js +23 -0
  78. package/dist/src/index.js +19 -0
  79. package/dist/src/producer/producer.js +84 -0
  80. package/dist/src/request-handler.js +57 -0
  81. package/dist/src/request-handler.test.js +321 -0
  82. package/dist/src/types.js +2 -0
  83. package/dist/src/utils/api.js +5 -0
  84. package/dist/src/utils/decoder.js +161 -0
  85. package/dist/src/utils/encoder.js +137 -0
  86. package/dist/src/utils/error.js +10 -0
  87. package/dist/utils/api.d.ts +4 -1
  88. package/dist/utils/cached.d.ts +3 -0
  89. package/dist/utils/cached.js +19 -0
  90. package/dist/utils/debug.d.ts +2 -0
  91. package/dist/utils/debug.js +11 -0
  92. package/dist/utils/decoder.d.ts +2 -2
  93. package/dist/utils/decoder.js +14 -1
  94. package/dist/utils/encoder.d.ts +1 -0
  95. package/dist/utils/encoder.js +14 -0
  96. package/dist/utils/lock.d.ts +8 -0
  97. package/dist/utils/lock.js +44 -0
  98. package/dist/utils/memo.d.ts +1 -0
  99. package/dist/utils/memo.js +16 -0
  100. package/dist/utils/mutex.d.ts +3 -0
  101. package/dist/utils/mutex.js +32 -0
  102. package/package.json +1 -1
@@ -3,23 +3,108 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OFFSET_FETCH = void 0;
4
4
  const api_1 = require("../utils/api");
5
5
  const error_1 = require("../utils/error");
6
+ /*
7
+ OffsetFetch Request (Version: 1) => group_id [topics]
8
+ group_id => STRING
9
+ topics => name [partition_indexes]
10
+ name => STRING
11
+ partition_indexes => INT32
12
+
13
+ OffsetFetch Response (Version: 1) => [topics]
14
+ topics => name [partitions]
15
+ name => STRING
16
+ partitions => partition_index committed_offset metadata error_code
17
+ partition_index => INT32
18
+ committed_offset => INT64
19
+ metadata => NULLABLE_STRING
20
+ error_code => INT16
21
+ */
22
+ const OFFSET_FETCH_V1 = (0, api_1.createApi)({
23
+ apiKey: 9,
24
+ apiVersion: 1,
25
+ requestHeaderVersion: 1,
26
+ responseHeaderVersion: 0,
27
+ request: (encoder, data) => encoder
28
+ .writeString(data.groups[0].groupId)
29
+ .writeArray(data.groups[0].topics, (encoder, topic) => encoder
30
+ .writeString(topic.name)
31
+ .writeArray(topic.partitionIndexes, (encoder, partitionIndex) => encoder.writeInt32(partitionIndex))),
32
+ response: (decoder) => {
33
+ const result = {
34
+ throttleTimeMs: 0,
35
+ groups: [
36
+ {
37
+ groupId: '', // Not provided in v1 response
38
+ topics: decoder.readArray((decoder) => ({
39
+ name: decoder.readString(),
40
+ partitions: decoder.readArray((decoder) => ({
41
+ partitionIndex: decoder.readInt32(),
42
+ committedOffset: decoder.readInt64(),
43
+ committedLeaderEpoch: -1,
44
+ committedMetadata: decoder.readString(),
45
+ errorCode: decoder.readInt16(),
46
+ tags: {},
47
+ })),
48
+ tags: {},
49
+ })),
50
+ errorCode: 0,
51
+ tags: {},
52
+ },
53
+ ],
54
+ tags: {},
55
+ };
56
+ result.groups.forEach((group) => {
57
+ group.topics.forEach((topic) => {
58
+ topic.partitions.forEach((partition) => {
59
+ if (partition.errorCode)
60
+ throw new error_1.KafkaTSApiError(partition.errorCode, null, result);
61
+ });
62
+ });
63
+ });
64
+ return result;
65
+ },
66
+ });
67
+ /*
68
+ OffsetFetch Request (Version: 8) => [groups] require_stable _tagged_fields
69
+ groups => group_id [topics] _tagged_fields
70
+ group_id => COMPACT_STRING
71
+ topics => name [partition_indexes] _tagged_fields
72
+ name => COMPACT_STRING
73
+ partition_indexes => INT32
74
+ require_stable => BOOLEAN
75
+
76
+ OffsetFetch Response (Version: 8) => throttle_time_ms [groups] _tagged_fields
77
+ throttle_time_ms => INT32
78
+ groups => group_id [topics] error_code _tagged_fields
79
+ group_id => COMPACT_STRING
80
+ topics => name [partitions] _tagged_fields
81
+ name => COMPACT_STRING
82
+ partitions => partition_index committed_offset committed_leader_epoch metadata error_code _tagged_fields
83
+ partition_index => INT32
84
+ committed_offset => INT64
85
+ committed_leader_epoch => INT32
86
+ metadata => COMPACT_NULLABLE_STRING
87
+ error_code => INT16
88
+ error_code => INT16
89
+ */
6
90
  exports.OFFSET_FETCH = (0, api_1.createApi)({
7
91
  apiKey: 9,
8
92
  apiVersion: 8,
93
+ fallback: OFFSET_FETCH_V1,
94
+ requestHeaderVersion: 2,
95
+ responseHeaderVersion: 1,
9
96
  request: (encoder, data) => encoder
10
- .writeUVarInt(0)
11
97
  .writeCompactArray(data.groups, (encoder, group) => encoder
12
98
  .writeCompactString(group.groupId)
13
99
  .writeCompactArray(group.topics, (encoder, topic) => encoder
14
100
  .writeCompactString(topic.name)
15
101
  .writeCompactArray(topic.partitionIndexes, (encoder, partitionIndex) => encoder.writeInt32(partitionIndex))
16
- .writeUVarInt(0))
17
- .writeUVarInt(0))
102
+ .writeTagBuffer())
103
+ .writeTagBuffer())
18
104
  .writeBoolean(data.requireStable)
19
- .writeUVarInt(0),
105
+ .writeTagBuffer(),
20
106
  response: (decoder) => {
21
107
  const result = {
22
- _tag: decoder.readTagBuffer(),
23
108
  throttleTimeMs: decoder.readInt32(),
24
109
  groups: decoder.readCompactArray((decoder) => ({
25
110
  groupId: decoder.readCompactString(),
@@ -31,14 +116,14 @@ exports.OFFSET_FETCH = (0, api_1.createApi)({
31
116
  committedLeaderEpoch: decoder.readInt32(),
32
117
  committedMetadata: decoder.readCompactString(),
33
118
  errorCode: decoder.readInt16(),
34
- _tag: decoder.readTagBuffer(),
119
+ tags: decoder.readTagBuffer(),
35
120
  })),
36
- _tag: decoder.readTagBuffer(),
121
+ tags: decoder.readTagBuffer(),
37
122
  })),
38
123
  errorCode: decoder.readInt16(),
39
- _tag: decoder.readTagBuffer(),
124
+ tags: decoder.readTagBuffer(),
40
125
  })),
41
- _tag2: decoder.readTagBuffer(),
126
+ tags: decoder.readTagBuffer(),
42
127
  };
43
128
  result.groups.forEach((group) => {
44
129
  if (group.errorCode)
@@ -1,4 +1,4 @@
1
- export declare const PRODUCE: import("../utils/api.js").Api<{
1
+ type ProduceRequest = {
2
2
  transactionalId: string | null;
3
3
  acks: number;
4
4
  timeoutMs: number;
@@ -28,10 +28,10 @@ export declare const PRODUCE: import("../utils/api.js").Api<{
28
28
  }[];
29
29
  }[];
30
30
  }[];
31
- }, {
32
- _tag: void;
31
+ };
32
+ type ProduceResponse = {
33
33
  responses: {
34
- name: string | null;
34
+ name: string;
35
35
  partitionResponses: {
36
36
  index: number;
37
37
  errorCode: number;
@@ -40,14 +40,12 @@ export declare const PRODUCE: import("../utils/api.js").Api<{
40
40
  logStartOffset: bigint;
41
41
  recordErrors: {
42
42
  batchIndex: number;
43
- batchIndexError: number;
44
- _tag: void;
43
+ batchIndexErrorMessage: string | null;
45
44
  }[];
46
45
  errorMessage: string | null;
47
- _tag: void;
48
46
  }[];
49
- _tag: void;
50
47
  }[];
51
48
  throttleTimeMs: number;
52
- _tag2: void;
53
- }>;
49
+ };
50
+ export declare const PRODUCE: import("../utils/api.js").Api<ProduceRequest, ProduceResponse>;
51
+ export {};
@@ -4,56 +4,150 @@ exports.PRODUCE = void 0;
4
4
  const api_js_1 = require("../utils/api.js");
5
5
  const encoder_js_1 = require("../utils/encoder.js");
6
6
  const error_js_1 = require("../utils/error.js");
7
+ const createBatch = (partition) => {
8
+ const batchBody = new encoder_js_1.Encoder()
9
+ .writeInt16(partition.attributes)
10
+ .writeInt32(partition.lastOffsetDelta)
11
+ .writeInt64(partition.baseTimestamp)
12
+ .writeInt64(partition.maxTimestamp)
13
+ .writeInt64(partition.producerId)
14
+ .writeInt16(partition.producerEpoch)
15
+ .writeInt32(partition.baseSequence)
16
+ .writeArray(partition.records, (encoder, record) => {
17
+ const recordBody = new encoder_js_1.Encoder()
18
+ .writeInt8(record.attributes)
19
+ .writeVarLong(record.timestampDelta)
20
+ .writeVarInt(record.offsetDelta)
21
+ .writeVarIntString(record.key)
22
+ .writeVarIntString(record.value)
23
+ .writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntString(header.key).writeVarIntString(header.value));
24
+ return encoder.writeVarInt(recordBody.getBufferLength()).writeEncoder(recordBody);
25
+ })
26
+ .value();
27
+ const batchHeader = new encoder_js_1.Encoder()
28
+ .writeInt32(partition.partitionLeaderEpoch)
29
+ .writeInt8(2) // magic byte
30
+ .writeUInt32(unsigned(crc32C(batchBody)))
31
+ .write(batchBody);
32
+ const batch = new encoder_js_1.Encoder()
33
+ .writeInt64(partition.baseOffset)
34
+ .writeInt32(batchHeader.getBufferLength())
35
+ .writeEncoder(batchHeader);
36
+ return batch;
37
+ };
38
+ /*
39
+ Produce Request (Version: 3) => transactional_id acks timeout_ms [topic_data]
40
+ transactional_id => NULLABLE_STRING
41
+ acks => INT16
42
+ timeout_ms => INT32
43
+ topic_data => name [partition_data]
44
+ name => STRING
45
+ partition_data => index records
46
+ index => INT32
47
+ records => RECORDS
48
+
49
+ Produce Response (Version: 3) => [responses] throttle_time_ms
50
+ responses => name [partition_responses]
51
+ name => STRING
52
+ partition_responses => index error_code base_offset log_append_time_ms
53
+ index => INT32
54
+ error_code => INT16
55
+ base_offset => INT64
56
+ log_append_time_ms => INT64
57
+ throttle_time_ms => INT32
58
+ */
59
+ const PRODUCE_V3 = (0, api_js_1.createApi)({
60
+ apiKey: 0,
61
+ apiVersion: 3,
62
+ requestHeaderVersion: 1,
63
+ responseHeaderVersion: 0,
64
+ request: (encoder, data) => encoder
65
+ .writeString(data.transactionalId)
66
+ .writeInt16(data.acks)
67
+ .writeInt32(data.timeoutMs)
68
+ .writeArray(data.topicData, (encoder, topic) => encoder.writeString(topic.name).writeArray(topic.partitionData, (encoder, partition) => {
69
+ const batch = createBatch(partition);
70
+ return encoder.writeInt32(partition.index).writeInt32(batch.getBufferLength()).writeEncoder(batch);
71
+ })),
72
+ response: (decoder) => {
73
+ const result = {
74
+ responses: decoder.readArray((response) => ({
75
+ name: response.readString(),
76
+ partitionResponses: response.readArray((partitionResponse) => ({
77
+ index: partitionResponse.readInt32(),
78
+ errorCode: partitionResponse.readInt16(),
79
+ baseOffset: partitionResponse.readInt64(),
80
+ logAppendTime: partitionResponse.readInt64(),
81
+ logStartOffset: BigInt(0), // Not present in v3 response
82
+ recordErrors: [],
83
+ errorMessage: null,
84
+ tags: {},
85
+ })),
86
+ tags: {},
87
+ })),
88
+ throttleTimeMs: decoder.readInt32(),
89
+ tags: {},
90
+ };
91
+ result.responses.forEach((topic) => {
92
+ topic.partitionResponses.forEach((partition) => {
93
+ if (partition.errorCode !== 0) {
94
+ throw new error_js_1.KafkaTSApiError(partition.errorCode, partition.errorMessage, result);
95
+ }
96
+ });
97
+ });
98
+ return result;
99
+ },
100
+ });
101
+ /*
102
+ Produce Request (Version: 9) => transactional_id acks timeout_ms [topic_data] _tagged_fields
103
+ transactional_id => COMPACT_NULLABLE_STRING
104
+ acks => INT16
105
+ timeout_ms => INT32
106
+ topic_data => name [partition_data] _tagged_fields
107
+ name => COMPACT_STRING
108
+ partition_data => index records _tagged_fields
109
+ index => INT32
110
+ records => COMPACT_RECORDS
111
+
112
+ Produce Response (Version: 9) => [responses] throttle_time_ms _tagged_fields
113
+ responses => name [partition_responses] _tagged_fields
114
+ name => COMPACT_STRING
115
+ partition_responses => index error_code base_offset log_append_time_ms log_start_offset [record_errors] error_message _tagged_fields
116
+ index => INT32
117
+ error_code => INT16
118
+ base_offset => INT64
119
+ log_append_time_ms => INT64
120
+ log_start_offset => INT64
121
+ record_errors => batch_index batch_index_error_message _tagged_fields
122
+ batch_index => INT32
123
+ batch_index_error_message => COMPACT_NULLABLE_STRING
124
+ error_message => COMPACT_NULLABLE_STRING
125
+ throttle_time_ms => INT32
126
+ */
7
127
  exports.PRODUCE = (0, api_js_1.createApi)({
8
128
  apiKey: 0,
9
129
  apiVersion: 9,
130
+ requestHeaderVersion: 2,
131
+ responseHeaderVersion: 1,
132
+ fallback: PRODUCE_V3,
10
133
  request: (encoder, data) => encoder
11
- .writeUVarInt(0)
12
134
  .writeCompactString(data.transactionalId)
13
135
  .writeInt16(data.acks)
14
136
  .writeInt32(data.timeoutMs)
15
137
  .writeCompactArray(data.topicData, (encoder, topic) => encoder
16
138
  .writeCompactString(topic.name)
17
139
  .writeCompactArray(topic.partitionData, (encoder, partition) => {
18
- const batchBody = new encoder_js_1.Encoder()
19
- .writeInt16(partition.attributes)
20
- .writeInt32(partition.lastOffsetDelta)
21
- .writeInt64(partition.baseTimestamp)
22
- .writeInt64(partition.maxTimestamp)
23
- .writeInt64(partition.producerId)
24
- .writeInt16(partition.producerEpoch)
25
- .writeInt32(partition.baseSequence)
26
- .writeArray(partition.records, (encoder, record) => {
27
- const recordBody = new encoder_js_1.Encoder()
28
- .writeInt8(record.attributes)
29
- .writeVarLong(record.timestampDelta)
30
- .writeVarInt(record.offsetDelta)
31
- .writeVarIntString(record.key)
32
- .writeVarIntString(record.value)
33
- .writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntString(header.key).writeVarIntString(header.value));
34
- return encoder.writeVarInt(recordBody.getBufferLength()).writeEncoder(recordBody);
35
- })
36
- .value();
37
- const batchHeader = new encoder_js_1.Encoder()
38
- .writeInt32(partition.partitionLeaderEpoch)
39
- .writeInt8(2) // magic byte
40
- .writeUInt32(unsigned(crc32C(batchBody)))
41
- .write(batchBody);
42
- const batch = new encoder_js_1.Encoder()
43
- .writeInt64(partition.baseOffset)
44
- .writeInt32(batchHeader.getBufferLength())
45
- .writeEncoder(batchHeader);
140
+ const batch = createBatch(partition);
46
141
  return encoder
47
142
  .writeInt32(partition.index)
48
143
  .writeUVarInt(batch.getBufferLength() + 1)
49
144
  .writeEncoder(batch)
50
- .writeUVarInt(0);
145
+ .writeTagBuffer();
51
146
  })
52
- .writeUVarInt(0))
53
- .writeUVarInt(0),
147
+ .writeTagBuffer())
148
+ .writeTagBuffer(),
54
149
  response: (decoder) => {
55
150
  const result = {
56
- _tag: decoder.readTagBuffer(),
57
151
  responses: decoder.readCompactArray((response) => ({
58
152
  name: response.readCompactString(),
59
153
  partitionResponses: response.readCompactArray((partitionResponse) => ({
@@ -64,16 +158,16 @@ exports.PRODUCE = (0, api_js_1.createApi)({
64
158
  logStartOffset: partitionResponse.readInt64(),
65
159
  recordErrors: partitionResponse.readCompactArray((recordError) => ({
66
160
  batchIndex: recordError.readInt32(),
67
- batchIndexError: recordError.readInt16(),
68
- _tag: recordError.readTagBuffer(),
161
+ batchIndexErrorMessage: recordError.readCompactString(),
162
+ tags: recordError.readTagBuffer(),
69
163
  })),
70
164
  errorMessage: partitionResponse.readCompactString(),
71
- _tag: partitionResponse.readTagBuffer(),
165
+ tags: partitionResponse.readTagBuffer(),
72
166
  })),
73
- _tag: response.readTagBuffer(),
167
+ tags: response.readTagBuffer(),
74
168
  })),
75
169
  throttleTimeMs: decoder.readInt32(),
76
- _tag2: decoder.readTagBuffer(),
170
+ tags: decoder.readTagBuffer(),
77
171
  };
78
172
  result.responses.forEach((topic) => {
79
173
  topic.partitionResponses.forEach((partition) => {
@@ -1,10 +1,12 @@
1
- export declare const SASL_AUTHENTICATE: import("../utils/api").Api<{
1
+ type SaslAuthenticateRequest = {
2
2
  authBytes: Buffer;
3
- }, {
4
- _tag: void;
3
+ };
4
+ type SaslAuthenticateResponse = {
5
5
  errorCode: number;
6
6
  errorMessage: string | null;
7
- authBytes: Buffer<ArrayBufferLike> | null;
7
+ authBytes: Buffer;
8
8
  sessionLifetimeMs: bigint;
9
- _tag2: void;
10
- }>;
9
+ tags: Record<number, Buffer>;
10
+ };
11
+ export declare const SASL_AUTHENTICATE: import("../utils/api").Api<SaslAuthenticateRequest, SaslAuthenticateResponse>;
12
+ export {};
@@ -3,18 +3,58 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.SASL_AUTHENTICATE = void 0;
4
4
  const api_1 = require("../utils/api");
5
5
  const error_1 = require("../utils/error");
6
+ /*
7
+ SaslAuthenticate Request (Version: 0) => auth_bytes
8
+ auth_bytes => BYTES
9
+
10
+ SaslAuthenticate Response (Version: 0) => error_code error_message auth_bytes
11
+ error_code => INT16
12
+ error_message => NULLABLE_STRING
13
+ auth_bytes => BYTES
14
+ */
15
+ const SASL_AUTHENTICATE_V0 = (0, api_1.createApi)({
16
+ apiKey: 36,
17
+ apiVersion: 0,
18
+ requestHeaderVersion: 1,
19
+ responseHeaderVersion: 0,
20
+ request: (encoder, data) => encoder.writeBytes(data.authBytes),
21
+ response: (decoder) => {
22
+ const result = {
23
+ errorCode: decoder.readInt16(),
24
+ errorMessage: decoder.readString(),
25
+ authBytes: decoder.readBytes(),
26
+ sessionLifetimeMs: BigInt(0),
27
+ tags: {},
28
+ };
29
+ if (result.errorCode)
30
+ throw new error_1.KafkaTSApiError(result.errorCode, result.errorMessage, result);
31
+ return result;
32
+ },
33
+ });
34
+ /*
35
+ SaslAuthenticate Request (Version: 2) => auth_bytes _tagged_fields
36
+ auth_bytes => COMPACT_BYTES
37
+
38
+ SaslAuthenticate Response (Version: 2) => error_code error_message auth_bytes session_lifetime_ms _tagged_fields
39
+ error_code => INT16
40
+ error_message => COMPACT_NULLABLE_STRING
41
+ auth_bytes => COMPACT_BYTES
42
+ session_lifetime_ms => INT64
43
+ */
6
44
  exports.SASL_AUTHENTICATE = (0, api_1.createApi)({
7
45
  apiKey: 36,
8
46
  apiVersion: 2,
9
- request: (encoder, data) => encoder.writeUVarInt(0).writeCompactBytes(data.authBytes).writeUVarInt(0),
47
+ fallback: SASL_AUTHENTICATE_V0,
48
+ requestHeaderVersion: 2,
49
+ responseHeaderVersion: 1,
50
+ request: (encoder, data) => encoder.writeCompactBytes(data.authBytes).writeTagBuffer(),
10
51
  response: (decoder) => {
11
52
  const result = {
12
- _tag: decoder.readTagBuffer(),
13
53
  errorCode: decoder.readInt16(),
14
54
  errorMessage: decoder.readCompactString(),
15
55
  authBytes: decoder.readCompactBytes(),
16
56
  sessionLifetimeMs: decoder.readInt64(),
17
- _tag2: decoder.readTagBuffer(),
57
+ tags: decoder.readTagBuffer(),
18
58
  };
19
59
  if (result.errorCode)
20
60
  throw new error_1.KafkaTSApiError(result.errorCode, result.errorMessage, result);
@@ -1,6 +1,9 @@
1
- export declare const SASL_HANDSHAKE: import("../utils/api").Api<{
1
+ type SaslHandshakeRequest = {
2
2
  mechanism: string;
3
- }, {
3
+ };
4
+ type SaslHandshakeResponse = {
4
5
  errorCode: number;
5
- mechanisms: (string | null)[];
6
- }>;
6
+ mechanisms: string[];
7
+ };
8
+ export declare const SASL_HANDSHAKE: import("../utils/api").Api<SaslHandshakeRequest, SaslHandshakeResponse>;
9
+ export {};
@@ -3,9 +3,19 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.SASL_HANDSHAKE = void 0;
4
4
  const api_1 = require("../utils/api");
5
5
  const error_1 = require("../utils/error");
6
+ /*
7
+ SaslHandshake Request (Version: 1) => mechanism
8
+ mechanism => STRING
9
+
10
+ SaslHandshake Response (Version: 1) => error_code [mechanisms]
11
+ error_code => INT16
12
+ mechanisms => STRING
13
+ */
6
14
  exports.SASL_HANDSHAKE = (0, api_1.createApi)({
7
15
  apiKey: 17,
8
16
  apiVersion: 1,
17
+ requestHeaderVersion: 1,
18
+ responseHeaderVersion: 0,
9
19
  request: (encoder, data) => encoder.writeString(data.mechanism),
10
20
  response: (decoder) => {
11
21
  const result = {
@@ -5,7 +5,7 @@ export type MemberAssignment = {
5
5
  memberId: string;
6
6
  assignment: Assignment;
7
7
  };
8
- export declare const SYNC_GROUP: import("../utils/api").Api<{
8
+ type SyncGroupRequest = {
9
9
  groupId: string;
10
10
  generationId: number;
11
11
  memberId: string;
@@ -13,12 +13,14 @@ export declare const SYNC_GROUP: import("../utils/api").Api<{
13
13
  protocolType: string | null;
14
14
  protocolName: string | null;
15
15
  assignments: MemberAssignment[];
16
- }, {
17
- _tag: void;
16
+ };
17
+ type SyncGroupResponse = {
18
18
  throttleTimeMs: number;
19
19
  errorCode: number;
20
20
  protocolType: string | null;
21
21
  protocolName: string | null;
22
22
  assignments: Assignment;
23
- _tag2: void;
24
- }>;
23
+ tags: Record<number, Buffer>;
24
+ };
25
+ export declare const SYNC_GROUP: import("../utils/api").Api<SyncGroupRequest, SyncGroupResponse>;
26
+ export {};
@@ -5,11 +5,69 @@ const api_1 = require("../utils/api");
5
5
  const decoder_1 = require("../utils/decoder");
6
6
  const encoder_1 = require("../utils/encoder");
7
7
  const error_1 = require("../utils/error");
8
+ /*
9
+ SyncGroup Request (Version: 0) => group_id generation_id member_id [assignments]
10
+ group_id => STRING
11
+ generation_id => INT32
12
+ member_id => STRING
13
+ assignments => member_id assignment
14
+ member_id => STRING
15
+ assignment => BYTES
16
+
17
+ SyncGroup Response (Version: 0) => error_code assignment
18
+ error_code => INT16
19
+ assignment => BYTES
20
+ */
21
+ const SYNC_GROUP_V0 = (0, api_1.createApi)({
22
+ apiKey: 14,
23
+ apiVersion: 0,
24
+ requestHeaderVersion: 1,
25
+ responseHeaderVersion: 0,
26
+ request: (encoder, data) => encoder
27
+ .writeString(data.groupId)
28
+ .writeInt32(data.generationId)
29
+ .writeString(data.memberId)
30
+ .writeArray(data.assignments, (encoder, assignment) => encoder.writeString(assignment.memberId).writeBytes(encodeAssignment(assignment.assignment))),
31
+ response: (decoder) => {
32
+ const result = {
33
+ throttleTimeMs: 0,
34
+ errorCode: decoder.readInt16(),
35
+ protocolType: null,
36
+ protocolName: null,
37
+ assignments: decodeAssignment(decoder.readBytes()),
38
+ tags: {},
39
+ };
40
+ if (result.errorCode)
41
+ throw new error_1.KafkaTSApiError(result.errorCode, null, result);
42
+ return result;
43
+ },
44
+ });
45
+ /*
46
+ SyncGroup Request (Version: 5) => group_id generation_id member_id group_instance_id protocol_type protocol_name [assignments] _tagged_fields
47
+ group_id => COMPACT_STRING
48
+ generation_id => INT32
49
+ member_id => COMPACT_STRING
50
+ group_instance_id => COMPACT_NULLABLE_STRING
51
+ protocol_type => COMPACT_NULLABLE_STRING
52
+ protocol_name => COMPACT_NULLABLE_STRING
53
+ assignments => member_id assignment _tagged_fields
54
+ member_id => COMPACT_STRING
55
+ assignment => COMPACT_BYTES
56
+
57
+ SyncGroup Response (Version: 5) => throttle_time_ms error_code protocol_type protocol_name assignment _tagged_fields
58
+ throttle_time_ms => INT32
59
+ error_code => INT16
60
+ protocol_type => COMPACT_NULLABLE_STRING
61
+ protocol_name => COMPACT_NULLABLE_STRING
62
+ assignment => COMPACT_BYTES
63
+ */
8
64
  exports.SYNC_GROUP = (0, api_1.createApi)({
9
65
  apiKey: 14,
10
66
  apiVersion: 5,
67
+ fallback: SYNC_GROUP_V0,
68
+ requestHeaderVersion: 2,
69
+ responseHeaderVersion: 1,
11
70
  request: (encoder, data) => encoder
12
- .writeUVarInt(0)
13
71
  .writeCompactString(data.groupId)
14
72
  .writeInt32(data.generationId)
15
73
  .writeCompactString(data.memberId)
@@ -19,17 +77,16 @@ exports.SYNC_GROUP = (0, api_1.createApi)({
19
77
  .writeCompactArray(data.assignments, (encoder, assignment) => encoder
20
78
  .writeCompactString(assignment.memberId)
21
79
  .writeCompactBytes(encodeAssignment(assignment.assignment))
22
- .writeUVarInt(0))
23
- .writeUVarInt(0),
80
+ .writeTagBuffer())
81
+ .writeTagBuffer(),
24
82
  response: (decoder) => {
25
83
  const result = {
26
- _tag: decoder.readTagBuffer(),
27
84
  throttleTimeMs: decoder.readInt32(),
28
85
  errorCode: decoder.readInt16(),
29
86
  protocolType: decoder.readCompactString(),
30
87
  protocolName: decoder.readCompactString(),
31
88
  assignments: decodeAssignment(decoder.readCompactBytes()),
32
- _tag2: decoder.readTagBuffer(),
89
+ tags: decoder.readTagBuffer(),
33
90
  };
34
91
  if (result.errorCode)
35
92
  throw new error_1.KafkaTSApiError(result.errorCode, null, result);
package/dist/broker.js CHANGED
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Broker = void 0;
4
4
  const api_1 = require("./api");
5
5
  const connection_1 = require("./connection");
6
- const error_1 = require("./utils/error");
6
+ const logger_1 = require("./utils/logger");
7
7
  class Broker {
8
8
  options;
9
9
  connection;
@@ -34,14 +34,15 @@ class Broker {
34
34
  const { versions } = await this.sendRequest(api_1.API.API_VERSIONS, {});
35
35
  const apiByKey = Object.fromEntries(Object.values(api_1.API).map((api) => [api.apiKey, api]));
36
36
  versions.forEach(({ apiKey, minVersion, maxVersion }) => {
37
- if (!apiByKey[apiKey]) {
37
+ const api = apiByKey[apiKey];
38
+ if (!api) {
38
39
  return;
39
40
  }
40
- const { apiVersion } = apiByKey[apiKey];
41
- if (apiVersion < minVersion || apiVersion > maxVersion) {
42
- throw new error_1.KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker (minVersion=${minVersion}, maxVersion=${maxVersion})`);
41
+ if (api.apiVersion < minVersion || api.apiVersion > maxVersion) {
42
+ logger_1.log.warn(`Broker does not support API ${(0, api_1.getApiName)(api)} version ${api.apiVersion} (minVersion=${minVersion}, maxVersion=${maxVersion})`);
43
43
  }
44
44
  });
45
+ this.connection.setVersions(versions);
45
46
  }
46
47
  async saslHandshake() {
47
48
  if (!this.options.sasl) {