kafka-ts 0.0.3 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/dist/client.d.ts +1 -2
- package/package.json +1 -1
- package/.prettierrc +0 -8
- package/src/__snapshots__/cluster.test.ts.snap +0 -1281
- package/src/api/api-versions.ts +0 -21
- package/src/api/create-topics.ts +0 -78
- package/src/api/delete-topics.ts +0 -42
- package/src/api/fetch.ts +0 -198
- package/src/api/find-coordinator.ts +0 -39
- package/src/api/heartbeat.ts +0 -33
- package/src/api/index.ts +0 -166
- package/src/api/init-producer-id.ts +0 -35
- package/src/api/join-group.ts +0 -67
- package/src/api/leave-group.ts +0 -48
- package/src/api/list-offsets.ts +0 -65
- package/src/api/metadata.ts +0 -66
- package/src/api/offset-commit.ts +0 -67
- package/src/api/offset-fetch.ts +0 -70
- package/src/api/produce.ts +0 -170
- package/src/api/sasl-authenticate.ts +0 -21
- package/src/api/sasl-handshake.ts +0 -16
- package/src/api/sync-group.ts +0 -54
- package/src/auth/index.ts +0 -2
- package/src/auth/plain.ts +0 -10
- package/src/auth/scram.ts +0 -52
- package/src/broker.ts +0 -72
- package/src/client.ts +0 -47
- package/src/cluster.test.ts +0 -371
- package/src/cluster.ts +0 -85
- package/src/codecs/gzip.ts +0 -9
- package/src/codecs/index.ts +0 -16
- package/src/codecs/none.ts +0 -6
- package/src/codecs/types.ts +0 -4
- package/src/connection.ts +0 -157
- package/src/consumer/consumer-group.ts +0 -229
- package/src/consumer/consumer-metadata.ts +0 -14
- package/src/consumer/consumer.ts +0 -252
- package/src/consumer/fetch-manager.ts +0 -169
- package/src/consumer/fetcher.ts +0 -64
- package/src/consumer/offset-manager.ts +0 -104
- package/src/consumer/processor.ts +0 -53
- package/src/distributors/assignments-to-replicas.test.ts +0 -43
- package/src/distributors/assignments-to-replicas.ts +0 -83
- package/src/distributors/messages-to-topic-partition-leaders.test.ts +0 -32
- package/src/distributors/messages-to-topic-partition-leaders.ts +0 -19
- package/src/distributors/partitioner.ts +0 -27
- package/src/index.ts +0 -9
- package/src/metadata.ts +0 -126
- package/src/producer/producer.ts +0 -142
- package/src/types.ts +0 -11
- package/src/utils/api.ts +0 -11
- package/src/utils/crypto.ts +0 -15
- package/src/utils/decoder.ts +0 -174
- package/src/utils/delay.ts +0 -1
- package/src/utils/encoder.ts +0 -148
- package/src/utils/error.ts +0 -21
- package/src/utils/logger.ts +0 -37
- package/src/utils/memo.ts +0 -12
- package/src/utils/murmur2.ts +0 -44
- package/src/utils/retrier.ts +0 -39
- package/src/utils/tracer.ts +0 -49
- package/tsconfig.json +0 -17
package/src/api/api-versions.ts
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { createApi } from '../utils/api.js';
|
|
2
|
-
import { KafkaTSApiError } from '../utils/error.js';
|
|
3
|
-
|
|
4
|
-
export const API_VERSIONS = createApi({
|
|
5
|
-
apiKey: 18,
|
|
6
|
-
apiVersion: 2,
|
|
7
|
-
request: (encoder) => encoder,
|
|
8
|
-
response: (decoder) => {
|
|
9
|
-
const result = {
|
|
10
|
-
errorCode: decoder.readInt16(),
|
|
11
|
-
versions: decoder.readArray((version) => ({
|
|
12
|
-
apiKey: version.readInt16(),
|
|
13
|
-
minVersion: version.readInt16(),
|
|
14
|
-
maxVersion: version.readInt16(),
|
|
15
|
-
})),
|
|
16
|
-
throttleTimeMs: decoder.readInt32(),
|
|
17
|
-
};
|
|
18
|
-
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
19
|
-
return result;
|
|
20
|
-
},
|
|
21
|
-
});
|
package/src/api/create-topics.ts
DELETED
|
@@ -1,78 +0,0 @@
|
|
|
1
|
-
import { createApi } from '../utils/api';
|
|
2
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
3
|
-
|
|
4
|
-
export const CREATE_TOPICS = createApi({
|
|
5
|
-
apiKey: 19,
|
|
6
|
-
apiVersion: 7,
|
|
7
|
-
request: (
|
|
8
|
-
encoder,
|
|
9
|
-
data: {
|
|
10
|
-
topics: {
|
|
11
|
-
name: string;
|
|
12
|
-
numPartitions: number;
|
|
13
|
-
replicationFactor: number;
|
|
14
|
-
assignments: {
|
|
15
|
-
partitionIndex: number;
|
|
16
|
-
brokerIds: number[];
|
|
17
|
-
}[];
|
|
18
|
-
configs: {
|
|
19
|
-
name: string;
|
|
20
|
-
value: string | null;
|
|
21
|
-
}[];
|
|
22
|
-
}[];
|
|
23
|
-
timeoutMs: number;
|
|
24
|
-
validateOnly: boolean;
|
|
25
|
-
},
|
|
26
|
-
) =>
|
|
27
|
-
encoder
|
|
28
|
-
.writeUVarInt(0)
|
|
29
|
-
.writeCompactArray(data.topics, (encoder, topic) =>
|
|
30
|
-
encoder
|
|
31
|
-
.writeCompactString(topic.name)
|
|
32
|
-
.writeInt32(topic.numPartitions)
|
|
33
|
-
.writeInt16(topic.replicationFactor)
|
|
34
|
-
.writeCompactArray(topic.assignments, (encoder, assignment) =>
|
|
35
|
-
encoder
|
|
36
|
-
.writeInt32(assignment.partitionIndex)
|
|
37
|
-
.writeCompactArray(assignment.brokerIds, (encoder, brokerId) =>
|
|
38
|
-
encoder.writeInt32(brokerId),
|
|
39
|
-
)
|
|
40
|
-
.writeUVarInt(0),
|
|
41
|
-
)
|
|
42
|
-
.writeCompactArray(topic.configs, (encoder, config) =>
|
|
43
|
-
encoder.writeCompactString(config.name).writeCompactString(config.value).writeUVarInt(0),
|
|
44
|
-
)
|
|
45
|
-
.writeUVarInt(0),
|
|
46
|
-
)
|
|
47
|
-
.writeInt32(data.timeoutMs)
|
|
48
|
-
.writeBoolean(data.validateOnly)
|
|
49
|
-
.writeUVarInt(0),
|
|
50
|
-
response: (decoder) => {
|
|
51
|
-
const result = {
|
|
52
|
-
_tag: decoder.readTagBuffer(),
|
|
53
|
-
throttleTimeMs: decoder.readInt32(),
|
|
54
|
-
topics: decoder.readCompactArray((topic) => ({
|
|
55
|
-
name: topic.readCompactString(),
|
|
56
|
-
topicId: topic.readUUID(),
|
|
57
|
-
errorCode: topic.readInt16(),
|
|
58
|
-
errorMessage: topic.readCompactString(),
|
|
59
|
-
numPartitions: topic.readInt32(),
|
|
60
|
-
replicationFactor: topic.readInt16(),
|
|
61
|
-
configs: topic.readCompactArray((config) => ({
|
|
62
|
-
name: config.readCompactString(),
|
|
63
|
-
value: config.readCompactString(),
|
|
64
|
-
readOnly: config.readBoolean(),
|
|
65
|
-
configSource: config.readInt8(),
|
|
66
|
-
isSensitive: config.readBoolean(),
|
|
67
|
-
_tag: config.readTagBuffer(),
|
|
68
|
-
})),
|
|
69
|
-
_tag: topic.readTagBuffer(),
|
|
70
|
-
})),
|
|
71
|
-
_tag2: decoder.readTagBuffer(),
|
|
72
|
-
};
|
|
73
|
-
result.topics.forEach((topic) => {
|
|
74
|
-
if (topic.errorCode) throw new KafkaTSApiError(topic.errorCode, topic.errorMessage, result);
|
|
75
|
-
});
|
|
76
|
-
return result;
|
|
77
|
-
},
|
|
78
|
-
});
|
package/src/api/delete-topics.ts
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
import { createApi } from '../utils/api';
|
|
2
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
3
|
-
|
|
4
|
-
export const DELETE_TOPICS = createApi({
|
|
5
|
-
apiKey: 20,
|
|
6
|
-
apiVersion: 6,
|
|
7
|
-
request: (
|
|
8
|
-
encoder,
|
|
9
|
-
data: {
|
|
10
|
-
topics: {
|
|
11
|
-
name: string | null;
|
|
12
|
-
topicId: string | null;
|
|
13
|
-
}[];
|
|
14
|
-
timeoutMs: number;
|
|
15
|
-
},
|
|
16
|
-
) =>
|
|
17
|
-
encoder
|
|
18
|
-
.writeUVarInt(0)
|
|
19
|
-
.writeCompactArray(data.topics, (encoder, topic) =>
|
|
20
|
-
encoder.writeCompactString(topic.name).writeUUID(topic.topicId).writeUVarInt(0),
|
|
21
|
-
)
|
|
22
|
-
.writeInt32(data.timeoutMs)
|
|
23
|
-
.writeUVarInt(0),
|
|
24
|
-
response: (decoder) => {
|
|
25
|
-
const result = {
|
|
26
|
-
_tag: decoder.readTagBuffer(),
|
|
27
|
-
throttleTimeMs: decoder.readInt32(),
|
|
28
|
-
responses: decoder.readCompactArray((decoder) => ({
|
|
29
|
-
name: decoder.readCompactString(),
|
|
30
|
-
topicId: decoder.readUUID(),
|
|
31
|
-
errorCode: decoder.readInt16(),
|
|
32
|
-
errorMessage: decoder.readCompactString(),
|
|
33
|
-
_tag: decoder.readTagBuffer(),
|
|
34
|
-
})),
|
|
35
|
-
_tag2: decoder.readTagBuffer(),
|
|
36
|
-
};
|
|
37
|
-
result.responses.forEach((response) => {
|
|
38
|
-
if (response.errorCode) throw new KafkaTSApiError(response.errorCode, response.errorMessage, result);
|
|
39
|
-
});
|
|
40
|
-
return result;
|
|
41
|
-
},
|
|
42
|
-
});
|
package/src/api/fetch.ts
DELETED
|
@@ -1,198 +0,0 @@
|
|
|
1
|
-
import { findCodec } from '../codecs';
|
|
2
|
-
import { createApi } from '../utils/api';
|
|
3
|
-
import { Decoder } from '../utils/decoder';
|
|
4
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
5
|
-
|
|
6
|
-
export const enum IsolationLevel {
|
|
7
|
-
READ_UNCOMMITTED = 0,
|
|
8
|
-
READ_COMMITTED = 1,
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
export type FetchResponse = Awaited<ReturnType<(typeof FETCH)['response']>>;
|
|
12
|
-
|
|
13
|
-
export const FETCH = createApi({
|
|
14
|
-
apiKey: 1,
|
|
15
|
-
apiVersion: 15,
|
|
16
|
-
request: (
|
|
17
|
-
encoder,
|
|
18
|
-
data: {
|
|
19
|
-
maxWaitMs: number;
|
|
20
|
-
minBytes: number;
|
|
21
|
-
maxBytes: number;
|
|
22
|
-
isolationLevel: IsolationLevel;
|
|
23
|
-
sessionId: number;
|
|
24
|
-
sessionEpoch: number;
|
|
25
|
-
topics: {
|
|
26
|
-
topicId: string;
|
|
27
|
-
partitions: {
|
|
28
|
-
partition: number;
|
|
29
|
-
currentLeaderEpoch: number;
|
|
30
|
-
fetchOffset: bigint;
|
|
31
|
-
lastFetchedEpoch: number;
|
|
32
|
-
logStartOffset: bigint;
|
|
33
|
-
partitionMaxBytes: number;
|
|
34
|
-
}[];
|
|
35
|
-
}[];
|
|
36
|
-
forgottenTopicsData: {
|
|
37
|
-
topicId: string;
|
|
38
|
-
partitions: number[];
|
|
39
|
-
}[];
|
|
40
|
-
rackId: string;
|
|
41
|
-
},
|
|
42
|
-
) =>
|
|
43
|
-
encoder
|
|
44
|
-
.writeUVarInt(0)
|
|
45
|
-
.writeInt32(data.maxWaitMs)
|
|
46
|
-
.writeInt32(data.minBytes)
|
|
47
|
-
.writeInt32(data.maxBytes)
|
|
48
|
-
.writeInt8(data.isolationLevel)
|
|
49
|
-
.writeInt32(data.sessionId)
|
|
50
|
-
.writeInt32(data.sessionEpoch)
|
|
51
|
-
.writeCompactArray(data.topics, (encoder, topic) =>
|
|
52
|
-
encoder
|
|
53
|
-
.writeUUID(topic.topicId)
|
|
54
|
-
.writeCompactArray(topic.partitions, (encoder, partition) =>
|
|
55
|
-
encoder
|
|
56
|
-
.writeInt32(partition.partition)
|
|
57
|
-
.writeInt32(partition.currentLeaderEpoch)
|
|
58
|
-
.writeInt64(partition.fetchOffset)
|
|
59
|
-
.writeInt32(partition.lastFetchedEpoch)
|
|
60
|
-
.writeInt64(partition.logStartOffset)
|
|
61
|
-
.writeInt32(partition.partitionMaxBytes)
|
|
62
|
-
.writeUVarInt(0),
|
|
63
|
-
)
|
|
64
|
-
.writeUVarInt(0),
|
|
65
|
-
)
|
|
66
|
-
.writeCompactArray(data.forgottenTopicsData, (encoder, forgottenTopic) =>
|
|
67
|
-
encoder
|
|
68
|
-
.writeUUID(forgottenTopic.topicId)
|
|
69
|
-
.writeCompactArray(forgottenTopic.partitions, (encoder, partition) => encoder.writeInt32(partition))
|
|
70
|
-
.writeUVarInt(0),
|
|
71
|
-
)
|
|
72
|
-
.writeCompactString(data.rackId)
|
|
73
|
-
.writeUVarInt(0),
|
|
74
|
-
response: async (decoder) => {
|
|
75
|
-
const result = {
|
|
76
|
-
_tag: decoder.readTagBuffer(),
|
|
77
|
-
throttleTimeMs: decoder.readInt32(),
|
|
78
|
-
errorCode: decoder.readInt16(),
|
|
79
|
-
sessionId: decoder.readInt32(),
|
|
80
|
-
responses: decoder.readCompactArray((response) => ({
|
|
81
|
-
topicId: response.readUUID(),
|
|
82
|
-
partitions: response.readCompactArray((partition) => ({
|
|
83
|
-
partitionIndex: partition.readInt32(),
|
|
84
|
-
errorCode: partition.readInt16(),
|
|
85
|
-
highWatermark: partition.readInt64(),
|
|
86
|
-
lastStableOffset: partition.readInt64(),
|
|
87
|
-
logStartOffset: partition.readInt64(),
|
|
88
|
-
abortedTransactions: partition.readCompactArray((abortedTransaction) => ({
|
|
89
|
-
producerId: abortedTransaction.readInt64(),
|
|
90
|
-
firstOffset: abortedTransaction.readInt64(),
|
|
91
|
-
_tag: abortedTransaction.readTagBuffer(),
|
|
92
|
-
})),
|
|
93
|
-
preferredReadReplica: partition.readInt32(),
|
|
94
|
-
records: decodeRecordBatch(partition),
|
|
95
|
-
_tag: partition.readTagBuffer(),
|
|
96
|
-
})),
|
|
97
|
-
_tag: response.readTagBuffer(),
|
|
98
|
-
})),
|
|
99
|
-
_tag2: decoder.readTagBuffer(),
|
|
100
|
-
};
|
|
101
|
-
|
|
102
|
-
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
103
|
-
result.responses.forEach((response) => {
|
|
104
|
-
response.partitions.forEach((partition) => {
|
|
105
|
-
if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
|
|
106
|
-
});
|
|
107
|
-
});
|
|
108
|
-
|
|
109
|
-
const decompressedResponses = await Promise.all(
|
|
110
|
-
result.responses.map(async (response) => ({
|
|
111
|
-
...response,
|
|
112
|
-
partitions: await Promise.all(
|
|
113
|
-
response.partitions.map(async (partition) => ({
|
|
114
|
-
...partition,
|
|
115
|
-
records: await Promise.all(
|
|
116
|
-
partition.records.map(async ({ recordsLength, compressedRecords, ...record }) => {
|
|
117
|
-
const { decompress } = findCodec(record.compression);
|
|
118
|
-
const decompressedRecords = await decompress(compressedRecords);
|
|
119
|
-
const decompressedDecoder = new Decoder(
|
|
120
|
-
Buffer.concat([recordsLength, decompressedRecords]),
|
|
121
|
-
);
|
|
122
|
-
return { ...record, records: decodeRecord(decompressedDecoder) };
|
|
123
|
-
}),
|
|
124
|
-
),
|
|
125
|
-
})),
|
|
126
|
-
),
|
|
127
|
-
})),
|
|
128
|
-
);
|
|
129
|
-
|
|
130
|
-
return { ...result, responses: decompressedResponses };
|
|
131
|
-
},
|
|
132
|
-
});
|
|
133
|
-
|
|
134
|
-
const decodeRecordBatch = (decoder: Decoder) => {
|
|
135
|
-
const size = decoder.readUVarInt() - 1;
|
|
136
|
-
if (size <= 0) {
|
|
137
|
-
return [];
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
const recordBatchDecoder = new Decoder(decoder.read(size));
|
|
141
|
-
|
|
142
|
-
const results = [];
|
|
143
|
-
while (recordBatchDecoder.getBufferLength() > recordBatchDecoder.getOffset() + 12) {
|
|
144
|
-
const baseOffset = recordBatchDecoder.readInt64();
|
|
145
|
-
const batchLength = recordBatchDecoder.readInt32();
|
|
146
|
-
if (!batchLength) {
|
|
147
|
-
continue;
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
const batchDecoder = new Decoder(recordBatchDecoder.read(batchLength));
|
|
151
|
-
|
|
152
|
-
const result = {
|
|
153
|
-
baseOffset,
|
|
154
|
-
batchLength,
|
|
155
|
-
partitionLeaderEpoch: batchDecoder.readInt32(),
|
|
156
|
-
magic: batchDecoder.readInt8(),
|
|
157
|
-
crc: batchDecoder.readUInt32(),
|
|
158
|
-
attributes: batchDecoder.readInt16(),
|
|
159
|
-
lastOffsetDelta: batchDecoder.readInt32(),
|
|
160
|
-
baseTimestamp: batchDecoder.readInt64(),
|
|
161
|
-
maxTimestamp: batchDecoder.readInt64(),
|
|
162
|
-
producerId: batchDecoder.readInt64(),
|
|
163
|
-
producerEpoch: batchDecoder.readInt16(),
|
|
164
|
-
baseSequence: batchDecoder.readInt32(),
|
|
165
|
-
recordsLength: batchDecoder.read(4),
|
|
166
|
-
compressedRecords: batchDecoder.read(),
|
|
167
|
-
};
|
|
168
|
-
|
|
169
|
-
const compression = result.attributes & 0x07;
|
|
170
|
-
const timestampType = (result.attributes & 0x08) >> 3 ? 'LogAppendTime' : 'CreateTime';
|
|
171
|
-
const isTransactional = !!((result.attributes & 0x10) >> 4);
|
|
172
|
-
const isControlBatch = !!((result.attributes & 0x20) >> 5);
|
|
173
|
-
const hasDeleteHorizonMs = !!((result.attributes & 0x40) >> 6);
|
|
174
|
-
|
|
175
|
-
results.push({
|
|
176
|
-
...result,
|
|
177
|
-
compression,
|
|
178
|
-
timestampType,
|
|
179
|
-
isTransactional,
|
|
180
|
-
isControlBatch,
|
|
181
|
-
hasDeleteHorizonMs,
|
|
182
|
-
});
|
|
183
|
-
}
|
|
184
|
-
return results;
|
|
185
|
-
};
|
|
186
|
-
|
|
187
|
-
const decodeRecord = (decoder: Decoder) =>
|
|
188
|
-
decoder.readRecords((record) => ({
|
|
189
|
-
attributes: record.readInt8(),
|
|
190
|
-
timestampDelta: record.readVarLong(),
|
|
191
|
-
offsetDelta: record.readVarInt(),
|
|
192
|
-
key: record.readVarIntBuffer(),
|
|
193
|
-
value: record.readVarIntBuffer(),
|
|
194
|
-
headers: record.readVarIntArray((header) => ({
|
|
195
|
-
key: header.readVarIntBuffer(),
|
|
196
|
-
value: header.readVarIntBuffer(),
|
|
197
|
-
})),
|
|
198
|
-
}));
|
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
import { createApi } from '../utils/api';
|
|
2
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
3
|
-
|
|
4
|
-
export const KEY_TYPE = {
|
|
5
|
-
GROUP: 0,
|
|
6
|
-
TRANSACTION: 1,
|
|
7
|
-
};
|
|
8
|
-
|
|
9
|
-
export const FIND_COORDINATOR = createApi({
|
|
10
|
-
apiKey: 10,
|
|
11
|
-
apiVersion: 4,
|
|
12
|
-
request: (encoder, data: { keyType: number; keys: string[] }) =>
|
|
13
|
-
encoder
|
|
14
|
-
.writeUVarInt(0)
|
|
15
|
-
.writeInt8(data.keyType)
|
|
16
|
-
.writeCompactArray(data.keys, (encoder, key) => encoder.writeCompactString(key))
|
|
17
|
-
.writeUVarInt(0),
|
|
18
|
-
response: (decoder) => {
|
|
19
|
-
const result = {
|
|
20
|
-
_tag: decoder.readTagBuffer(),
|
|
21
|
-
throttleTimeMs: decoder.readInt32(),
|
|
22
|
-
coordinators: decoder.readCompactArray((decoder) => ({
|
|
23
|
-
key: decoder.readCompactString(),
|
|
24
|
-
nodeId: decoder.readInt32(),
|
|
25
|
-
host: decoder.readCompactString()!,
|
|
26
|
-
port: decoder.readInt32(),
|
|
27
|
-
errorCode: decoder.readInt16(),
|
|
28
|
-
errorMessage: decoder.readCompactString(),
|
|
29
|
-
_tag: decoder.readTagBuffer(),
|
|
30
|
-
})),
|
|
31
|
-
_tag2: decoder.readTagBuffer(),
|
|
32
|
-
};
|
|
33
|
-
result.coordinators.forEach((coordinator) => {
|
|
34
|
-
if (coordinator.errorCode)
|
|
35
|
-
throw new KafkaTSApiError(coordinator.errorCode, coordinator.errorMessage, result);
|
|
36
|
-
});
|
|
37
|
-
return result;
|
|
38
|
-
},
|
|
39
|
-
});
|
package/src/api/heartbeat.ts
DELETED
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
import { createApi } from '../utils/api';
|
|
2
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
3
|
-
|
|
4
|
-
export const HEARTBEAT = createApi({
|
|
5
|
-
apiKey: 12,
|
|
6
|
-
apiVersion: 4,
|
|
7
|
-
request: (
|
|
8
|
-
encoder,
|
|
9
|
-
data: {
|
|
10
|
-
groupId: string;
|
|
11
|
-
generationId: number;
|
|
12
|
-
memberId: string;
|
|
13
|
-
groupInstanceId: string | null;
|
|
14
|
-
},
|
|
15
|
-
) =>
|
|
16
|
-
encoder
|
|
17
|
-
.writeUVarInt(0)
|
|
18
|
-
.writeCompactString(data.groupId)
|
|
19
|
-
.writeInt32(data.generationId)
|
|
20
|
-
.writeCompactString(data.memberId)
|
|
21
|
-
.writeCompactString(data.groupInstanceId)
|
|
22
|
-
.writeUVarInt(0),
|
|
23
|
-
response: (decoder) => {
|
|
24
|
-
const result = {
|
|
25
|
-
_tag: decoder.readTagBuffer(),
|
|
26
|
-
throttleTimeMs: decoder.readInt32(),
|
|
27
|
-
errorCode: decoder.readInt16(),
|
|
28
|
-
_tag2: decoder.readTagBuffer(),
|
|
29
|
-
};
|
|
30
|
-
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
31
|
-
return result;
|
|
32
|
-
},
|
|
33
|
-
});
|
package/src/api/index.ts
DELETED
|
@@ -1,166 +0,0 @@
|
|
|
1
|
-
import { Api } from '../utils/api';
|
|
2
|
-
import { API_VERSIONS } from './api-versions';
|
|
3
|
-
import { CREATE_TOPICS } from './create-topics';
|
|
4
|
-
import { DELETE_TOPICS } from './delete-topics';
|
|
5
|
-
import { FETCH } from './fetch';
|
|
6
|
-
import { FIND_COORDINATOR } from './find-coordinator';
|
|
7
|
-
import { HEARTBEAT } from './heartbeat';
|
|
8
|
-
import { INIT_PRODUCER_ID } from './init-producer-id';
|
|
9
|
-
import { JOIN_GROUP } from './join-group';
|
|
10
|
-
import { LEAVE_GROUP } from './leave-group';
|
|
11
|
-
import { LIST_OFFSETS } from './list-offsets';
|
|
12
|
-
import { METADATA } from './metadata';
|
|
13
|
-
import { OFFSET_COMMIT } from './offset-commit';
|
|
14
|
-
import { OFFSET_FETCH } from './offset-fetch';
|
|
15
|
-
import { PRODUCE } from './produce';
|
|
16
|
-
import { SASL_AUTHENTICATE } from './sasl-authenticate';
|
|
17
|
-
import { SASL_HANDSHAKE } from './sasl-handshake';
|
|
18
|
-
import { SYNC_GROUP } from './sync-group';
|
|
19
|
-
|
|
20
|
-
export const API = {
|
|
21
|
-
API_VERSIONS,
|
|
22
|
-
CREATE_TOPICS,
|
|
23
|
-
DELETE_TOPICS,
|
|
24
|
-
FETCH,
|
|
25
|
-
FIND_COORDINATOR,
|
|
26
|
-
HEARTBEAT,
|
|
27
|
-
INIT_PRODUCER_ID,
|
|
28
|
-
JOIN_GROUP,
|
|
29
|
-
LEAVE_GROUP,
|
|
30
|
-
LIST_OFFSETS,
|
|
31
|
-
METADATA,
|
|
32
|
-
OFFSET_COMMIT,
|
|
33
|
-
OFFSET_FETCH,
|
|
34
|
-
PRODUCE,
|
|
35
|
-
SASL_AUTHENTICATE,
|
|
36
|
-
SASL_HANDSHAKE,
|
|
37
|
-
SYNC_GROUP,
|
|
38
|
-
};
|
|
39
|
-
|
|
40
|
-
const apiNameByKey = Object.fromEntries(Object.entries(API).map(([k, v]) => [v.apiKey, k]));
|
|
41
|
-
|
|
42
|
-
export const getApiName = <Request, Response>(api: Api<Request, Response>) => apiNameByKey[api.apiKey];
|
|
43
|
-
|
|
44
|
-
export const API_ERROR = {
|
|
45
|
-
UNKNOWN_SERVER_ERROR: -1,
|
|
46
|
-
OFFSET_OUT_OF_RANGE: 1,
|
|
47
|
-
CORRUPT_MESSAGE: 2,
|
|
48
|
-
UNKNOWN_TOPIC_OR_PARTITION: 3,
|
|
49
|
-
INVALID_FETCH_SIZE: 4,
|
|
50
|
-
LEADER_NOT_AVAILABLE: 5,
|
|
51
|
-
NOT_LEADER_OR_FOLLOWER: 6,
|
|
52
|
-
REQUEST_TIMED_OUT: 7,
|
|
53
|
-
BROKER_NOT_AVAILABLE: 8,
|
|
54
|
-
REPLICA_NOT_AVAILABLE: 9,
|
|
55
|
-
MESSAGE_TOO_LARGE: 10,
|
|
56
|
-
STALE_CONTROLLER_EPOCH: 11,
|
|
57
|
-
OFFSET_METADATA_TOO_LARGE: 12,
|
|
58
|
-
NETWORK_EXCEPTION: 13,
|
|
59
|
-
COORDINATOR_LOAD_IN_PROGRESS: 14,
|
|
60
|
-
COORDINATOR_NOT_AVAILABLE: 15,
|
|
61
|
-
NOT_COORDINATOR: 16,
|
|
62
|
-
INVALID_TOPIC_EXCEPTION: 17,
|
|
63
|
-
RECORD_LIST_TOO_LARGE: 18,
|
|
64
|
-
NOT_ENOUGH_REPLICAS: 19,
|
|
65
|
-
NOT_ENOUGH_REPLICAS_AFTER_APPEND: 20,
|
|
66
|
-
INVALID_REQUIRED_ACKS: 21,
|
|
67
|
-
ILLEGAL_GENERATION: 22,
|
|
68
|
-
INCONSISTENT_GROUP_PROTOCOL: 23,
|
|
69
|
-
INVALID_GROUP_ID: 24,
|
|
70
|
-
UNKNOWN_MEMBER_ID: 25,
|
|
71
|
-
INVALID_SESSION_TIMEOUT: 26,
|
|
72
|
-
REBALANCE_IN_PROGRESS: 27,
|
|
73
|
-
INVALID_COMMIT_OFFSET_SIZE: 28,
|
|
74
|
-
TOPIC_AUTHORIZATION_FAILED: 29,
|
|
75
|
-
GROUP_AUTHORIZATION_FAILED: 30,
|
|
76
|
-
CLUSTER_AUTHORIZATION_FAILED: 31,
|
|
77
|
-
INVALID_TIMESTAMP: 32,
|
|
78
|
-
UNSUPPORTED_SASL_MECHANISM: 33,
|
|
79
|
-
ILLEGAL_SASL_STATE: 34,
|
|
80
|
-
UNSUPPORTED_VERSION: 35,
|
|
81
|
-
TOPIC_ALREADY_EXISTS: 36,
|
|
82
|
-
INVALID_PARTITIONS: 37,
|
|
83
|
-
INVALID_REPLICATION_FACTOR: 38,
|
|
84
|
-
INVALID_REPLICA_ASSIGNMENT: 39,
|
|
85
|
-
INVALID_CONFIG: 40,
|
|
86
|
-
NOT_CONTROLLER: 41,
|
|
87
|
-
INVALID_REQUEST: 42,
|
|
88
|
-
UNSUPPORTED_FOR_MESSAGE_FORMAT: 43,
|
|
89
|
-
POLICY_VIOLATION: 44,
|
|
90
|
-
OUT_OF_ORDER_SEQUENCE_NUMBER: 45,
|
|
91
|
-
DUPLICATE_SEQUENCE_NUMBER: 46,
|
|
92
|
-
INVALID_PRODUCER_EPOCH: 47,
|
|
93
|
-
INVALID_TXN_STATE: 48,
|
|
94
|
-
INVALID_PRODUCER_ID_MAPPING: 49,
|
|
95
|
-
INVALID_TRANSACTION_TIMEOUT: 50,
|
|
96
|
-
CONCURRENT_TRANSACTIONS: 51,
|
|
97
|
-
TRANSACTION_COORDINATOR_FENCED: 52,
|
|
98
|
-
TRANSACTIONAL_ID_AUTHORIZATION_FAILED: 53,
|
|
99
|
-
SECURITY_DISABLED: 54,
|
|
100
|
-
OPERATION_NOT_ATTEMPTED: 55,
|
|
101
|
-
KAFKA_STORAGE_ERROR: 56,
|
|
102
|
-
LOG_DIR_NOT_FOUND: 57,
|
|
103
|
-
SASL_AUTHENTICATION_FAILED: 58,
|
|
104
|
-
UNKNOWN_PRODUCER_ID: 59,
|
|
105
|
-
REASSIGNMENT_IN_PROGRESS: 60,
|
|
106
|
-
DELEGATION_TOKEN_AUTH_DISABLED: 61,
|
|
107
|
-
DELEGATION_TOKEN_NOT_FOUND: 62,
|
|
108
|
-
DELEGATION_TOKEN_OWNER_MISMATCH: 63,
|
|
109
|
-
DELEGATION_TOKEN_REQUEST_NOT_ALLOWED: 64,
|
|
110
|
-
DELEGATION_TOKEN_AUTHORIZATION_FAILED: 65,
|
|
111
|
-
DELEGATION_TOKEN_EXPIRED: 66,
|
|
112
|
-
INVALID_PRINCIPAL_TYPE: 67,
|
|
113
|
-
NON_EMPTY_GROUP: 68,
|
|
114
|
-
GROUP_ID_NOT_FOUND: 69,
|
|
115
|
-
FETCH_SESSION_ID_NOT_FOUND: 70,
|
|
116
|
-
INVALID_FETCH_SESSION_EPOCH: 71,
|
|
117
|
-
LISTENER_NOT_FOUND: 72,
|
|
118
|
-
TOPIC_DELETION_DISABLED: 73,
|
|
119
|
-
FENCED_LEADER_EPOCH: 74,
|
|
120
|
-
UNKNOWN_LEADER_EPOCH: 75,
|
|
121
|
-
UNSUPPORTED_COMPRESSION_TYPE: 76,
|
|
122
|
-
STALE_BROKER_EPOCH: 77,
|
|
123
|
-
OFFSET_NOT_AVAILABLE: 78,
|
|
124
|
-
MEMBER_ID_REQUIRED: 79,
|
|
125
|
-
PREFERRED_LEADER_NOT_AVAILABLE: 80,
|
|
126
|
-
GROUP_MAX_SIZE_REACHED: 81,
|
|
127
|
-
FENCED_INSTANCE_ID: 82,
|
|
128
|
-
ELIGIBLE_LEADERS_NOT_AVAILABLE: 83,
|
|
129
|
-
ELECTION_NOT_NEEDED: 84,
|
|
130
|
-
NO_REASSIGNMENT_IN_PROGRESS: 85,
|
|
131
|
-
GROUP_SUBSCRIBED_TO_TOPIC: 86,
|
|
132
|
-
INVALID_RECORD: 87,
|
|
133
|
-
UNSTABLE_OFFSET_COMMIT: 88,
|
|
134
|
-
THROTTLING_QUOTA_EXCEEDED: 89,
|
|
135
|
-
PRODUCER_FENCED: 90,
|
|
136
|
-
RESOURCE_NOT_FOUND: 91,
|
|
137
|
-
DUPLICATE_RESOURCE: 92,
|
|
138
|
-
UNACCEPTABLE_CREDENTIAL: 93,
|
|
139
|
-
INCONSISTENT_VOTER_SET: 94,
|
|
140
|
-
INVALID_UPDATE_VERSION: 95,
|
|
141
|
-
FEATURE_UPDATE_FAILED: 96,
|
|
142
|
-
PRINCIPAL_DESERIALIZATION_FAILURE: 97,
|
|
143
|
-
SNAPSHOT_NOT_FOUND: 98,
|
|
144
|
-
POSITION_OUT_OF_RANGE: 99,
|
|
145
|
-
UNKNOWN_TOPIC_ID: 100,
|
|
146
|
-
DUPLICATE_BROKER_REGISTRATION: 101,
|
|
147
|
-
BROKER_ID_NOT_REGISTERED: 102,
|
|
148
|
-
INCONSISTENT_TOPIC_ID: 103,
|
|
149
|
-
INCONSISTENT_CLUSTER_ID: 104,
|
|
150
|
-
TRANSACTIONAL_ID_NOT_FOUND: 105,
|
|
151
|
-
FETCH_SESSION_TOPIC_ID_ERROR: 106,
|
|
152
|
-
INELIGIBLE_REPLICA: 107,
|
|
153
|
-
NEW_LEADER_ELECTED: 108,
|
|
154
|
-
OFFSET_MOVED_TO_TIERED_STORAGE: 109,
|
|
155
|
-
FENCED_MEMBER_EPOCH: 110,
|
|
156
|
-
UNRELEASED_INSTANCE_ID: 111,
|
|
157
|
-
UNSUPPORTED_ASSIGNOR: 112,
|
|
158
|
-
STALE_MEMBER_EPOCH: 113,
|
|
159
|
-
MISMATCHED_ENDPOINT_TYPE: 114,
|
|
160
|
-
UNSUPPORTED_ENDPOINT_TYPE: 115,
|
|
161
|
-
UNKNOWN_CONTROLLER_ID: 116,
|
|
162
|
-
UNKNOWN_SUBSCRIPTION_ID: 117,
|
|
163
|
-
TELEMETRY_TOO_LARGE: 118,
|
|
164
|
-
INVALID_REGISTRATION: 119,
|
|
165
|
-
TRANSACTION_ABORTABLE: 120,
|
|
166
|
-
};
|
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
import { createApi } from '../utils/api';
|
|
2
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
3
|
-
|
|
4
|
-
export const INIT_PRODUCER_ID = createApi({
|
|
5
|
-
apiKey: 22,
|
|
6
|
-
apiVersion: 4,
|
|
7
|
-
request: (
|
|
8
|
-
encoder,
|
|
9
|
-
body: {
|
|
10
|
-
transactionalId: string | null;
|
|
11
|
-
transactionTimeoutMs: number;
|
|
12
|
-
producerId: bigint;
|
|
13
|
-
producerEpoch: number;
|
|
14
|
-
},
|
|
15
|
-
) =>
|
|
16
|
-
encoder
|
|
17
|
-
.writeUVarInt(0)
|
|
18
|
-
.writeCompactString(body.transactionalId)
|
|
19
|
-
.writeInt32(body.transactionTimeoutMs)
|
|
20
|
-
.writeInt64(body.producerId)
|
|
21
|
-
.writeInt16(body.producerEpoch)
|
|
22
|
-
.writeUVarInt(0),
|
|
23
|
-
response: (decoder) => {
|
|
24
|
-
const result = {
|
|
25
|
-
_tag: decoder.readTagBuffer(),
|
|
26
|
-
throttleTimeMs: decoder.readInt32(),
|
|
27
|
-
errorCode: decoder.readInt16(),
|
|
28
|
-
producerId: decoder.readInt64(),
|
|
29
|
-
producerEpoch: decoder.readInt16(),
|
|
30
|
-
_tag2: decoder.readTagBuffer(),
|
|
31
|
-
};
|
|
32
|
-
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
33
|
-
return result;
|
|
34
|
-
},
|
|
35
|
-
});
|
package/src/api/join-group.ts
DELETED
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
import { createApi } from '../utils/api';
|
|
2
|
-
import { Encoder } from '../utils/encoder';
|
|
3
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
4
|
-
|
|
5
|
-
export const JOIN_GROUP = createApi({
|
|
6
|
-
apiKey: 11,
|
|
7
|
-
apiVersion: 9,
|
|
8
|
-
request: (
|
|
9
|
-
encoder,
|
|
10
|
-
data: {
|
|
11
|
-
groupId: string;
|
|
12
|
-
sessionTimeoutMs: number;
|
|
13
|
-
rebalanceTimeoutMs: number;
|
|
14
|
-
memberId: string;
|
|
15
|
-
groupInstanceId: string | null;
|
|
16
|
-
protocolType: string;
|
|
17
|
-
protocols: {
|
|
18
|
-
name: string;
|
|
19
|
-
metadata: {
|
|
20
|
-
version: number;
|
|
21
|
-
topics: string[];
|
|
22
|
-
};
|
|
23
|
-
}[];
|
|
24
|
-
reason: string | null;
|
|
25
|
-
},
|
|
26
|
-
) =>
|
|
27
|
-
encoder
|
|
28
|
-
.writeUVarInt(0)
|
|
29
|
-
.writeCompactString(data.groupId)
|
|
30
|
-
.writeInt32(data.sessionTimeoutMs)
|
|
31
|
-
.writeInt32(data.rebalanceTimeoutMs)
|
|
32
|
-
.writeCompactString(data.memberId)
|
|
33
|
-
.writeCompactString(data.groupInstanceId)
|
|
34
|
-
.writeCompactString(data.protocolType)
|
|
35
|
-
.writeCompactArray(data.protocols, (encoder, protocol) => {
|
|
36
|
-
const metadata = new Encoder()
|
|
37
|
-
.writeInt16(protocol.metadata.version)
|
|
38
|
-
.writeArray(protocol.metadata.topics, (encoder, topic) => encoder.writeString(topic))
|
|
39
|
-
.writeBytes(Buffer.alloc(0))
|
|
40
|
-
.value();
|
|
41
|
-
return encoder.writeCompactString(protocol.name).writeCompactBytes(metadata).writeUVarInt(0);
|
|
42
|
-
})
|
|
43
|
-
.writeCompactString(data.reason)
|
|
44
|
-
.writeUVarInt(0),
|
|
45
|
-
response: (decoder) => {
|
|
46
|
-
const result = {
|
|
47
|
-
_tag: decoder.readTagBuffer(),
|
|
48
|
-
throttleTimeMs: decoder.readInt32(),
|
|
49
|
-
errorCode: decoder.readInt16(),
|
|
50
|
-
generationId: decoder.readInt32(),
|
|
51
|
-
protocolType: decoder.readCompactString(),
|
|
52
|
-
protocolName: decoder.readCompactString(),
|
|
53
|
-
leader: decoder.readCompactString()!,
|
|
54
|
-
skipAssignment: decoder.readBoolean(),
|
|
55
|
-
memberId: decoder.readCompactString()!,
|
|
56
|
-
members: decoder.readCompactArray((decoder) => ({
|
|
57
|
-
memberId: decoder.readCompactString()!,
|
|
58
|
-
groupInstanceId: decoder.readCompactString(),
|
|
59
|
-
metadata: decoder.readCompactBytes()!,
|
|
60
|
-
_tag: decoder.readTagBuffer(),
|
|
61
|
-
})),
|
|
62
|
-
_tag2: decoder.readTagBuffer(),
|
|
63
|
-
};
|
|
64
|
-
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
65
|
-
return result;
|
|
66
|
-
},
|
|
67
|
-
});
|