kafka-ts 0.0.1-beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierrc +7 -0
- package/LICENSE +24 -0
- package/README.md +88 -0
- package/certs/ca.crt +29 -0
- package/certs/ca.key +52 -0
- package/certs/ca.srl +1 -0
- package/certs/kafka.crt +29 -0
- package/certs/kafka.csr +26 -0
- package/certs/kafka.key +52 -0
- package/certs/kafka.keystore.jks +0 -0
- package/certs/kafka.truststore.jks +0 -0
- package/dist/api/api-versions.d.ts +9 -0
- package/dist/api/api-versions.js +24 -0
- package/dist/api/create-topics.d.ts +38 -0
- package/dist/api/create-topics.js +53 -0
- package/dist/api/delete-topics.d.ts +18 -0
- package/dist/api/delete-topics.js +33 -0
- package/dist/api/fetch.d.ts +77 -0
- package/dist/api/fetch.js +106 -0
- package/dist/api/find-coordinator.d.ts +21 -0
- package/dist/api/find-coordinator.js +39 -0
- package/dist/api/heartbeat.d.ts +11 -0
- package/dist/api/heartbeat.js +27 -0
- package/dist/api/index.d.ts +573 -0
- package/dist/api/index.js +164 -0
- package/dist/api/init-producer-id.d.ts +13 -0
- package/dist/api/init-producer-id.js +29 -0
- package/dist/api/join-group.d.ts +34 -0
- package/dist/api/join-group.js +51 -0
- package/dist/api/leave-group.d.ts +19 -0
- package/dist/api/leave-group.js +39 -0
- package/dist/api/list-offsets.d.ts +29 -0
- package/dist/api/list-offsets.js +48 -0
- package/dist/api/metadata.d.ts +40 -0
- package/dist/api/metadata.js +58 -0
- package/dist/api/offset-commit.d.ts +28 -0
- package/dist/api/offset-commit.js +48 -0
- package/dist/api/offset-fetch.d.ts +33 -0
- package/dist/api/offset-fetch.js +57 -0
- package/dist/api/produce.d.ts +53 -0
- package/dist/api/produce.js +129 -0
- package/dist/api/sasl-authenticate.d.ts +11 -0
- package/dist/api/sasl-authenticate.js +23 -0
- package/dist/api/sasl-handshake.d.ts +6 -0
- package/dist/api/sasl-handshake.js +19 -0
- package/dist/api/sync-group.d.ts +24 -0
- package/dist/api/sync-group.js +36 -0
- package/dist/broker.d.ts +29 -0
- package/dist/broker.js +60 -0
- package/dist/client.d.ts +23 -0
- package/dist/client.js +36 -0
- package/dist/cluster.d.ts +24 -0
- package/dist/cluster.js +72 -0
- package/dist/connection.d.ts +25 -0
- package/dist/connection.js +155 -0
- package/dist/consumer/consumer-group.d.ts +36 -0
- package/dist/consumer/consumer-group.js +182 -0
- package/dist/consumer/consumer-metadata.d.ts +7 -0
- package/dist/consumer/consumer-metadata.js +14 -0
- package/dist/consumer/consumer.d.ts +37 -0
- package/dist/consumer/consumer.js +178 -0
- package/dist/consumer/metadata.d.ts +24 -0
- package/dist/consumer/metadata.js +64 -0
- package/dist/consumer/offset-manager.d.ts +22 -0
- package/dist/consumer/offset-manager.js +56 -0
- package/dist/distributors/assignments-to-replicas.d.ts +17 -0
- package/dist/distributors/assignments-to-replicas.js +60 -0
- package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
- package/dist/distributors/assignments-to-replicas.test.js +40 -0
- package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
- package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
- package/dist/examples/src/replicator.js +34 -0
- package/dist/examples/src/utils/json.js +5 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +19 -0
- package/dist/metadata.d.ts +24 -0
- package/dist/metadata.js +89 -0
- package/dist/producer/producer.d.ts +19 -0
- package/dist/producer/producer.js +111 -0
- package/dist/request-handler.d.ts +16 -0
- package/dist/request-handler.js +67 -0
- package/dist/request-handler.test.d.ts +1 -0
- package/dist/request-handler.test.js +340 -0
- package/dist/src/api/api-versions.js +18 -0
- package/dist/src/api/create-topics.js +46 -0
- package/dist/src/api/delete-topics.js +26 -0
- package/dist/src/api/fetch.js +95 -0
- package/dist/src/api/find-coordinator.js +34 -0
- package/dist/src/api/heartbeat.js +22 -0
- package/dist/src/api/index.js +38 -0
- package/dist/src/api/init-producer-id.js +24 -0
- package/dist/src/api/join-group.js +48 -0
- package/dist/src/api/leave-group.js +30 -0
- package/dist/src/api/list-offsets.js +39 -0
- package/dist/src/api/metadata.js +47 -0
- package/dist/src/api/offset-commit.js +39 -0
- package/dist/src/api/offset-fetch.js +44 -0
- package/dist/src/api/produce.js +119 -0
- package/dist/src/api/sync-group.js +31 -0
- package/dist/src/broker.js +35 -0
- package/dist/src/connection.js +21 -0
- package/dist/src/consumer/consumer-group.js +131 -0
- package/dist/src/consumer/consumer.js +103 -0
- package/dist/src/consumer/metadata.js +52 -0
- package/dist/src/consumer/offset-manager.js +23 -0
- package/dist/src/index.js +19 -0
- package/dist/src/producer/producer.js +84 -0
- package/dist/src/request-handler.js +57 -0
- package/dist/src/request-handler.test.js +321 -0
- package/dist/src/types.js +2 -0
- package/dist/src/utils/api.js +5 -0
- package/dist/src/utils/decoder.js +161 -0
- package/dist/src/utils/encoder.js +137 -0
- package/dist/src/utils/error.js +10 -0
- package/dist/types.d.ts +9 -0
- package/dist/types.js +2 -0
- package/dist/utils/api.d.ts +9 -0
- package/dist/utils/api.js +5 -0
- package/dist/utils/debug.d.ts +2 -0
- package/dist/utils/debug.js +11 -0
- package/dist/utils/decoder.d.ts +29 -0
- package/dist/utils/decoder.js +147 -0
- package/dist/utils/delay.d.ts +1 -0
- package/dist/utils/delay.js +5 -0
- package/dist/utils/encoder.d.ts +28 -0
- package/dist/utils/encoder.js +122 -0
- package/dist/utils/error.d.ts +11 -0
- package/dist/utils/error.js +27 -0
- package/dist/utils/memo.d.ts +1 -0
- package/dist/utils/memo.js +16 -0
- package/dist/utils/retrier.d.ts +10 -0
- package/dist/utils/retrier.js +22 -0
- package/dist/utils/tracer.d.ts +1 -0
- package/dist/utils/tracer.js +26 -0
- package/docker-compose.yml +104 -0
- package/examples/node_modules/.package-lock.json +22 -0
- package/examples/package-lock.json +30 -0
- package/examples/package.json +14 -0
- package/examples/src/client.ts +9 -0
- package/examples/src/consumer.ts +17 -0
- package/examples/src/create-topic.ts +37 -0
- package/examples/src/producer.ts +24 -0
- package/examples/src/replicator.ts +25 -0
- package/examples/src/utils/json.ts +1 -0
- package/examples/tsconfig.json +7 -0
- package/log4j.properties +95 -0
- package/package.json +17 -0
- package/scripts/generate-certs.sh +24 -0
- package/src/__snapshots__/request-handler.test.ts.snap +1687 -0
- package/src/api/api-versions.ts +21 -0
- package/src/api/create-topics.ts +78 -0
- package/src/api/delete-topics.ts +42 -0
- package/src/api/fetch.ts +143 -0
- package/src/api/find-coordinator.ts +39 -0
- package/src/api/heartbeat.ts +33 -0
- package/src/api/index.ts +164 -0
- package/src/api/init-producer-id.ts +35 -0
- package/src/api/join-group.ts +67 -0
- package/src/api/leave-group.ts +48 -0
- package/src/api/list-offsets.ts +65 -0
- package/src/api/metadata.ts +66 -0
- package/src/api/offset-commit.ts +67 -0
- package/src/api/offset-fetch.ts +74 -0
- package/src/api/produce.ts +173 -0
- package/src/api/sasl-authenticate.ts +21 -0
- package/src/api/sasl-handshake.ts +16 -0
- package/src/api/sync-group.ts +54 -0
- package/src/broker.ts +74 -0
- package/src/client.ts +47 -0
- package/src/cluster.ts +87 -0
- package/src/connection.ts +141 -0
- package/src/consumer/consumer-group.ts +209 -0
- package/src/consumer/consumer-metadata.ts +14 -0
- package/src/consumer/consumer.ts +229 -0
- package/src/consumer/offset-manager.ts +93 -0
- package/src/distributors/assignments-to-replicas.test.ts +43 -0
- package/src/distributors/assignments-to-replicas.ts +85 -0
- package/src/distributors/messages-to-topic-partition-leaders.test.ts +32 -0
- package/src/distributors/messages-to-topic-partition-leaders.ts +19 -0
- package/src/index.ts +3 -0
- package/src/metadata.ts +122 -0
- package/src/producer/producer.ts +132 -0
- package/src/request-handler.test.ts +366 -0
- package/src/types.ts +9 -0
- package/src/utils/api.ts +11 -0
- package/src/utils/debug.ts +9 -0
- package/src/utils/decoder.ts +168 -0
- package/src/utils/delay.ts +1 -0
- package/src/utils/encoder.ts +141 -0
- package/src/utils/error.ts +21 -0
- package/src/utils/memo.ts +12 -0
- package/src/utils/retrier.ts +39 -0
- package/src/utils/tracer.ts +28 -0
- package/tsconfig.json +17 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { createApi } from "../utils/api.js";
|
|
2
|
+
import { KafkaTSApiError } from "../utils/error.js";
|
|
3
|
+
|
|
4
|
+
export const API_VERSIONS = createApi({
|
|
5
|
+
apiKey: 18,
|
|
6
|
+
apiVersion: 2,
|
|
7
|
+
request: (encoder) => encoder,
|
|
8
|
+
response: (decoder) => {
|
|
9
|
+
const result = {
|
|
10
|
+
errorCode: decoder.readInt16(),
|
|
11
|
+
versions: decoder.readArray((version) => ({
|
|
12
|
+
apiKey: version.readInt16(),
|
|
13
|
+
minVersion: version.readInt16(),
|
|
14
|
+
maxVersion: version.readInt16(),
|
|
15
|
+
})),
|
|
16
|
+
throttleTimeMs: decoder.readInt32(),
|
|
17
|
+
};
|
|
18
|
+
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
19
|
+
return result;
|
|
20
|
+
},
|
|
21
|
+
});
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
3
|
+
|
|
4
|
+
export const CREATE_TOPICS = createApi({
|
|
5
|
+
apiKey: 19,
|
|
6
|
+
apiVersion: 7,
|
|
7
|
+
request: (
|
|
8
|
+
encoder,
|
|
9
|
+
data: {
|
|
10
|
+
topics: {
|
|
11
|
+
name: string;
|
|
12
|
+
numPartitions: number;
|
|
13
|
+
replicationFactor: number;
|
|
14
|
+
assignments: {
|
|
15
|
+
partitionIndex: number;
|
|
16
|
+
brokerIds: number[];
|
|
17
|
+
}[];
|
|
18
|
+
configs: {
|
|
19
|
+
name: string;
|
|
20
|
+
value: string | null;
|
|
21
|
+
}[];
|
|
22
|
+
}[];
|
|
23
|
+
timeoutMs: number;
|
|
24
|
+
validateOnly: boolean;
|
|
25
|
+
},
|
|
26
|
+
) =>
|
|
27
|
+
encoder
|
|
28
|
+
.writeUVarInt(0)
|
|
29
|
+
.writeCompactArray(data.topics, (encoder, topic) =>
|
|
30
|
+
encoder
|
|
31
|
+
.writeCompactString(topic.name)
|
|
32
|
+
.writeInt32(topic.numPartitions)
|
|
33
|
+
.writeInt16(topic.replicationFactor)
|
|
34
|
+
.writeCompactArray(topic.assignments, (encoder, assignment) =>
|
|
35
|
+
encoder
|
|
36
|
+
.writeInt32(assignment.partitionIndex)
|
|
37
|
+
.writeCompactArray(assignment.brokerIds, (encoder, brokerId) =>
|
|
38
|
+
encoder.writeInt32(brokerId),
|
|
39
|
+
)
|
|
40
|
+
.writeUVarInt(0),
|
|
41
|
+
)
|
|
42
|
+
.writeCompactArray(topic.configs, (encoder, config) =>
|
|
43
|
+
encoder.writeCompactString(config.name).writeCompactString(config.value).writeUVarInt(0),
|
|
44
|
+
)
|
|
45
|
+
.writeUVarInt(0),
|
|
46
|
+
)
|
|
47
|
+
.writeInt32(data.timeoutMs)
|
|
48
|
+
.writeBoolean(data.validateOnly)
|
|
49
|
+
.writeUVarInt(0),
|
|
50
|
+
response: (decoder) => {
|
|
51
|
+
const result = {
|
|
52
|
+
_tag: decoder.readTagBuffer(),
|
|
53
|
+
throttleTimeMs: decoder.readInt32(),
|
|
54
|
+
topics: decoder.readCompactArray((topic) => ({
|
|
55
|
+
name: topic.readCompactString(),
|
|
56
|
+
topicId: topic.readUUID(),
|
|
57
|
+
errorCode: topic.readInt16(),
|
|
58
|
+
errorMessage: topic.readCompactString(),
|
|
59
|
+
numPartitions: topic.readInt32(),
|
|
60
|
+
replicationFactor: topic.readInt16(),
|
|
61
|
+
configs: topic.readCompactArray((config) => ({
|
|
62
|
+
name: config.readCompactString(),
|
|
63
|
+
value: config.readCompactString(),
|
|
64
|
+
readOnly: config.readBoolean(),
|
|
65
|
+
configSource: config.readInt8(),
|
|
66
|
+
isSensitive: config.readBoolean(),
|
|
67
|
+
_tag: config.readTagBuffer(),
|
|
68
|
+
})),
|
|
69
|
+
_tag: topic.readTagBuffer(),
|
|
70
|
+
})),
|
|
71
|
+
_tag2: decoder.readTagBuffer(),
|
|
72
|
+
};
|
|
73
|
+
result.topics.forEach((topic) => {
|
|
74
|
+
if (topic.errorCode) throw new KafkaTSApiError(topic.errorCode, topic.errorMessage, result);
|
|
75
|
+
});
|
|
76
|
+
return result;
|
|
77
|
+
},
|
|
78
|
+
});
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
3
|
+
|
|
4
|
+
export const DELETE_TOPICS = createApi({
|
|
5
|
+
apiKey: 20,
|
|
6
|
+
apiVersion: 6,
|
|
7
|
+
request: (
|
|
8
|
+
encoder,
|
|
9
|
+
data: {
|
|
10
|
+
topics: {
|
|
11
|
+
name: string | null;
|
|
12
|
+
topicId: string | null;
|
|
13
|
+
}[];
|
|
14
|
+
timeoutMs: number;
|
|
15
|
+
},
|
|
16
|
+
) =>
|
|
17
|
+
encoder
|
|
18
|
+
.writeUVarInt(0)
|
|
19
|
+
.writeCompactArray(data.topics, (encoder, topic) =>
|
|
20
|
+
encoder.writeCompactString(topic.name).writeUUID(topic.topicId).writeUVarInt(0),
|
|
21
|
+
)
|
|
22
|
+
.writeInt32(data.timeoutMs)
|
|
23
|
+
.writeUVarInt(0),
|
|
24
|
+
response: (decoder) => {
|
|
25
|
+
const result = {
|
|
26
|
+
_tag: decoder.readTagBuffer(),
|
|
27
|
+
throttleTimeMs: decoder.readInt32(),
|
|
28
|
+
responses: decoder.readCompactArray((decoder) => ({
|
|
29
|
+
name: decoder.readCompactString(),
|
|
30
|
+
topicId: decoder.readUUID(),
|
|
31
|
+
errorCode: decoder.readInt16(),
|
|
32
|
+
errorMessage: decoder.readCompactString(),
|
|
33
|
+
_tag: decoder.readTagBuffer(),
|
|
34
|
+
})),
|
|
35
|
+
_tag2: decoder.readTagBuffer(),
|
|
36
|
+
};
|
|
37
|
+
result.responses.forEach((response) => {
|
|
38
|
+
if (response.errorCode) throw new KafkaTSApiError(response.errorCode, response.errorMessage, result);
|
|
39
|
+
});
|
|
40
|
+
return result;
|
|
41
|
+
},
|
|
42
|
+
});
|
package/src/api/fetch.ts
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { Decoder } from "../utils/decoder";
|
|
3
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
4
|
+
|
|
5
|
+
export const enum IsolationLevel {
|
|
6
|
+
READ_UNCOMMITTED = 0,
|
|
7
|
+
READ_COMMITTED = 1,
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export const FETCH = createApi({
|
|
11
|
+
apiKey: 1,
|
|
12
|
+
apiVersion: 16,
|
|
13
|
+
request: (
|
|
14
|
+
encoder,
|
|
15
|
+
data: {
|
|
16
|
+
maxWaitMs: number;
|
|
17
|
+
minBytes: number;
|
|
18
|
+
maxBytes: number;
|
|
19
|
+
isolationLevel: IsolationLevel;
|
|
20
|
+
sessionId: number;
|
|
21
|
+
sessionEpoch: number;
|
|
22
|
+
topics: {
|
|
23
|
+
topicId: string;
|
|
24
|
+
partitions: {
|
|
25
|
+
partition: number;
|
|
26
|
+
currentLeaderEpoch: number;
|
|
27
|
+
fetchOffset: bigint;
|
|
28
|
+
lastFetchedEpoch: number;
|
|
29
|
+
logStartOffset: bigint;
|
|
30
|
+
partitionMaxBytes: number;
|
|
31
|
+
}[];
|
|
32
|
+
}[];
|
|
33
|
+
forgottenTopicsData: {
|
|
34
|
+
topicId: string;
|
|
35
|
+
partitions: number[];
|
|
36
|
+
}[];
|
|
37
|
+
rackId: string;
|
|
38
|
+
},
|
|
39
|
+
) =>
|
|
40
|
+
encoder
|
|
41
|
+
.writeUVarInt(0)
|
|
42
|
+
.writeInt32(data.maxWaitMs)
|
|
43
|
+
.writeInt32(data.minBytes)
|
|
44
|
+
.writeInt32(data.maxBytes)
|
|
45
|
+
.writeInt8(data.isolationLevel)
|
|
46
|
+
.writeInt32(data.sessionId)
|
|
47
|
+
.writeInt32(data.sessionEpoch)
|
|
48
|
+
.writeCompactArray(data.topics, (encoder, topic) =>
|
|
49
|
+
encoder
|
|
50
|
+
.writeUUID(topic.topicId)
|
|
51
|
+
.writeCompactArray(topic.partitions, (encoder, partition) =>
|
|
52
|
+
encoder
|
|
53
|
+
.writeInt32(partition.partition)
|
|
54
|
+
.writeInt32(partition.currentLeaderEpoch)
|
|
55
|
+
.writeInt64(partition.fetchOffset)
|
|
56
|
+
.writeInt32(partition.lastFetchedEpoch)
|
|
57
|
+
.writeInt64(partition.logStartOffset)
|
|
58
|
+
.writeInt32(partition.partitionMaxBytes)
|
|
59
|
+
.writeUVarInt(0),
|
|
60
|
+
)
|
|
61
|
+
.writeUVarInt(0),
|
|
62
|
+
)
|
|
63
|
+
.writeCompactArray(data.forgottenTopicsData, (encoder, forgottenTopic) =>
|
|
64
|
+
encoder
|
|
65
|
+
.writeUUID(forgottenTopic.topicId)
|
|
66
|
+
.writeCompactArray(forgottenTopic.partitions, (encoder, partition) => encoder.writeInt32(partition))
|
|
67
|
+
.writeUVarInt(0),
|
|
68
|
+
)
|
|
69
|
+
.writeCompactString(data.rackId)
|
|
70
|
+
.writeUVarInt(0),
|
|
71
|
+
response: (decoder) => {
|
|
72
|
+
const result = {
|
|
73
|
+
_tag: decoder.readTagBuffer(),
|
|
74
|
+
throttleTimeMs: decoder.readInt32(),
|
|
75
|
+
errorCode: decoder.readInt16(),
|
|
76
|
+
sessionId: decoder.readInt32(),
|
|
77
|
+
responses: decoder.readCompactArray((response) => ({
|
|
78
|
+
topicId: response.readUUID(),
|
|
79
|
+
partitions: response.readCompactArray((partition) => ({
|
|
80
|
+
partitionIndex: partition.readInt32(),
|
|
81
|
+
errorCode: partition.readInt16(),
|
|
82
|
+
highWatermark: partition.readInt64(),
|
|
83
|
+
lastStableOffset: partition.readInt64(),
|
|
84
|
+
logStartOffset: partition.readInt64(),
|
|
85
|
+
abortedTransactions: partition.readCompactArray((abortedTransaction) => ({
|
|
86
|
+
producerId: abortedTransaction.readInt64(),
|
|
87
|
+
firstOffset: abortedTransaction.readInt64(),
|
|
88
|
+
_tag: abortedTransaction.readTagBuffer(),
|
|
89
|
+
})),
|
|
90
|
+
preferredReadReplica: partition.readInt32(),
|
|
91
|
+
records: decodeRecords(partition),
|
|
92
|
+
_tag: partition.readTagBuffer(),
|
|
93
|
+
})),
|
|
94
|
+
_tag: response.readTagBuffer(),
|
|
95
|
+
})),
|
|
96
|
+
_tag2: decoder.readTagBuffer(),
|
|
97
|
+
};
|
|
98
|
+
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
99
|
+
result.responses.forEach((response) => {
|
|
100
|
+
response.partitions.forEach((partition) => {
|
|
101
|
+
if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
|
|
102
|
+
});
|
|
103
|
+
});
|
|
104
|
+
return result;
|
|
105
|
+
},
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
const decodeRecords = (decoder: Decoder) => {
|
|
109
|
+
const size = decoder.readUVarInt() - 1;
|
|
110
|
+
if (size <= 0) {
|
|
111
|
+
return [];
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
const results = [];
|
|
115
|
+
while (decoder.getBufferLength() > decoder.getOffset() + 49) {
|
|
116
|
+
results.push({
|
|
117
|
+
baseOffset: decoder.readInt64(),
|
|
118
|
+
batchLength: decoder.readInt32(),
|
|
119
|
+
partitionLeaderEpoch: decoder.readInt32(),
|
|
120
|
+
magic: decoder.readInt8(),
|
|
121
|
+
crc: decoder.readUInt32(),
|
|
122
|
+
attributes: decoder.readInt16(),
|
|
123
|
+
lastOffsetDelta: decoder.readInt32(),
|
|
124
|
+
baseTimestamp: decoder.readInt64(),
|
|
125
|
+
maxTimestamp: decoder.readInt64(),
|
|
126
|
+
producerId: decoder.readInt64(),
|
|
127
|
+
producerEpoch: decoder.readInt16(),
|
|
128
|
+
baseSequence: decoder.readInt32(),
|
|
129
|
+
records: decoder.readRecords((record) => ({
|
|
130
|
+
attributes: record.readInt8(),
|
|
131
|
+
timestampDelta: record.readVarLong(),
|
|
132
|
+
offsetDelta: record.readVarInt(),
|
|
133
|
+
key: record.readVarIntString(),
|
|
134
|
+
value: record.readVarIntString(),
|
|
135
|
+
headers: record.readCompactArray((header) => ({
|
|
136
|
+
key: header.readVarIntString(),
|
|
137
|
+
value: header.readVarIntString(),
|
|
138
|
+
})),
|
|
139
|
+
})),
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
return results;
|
|
143
|
+
};
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
3
|
+
|
|
4
|
+
export const KEY_TYPE = {
|
|
5
|
+
GROUP: 0,
|
|
6
|
+
TRANSACTION: 1,
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
export const FIND_COORDINATOR = createApi({
|
|
10
|
+
apiKey: 10,
|
|
11
|
+
apiVersion: 4,
|
|
12
|
+
request: (encoder, data: { keyType: number; keys: string[] }) =>
|
|
13
|
+
encoder
|
|
14
|
+
.writeUVarInt(0)
|
|
15
|
+
.writeInt8(data.keyType)
|
|
16
|
+
.writeCompactArray(data.keys, (encoder, key) => encoder.writeCompactString(key))
|
|
17
|
+
.writeUVarInt(0),
|
|
18
|
+
response: (decoder) => {
|
|
19
|
+
const result = {
|
|
20
|
+
_tag: decoder.readTagBuffer(),
|
|
21
|
+
throttleTimeMs: decoder.readInt32(),
|
|
22
|
+
coordinators: decoder.readCompactArray((decoder) => ({
|
|
23
|
+
key: decoder.readCompactString(),
|
|
24
|
+
nodeId: decoder.readInt32(),
|
|
25
|
+
host: decoder.readCompactString()!,
|
|
26
|
+
port: decoder.readInt32(),
|
|
27
|
+
errorCode: decoder.readInt16(),
|
|
28
|
+
errorMessage: decoder.readCompactString(),
|
|
29
|
+
_tag: decoder.readTagBuffer(),
|
|
30
|
+
})),
|
|
31
|
+
_tag2: decoder.readTagBuffer(),
|
|
32
|
+
};
|
|
33
|
+
result.coordinators.forEach((coordinator) => {
|
|
34
|
+
if (coordinator.errorCode)
|
|
35
|
+
throw new KafkaTSApiError(coordinator.errorCode, coordinator.errorMessage, result);
|
|
36
|
+
});
|
|
37
|
+
return result;
|
|
38
|
+
},
|
|
39
|
+
});
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
3
|
+
|
|
4
|
+
export const HEARTBEAT = createApi({
|
|
5
|
+
apiKey: 12,
|
|
6
|
+
apiVersion: 4,
|
|
7
|
+
request: (
|
|
8
|
+
encoder,
|
|
9
|
+
data: {
|
|
10
|
+
groupId: string;
|
|
11
|
+
generationId: number;
|
|
12
|
+
memberId: string;
|
|
13
|
+
groupInstanceId: string | null;
|
|
14
|
+
},
|
|
15
|
+
) =>
|
|
16
|
+
encoder
|
|
17
|
+
.writeUVarInt(0)
|
|
18
|
+
.writeCompactString(data.groupId)
|
|
19
|
+
.writeInt32(data.generationId)
|
|
20
|
+
.writeCompactString(data.memberId)
|
|
21
|
+
.writeCompactString(data.groupInstanceId)
|
|
22
|
+
.writeUVarInt(0),
|
|
23
|
+
response: (decoder) => {
|
|
24
|
+
const result = {
|
|
25
|
+
_tag: decoder.readTagBuffer(),
|
|
26
|
+
throttleTimeMs: decoder.readInt32(),
|
|
27
|
+
errorCode: decoder.readInt16(),
|
|
28
|
+
_tag2: decoder.readTagBuffer(),
|
|
29
|
+
};
|
|
30
|
+
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
31
|
+
return result;
|
|
32
|
+
},
|
|
33
|
+
});
|
package/src/api/index.ts
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
import { Api } from "../utils/api";
|
|
2
|
+
import { API_VERSIONS } from "./api-versions";
|
|
3
|
+
import { CREATE_TOPICS } from "./create-topics";
|
|
4
|
+
import { DELETE_TOPICS } from "./delete-topics";
|
|
5
|
+
import { FETCH } from "./fetch";
|
|
6
|
+
import { FIND_COORDINATOR } from "./find-coordinator";
|
|
7
|
+
import { HEARTBEAT } from "./heartbeat";
|
|
8
|
+
import { INIT_PRODUCER_ID } from "./init-producer-id";
|
|
9
|
+
import { JOIN_GROUP } from "./join-group";
|
|
10
|
+
import { LEAVE_GROUP } from "./leave-group";
|
|
11
|
+
import { LIST_OFFSETS } from "./list-offsets";
|
|
12
|
+
import { METADATA } from "./metadata";
|
|
13
|
+
import { OFFSET_COMMIT } from "./offset-commit";
|
|
14
|
+
import { OFFSET_FETCH } from "./offset-fetch";
|
|
15
|
+
import { PRODUCE } from "./produce";
|
|
16
|
+
import { SASL_AUTHENTICATE } from "./sasl-authenticate";
|
|
17
|
+
import { SASL_HANDSHAKE } from "./sasl-handshake";
|
|
18
|
+
import { SYNC_GROUP } from "./sync-group";
|
|
19
|
+
|
|
20
|
+
export const API = {
|
|
21
|
+
API_VERSIONS,
|
|
22
|
+
CREATE_TOPICS,
|
|
23
|
+
DELETE_TOPICS,
|
|
24
|
+
FETCH,
|
|
25
|
+
FIND_COORDINATOR,
|
|
26
|
+
HEARTBEAT,
|
|
27
|
+
INIT_PRODUCER_ID,
|
|
28
|
+
JOIN_GROUP,
|
|
29
|
+
LEAVE_GROUP,
|
|
30
|
+
LIST_OFFSETS,
|
|
31
|
+
METADATA,
|
|
32
|
+
OFFSET_COMMIT,
|
|
33
|
+
OFFSET_FETCH,
|
|
34
|
+
PRODUCE,
|
|
35
|
+
SASL_AUTHENTICATE,
|
|
36
|
+
SASL_HANDSHAKE,
|
|
37
|
+
SYNC_GROUP,
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
export const getApiName = (api: Api<unknown, unknown>) => Object.entries(API).find(([, v]) => v === api)?.[0];
|
|
41
|
+
|
|
42
|
+
export const API_ERROR = {
|
|
43
|
+
UNKNOWN_SERVER_ERROR: -1,
|
|
44
|
+
OFFSET_OUT_OF_RANGE: 1,
|
|
45
|
+
CORRUPT_MESSAGE: 2,
|
|
46
|
+
UNKNOWN_TOPIC_OR_PARTITION: 3,
|
|
47
|
+
INVALID_FETCH_SIZE: 4,
|
|
48
|
+
LEADER_NOT_AVAILABLE: 5,
|
|
49
|
+
NOT_LEADER_OR_FOLLOWER: 6,
|
|
50
|
+
REQUEST_TIMED_OUT: 7,
|
|
51
|
+
BROKER_NOT_AVAILABLE: 8,
|
|
52
|
+
REPLICA_NOT_AVAILABLE: 9,
|
|
53
|
+
MESSAGE_TOO_LARGE: 10,
|
|
54
|
+
STALE_CONTROLLER_EPOCH: 11,
|
|
55
|
+
OFFSET_METADATA_TOO_LARGE: 12,
|
|
56
|
+
NETWORK_EXCEPTION: 13,
|
|
57
|
+
COORDINATOR_LOAD_IN_PROGRESS: 14,
|
|
58
|
+
COORDINATOR_NOT_AVAILABLE: 15,
|
|
59
|
+
NOT_COORDINATOR: 16,
|
|
60
|
+
INVALID_TOPIC_EXCEPTION: 17,
|
|
61
|
+
RECORD_LIST_TOO_LARGE: 18,
|
|
62
|
+
NOT_ENOUGH_REPLICAS: 19,
|
|
63
|
+
NOT_ENOUGH_REPLICAS_AFTER_APPEND: 20,
|
|
64
|
+
INVALID_REQUIRED_ACKS: 21,
|
|
65
|
+
ILLEGAL_GENERATION: 22,
|
|
66
|
+
INCONSISTENT_GROUP_PROTOCOL: 23,
|
|
67
|
+
INVALID_GROUP_ID: 24,
|
|
68
|
+
UNKNOWN_MEMBER_ID: 25,
|
|
69
|
+
INVALID_SESSION_TIMEOUT: 26,
|
|
70
|
+
REBALANCE_IN_PROGRESS: 27,
|
|
71
|
+
INVALID_COMMIT_OFFSET_SIZE: 28,
|
|
72
|
+
TOPIC_AUTHORIZATION_FAILED: 29,
|
|
73
|
+
GROUP_AUTHORIZATION_FAILED: 30,
|
|
74
|
+
CLUSTER_AUTHORIZATION_FAILED: 31,
|
|
75
|
+
INVALID_TIMESTAMP: 32,
|
|
76
|
+
UNSUPPORTED_SASL_MECHANISM: 33,
|
|
77
|
+
ILLEGAL_SASL_STATE: 34,
|
|
78
|
+
UNSUPPORTED_VERSION: 35,
|
|
79
|
+
TOPIC_ALREADY_EXISTS: 36,
|
|
80
|
+
INVALID_PARTITIONS: 37,
|
|
81
|
+
INVALID_REPLICATION_FACTOR: 38,
|
|
82
|
+
INVALID_REPLICA_ASSIGNMENT: 39,
|
|
83
|
+
INVALID_CONFIG: 40,
|
|
84
|
+
NOT_CONTROLLER: 41,
|
|
85
|
+
INVALID_REQUEST: 42,
|
|
86
|
+
UNSUPPORTED_FOR_MESSAGE_FORMAT: 43,
|
|
87
|
+
POLICY_VIOLATION: 44,
|
|
88
|
+
OUT_OF_ORDER_SEQUENCE_NUMBER: 45,
|
|
89
|
+
DUPLICATE_SEQUENCE_NUMBER: 46,
|
|
90
|
+
INVALID_PRODUCER_EPOCH: 47,
|
|
91
|
+
INVALID_TXN_STATE: 48,
|
|
92
|
+
INVALID_PRODUCER_ID_MAPPING: 49,
|
|
93
|
+
INVALID_TRANSACTION_TIMEOUT: 50,
|
|
94
|
+
CONCURRENT_TRANSACTIONS: 51,
|
|
95
|
+
TRANSACTION_COORDINATOR_FENCED: 52,
|
|
96
|
+
TRANSACTIONAL_ID_AUTHORIZATION_FAILED: 53,
|
|
97
|
+
SECURITY_DISABLED: 54,
|
|
98
|
+
OPERATION_NOT_ATTEMPTED: 55,
|
|
99
|
+
KAFKA_STORAGE_ERROR: 56,
|
|
100
|
+
LOG_DIR_NOT_FOUND: 57,
|
|
101
|
+
SASL_AUTHENTICATION_FAILED: 58,
|
|
102
|
+
UNKNOWN_PRODUCER_ID: 59,
|
|
103
|
+
REASSIGNMENT_IN_PROGRESS: 60,
|
|
104
|
+
DELEGATION_TOKEN_AUTH_DISABLED: 61,
|
|
105
|
+
DELEGATION_TOKEN_NOT_FOUND: 62,
|
|
106
|
+
DELEGATION_TOKEN_OWNER_MISMATCH: 63,
|
|
107
|
+
DELEGATION_TOKEN_REQUEST_NOT_ALLOWED: 64,
|
|
108
|
+
DELEGATION_TOKEN_AUTHORIZATION_FAILED: 65,
|
|
109
|
+
DELEGATION_TOKEN_EXPIRED: 66,
|
|
110
|
+
INVALID_PRINCIPAL_TYPE: 67,
|
|
111
|
+
NON_EMPTY_GROUP: 68,
|
|
112
|
+
GROUP_ID_NOT_FOUND: 69,
|
|
113
|
+
FETCH_SESSION_ID_NOT_FOUND: 70,
|
|
114
|
+
INVALID_FETCH_SESSION_EPOCH: 71,
|
|
115
|
+
LISTENER_NOT_FOUND: 72,
|
|
116
|
+
TOPIC_DELETION_DISABLED: 73,
|
|
117
|
+
FENCED_LEADER_EPOCH: 74,
|
|
118
|
+
UNKNOWN_LEADER_EPOCH: 75,
|
|
119
|
+
UNSUPPORTED_COMPRESSION_TYPE: 76,
|
|
120
|
+
STALE_BROKER_EPOCH: 77,
|
|
121
|
+
OFFSET_NOT_AVAILABLE: 78,
|
|
122
|
+
MEMBER_ID_REQUIRED: 79,
|
|
123
|
+
PREFERRED_LEADER_NOT_AVAILABLE: 80,
|
|
124
|
+
GROUP_MAX_SIZE_REACHED: 81,
|
|
125
|
+
FENCED_INSTANCE_ID: 82,
|
|
126
|
+
ELIGIBLE_LEADERS_NOT_AVAILABLE: 83,
|
|
127
|
+
ELECTION_NOT_NEEDED: 84,
|
|
128
|
+
NO_REASSIGNMENT_IN_PROGRESS: 85,
|
|
129
|
+
GROUP_SUBSCRIBED_TO_TOPIC: 86,
|
|
130
|
+
INVALID_RECORD: 87,
|
|
131
|
+
UNSTABLE_OFFSET_COMMIT: 88,
|
|
132
|
+
THROTTLING_QUOTA_EXCEEDED: 89,
|
|
133
|
+
PRODUCER_FENCED: 90,
|
|
134
|
+
RESOURCE_NOT_FOUND: 91,
|
|
135
|
+
DUPLICATE_RESOURCE: 92,
|
|
136
|
+
UNACCEPTABLE_CREDENTIAL: 93,
|
|
137
|
+
INCONSISTENT_VOTER_SET: 94,
|
|
138
|
+
INVALID_UPDATE_VERSION: 95,
|
|
139
|
+
FEATURE_UPDATE_FAILED: 96,
|
|
140
|
+
PRINCIPAL_DESERIALIZATION_FAILURE: 97,
|
|
141
|
+
SNAPSHOT_NOT_FOUND: 98,
|
|
142
|
+
POSITION_OUT_OF_RANGE: 99,
|
|
143
|
+
UNKNOWN_TOPIC_ID: 100,
|
|
144
|
+
DUPLICATE_BROKER_REGISTRATION: 101,
|
|
145
|
+
BROKER_ID_NOT_REGISTERED: 102,
|
|
146
|
+
INCONSISTENT_TOPIC_ID: 103,
|
|
147
|
+
INCONSISTENT_CLUSTER_ID: 104,
|
|
148
|
+
TRANSACTIONAL_ID_NOT_FOUND: 105,
|
|
149
|
+
FETCH_SESSION_TOPIC_ID_ERROR: 106,
|
|
150
|
+
INELIGIBLE_REPLICA: 107,
|
|
151
|
+
NEW_LEADER_ELECTED: 108,
|
|
152
|
+
OFFSET_MOVED_TO_TIERED_STORAGE: 109,
|
|
153
|
+
FENCED_MEMBER_EPOCH: 110,
|
|
154
|
+
UNRELEASED_INSTANCE_ID: 111,
|
|
155
|
+
UNSUPPORTED_ASSIGNOR: 112,
|
|
156
|
+
STALE_MEMBER_EPOCH: 113,
|
|
157
|
+
MISMATCHED_ENDPOINT_TYPE: 114,
|
|
158
|
+
UNSUPPORTED_ENDPOINT_TYPE: 115,
|
|
159
|
+
UNKNOWN_CONTROLLER_ID: 116,
|
|
160
|
+
UNKNOWN_SUBSCRIPTION_ID: 117,
|
|
161
|
+
TELEMETRY_TOO_LARGE: 118,
|
|
162
|
+
INVALID_REGISTRATION: 119,
|
|
163
|
+
TRANSACTION_ABORTABLE: 120,
|
|
164
|
+
};
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
3
|
+
|
|
4
|
+
export const INIT_PRODUCER_ID = createApi({
|
|
5
|
+
apiKey: 22,
|
|
6
|
+
apiVersion: 4,
|
|
7
|
+
request: (
|
|
8
|
+
encoder,
|
|
9
|
+
body: {
|
|
10
|
+
transactionalId: string | null;
|
|
11
|
+
transactionTimeoutMs: number;
|
|
12
|
+
producerId: bigint;
|
|
13
|
+
producerEpoch: number;
|
|
14
|
+
},
|
|
15
|
+
) =>
|
|
16
|
+
encoder
|
|
17
|
+
.writeUVarInt(0)
|
|
18
|
+
.writeCompactString(body.transactionalId)
|
|
19
|
+
.writeInt32(body.transactionTimeoutMs)
|
|
20
|
+
.writeInt64(body.producerId)
|
|
21
|
+
.writeInt16(body.producerEpoch)
|
|
22
|
+
.writeUVarInt(0),
|
|
23
|
+
response: (decoder) => {
|
|
24
|
+
const result = {
|
|
25
|
+
_tag: decoder.readTagBuffer(),
|
|
26
|
+
throttleTimeMs: decoder.readInt32(),
|
|
27
|
+
errorCode: decoder.readInt16(),
|
|
28
|
+
producerId: decoder.readInt64(),
|
|
29
|
+
producerEpoch: decoder.readInt16(),
|
|
30
|
+
_tag2: decoder.readTagBuffer(),
|
|
31
|
+
};
|
|
32
|
+
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
33
|
+
return result;
|
|
34
|
+
},
|
|
35
|
+
});
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { Encoder } from "../utils/encoder";
|
|
3
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
4
|
+
|
|
5
|
+
export const JOIN_GROUP = createApi({
|
|
6
|
+
apiKey: 11,
|
|
7
|
+
apiVersion: 9,
|
|
8
|
+
request: (
|
|
9
|
+
encoder,
|
|
10
|
+
data: {
|
|
11
|
+
groupId: string;
|
|
12
|
+
sessionTimeoutMs: number;
|
|
13
|
+
rebalanceTimeoutMs: number;
|
|
14
|
+
memberId: string;
|
|
15
|
+
groupInstanceId: string | null;
|
|
16
|
+
protocolType: string;
|
|
17
|
+
protocols: {
|
|
18
|
+
name: string;
|
|
19
|
+
metadata: {
|
|
20
|
+
version: number;
|
|
21
|
+
topics: string[];
|
|
22
|
+
};
|
|
23
|
+
}[];
|
|
24
|
+
reason: string | null;
|
|
25
|
+
},
|
|
26
|
+
) =>
|
|
27
|
+
encoder
|
|
28
|
+
.writeUVarInt(0)
|
|
29
|
+
.writeCompactString(data.groupId)
|
|
30
|
+
.writeInt32(data.sessionTimeoutMs)
|
|
31
|
+
.writeInt32(data.rebalanceTimeoutMs)
|
|
32
|
+
.writeCompactString(data.memberId)
|
|
33
|
+
.writeCompactString(data.groupInstanceId)
|
|
34
|
+
.writeCompactString(data.protocolType)
|
|
35
|
+
.writeCompactArray(data.protocols, (encoder, protocol) => {
|
|
36
|
+
const metadata = new Encoder()
|
|
37
|
+
.writeInt16(protocol.metadata.version)
|
|
38
|
+
.writeArray(protocol.metadata.topics, (encoder, topic) => encoder.writeString(topic))
|
|
39
|
+
.writeBytes(Buffer.alloc(0))
|
|
40
|
+
.value();
|
|
41
|
+
return encoder.writeCompactString(protocol.name).writeCompactBytes(metadata).writeUVarInt(0);
|
|
42
|
+
})
|
|
43
|
+
.writeCompactString(data.reason)
|
|
44
|
+
.writeUVarInt(0),
|
|
45
|
+
response: (decoder) => {
|
|
46
|
+
const result = {
|
|
47
|
+
_tag: decoder.readTagBuffer(),
|
|
48
|
+
throttleTimeMs: decoder.readInt32(),
|
|
49
|
+
errorCode: decoder.readInt16(),
|
|
50
|
+
generationId: decoder.readInt32(),
|
|
51
|
+
protocolType: decoder.readCompactString(),
|
|
52
|
+
protocolName: decoder.readCompactString(),
|
|
53
|
+
leader: decoder.readCompactString()!,
|
|
54
|
+
skipAssignment: decoder.readBoolean(),
|
|
55
|
+
memberId: decoder.readCompactString()!,
|
|
56
|
+
members: decoder.readCompactArray((decoder) => ({
|
|
57
|
+
memberId: decoder.readCompactString()!,
|
|
58
|
+
groupInstanceId: decoder.readCompactString(),
|
|
59
|
+
metadata: decoder.readCompactBytes()!,
|
|
60
|
+
_tag: decoder.readTagBuffer(),
|
|
61
|
+
})),
|
|
62
|
+
_tag2: decoder.readTagBuffer(),
|
|
63
|
+
};
|
|
64
|
+
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
65
|
+
return result;
|
|
66
|
+
},
|
|
67
|
+
});
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { createApi } from "../utils/api";
|
|
2
|
+
import { KafkaTSApiError } from "../utils/error";
|
|
3
|
+
|
|
4
|
+
export const LEAVE_GROUP = createApi({
|
|
5
|
+
apiKey: 13,
|
|
6
|
+
apiVersion: 5,
|
|
7
|
+
request: (
|
|
8
|
+
encoder,
|
|
9
|
+
body: {
|
|
10
|
+
groupId: string;
|
|
11
|
+
members: {
|
|
12
|
+
memberId: string;
|
|
13
|
+
groupInstanceId: string | null;
|
|
14
|
+
reason: string | null;
|
|
15
|
+
}[];
|
|
16
|
+
},
|
|
17
|
+
) =>
|
|
18
|
+
encoder
|
|
19
|
+
.writeUVarInt(0)
|
|
20
|
+
.writeCompactString(body.groupId)
|
|
21
|
+
.writeCompactArray(body.members, (encoder, member) =>
|
|
22
|
+
encoder
|
|
23
|
+
.writeCompactString(member.memberId)
|
|
24
|
+
.writeCompactString(member.groupInstanceId)
|
|
25
|
+
.writeCompactString(member.reason)
|
|
26
|
+
.writeUVarInt(0),
|
|
27
|
+
)
|
|
28
|
+
.writeUVarInt(0),
|
|
29
|
+
response: (decoder) => {
|
|
30
|
+
const result = {
|
|
31
|
+
_tag: decoder.readTagBuffer(),
|
|
32
|
+
throttleTimeMs: decoder.readInt32(),
|
|
33
|
+
errorCode: decoder.readInt16(),
|
|
34
|
+
members: decoder.readCompactArray((decoder) => ({
|
|
35
|
+
memberId: decoder.readCompactString()!,
|
|
36
|
+
groupInstanceId: decoder.readCompactString(),
|
|
37
|
+
errorCode: decoder.readInt16(),
|
|
38
|
+
_tag: decoder.readTagBuffer(),
|
|
39
|
+
})),
|
|
40
|
+
_tag2: decoder.readTagBuffer(),
|
|
41
|
+
};
|
|
42
|
+
if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
|
|
43
|
+
result.members.forEach((member) => {
|
|
44
|
+
if (member.errorCode) throw new KafkaTSApiError(member.errorCode, null, result);
|
|
45
|
+
});
|
|
46
|
+
return result;
|
|
47
|
+
},
|
|
48
|
+
});
|