kafka-ts 0.0.3-beta → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +68 -8
- package/dist/api/api-versions.d.ts +9 -0
- package/dist/api/api-versions.js +24 -0
- package/dist/api/create-topics.d.ts +38 -0
- package/dist/api/create-topics.js +53 -0
- package/dist/api/delete-topics.d.ts +18 -0
- package/dist/api/delete-topics.js +33 -0
- package/dist/api/fetch.d.ts +84 -0
- package/dist/api/fetch.js +142 -0
- package/dist/api/find-coordinator.d.ts +21 -0
- package/dist/api/find-coordinator.js +39 -0
- package/dist/api/heartbeat.d.ts +11 -0
- package/dist/api/heartbeat.js +27 -0
- package/dist/api/index.d.ts +576 -0
- package/dist/api/index.js +165 -0
- package/dist/api/init-producer-id.d.ts +13 -0
- package/dist/api/init-producer-id.js +29 -0
- package/dist/api/join-group.d.ts +34 -0
- package/dist/api/join-group.js +51 -0
- package/dist/api/leave-group.d.ts +19 -0
- package/dist/api/leave-group.js +39 -0
- package/dist/api/list-offsets.d.ts +29 -0
- package/dist/api/list-offsets.js +48 -0
- package/dist/api/metadata.d.ts +40 -0
- package/dist/api/metadata.js +58 -0
- package/dist/api/offset-commit.d.ts +28 -0
- package/dist/api/offset-commit.js +48 -0
- package/dist/api/offset-fetch.d.ts +31 -0
- package/dist/api/offset-fetch.js +55 -0
- package/dist/api/produce.d.ts +54 -0
- package/dist/api/produce.js +126 -0
- package/dist/api/sasl-authenticate.d.ts +11 -0
- package/dist/api/sasl-authenticate.js +23 -0
- package/dist/api/sasl-handshake.d.ts +6 -0
- package/dist/api/sasl-handshake.js +19 -0
- package/dist/api/sync-group.d.ts +24 -0
- package/dist/api/sync-group.js +36 -0
- package/dist/auth/index.d.ts +2 -0
- package/dist/auth/index.js +8 -0
- package/dist/auth/plain.d.ts +5 -0
- package/dist/auth/plain.js +12 -0
- package/dist/auth/scram.d.ts +9 -0
- package/dist/auth/scram.js +40 -0
- package/dist/broker.d.ts +30 -0
- package/dist/broker.js +55 -0
- package/dist/client.d.ts +23 -0
- package/dist/client.js +36 -0
- package/dist/cluster.d.ts +27 -0
- package/dist/cluster.js +70 -0
- package/dist/cluster.test.d.ts +1 -0
- package/dist/cluster.test.js +343 -0
- package/dist/codecs/gzip.d.ts +2 -0
- package/dist/codecs/gzip.js +8 -0
- package/dist/codecs/index.d.ts +2 -0
- package/dist/codecs/index.js +17 -0
- package/dist/codecs/none.d.ts +2 -0
- package/dist/codecs/none.js +7 -0
- package/dist/codecs/types.d.ts +5 -0
- package/dist/codecs/types.js +2 -0
- package/dist/connection.d.ts +26 -0
- package/dist/connection.js +175 -0
- package/dist/consumer/consumer-group.d.ts +41 -0
- package/dist/consumer/consumer-group.js +215 -0
- package/dist/consumer/consumer-metadata.d.ts +7 -0
- package/dist/consumer/consumer-metadata.js +14 -0
- package/dist/consumer/consumer.d.ts +44 -0
- package/dist/consumer/consumer.js +225 -0
- package/dist/consumer/fetch-manager.d.ts +33 -0
- package/dist/consumer/fetch-manager.js +140 -0
- package/dist/consumer/fetcher.d.ts +25 -0
- package/dist/consumer/fetcher.js +64 -0
- package/dist/consumer/offset-manager.d.ts +22 -0
- package/dist/consumer/offset-manager.js +66 -0
- package/dist/consumer/processor.d.ts +19 -0
- package/dist/consumer/processor.js +59 -0
- package/dist/distributors/assignments-to-replicas.d.ts +16 -0
- package/dist/distributors/assignments-to-replicas.js +59 -0
- package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
- package/dist/distributors/assignments-to-replicas.test.js +40 -0
- package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
- package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
- package/dist/distributors/partitioner.d.ts +7 -0
- package/dist/distributors/partitioner.js +23 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +26 -0
- package/dist/metadata.d.ts +24 -0
- package/dist/metadata.js +106 -0
- package/dist/producer/producer.d.ts +24 -0
- package/dist/producer/producer.js +131 -0
- package/dist/types.d.ts +11 -0
- package/dist/types.js +2 -0
- package/dist/utils/api.d.ts +9 -0
- package/dist/utils/api.js +5 -0
- package/dist/utils/crypto.d.ts +8 -0
- package/dist/utils/crypto.js +18 -0
- package/dist/utils/decoder.d.ts +30 -0
- package/dist/utils/decoder.js +152 -0
- package/dist/utils/delay.d.ts +1 -0
- package/dist/utils/delay.js +5 -0
- package/dist/utils/encoder.d.ts +28 -0
- package/dist/utils/encoder.js +125 -0
- package/dist/utils/error.d.ts +11 -0
- package/dist/utils/error.js +27 -0
- package/dist/utils/logger.d.ts +9 -0
- package/dist/utils/logger.js +32 -0
- package/dist/utils/memo.d.ts +1 -0
- package/dist/utils/memo.js +16 -0
- package/dist/utils/murmur2.d.ts +3 -0
- package/dist/utils/murmur2.js +40 -0
- package/dist/utils/retrier.d.ts +10 -0
- package/dist/utils/retrier.js +22 -0
- package/dist/utils/tracer.d.ts +5 -0
- package/dist/utils/tracer.js +39 -0
- package/package.json +11 -2
- package/src/__snapshots__/{request-handler.test.ts.snap → cluster.test.ts.snap} +329 -26
- package/src/api/fetch.ts +84 -29
- package/src/api/index.ts +3 -1
- package/src/api/metadata.ts +1 -1
- package/src/api/offset-commit.ts +1 -1
- package/src/api/offset-fetch.ts +1 -5
- package/src/api/produce.ts +15 -18
- package/src/auth/index.ts +2 -0
- package/src/auth/plain.ts +10 -0
- package/src/auth/scram.ts +52 -0
- package/src/broker.ts +7 -9
- package/src/client.ts +2 -2
- package/src/cluster.test.ts +16 -14
- package/src/cluster.ts +38 -40
- package/src/codecs/gzip.ts +9 -0
- package/src/codecs/index.ts +16 -0
- package/src/codecs/none.ts +6 -0
- package/src/codecs/types.ts +4 -0
- package/src/connection.ts +31 -17
- package/src/consumer/consumer-group.ts +43 -23
- package/src/consumer/consumer.ts +64 -43
- package/src/consumer/fetch-manager.ts +43 -53
- package/src/consumer/fetcher.ts +20 -13
- package/src/consumer/offset-manager.ts +18 -7
- package/src/consumer/processor.ts +14 -8
- package/src/distributors/assignments-to-replicas.ts +1 -3
- package/src/distributors/partitioner.ts +27 -0
- package/src/index.ts +7 -2
- package/src/metadata.ts +4 -0
- package/src/producer/producer.ts +22 -12
- package/src/types.ts +3 -3
- package/src/utils/api.ts +1 -1
- package/src/utils/crypto.ts +15 -0
- package/src/utils/decoder.ts +11 -5
- package/src/utils/encoder.ts +29 -22
- package/src/utils/logger.ts +37 -0
- package/src/utils/murmur2.ts +44 -0
- package/src/utils/tracer.ts +40 -22
- package/.github/workflows/release.yml +0 -17
- package/certs/ca.crt +0 -29
- package/certs/ca.key +0 -52
- package/certs/ca.srl +0 -1
- package/certs/kafka.crt +0 -29
- package/certs/kafka.csr +0 -26
- package/certs/kafka.key +0 -52
- package/certs/kafka.keystore.jks +0 -0
- package/certs/kafka.truststore.jks +0 -0
- package/docker-compose.yml +0 -104
- package/examples/package-lock.json +0 -31
- package/examples/package.json +0 -14
- package/examples/src/client.ts +0 -9
- package/examples/src/consumer.ts +0 -18
- package/examples/src/create-topic.ts +0 -44
- package/examples/src/producer.ts +0 -24
- package/examples/src/replicator.ts +0 -25
- package/examples/src/utils/delay.ts +0 -1
- package/examples/src/utils/json.ts +0 -1
- package/examples/tsconfig.json +0 -7
- package/log4j.properties +0 -95
- package/scripts/generate-certs.sh +0 -24
- package/src/utils/debug.ts +0 -9
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.OFFSET_FETCH = void 0;
|
|
4
|
+
const api_1 = require("../utils/api");
|
|
5
|
+
const error_1 = require("../utils/error");
|
|
6
|
+
exports.OFFSET_FETCH = (0, api_1.createApi)({
|
|
7
|
+
apiKey: 9,
|
|
8
|
+
apiVersion: 8,
|
|
9
|
+
request: (encoder, data) => encoder
|
|
10
|
+
.writeUVarInt(0)
|
|
11
|
+
.writeCompactArray(data.groups, (encoder, group) => encoder
|
|
12
|
+
.writeCompactString(group.groupId)
|
|
13
|
+
.writeCompactArray(group.topics, (encoder, topic) => encoder
|
|
14
|
+
.writeCompactString(topic.name)
|
|
15
|
+
.writeCompactArray(topic.partitionIndexes, (encoder, partitionIndex) => encoder.writeInt32(partitionIndex))
|
|
16
|
+
.writeUVarInt(0))
|
|
17
|
+
.writeUVarInt(0))
|
|
18
|
+
.writeBoolean(data.requireStable)
|
|
19
|
+
.writeUVarInt(0),
|
|
20
|
+
response: (decoder) => {
|
|
21
|
+
const result = {
|
|
22
|
+
_tag: decoder.readTagBuffer(),
|
|
23
|
+
throttleTimeMs: decoder.readInt32(),
|
|
24
|
+
groups: decoder.readCompactArray((decoder) => ({
|
|
25
|
+
groupId: decoder.readCompactString(),
|
|
26
|
+
topics: decoder.readCompactArray((decoder) => ({
|
|
27
|
+
name: decoder.readCompactString(),
|
|
28
|
+
partitions: decoder.readCompactArray((decoder) => ({
|
|
29
|
+
partitionIndex: decoder.readInt32(),
|
|
30
|
+
committedOffset: decoder.readInt64(),
|
|
31
|
+
committedLeaderEpoch: decoder.readInt32(),
|
|
32
|
+
committedMetadata: decoder.readCompactString(),
|
|
33
|
+
errorCode: decoder.readInt16(),
|
|
34
|
+
_tag: decoder.readTagBuffer(),
|
|
35
|
+
})),
|
|
36
|
+
_tag: decoder.readTagBuffer(),
|
|
37
|
+
})),
|
|
38
|
+
errorCode: decoder.readInt16(),
|
|
39
|
+
_tag: decoder.readTagBuffer(),
|
|
40
|
+
})),
|
|
41
|
+
_tag2: decoder.readTagBuffer(),
|
|
42
|
+
};
|
|
43
|
+
result.groups.forEach((group) => {
|
|
44
|
+
if (group.errorCode)
|
|
45
|
+
throw new error_1.KafkaTSApiError(group.errorCode, null, result);
|
|
46
|
+
group.topics.forEach((topic) => {
|
|
47
|
+
topic.partitions.forEach((partition) => {
|
|
48
|
+
if (partition.errorCode)
|
|
49
|
+
throw new error_1.KafkaTSApiError(partition.errorCode, null, result);
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
return result;
|
|
54
|
+
},
|
|
55
|
+
});
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
export declare const PRODUCE: import("../utils/api.js").Api<{
|
|
3
|
+
transactionalId: string | null;
|
|
4
|
+
acks: number;
|
|
5
|
+
timeoutMs: number;
|
|
6
|
+
topicData: {
|
|
7
|
+
name: string;
|
|
8
|
+
partitionData: {
|
|
9
|
+
index: number;
|
|
10
|
+
baseOffset: bigint;
|
|
11
|
+
partitionLeaderEpoch: number;
|
|
12
|
+
attributes: number;
|
|
13
|
+
lastOffsetDelta: number;
|
|
14
|
+
baseTimestamp: bigint;
|
|
15
|
+
maxTimestamp: bigint;
|
|
16
|
+
producerId: bigint;
|
|
17
|
+
producerEpoch: number;
|
|
18
|
+
baseSequence: number;
|
|
19
|
+
records: {
|
|
20
|
+
attributes: number;
|
|
21
|
+
timestampDelta: bigint;
|
|
22
|
+
offsetDelta: number;
|
|
23
|
+
key: Buffer | null;
|
|
24
|
+
value: Buffer | null;
|
|
25
|
+
headers: {
|
|
26
|
+
key: Buffer;
|
|
27
|
+
value: Buffer;
|
|
28
|
+
}[];
|
|
29
|
+
}[];
|
|
30
|
+
}[];
|
|
31
|
+
}[];
|
|
32
|
+
}, {
|
|
33
|
+
_tag: void;
|
|
34
|
+
responses: {
|
|
35
|
+
name: string | null;
|
|
36
|
+
partitionResponses: {
|
|
37
|
+
index: number;
|
|
38
|
+
errorCode: number;
|
|
39
|
+
baseOffset: bigint;
|
|
40
|
+
logAppendTime: bigint;
|
|
41
|
+
logStartOffset: bigint;
|
|
42
|
+
recordErrors: {
|
|
43
|
+
batchIndex: number;
|
|
44
|
+
batchIndexError: number;
|
|
45
|
+
_tag: void;
|
|
46
|
+
}[];
|
|
47
|
+
errorMessage: string | null;
|
|
48
|
+
_tag: void;
|
|
49
|
+
}[];
|
|
50
|
+
_tag: void;
|
|
51
|
+
}[];
|
|
52
|
+
throttleTimeMs: number;
|
|
53
|
+
_tag2: void;
|
|
54
|
+
}>;
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PRODUCE = void 0;
|
|
4
|
+
const api_js_1 = require("../utils/api.js");
|
|
5
|
+
const encoder_js_1 = require("../utils/encoder.js");
|
|
6
|
+
const error_js_1 = require("../utils/error.js");
|
|
7
|
+
exports.PRODUCE = (0, api_js_1.createApi)({
|
|
8
|
+
apiKey: 0,
|
|
9
|
+
apiVersion: 9,
|
|
10
|
+
request: (encoder, data) => encoder
|
|
11
|
+
.writeUVarInt(0)
|
|
12
|
+
.writeCompactString(data.transactionalId)
|
|
13
|
+
.writeInt16(data.acks)
|
|
14
|
+
.writeInt32(data.timeoutMs)
|
|
15
|
+
.writeCompactArray(data.topicData, (encoder, topic) => encoder
|
|
16
|
+
.writeCompactString(topic.name)
|
|
17
|
+
.writeCompactArray(topic.partitionData, (encoder, partition) => {
|
|
18
|
+
const batchBody = new encoder_js_1.Encoder()
|
|
19
|
+
.writeInt16(partition.attributes)
|
|
20
|
+
.writeInt32(partition.lastOffsetDelta)
|
|
21
|
+
.writeInt64(partition.baseTimestamp)
|
|
22
|
+
.writeInt64(partition.maxTimestamp)
|
|
23
|
+
.writeInt64(partition.producerId)
|
|
24
|
+
.writeInt16(partition.producerEpoch)
|
|
25
|
+
.writeInt32(partition.baseSequence)
|
|
26
|
+
.writeArray(partition.records, (encoder, record) => {
|
|
27
|
+
const recordBody = new encoder_js_1.Encoder()
|
|
28
|
+
.writeInt8(record.attributes)
|
|
29
|
+
.writeVarLong(record.timestampDelta)
|
|
30
|
+
.writeVarInt(record.offsetDelta)
|
|
31
|
+
.writeVarIntBuffer(record.key)
|
|
32
|
+
.writeVarIntBuffer(record.value)
|
|
33
|
+
.writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntBuffer(header.key).writeVarIntBuffer(header.value));
|
|
34
|
+
return encoder.writeVarInt(recordBody.getByteLength()).writeEncoder(recordBody);
|
|
35
|
+
})
|
|
36
|
+
.value();
|
|
37
|
+
const batchHeader = new encoder_js_1.Encoder()
|
|
38
|
+
.writeInt32(partition.partitionLeaderEpoch)
|
|
39
|
+
.writeInt8(2) // magic byte
|
|
40
|
+
.writeUInt32(unsigned(crc32C(batchBody)))
|
|
41
|
+
.write(batchBody);
|
|
42
|
+
const batch = new encoder_js_1.Encoder()
|
|
43
|
+
.writeInt64(partition.baseOffset)
|
|
44
|
+
.writeInt32(batchHeader.getByteLength())
|
|
45
|
+
.writeEncoder(batchHeader);
|
|
46
|
+
return encoder
|
|
47
|
+
.writeInt32(partition.index)
|
|
48
|
+
.writeUVarInt(batch.getByteLength() + 1) // batch size
|
|
49
|
+
.writeEncoder(batch)
|
|
50
|
+
.writeUVarInt(0);
|
|
51
|
+
})
|
|
52
|
+
.writeUVarInt(0))
|
|
53
|
+
.writeUVarInt(0),
|
|
54
|
+
response: (decoder) => {
|
|
55
|
+
const result = {
|
|
56
|
+
_tag: decoder.readTagBuffer(),
|
|
57
|
+
responses: decoder.readCompactArray((response) => ({
|
|
58
|
+
name: response.readCompactString(),
|
|
59
|
+
partitionResponses: response.readCompactArray((partitionResponse) => ({
|
|
60
|
+
index: partitionResponse.readInt32(),
|
|
61
|
+
errorCode: partitionResponse.readInt16(),
|
|
62
|
+
baseOffset: partitionResponse.readInt64(),
|
|
63
|
+
logAppendTime: partitionResponse.readInt64(),
|
|
64
|
+
logStartOffset: partitionResponse.readInt64(),
|
|
65
|
+
recordErrors: partitionResponse.readCompactArray((recordError) => ({
|
|
66
|
+
batchIndex: recordError.readInt32(),
|
|
67
|
+
batchIndexError: recordError.readInt16(),
|
|
68
|
+
_tag: recordError.readTagBuffer(),
|
|
69
|
+
})),
|
|
70
|
+
errorMessage: partitionResponse.readCompactString(),
|
|
71
|
+
_tag: partitionResponse.readTagBuffer(),
|
|
72
|
+
})),
|
|
73
|
+
_tag: response.readTagBuffer(),
|
|
74
|
+
})),
|
|
75
|
+
throttleTimeMs: decoder.readInt32(),
|
|
76
|
+
_tag2: decoder.readTagBuffer(),
|
|
77
|
+
};
|
|
78
|
+
result.responses.forEach((topic) => {
|
|
79
|
+
topic.partitionResponses.forEach((partition) => {
|
|
80
|
+
if (partition.errorCode !== 0) {
|
|
81
|
+
throw new error_js_1.KafkaTSApiError(partition.errorCode, partition.errorMessage, result);
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
});
|
|
85
|
+
return result;
|
|
86
|
+
},
|
|
87
|
+
});
|
|
88
|
+
const unsigned = (value) => Uint32Array.from([value])[0];
|
|
89
|
+
const crc32C = (buffer) => {
|
|
90
|
+
let crc = 0 ^ -1;
|
|
91
|
+
for (let i = 0; i < buffer.length; i++) {
|
|
92
|
+
crc = T[(crc ^ buffer[i]) & 0xff] ^ (crc >>> 8);
|
|
93
|
+
}
|
|
94
|
+
return (crc ^ -1) >>> 0;
|
|
95
|
+
};
|
|
96
|
+
const T = new Int32Array([
|
|
97
|
+
0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf,
|
|
98
|
+
0x78b2dbcc, 0x6be22838, 0x9989ab3b, 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,
|
|
99
|
+
0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57,
|
|
100
|
+
0x89d76c54, 0x5d1d08bf, 0xaf768bbc, 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,
|
|
101
|
+
0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e,
|
|
102
|
+
0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,
|
|
103
|
+
0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, 0x7da08661, 0x8fcb0562, 0x9c9bf696,
|
|
104
|
+
0x6ef07595, 0x417b1dbc, 0xb3109ebf, 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,
|
|
105
|
+
0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, 0xed03a29b, 0x1f682198, 0x5125dad3,
|
|
106
|
+
0xa34e59d0, 0xb01eaa24, 0x42752927, 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,
|
|
107
|
+
0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, 0x61c69362, 0x93ad1061, 0x80fde395,
|
|
108
|
+
0x72966096, 0xa65c047d, 0x5437877e, 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,
|
|
109
|
+
0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, 0x90a324fa, 0x62c8a7f9, 0xb602c312,
|
|
110
|
+
0x44694011, 0x5739b3e5, 0xa55230e6, 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,
|
|
111
|
+
0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, 0x456cac67, 0xb7072f64, 0xa457dc90,
|
|
112
|
+
0x563c5f93, 0x082f63b7, 0xfa44e0b4, 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,
|
|
113
|
+
0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, 0xb4091bff, 0x466298fc, 0x1871a4d8,
|
|
114
|
+
0xea1a27db, 0xf94ad42f, 0x0b21572c, 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,
|
|
115
|
+
0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e,
|
|
116
|
+
0x3bc21e9d, 0xef087a76, 0x1d63f975, 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,
|
|
117
|
+
0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19,
|
|
118
|
+
0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,
|
|
119
|
+
0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, 0x8ecee914, 0x7ca56a17, 0x6ff599e3,
|
|
120
|
+
0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,
|
|
121
|
+
0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a,
|
|
122
|
+
0x115b2b19, 0x020bd8ed, 0xf0605bee, 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,
|
|
123
|
+
0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, 0xf36e6f75, 0x0105ec76, 0x12551f82,
|
|
124
|
+
0xe03e9c81, 0x34f4f86a, 0xc69f7b69, 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
|
|
125
|
+
0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,
|
|
126
|
+
]);
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
export declare const SASL_AUTHENTICATE: import("../utils/api").Api<{
|
|
3
|
+
authBytes: Buffer;
|
|
4
|
+
}, {
|
|
5
|
+
_tag: void;
|
|
6
|
+
errorCode: number;
|
|
7
|
+
errorMessage: string | null;
|
|
8
|
+
authBytes: Buffer | null;
|
|
9
|
+
sessionLifetimeMs: bigint;
|
|
10
|
+
_tag2: void;
|
|
11
|
+
}>;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SASL_AUTHENTICATE = void 0;
|
|
4
|
+
const api_1 = require("../utils/api");
|
|
5
|
+
const error_1 = require("../utils/error");
|
|
6
|
+
exports.SASL_AUTHENTICATE = (0, api_1.createApi)({
|
|
7
|
+
apiKey: 36,
|
|
8
|
+
apiVersion: 2,
|
|
9
|
+
request: (encoder, data) => encoder.writeUVarInt(0).writeCompactBytes(data.authBytes).writeUVarInt(0),
|
|
10
|
+
response: (decoder) => {
|
|
11
|
+
const result = {
|
|
12
|
+
_tag: decoder.readTagBuffer(),
|
|
13
|
+
errorCode: decoder.readInt16(),
|
|
14
|
+
errorMessage: decoder.readCompactString(),
|
|
15
|
+
authBytes: decoder.readCompactBytes(),
|
|
16
|
+
sessionLifetimeMs: decoder.readInt64(),
|
|
17
|
+
_tag2: decoder.readTagBuffer(),
|
|
18
|
+
};
|
|
19
|
+
if (result.errorCode)
|
|
20
|
+
throw new error_1.KafkaTSApiError(result.errorCode, result.errorMessage, result);
|
|
21
|
+
return result;
|
|
22
|
+
},
|
|
23
|
+
});
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SASL_HANDSHAKE = void 0;
|
|
4
|
+
const api_1 = require("../utils/api");
|
|
5
|
+
const error_1 = require("../utils/error");
|
|
6
|
+
exports.SASL_HANDSHAKE = (0, api_1.createApi)({
|
|
7
|
+
apiKey: 17,
|
|
8
|
+
apiVersion: 1,
|
|
9
|
+
request: (encoder, data) => encoder.writeString(data.mechanism),
|
|
10
|
+
response: (decoder) => {
|
|
11
|
+
const result = {
|
|
12
|
+
errorCode: decoder.readInt16(),
|
|
13
|
+
mechanisms: decoder.readArray((mechanism) => mechanism.readString()),
|
|
14
|
+
};
|
|
15
|
+
if (result.errorCode)
|
|
16
|
+
throw new error_1.KafkaTSApiError(result.errorCode, null, result);
|
|
17
|
+
return result;
|
|
18
|
+
},
|
|
19
|
+
});
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
export type Assignment = {
|
|
2
|
+
[topic: string]: number[];
|
|
3
|
+
};
|
|
4
|
+
export type MemberAssignment = {
|
|
5
|
+
memberId: string;
|
|
6
|
+
assignment: Assignment;
|
|
7
|
+
};
|
|
8
|
+
export declare const SYNC_GROUP: import("../utils/api").Api<{
|
|
9
|
+
groupId: string;
|
|
10
|
+
generationId: number;
|
|
11
|
+
memberId: string;
|
|
12
|
+
groupInstanceId: string | null;
|
|
13
|
+
protocolType: string | null;
|
|
14
|
+
protocolName: string | null;
|
|
15
|
+
assignments: MemberAssignment[];
|
|
16
|
+
}, {
|
|
17
|
+
_tag: void;
|
|
18
|
+
throttleTimeMs: number;
|
|
19
|
+
errorCode: number;
|
|
20
|
+
protocolType: string | null;
|
|
21
|
+
protocolName: string | null;
|
|
22
|
+
assignments: string;
|
|
23
|
+
_tag2: void;
|
|
24
|
+
}>;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SYNC_GROUP = void 0;
|
|
4
|
+
const api_1 = require("../utils/api");
|
|
5
|
+
const error_1 = require("../utils/error");
|
|
6
|
+
exports.SYNC_GROUP = (0, api_1.createApi)({
|
|
7
|
+
apiKey: 14,
|
|
8
|
+
apiVersion: 5,
|
|
9
|
+
request: (encoder, data) => encoder
|
|
10
|
+
.writeUVarInt(0)
|
|
11
|
+
.writeCompactString(data.groupId)
|
|
12
|
+
.writeInt32(data.generationId)
|
|
13
|
+
.writeCompactString(data.memberId)
|
|
14
|
+
.writeCompactString(data.groupInstanceId)
|
|
15
|
+
.writeCompactString(data.protocolType)
|
|
16
|
+
.writeCompactString(data.protocolName)
|
|
17
|
+
.writeCompactArray(data.assignments, (encoder, assignment) => encoder
|
|
18
|
+
.writeCompactString(assignment.memberId)
|
|
19
|
+
.writeCompactString(JSON.stringify(assignment.assignment))
|
|
20
|
+
.writeUVarInt(0))
|
|
21
|
+
.writeUVarInt(0),
|
|
22
|
+
response: (decoder) => {
|
|
23
|
+
const result = {
|
|
24
|
+
_tag: decoder.readTagBuffer(),
|
|
25
|
+
throttleTimeMs: decoder.readInt32(),
|
|
26
|
+
errorCode: decoder.readInt16(),
|
|
27
|
+
protocolType: decoder.readCompactString(),
|
|
28
|
+
protocolName: decoder.readCompactString(),
|
|
29
|
+
assignments: decoder.readCompactString(),
|
|
30
|
+
_tag2: decoder.readTagBuffer(),
|
|
31
|
+
};
|
|
32
|
+
if (result.errorCode)
|
|
33
|
+
throw new error_1.KafkaTSApiError(result.errorCode, null, result);
|
|
34
|
+
return result;
|
|
35
|
+
},
|
|
36
|
+
});
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.saslScramSha512 = exports.saslScramSha256 = exports.saslPlain = void 0;
|
|
4
|
+
var plain_1 = require("./plain");
|
|
5
|
+
Object.defineProperty(exports, "saslPlain", { enumerable: true, get: function () { return plain_1.saslPlain; } });
|
|
6
|
+
var scram_1 = require("./scram");
|
|
7
|
+
Object.defineProperty(exports, "saslScramSha256", { enumerable: true, get: function () { return scram_1.saslScramSha256; } });
|
|
8
|
+
Object.defineProperty(exports, "saslScramSha512", { enumerable: true, get: function () { return scram_1.saslScramSha512; } });
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.saslPlain = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const saslPlain = ({ username, password }) => ({
|
|
6
|
+
mechanism: 'PLAIN',
|
|
7
|
+
authenticate: async ({ sendRequest }) => {
|
|
8
|
+
const authBytes = [null, username, password].join('\u0000');
|
|
9
|
+
await sendRequest(api_1.API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
|
|
10
|
+
},
|
|
11
|
+
});
|
|
12
|
+
exports.saslPlain = saslPlain;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { SASLProvider } from '../broker';
|
|
2
|
+
export declare const saslScramSha256: ({ username, password }: {
|
|
3
|
+
username: string;
|
|
4
|
+
password: string;
|
|
5
|
+
}) => SASLProvider;
|
|
6
|
+
export declare const saslScramSha512: ({ username, password }: {
|
|
7
|
+
username: string;
|
|
8
|
+
password: string;
|
|
9
|
+
}) => SASLProvider;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.saslScramSha512 = exports.saslScramSha256 = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const crypto_1 = require("../utils/crypto");
|
|
6
|
+
const error_1 = require("../utils/error");
|
|
7
|
+
const saslScram = ({ mechanism, keyLength, digest }) => ({ username, password }) => ({
|
|
8
|
+
mechanism,
|
|
9
|
+
authenticate: async ({ sendRequest }) => {
|
|
10
|
+
const nonce = (0, crypto_1.generateNonce)();
|
|
11
|
+
const firstMessage = `n=${username},r=${nonce}`;
|
|
12
|
+
const { authBytes } = await sendRequest(api_1.API.SASL_AUTHENTICATE, {
|
|
13
|
+
authBytes: Buffer.from(`n,,${firstMessage}`),
|
|
14
|
+
});
|
|
15
|
+
if (!authBytes) {
|
|
16
|
+
throw new error_1.KafkaTSError('No auth response');
|
|
17
|
+
}
|
|
18
|
+
const response = Object.fromEntries(authBytes
|
|
19
|
+
.toString()
|
|
20
|
+
.split(',')
|
|
21
|
+
.map((pair) => pair.split('=')));
|
|
22
|
+
const rnonce = response.r;
|
|
23
|
+
if (!rnonce.startsWith(nonce)) {
|
|
24
|
+
throw new error_1.KafkaTSError('Invalid nonce');
|
|
25
|
+
}
|
|
26
|
+
const iterations = parseInt(response.i);
|
|
27
|
+
const salt = (0, crypto_1.base64Decode)(response.s);
|
|
28
|
+
const saltedPassword = await (0, crypto_1.saltPassword)(password, salt, iterations, keyLength, digest);
|
|
29
|
+
const clientKey = (0, crypto_1.hmac)(saltedPassword, 'Client Key', digest);
|
|
30
|
+
const clientKeyHash = (0, crypto_1.hash)(clientKey, digest);
|
|
31
|
+
let finalMessage = `c=${(0, crypto_1.base64Encode)('n,,')},r=${rnonce}`;
|
|
32
|
+
const fullMessage = `${firstMessage},${authBytes.toString()},${finalMessage}`;
|
|
33
|
+
const clientSignature = (0, crypto_1.hmac)(clientKeyHash, fullMessage, digest);
|
|
34
|
+
const clientProof = (0, crypto_1.base64Encode)((0, crypto_1.xor)(clientKey, clientSignature));
|
|
35
|
+
finalMessage += `,p=${clientProof}`;
|
|
36
|
+
await sendRequest(api_1.API.SASL_AUTHENTICATE, { authBytes: Buffer.from(finalMessage) });
|
|
37
|
+
},
|
|
38
|
+
});
|
|
39
|
+
exports.saslScramSha256 = saslScram({ mechanism: 'SCRAM-SHA-256', keyLength: 32, digest: 'sha256' });
|
|
40
|
+
exports.saslScramSha512 = saslScram({ mechanism: 'SCRAM-SHA-512', keyLength: 64, digest: 'sha512' });
|
package/dist/broker.d.ts
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/// <reference types="node" />
|
|
3
|
+
import { TcpSocketConnectOpts } from 'net';
|
|
4
|
+
import { TLSSocketOptions } from 'tls';
|
|
5
|
+
import { SendRequest } from './connection';
|
|
6
|
+
export type SASLProvider = {
|
|
7
|
+
mechanism: string;
|
|
8
|
+
authenticate: (context: {
|
|
9
|
+
sendRequest: SendRequest;
|
|
10
|
+
}) => Promise<void>;
|
|
11
|
+
};
|
|
12
|
+
type BrokerOptions = {
|
|
13
|
+
clientId: string | null;
|
|
14
|
+
options: TcpSocketConnectOpts;
|
|
15
|
+
sasl: SASLProvider | null;
|
|
16
|
+
ssl: TLSSocketOptions | null;
|
|
17
|
+
};
|
|
18
|
+
export declare class Broker {
|
|
19
|
+
private options;
|
|
20
|
+
private connection;
|
|
21
|
+
sendRequest: SendRequest;
|
|
22
|
+
constructor(options: BrokerOptions);
|
|
23
|
+
connect(): Promise<this>;
|
|
24
|
+
ensureConnected: () => Promise<this>;
|
|
25
|
+
disconnect(): Promise<void>;
|
|
26
|
+
private validateApiVersions;
|
|
27
|
+
private saslHandshake;
|
|
28
|
+
private saslAuthenticate;
|
|
29
|
+
}
|
|
30
|
+
export {};
|
package/dist/broker.js
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Broker = void 0;
|
|
4
|
+
const api_1 = require("./api");
|
|
5
|
+
const connection_1 = require("./connection");
|
|
6
|
+
const error_1 = require("./utils/error");
|
|
7
|
+
const memo_1 = require("./utils/memo");
|
|
8
|
+
class Broker {
|
|
9
|
+
options;
|
|
10
|
+
connection;
|
|
11
|
+
sendRequest;
|
|
12
|
+
constructor(options) {
|
|
13
|
+
this.options = options;
|
|
14
|
+
this.connection = new connection_1.Connection({
|
|
15
|
+
clientId: this.options.clientId,
|
|
16
|
+
connection: this.options.options,
|
|
17
|
+
ssl: this.options.ssl,
|
|
18
|
+
});
|
|
19
|
+
this.sendRequest = this.connection.sendRequest.bind(this.connection);
|
|
20
|
+
}
|
|
21
|
+
async connect() {
|
|
22
|
+
await this.connection.connect();
|
|
23
|
+
await this.validateApiVersions();
|
|
24
|
+
await this.saslHandshake();
|
|
25
|
+
await this.saslAuthenticate();
|
|
26
|
+
return this;
|
|
27
|
+
}
|
|
28
|
+
ensureConnected = (0, memo_1.memo)(() => this.connect());
|
|
29
|
+
async disconnect() {
|
|
30
|
+
await this.connection.disconnect();
|
|
31
|
+
}
|
|
32
|
+
async validateApiVersions() {
|
|
33
|
+
const { versions } = await this.sendRequest(api_1.API.API_VERSIONS, {});
|
|
34
|
+
const apiByKey = Object.fromEntries(Object.values(api_1.API).map((api) => [api.apiKey, api]));
|
|
35
|
+
versions.forEach(({ apiKey, minVersion, maxVersion }) => {
|
|
36
|
+
if (!apiByKey[apiKey]) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
const { apiVersion } = apiByKey[apiKey];
|
|
40
|
+
if (apiVersion < minVersion || apiVersion > maxVersion) {
|
|
41
|
+
throw new error_1.KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker (minVersion=${minVersion}, maxVersion=${maxVersion})`);
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
async saslHandshake() {
|
|
46
|
+
if (!this.options.sasl) {
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
await this.sendRequest(api_1.API.SASL_HANDSHAKE, { mechanism: this.options.sasl.mechanism });
|
|
50
|
+
}
|
|
51
|
+
async saslAuthenticate() {
|
|
52
|
+
await this.options.sasl?.authenticate({ sendRequest: this.sendRequest });
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
exports.Broker = Broker;
|
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/// <reference types="node" />
|
|
3
|
+
import { TcpSocketConnectOpts } from 'net';
|
|
4
|
+
import { TLSSocketOptions } from 'tls';
|
|
5
|
+
import { SASLProvider } from './broker';
|
|
6
|
+
import { Cluster } from './cluster';
|
|
7
|
+
import { Consumer, ConsumerOptions } from './consumer/consumer';
|
|
8
|
+
import { Producer, ProducerOptions } from './producer/producer';
|
|
9
|
+
type ClientOptions = {
|
|
10
|
+
clientId?: string | null;
|
|
11
|
+
bootstrapServers: TcpSocketConnectOpts[];
|
|
12
|
+
sasl?: SASLProvider | null;
|
|
13
|
+
ssl?: TLSSocketOptions | null;
|
|
14
|
+
};
|
|
15
|
+
export declare class Client {
|
|
16
|
+
private options;
|
|
17
|
+
constructor(options: ClientOptions);
|
|
18
|
+
startConsumer(options: ConsumerOptions): Promise<Consumer>;
|
|
19
|
+
createProducer(options: ProducerOptions): Producer;
|
|
20
|
+
createCluster(): Cluster;
|
|
21
|
+
}
|
|
22
|
+
export declare const createKafkaClient: (options: ClientOptions) => Client;
|
|
23
|
+
export {};
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createKafkaClient = exports.Client = void 0;
|
|
4
|
+
const cluster_1 = require("./cluster");
|
|
5
|
+
const consumer_1 = require("./consumer/consumer");
|
|
6
|
+
const producer_1 = require("./producer/producer");
|
|
7
|
+
class Client {
|
|
8
|
+
options;
|
|
9
|
+
constructor(options) {
|
|
10
|
+
this.options = {
|
|
11
|
+
...options,
|
|
12
|
+
clientId: options.clientId ?? null,
|
|
13
|
+
sasl: options.sasl ?? null,
|
|
14
|
+
ssl: options.ssl ?? null,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
async startConsumer(options) {
|
|
18
|
+
const consumer = new consumer_1.Consumer(this.createCluster(), options);
|
|
19
|
+
await consumer.start();
|
|
20
|
+
return consumer;
|
|
21
|
+
}
|
|
22
|
+
createProducer(options) {
|
|
23
|
+
return new producer_1.Producer(this.createCluster(), options);
|
|
24
|
+
}
|
|
25
|
+
createCluster() {
|
|
26
|
+
return new cluster_1.Cluster({
|
|
27
|
+
clientId: this.options.clientId,
|
|
28
|
+
bootstrapServers: this.options.bootstrapServers,
|
|
29
|
+
sasl: this.options.sasl,
|
|
30
|
+
ssl: this.options.ssl,
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.Client = Client;
|
|
35
|
+
const createKafkaClient = (options) => new Client(options);
|
|
36
|
+
exports.createKafkaClient = createKafkaClient;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/// <reference types="node" />
|
|
3
|
+
import { TcpSocketConnectOpts } from 'net';
|
|
4
|
+
import { TLSSocketOptions } from 'tls';
|
|
5
|
+
import { Broker, SASLProvider } from './broker';
|
|
6
|
+
import { SendRequest } from './connection';
|
|
7
|
+
type ClusterOptions = {
|
|
8
|
+
clientId: string | null;
|
|
9
|
+
bootstrapServers: TcpSocketConnectOpts[];
|
|
10
|
+
sasl: SASLProvider | null;
|
|
11
|
+
ssl: TLSSocketOptions | null;
|
|
12
|
+
};
|
|
13
|
+
export declare class Cluster {
|
|
14
|
+
private options;
|
|
15
|
+
private seedBroker;
|
|
16
|
+
private brokerById;
|
|
17
|
+
private brokerMetadata;
|
|
18
|
+
constructor(options: ClusterOptions);
|
|
19
|
+
connect(): Promise<void>;
|
|
20
|
+
disconnect(): Promise<void>;
|
|
21
|
+
setSeedBroker: (nodeId: number) => Promise<void>;
|
|
22
|
+
sendRequest: SendRequest;
|
|
23
|
+
sendRequestToNode: (nodeId: number) => SendRequest;
|
|
24
|
+
acquireBroker(nodeId: number): Promise<Broker>;
|
|
25
|
+
private findSeedBroker;
|
|
26
|
+
}
|
|
27
|
+
export {};
|