kafka-ts 0.0.3-beta → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +72 -8
- package/dist/api/api-versions.d.ts +9 -0
- package/{src/api/api-versions.ts → dist/api/api-versions.js} +8 -5
- package/dist/api/create-topics.d.ts +38 -0
- package/dist/api/create-topics.js +53 -0
- package/dist/api/delete-topics.d.ts +18 -0
- package/dist/api/delete-topics.js +33 -0
- package/dist/api/fetch.d.ts +84 -0
- package/dist/api/fetch.js +142 -0
- package/dist/api/find-coordinator.d.ts +21 -0
- package/{src/api/find-coordinator.ts → dist/api/find-coordinator.js} +14 -14
- package/dist/api/heartbeat.d.ts +11 -0
- package/dist/api/heartbeat.js +27 -0
- package/dist/api/index.d.ts +576 -0
- package/{src/api/index.ts → dist/api/index.js} +42 -41
- package/dist/api/init-producer-id.d.ts +13 -0
- package/dist/api/init-producer-id.js +29 -0
- package/dist/api/join-group.d.ts +34 -0
- package/dist/api/join-group.js +51 -0
- package/dist/api/leave-group.d.ts +19 -0
- package/dist/api/leave-group.js +39 -0
- package/dist/api/list-offsets.d.ts +29 -0
- package/dist/api/list-offsets.js +48 -0
- package/dist/api/metadata.d.ts +40 -0
- package/{src/api/metadata.ts → dist/api/metadata.js} +18 -26
- package/dist/api/offset-commit.d.ts +28 -0
- package/dist/api/offset-commit.js +48 -0
- package/dist/api/offset-fetch.d.ts +31 -0
- package/dist/api/offset-fetch.js +55 -0
- package/dist/api/produce.d.ts +54 -0
- package/{src/api/produce.ts → dist/api/produce.js} +55 -102
- package/dist/api/sasl-authenticate.d.ts +11 -0
- package/dist/api/sasl-authenticate.js +23 -0
- package/dist/api/sasl-handshake.d.ts +6 -0
- package/dist/api/sasl-handshake.js +19 -0
- package/dist/api/sync-group.d.ts +24 -0
- package/dist/api/sync-group.js +36 -0
- package/dist/auth/index.d.ts +2 -0
- package/dist/auth/index.js +8 -0
- package/dist/auth/plain.d.ts +5 -0
- package/dist/auth/plain.js +12 -0
- package/dist/auth/scram.d.ts +9 -0
- package/dist/auth/scram.js +40 -0
- package/dist/broker.d.ts +30 -0
- package/dist/broker.js +55 -0
- package/dist/client.d.ts +22 -0
- package/dist/client.js +36 -0
- package/dist/cluster.d.ts +27 -0
- package/dist/cluster.js +70 -0
- package/dist/cluster.test.d.ts +1 -0
- package/{src/cluster.test.ts → dist/cluster.test.js} +87 -113
- package/dist/codecs/gzip.d.ts +2 -0
- package/dist/codecs/gzip.js +8 -0
- package/dist/codecs/index.d.ts +2 -0
- package/dist/codecs/index.js +17 -0
- package/dist/codecs/none.d.ts +2 -0
- package/dist/codecs/none.js +7 -0
- package/dist/codecs/types.d.ts +5 -0
- package/dist/codecs/types.js +2 -0
- package/dist/connection.d.ts +26 -0
- package/dist/connection.js +175 -0
- package/dist/consumer/consumer-group.d.ts +41 -0
- package/dist/consumer/consumer-group.js +215 -0
- package/dist/consumer/consumer-metadata.d.ts +7 -0
- package/dist/consumer/consumer-metadata.js +14 -0
- package/dist/consumer/consumer.d.ts +44 -0
- package/dist/consumer/consumer.js +225 -0
- package/dist/consumer/fetch-manager.d.ts +33 -0
- package/dist/consumer/fetch-manager.js +140 -0
- package/dist/consumer/fetcher.d.ts +25 -0
- package/dist/consumer/fetcher.js +64 -0
- package/dist/consumer/offset-manager.d.ts +22 -0
- package/dist/consumer/offset-manager.js +66 -0
- package/dist/consumer/processor.d.ts +19 -0
- package/dist/consumer/processor.js +59 -0
- package/dist/distributors/assignments-to-replicas.d.ts +16 -0
- package/{src/distributors/assignments-to-replicas.ts → dist/distributors/assignments-to-replicas.js} +15 -41
- package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
- package/dist/distributors/assignments-to-replicas.test.js +40 -0
- package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
- package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
- package/dist/distributors/partitioner.d.ts +7 -0
- package/dist/distributors/partitioner.js +23 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +26 -0
- package/dist/metadata.d.ts +24 -0
- package/dist/metadata.js +106 -0
- package/dist/producer/producer.d.ts +24 -0
- package/dist/producer/producer.js +131 -0
- package/{src/types.ts → dist/types.d.ts} +4 -4
- package/dist/types.js +2 -0
- package/{src/utils/api.ts → dist/utils/api.d.ts} +2 -4
- package/dist/utils/api.js +5 -0
- package/dist/utils/crypto.d.ts +8 -0
- package/dist/utils/crypto.js +18 -0
- package/dist/utils/decoder.d.ts +30 -0
- package/{src/utils/decoder.ts → dist/utils/decoder.js} +41 -57
- package/dist/utils/delay.d.ts +1 -0
- package/dist/utils/delay.js +5 -0
- package/dist/utils/encoder.d.ts +28 -0
- package/{src/utils/encoder.ts → dist/utils/encoder.js} +50 -66
- package/dist/utils/error.d.ts +11 -0
- package/dist/utils/error.js +27 -0
- package/dist/utils/logger.d.ts +9 -0
- package/dist/utils/logger.js +32 -0
- package/dist/utils/memo.d.ts +1 -0
- package/{src/utils/memo.ts → dist/utils/memo.js} +7 -3
- package/dist/utils/murmur2.d.ts +3 -0
- package/dist/utils/murmur2.js +40 -0
- package/dist/utils/retrier.d.ts +10 -0
- package/dist/utils/retrier.js +22 -0
- package/dist/utils/tracer.d.ts +5 -0
- package/dist/utils/tracer.js +39 -0
- package/package.json +11 -2
- package/.github/workflows/release.yml +0 -17
- package/.prettierrc +0 -8
- package/certs/ca.crt +0 -29
- package/certs/ca.key +0 -52
- package/certs/ca.srl +0 -1
- package/certs/kafka.crt +0 -29
- package/certs/kafka.csr +0 -26
- package/certs/kafka.key +0 -52
- package/certs/kafka.keystore.jks +0 -0
- package/certs/kafka.truststore.jks +0 -0
- package/docker-compose.yml +0 -104
- package/examples/package-lock.json +0 -31
- package/examples/package.json +0 -14
- package/examples/src/client.ts +0 -9
- package/examples/src/consumer.ts +0 -18
- package/examples/src/create-topic.ts +0 -44
- package/examples/src/producer.ts +0 -24
- package/examples/src/replicator.ts +0 -25
- package/examples/src/utils/delay.ts +0 -1
- package/examples/src/utils/json.ts +0 -1
- package/examples/tsconfig.json +0 -7
- package/log4j.properties +0 -95
- package/scripts/generate-certs.sh +0 -24
- package/src/__snapshots__/request-handler.test.ts.snap +0 -978
- package/src/api/create-topics.ts +0 -78
- package/src/api/delete-topics.ts +0 -42
- package/src/api/fetch.ts +0 -143
- package/src/api/heartbeat.ts +0 -33
- package/src/api/init-producer-id.ts +0 -35
- package/src/api/join-group.ts +0 -67
- package/src/api/leave-group.ts +0 -48
- package/src/api/list-offsets.ts +0 -65
- package/src/api/offset-commit.ts +0 -67
- package/src/api/offset-fetch.ts +0 -74
- package/src/api/sasl-authenticate.ts +0 -21
- package/src/api/sasl-handshake.ts +0 -16
- package/src/api/sync-group.ts +0 -54
- package/src/broker.ts +0 -74
- package/src/client.ts +0 -47
- package/src/cluster.ts +0 -87
- package/src/connection.ts +0 -143
- package/src/consumer/consumer-group.ts +0 -209
- package/src/consumer/consumer-metadata.ts +0 -14
- package/src/consumer/consumer.ts +0 -231
- package/src/consumer/fetch-manager.ts +0 -179
- package/src/consumer/fetcher.ts +0 -57
- package/src/consumer/offset-manager.ts +0 -93
- package/src/consumer/processor.ts +0 -47
- package/src/distributors/assignments-to-replicas.test.ts +0 -43
- package/src/distributors/messages-to-topic-partition-leaders.test.ts +0 -32
- package/src/distributors/messages-to-topic-partition-leaders.ts +0 -19
- package/src/index.ts +0 -4
- package/src/metadata.ts +0 -122
- package/src/producer/producer.ts +0 -132
- package/src/utils/debug.ts +0 -9
- package/src/utils/delay.ts +0 -1
- package/src/utils/error.ts +0 -21
- package/src/utils/retrier.ts +0 -39
- package/src/utils/tracer.ts +0 -31
- package/tsconfig.json +0 -17
|
@@ -1,99 +1,56 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PRODUCE = void 0;
|
|
4
|
+
const api_js_1 = require("../utils/api.js");
|
|
5
|
+
const encoder_js_1 = require("../utils/encoder.js");
|
|
6
|
+
const error_js_1 = require("../utils/error.js");
|
|
7
|
+
exports.PRODUCE = (0, api_js_1.createApi)({
|
|
6
8
|
apiKey: 0,
|
|
7
|
-
apiVersion:
|
|
8
|
-
request: (
|
|
9
|
-
|
|
10
|
-
data
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
.
|
|
44
|
-
|
|
45
|
-
.
|
|
46
|
-
.
|
|
47
|
-
.
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
.writeInt16(partition.attributes)
|
|
53
|
-
.writeInt32(partition.lastOffsetDelta)
|
|
54
|
-
.writeInt64(partition.baseTimestamp)
|
|
55
|
-
.writeInt64(partition.maxTimestamp)
|
|
56
|
-
.writeInt64(partition.producerId)
|
|
57
|
-
.writeInt16(partition.producerEpoch)
|
|
58
|
-
.writeInt32(partition.baseSequence)
|
|
59
|
-
.writeArray(partition.records, (encoder, record) => {
|
|
60
|
-
const recordBody = new Encoder()
|
|
61
|
-
.writeInt8(record.attributes)
|
|
62
|
-
.writeVarLong(record.timestampDelta)
|
|
63
|
-
.writeVarInt(record.offsetDelta)
|
|
64
|
-
.writeVarIntString(record.key)
|
|
65
|
-
.writeVarIntString(record.value)
|
|
66
|
-
.writeVarIntArray(record.headers, (encoder, header) =>
|
|
67
|
-
encoder.writeVarIntString(header.key).writeVarIntString(header.value),
|
|
68
|
-
)
|
|
69
|
-
.value();
|
|
70
|
-
|
|
71
|
-
return encoder.writeVarInt(recordBody.length).write(recordBody);
|
|
72
|
-
})
|
|
73
|
-
.value();
|
|
74
|
-
|
|
75
|
-
const batchHeader = new Encoder()
|
|
76
|
-
.writeInt32(partition.partitionLeaderEpoch)
|
|
77
|
-
.writeInt8(2) // magic byte
|
|
78
|
-
.writeUInt32(unsigned(crc32C(batchBody)))
|
|
79
|
-
.write(batchBody)
|
|
80
|
-
.value();
|
|
81
|
-
|
|
82
|
-
const batch = new Encoder()
|
|
83
|
-
.writeInt64(partition.baseOffset)
|
|
84
|
-
.writeInt32(batchHeader.length)
|
|
85
|
-
.write(batchHeader)
|
|
86
|
-
.value();
|
|
87
|
-
|
|
88
|
-
return encoder
|
|
89
|
-
.writeInt32(partition.index)
|
|
90
|
-
.writeUVarInt(batch.length + 1) // batch size
|
|
91
|
-
.write(batch)
|
|
92
|
-
.writeUVarInt(0);
|
|
93
|
-
})
|
|
94
|
-
.writeUVarInt(0),
|
|
95
|
-
)
|
|
96
|
-
.writeUVarInt(0),
|
|
9
|
+
apiVersion: 9,
|
|
10
|
+
request: (encoder, data) => encoder
|
|
11
|
+
.writeUVarInt(0)
|
|
12
|
+
.writeCompactString(data.transactionalId)
|
|
13
|
+
.writeInt16(data.acks)
|
|
14
|
+
.writeInt32(data.timeoutMs)
|
|
15
|
+
.writeCompactArray(data.topicData, (encoder, topic) => encoder
|
|
16
|
+
.writeCompactString(topic.name)
|
|
17
|
+
.writeCompactArray(topic.partitionData, (encoder, partition) => {
|
|
18
|
+
const batchBody = new encoder_js_1.Encoder()
|
|
19
|
+
.writeInt16(partition.attributes)
|
|
20
|
+
.writeInt32(partition.lastOffsetDelta)
|
|
21
|
+
.writeInt64(partition.baseTimestamp)
|
|
22
|
+
.writeInt64(partition.maxTimestamp)
|
|
23
|
+
.writeInt64(partition.producerId)
|
|
24
|
+
.writeInt16(partition.producerEpoch)
|
|
25
|
+
.writeInt32(partition.baseSequence)
|
|
26
|
+
.writeArray(partition.records, (encoder, record) => {
|
|
27
|
+
const recordBody = new encoder_js_1.Encoder()
|
|
28
|
+
.writeInt8(record.attributes)
|
|
29
|
+
.writeVarLong(record.timestampDelta)
|
|
30
|
+
.writeVarInt(record.offsetDelta)
|
|
31
|
+
.writeVarIntBuffer(record.key)
|
|
32
|
+
.writeVarIntBuffer(record.value)
|
|
33
|
+
.writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntBuffer(header.key).writeVarIntBuffer(header.value));
|
|
34
|
+
return encoder.writeVarInt(recordBody.getByteLength()).writeEncoder(recordBody);
|
|
35
|
+
})
|
|
36
|
+
.value();
|
|
37
|
+
const batchHeader = new encoder_js_1.Encoder()
|
|
38
|
+
.writeInt32(partition.partitionLeaderEpoch)
|
|
39
|
+
.writeInt8(2) // magic byte
|
|
40
|
+
.writeUInt32(unsigned(crc32C(batchBody)))
|
|
41
|
+
.write(batchBody);
|
|
42
|
+
const batch = new encoder_js_1.Encoder()
|
|
43
|
+
.writeInt64(partition.baseOffset)
|
|
44
|
+
.writeInt32(batchHeader.getByteLength())
|
|
45
|
+
.writeEncoder(batchHeader);
|
|
46
|
+
return encoder
|
|
47
|
+
.writeInt32(partition.index)
|
|
48
|
+
.writeUVarInt(batch.getByteLength() + 1) // batch size
|
|
49
|
+
.writeEncoder(batch)
|
|
50
|
+
.writeUVarInt(0);
|
|
51
|
+
})
|
|
52
|
+
.writeUVarInt(0))
|
|
53
|
+
.writeUVarInt(0),
|
|
97
54
|
response: (decoder) => {
|
|
98
55
|
const result = {
|
|
99
56
|
_tag: decoder.readTagBuffer(),
|
|
@@ -121,25 +78,21 @@ export const PRODUCE = createApi({
|
|
|
121
78
|
result.responses.forEach((topic) => {
|
|
122
79
|
topic.partitionResponses.forEach((partition) => {
|
|
123
80
|
if (partition.errorCode !== 0) {
|
|
124
|
-
throw new KafkaTSApiError(partition.errorCode, partition.errorMessage, result);
|
|
81
|
+
throw new error_js_1.KafkaTSApiError(partition.errorCode, partition.errorMessage, result);
|
|
125
82
|
}
|
|
126
83
|
});
|
|
127
84
|
});
|
|
128
85
|
return result;
|
|
129
86
|
},
|
|
130
87
|
});
|
|
131
|
-
|
|
132
|
-
const
|
|
133
|
-
|
|
134
|
-
const crc32C = (buffer: Buffer) => {
|
|
88
|
+
const unsigned = (value) => Uint32Array.from([value])[0];
|
|
89
|
+
const crc32C = (buffer) => {
|
|
135
90
|
let crc = 0 ^ -1;
|
|
136
91
|
for (let i = 0; i < buffer.length; i++) {
|
|
137
92
|
crc = T[(crc ^ buffer[i]) & 0xff] ^ (crc >>> 8);
|
|
138
93
|
}
|
|
139
|
-
|
|
140
94
|
return (crc ^ -1) >>> 0;
|
|
141
95
|
};
|
|
142
|
-
|
|
143
96
|
const T = new Int32Array([
|
|
144
97
|
0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf,
|
|
145
98
|
0x78b2dbcc, 0x6be22838, 0x9989ab3b, 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
export declare const SASL_AUTHENTICATE: import("../utils/api").Api<{
|
|
3
|
+
authBytes: Buffer;
|
|
4
|
+
}, {
|
|
5
|
+
_tag: void;
|
|
6
|
+
errorCode: number;
|
|
7
|
+
errorMessage: string | null;
|
|
8
|
+
authBytes: Buffer | null;
|
|
9
|
+
sessionLifetimeMs: bigint;
|
|
10
|
+
_tag2: void;
|
|
11
|
+
}>;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SASL_AUTHENTICATE = void 0;
|
|
4
|
+
const api_1 = require("../utils/api");
|
|
5
|
+
const error_1 = require("../utils/error");
|
|
6
|
+
exports.SASL_AUTHENTICATE = (0, api_1.createApi)({
|
|
7
|
+
apiKey: 36,
|
|
8
|
+
apiVersion: 2,
|
|
9
|
+
request: (encoder, data) => encoder.writeUVarInt(0).writeCompactBytes(data.authBytes).writeUVarInt(0),
|
|
10
|
+
response: (decoder) => {
|
|
11
|
+
const result = {
|
|
12
|
+
_tag: decoder.readTagBuffer(),
|
|
13
|
+
errorCode: decoder.readInt16(),
|
|
14
|
+
errorMessage: decoder.readCompactString(),
|
|
15
|
+
authBytes: decoder.readCompactBytes(),
|
|
16
|
+
sessionLifetimeMs: decoder.readInt64(),
|
|
17
|
+
_tag2: decoder.readTagBuffer(),
|
|
18
|
+
};
|
|
19
|
+
if (result.errorCode)
|
|
20
|
+
throw new error_1.KafkaTSApiError(result.errorCode, result.errorMessage, result);
|
|
21
|
+
return result;
|
|
22
|
+
},
|
|
23
|
+
});
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SASL_HANDSHAKE = void 0;
|
|
4
|
+
const api_1 = require("../utils/api");
|
|
5
|
+
const error_1 = require("../utils/error");
|
|
6
|
+
exports.SASL_HANDSHAKE = (0, api_1.createApi)({
|
|
7
|
+
apiKey: 17,
|
|
8
|
+
apiVersion: 1,
|
|
9
|
+
request: (encoder, data) => encoder.writeString(data.mechanism),
|
|
10
|
+
response: (decoder) => {
|
|
11
|
+
const result = {
|
|
12
|
+
errorCode: decoder.readInt16(),
|
|
13
|
+
mechanisms: decoder.readArray((mechanism) => mechanism.readString()),
|
|
14
|
+
};
|
|
15
|
+
if (result.errorCode)
|
|
16
|
+
throw new error_1.KafkaTSApiError(result.errorCode, null, result);
|
|
17
|
+
return result;
|
|
18
|
+
},
|
|
19
|
+
});
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
export type Assignment = {
|
|
2
|
+
[topic: string]: number[];
|
|
3
|
+
};
|
|
4
|
+
export type MemberAssignment = {
|
|
5
|
+
memberId: string;
|
|
6
|
+
assignment: Assignment;
|
|
7
|
+
};
|
|
8
|
+
export declare const SYNC_GROUP: import("../utils/api").Api<{
|
|
9
|
+
groupId: string;
|
|
10
|
+
generationId: number;
|
|
11
|
+
memberId: string;
|
|
12
|
+
groupInstanceId: string | null;
|
|
13
|
+
protocolType: string | null;
|
|
14
|
+
protocolName: string | null;
|
|
15
|
+
assignments: MemberAssignment[];
|
|
16
|
+
}, {
|
|
17
|
+
_tag: void;
|
|
18
|
+
throttleTimeMs: number;
|
|
19
|
+
errorCode: number;
|
|
20
|
+
protocolType: string | null;
|
|
21
|
+
protocolName: string | null;
|
|
22
|
+
assignments: string;
|
|
23
|
+
_tag2: void;
|
|
24
|
+
}>;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.SYNC_GROUP = void 0;
|
|
4
|
+
const api_1 = require("../utils/api");
|
|
5
|
+
const error_1 = require("../utils/error");
|
|
6
|
+
exports.SYNC_GROUP = (0, api_1.createApi)({
|
|
7
|
+
apiKey: 14,
|
|
8
|
+
apiVersion: 5,
|
|
9
|
+
request: (encoder, data) => encoder
|
|
10
|
+
.writeUVarInt(0)
|
|
11
|
+
.writeCompactString(data.groupId)
|
|
12
|
+
.writeInt32(data.generationId)
|
|
13
|
+
.writeCompactString(data.memberId)
|
|
14
|
+
.writeCompactString(data.groupInstanceId)
|
|
15
|
+
.writeCompactString(data.protocolType)
|
|
16
|
+
.writeCompactString(data.protocolName)
|
|
17
|
+
.writeCompactArray(data.assignments, (encoder, assignment) => encoder
|
|
18
|
+
.writeCompactString(assignment.memberId)
|
|
19
|
+
.writeCompactString(JSON.stringify(assignment.assignment))
|
|
20
|
+
.writeUVarInt(0))
|
|
21
|
+
.writeUVarInt(0),
|
|
22
|
+
response: (decoder) => {
|
|
23
|
+
const result = {
|
|
24
|
+
_tag: decoder.readTagBuffer(),
|
|
25
|
+
throttleTimeMs: decoder.readInt32(),
|
|
26
|
+
errorCode: decoder.readInt16(),
|
|
27
|
+
protocolType: decoder.readCompactString(),
|
|
28
|
+
protocolName: decoder.readCompactString(),
|
|
29
|
+
assignments: decoder.readCompactString(),
|
|
30
|
+
_tag2: decoder.readTagBuffer(),
|
|
31
|
+
};
|
|
32
|
+
if (result.errorCode)
|
|
33
|
+
throw new error_1.KafkaTSApiError(result.errorCode, null, result);
|
|
34
|
+
return result;
|
|
35
|
+
},
|
|
36
|
+
});
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.saslScramSha512 = exports.saslScramSha256 = exports.saslPlain = void 0;
|
|
4
|
+
var plain_1 = require("./plain");
|
|
5
|
+
Object.defineProperty(exports, "saslPlain", { enumerable: true, get: function () { return plain_1.saslPlain; } });
|
|
6
|
+
var scram_1 = require("./scram");
|
|
7
|
+
Object.defineProperty(exports, "saslScramSha256", { enumerable: true, get: function () { return scram_1.saslScramSha256; } });
|
|
8
|
+
Object.defineProperty(exports, "saslScramSha512", { enumerable: true, get: function () { return scram_1.saslScramSha512; } });
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.saslPlain = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const saslPlain = ({ username, password }) => ({
|
|
6
|
+
mechanism: 'PLAIN',
|
|
7
|
+
authenticate: async ({ sendRequest }) => {
|
|
8
|
+
const authBytes = [null, username, password].join('\u0000');
|
|
9
|
+
await sendRequest(api_1.API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
|
|
10
|
+
},
|
|
11
|
+
});
|
|
12
|
+
exports.saslPlain = saslPlain;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { SASLProvider } from '../broker';
|
|
2
|
+
export declare const saslScramSha256: ({ username, password }: {
|
|
3
|
+
username: string;
|
|
4
|
+
password: string;
|
|
5
|
+
}) => SASLProvider;
|
|
6
|
+
export declare const saslScramSha512: ({ username, password }: {
|
|
7
|
+
username: string;
|
|
8
|
+
password: string;
|
|
9
|
+
}) => SASLProvider;
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.saslScramSha512 = exports.saslScramSha256 = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const crypto_1 = require("../utils/crypto");
|
|
6
|
+
const error_1 = require("../utils/error");
|
|
7
|
+
const saslScram = ({ mechanism, keyLength, digest }) => ({ username, password }) => ({
|
|
8
|
+
mechanism,
|
|
9
|
+
authenticate: async ({ sendRequest }) => {
|
|
10
|
+
const nonce = (0, crypto_1.generateNonce)();
|
|
11
|
+
const firstMessage = `n=${username},r=${nonce}`;
|
|
12
|
+
const { authBytes } = await sendRequest(api_1.API.SASL_AUTHENTICATE, {
|
|
13
|
+
authBytes: Buffer.from(`n,,${firstMessage}`),
|
|
14
|
+
});
|
|
15
|
+
if (!authBytes) {
|
|
16
|
+
throw new error_1.KafkaTSError('No auth response');
|
|
17
|
+
}
|
|
18
|
+
const response = Object.fromEntries(authBytes
|
|
19
|
+
.toString()
|
|
20
|
+
.split(',')
|
|
21
|
+
.map((pair) => pair.split('=')));
|
|
22
|
+
const rnonce = response.r;
|
|
23
|
+
if (!rnonce.startsWith(nonce)) {
|
|
24
|
+
throw new error_1.KafkaTSError('Invalid nonce');
|
|
25
|
+
}
|
|
26
|
+
const iterations = parseInt(response.i);
|
|
27
|
+
const salt = (0, crypto_1.base64Decode)(response.s);
|
|
28
|
+
const saltedPassword = await (0, crypto_1.saltPassword)(password, salt, iterations, keyLength, digest);
|
|
29
|
+
const clientKey = (0, crypto_1.hmac)(saltedPassword, 'Client Key', digest);
|
|
30
|
+
const clientKeyHash = (0, crypto_1.hash)(clientKey, digest);
|
|
31
|
+
let finalMessage = `c=${(0, crypto_1.base64Encode)('n,,')},r=${rnonce}`;
|
|
32
|
+
const fullMessage = `${firstMessage},${authBytes.toString()},${finalMessage}`;
|
|
33
|
+
const clientSignature = (0, crypto_1.hmac)(clientKeyHash, fullMessage, digest);
|
|
34
|
+
const clientProof = (0, crypto_1.base64Encode)((0, crypto_1.xor)(clientKey, clientSignature));
|
|
35
|
+
finalMessage += `,p=${clientProof}`;
|
|
36
|
+
await sendRequest(api_1.API.SASL_AUTHENTICATE, { authBytes: Buffer.from(finalMessage) });
|
|
37
|
+
},
|
|
38
|
+
});
|
|
39
|
+
exports.saslScramSha256 = saslScram({ mechanism: 'SCRAM-SHA-256', keyLength: 32, digest: 'sha256' });
|
|
40
|
+
exports.saslScramSha512 = saslScram({ mechanism: 'SCRAM-SHA-512', keyLength: 64, digest: 'sha512' });
|
package/dist/broker.d.ts
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/// <reference types="node" />
|
|
3
|
+
import { TcpSocketConnectOpts } from 'net';
|
|
4
|
+
import { TLSSocketOptions } from 'tls';
|
|
5
|
+
import { SendRequest } from './connection';
|
|
6
|
+
export type SASLProvider = {
|
|
7
|
+
mechanism: string;
|
|
8
|
+
authenticate: (context: {
|
|
9
|
+
sendRequest: SendRequest;
|
|
10
|
+
}) => Promise<void>;
|
|
11
|
+
};
|
|
12
|
+
type BrokerOptions = {
|
|
13
|
+
clientId: string | null;
|
|
14
|
+
options: TcpSocketConnectOpts;
|
|
15
|
+
sasl: SASLProvider | null;
|
|
16
|
+
ssl: TLSSocketOptions | null;
|
|
17
|
+
};
|
|
18
|
+
export declare class Broker {
|
|
19
|
+
private options;
|
|
20
|
+
private connection;
|
|
21
|
+
sendRequest: SendRequest;
|
|
22
|
+
constructor(options: BrokerOptions);
|
|
23
|
+
connect(): Promise<this>;
|
|
24
|
+
ensureConnected: () => Promise<this>;
|
|
25
|
+
disconnect(): Promise<void>;
|
|
26
|
+
private validateApiVersions;
|
|
27
|
+
private saslHandshake;
|
|
28
|
+
private saslAuthenticate;
|
|
29
|
+
}
|
|
30
|
+
export {};
|
package/dist/broker.js
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Broker = void 0;
|
|
4
|
+
const api_1 = require("./api");
|
|
5
|
+
const connection_1 = require("./connection");
|
|
6
|
+
const error_1 = require("./utils/error");
|
|
7
|
+
const memo_1 = require("./utils/memo");
|
|
8
|
+
class Broker {
|
|
9
|
+
options;
|
|
10
|
+
connection;
|
|
11
|
+
sendRequest;
|
|
12
|
+
constructor(options) {
|
|
13
|
+
this.options = options;
|
|
14
|
+
this.connection = new connection_1.Connection({
|
|
15
|
+
clientId: this.options.clientId,
|
|
16
|
+
connection: this.options.options,
|
|
17
|
+
ssl: this.options.ssl,
|
|
18
|
+
});
|
|
19
|
+
this.sendRequest = this.connection.sendRequest.bind(this.connection);
|
|
20
|
+
}
|
|
21
|
+
async connect() {
|
|
22
|
+
await this.connection.connect();
|
|
23
|
+
await this.validateApiVersions();
|
|
24
|
+
await this.saslHandshake();
|
|
25
|
+
await this.saslAuthenticate();
|
|
26
|
+
return this;
|
|
27
|
+
}
|
|
28
|
+
ensureConnected = (0, memo_1.memo)(() => this.connect());
|
|
29
|
+
async disconnect() {
|
|
30
|
+
await this.connection.disconnect();
|
|
31
|
+
}
|
|
32
|
+
async validateApiVersions() {
|
|
33
|
+
const { versions } = await this.sendRequest(api_1.API.API_VERSIONS, {});
|
|
34
|
+
const apiByKey = Object.fromEntries(Object.values(api_1.API).map((api) => [api.apiKey, api]));
|
|
35
|
+
versions.forEach(({ apiKey, minVersion, maxVersion }) => {
|
|
36
|
+
if (!apiByKey[apiKey]) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
const { apiVersion } = apiByKey[apiKey];
|
|
40
|
+
if (apiVersion < minVersion || apiVersion > maxVersion) {
|
|
41
|
+
throw new error_1.KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker (minVersion=${minVersion}, maxVersion=${maxVersion})`);
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
async saslHandshake() {
|
|
46
|
+
if (!this.options.sasl) {
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
await this.sendRequest(api_1.API.SASL_HANDSHAKE, { mechanism: this.options.sasl.mechanism });
|
|
50
|
+
}
|
|
51
|
+
async saslAuthenticate() {
|
|
52
|
+
await this.options.sasl?.authenticate({ sendRequest: this.sendRequest });
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
exports.Broker = Broker;
|
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/// <reference types="node" />
|
|
3
|
+
import { TcpSocketConnectOpts } from 'net';
|
|
4
|
+
import { TLSSocketOptions } from 'tls';
|
|
5
|
+
import { SASLProvider } from './broker';
|
|
6
|
+
import { Cluster } from './cluster';
|
|
7
|
+
import { Consumer, ConsumerOptions } from './consumer/consumer';
|
|
8
|
+
import { Producer, ProducerOptions } from './producer/producer';
|
|
9
|
+
export type ClientOptions = {
|
|
10
|
+
clientId?: string | null;
|
|
11
|
+
bootstrapServers: TcpSocketConnectOpts[];
|
|
12
|
+
sasl?: SASLProvider | null;
|
|
13
|
+
ssl?: TLSSocketOptions | null;
|
|
14
|
+
};
|
|
15
|
+
export declare class Client {
|
|
16
|
+
private options;
|
|
17
|
+
constructor(options: ClientOptions);
|
|
18
|
+
startConsumer(options: ConsumerOptions): Promise<Consumer>;
|
|
19
|
+
createProducer(options: ProducerOptions): Producer;
|
|
20
|
+
createCluster(): Cluster;
|
|
21
|
+
}
|
|
22
|
+
export declare const createKafkaClient: (options: ClientOptions) => Client;
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createKafkaClient = exports.Client = void 0;
|
|
4
|
+
const cluster_1 = require("./cluster");
|
|
5
|
+
const consumer_1 = require("./consumer/consumer");
|
|
6
|
+
const producer_1 = require("./producer/producer");
|
|
7
|
+
class Client {
|
|
8
|
+
options;
|
|
9
|
+
constructor(options) {
|
|
10
|
+
this.options = {
|
|
11
|
+
...options,
|
|
12
|
+
clientId: options.clientId ?? null,
|
|
13
|
+
sasl: options.sasl ?? null,
|
|
14
|
+
ssl: options.ssl ?? null,
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
async startConsumer(options) {
|
|
18
|
+
const consumer = new consumer_1.Consumer(this.createCluster(), options);
|
|
19
|
+
await consumer.start();
|
|
20
|
+
return consumer;
|
|
21
|
+
}
|
|
22
|
+
createProducer(options) {
|
|
23
|
+
return new producer_1.Producer(this.createCluster(), options);
|
|
24
|
+
}
|
|
25
|
+
createCluster() {
|
|
26
|
+
return new cluster_1.Cluster({
|
|
27
|
+
clientId: this.options.clientId,
|
|
28
|
+
bootstrapServers: this.options.bootstrapServers,
|
|
29
|
+
sasl: this.options.sasl,
|
|
30
|
+
ssl: this.options.ssl,
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
exports.Client = Client;
|
|
35
|
+
const createKafkaClient = (options) => new Client(options);
|
|
36
|
+
exports.createKafkaClient = createKafkaClient;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
/// <reference types="node" />
|
|
3
|
+
import { TcpSocketConnectOpts } from 'net';
|
|
4
|
+
import { TLSSocketOptions } from 'tls';
|
|
5
|
+
import { Broker, SASLProvider } from './broker';
|
|
6
|
+
import { SendRequest } from './connection';
|
|
7
|
+
type ClusterOptions = {
|
|
8
|
+
clientId: string | null;
|
|
9
|
+
bootstrapServers: TcpSocketConnectOpts[];
|
|
10
|
+
sasl: SASLProvider | null;
|
|
11
|
+
ssl: TLSSocketOptions | null;
|
|
12
|
+
};
|
|
13
|
+
export declare class Cluster {
|
|
14
|
+
private options;
|
|
15
|
+
private seedBroker;
|
|
16
|
+
private brokerById;
|
|
17
|
+
private brokerMetadata;
|
|
18
|
+
constructor(options: ClusterOptions);
|
|
19
|
+
connect(): Promise<void>;
|
|
20
|
+
disconnect(): Promise<void>;
|
|
21
|
+
setSeedBroker: (nodeId: number) => Promise<void>;
|
|
22
|
+
sendRequest: SendRequest;
|
|
23
|
+
sendRequestToNode: (nodeId: number) => SendRequest;
|
|
24
|
+
acquireBroker(nodeId: number): Promise<Broker>;
|
|
25
|
+
private findSeedBroker;
|
|
26
|
+
}
|
|
27
|
+
export {};
|
package/dist/cluster.js
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Cluster = void 0;
|
|
4
|
+
const api_1 = require("./api");
|
|
5
|
+
const broker_1 = require("./broker");
|
|
6
|
+
const error_1 = require("./utils/error");
|
|
7
|
+
const logger_1 = require("./utils/logger");
|
|
8
|
+
class Cluster {
|
|
9
|
+
options;
|
|
10
|
+
seedBroker = new broker_1.Broker({ clientId: null, sasl: null, ssl: null, options: { port: 9092 } });
|
|
11
|
+
brokerById = {};
|
|
12
|
+
brokerMetadata = {};
|
|
13
|
+
constructor(options) {
|
|
14
|
+
this.options = options;
|
|
15
|
+
}
|
|
16
|
+
async connect() {
|
|
17
|
+
this.seedBroker = await this.findSeedBroker();
|
|
18
|
+
this.brokerById = {};
|
|
19
|
+
const metadata = await this.sendRequest(api_1.API.METADATA, {
|
|
20
|
+
allowTopicAutoCreation: false,
|
|
21
|
+
includeTopicAuthorizedOperations: false,
|
|
22
|
+
topics: [],
|
|
23
|
+
});
|
|
24
|
+
this.brokerMetadata = Object.fromEntries(metadata.brokers.map((options) => [options.nodeId, options]));
|
|
25
|
+
}
|
|
26
|
+
async disconnect() {
|
|
27
|
+
await Promise.all([this.seedBroker.disconnect(), ...Object.values(this.brokerById).map((x) => x.disconnect())]);
|
|
28
|
+
}
|
|
29
|
+
setSeedBroker = async (nodeId) => {
|
|
30
|
+
await this.seedBroker.disconnect();
|
|
31
|
+
this.seedBroker = await this.acquireBroker(nodeId);
|
|
32
|
+
};
|
|
33
|
+
sendRequest = (...args) => this.seedBroker.sendRequest(...args);
|
|
34
|
+
sendRequestToNode = (nodeId) => async (...args) => {
|
|
35
|
+
if (!this.brokerById[nodeId]) {
|
|
36
|
+
this.brokerById[nodeId] = await this.acquireBroker(nodeId);
|
|
37
|
+
}
|
|
38
|
+
return this.brokerById[nodeId].sendRequest(...args);
|
|
39
|
+
};
|
|
40
|
+
async acquireBroker(nodeId) {
|
|
41
|
+
const broker = new broker_1.Broker({
|
|
42
|
+
clientId: this.options.clientId,
|
|
43
|
+
sasl: this.options.sasl,
|
|
44
|
+
ssl: this.options.ssl,
|
|
45
|
+
options: this.brokerMetadata[nodeId],
|
|
46
|
+
});
|
|
47
|
+
await broker.connect();
|
|
48
|
+
return broker;
|
|
49
|
+
}
|
|
50
|
+
async findSeedBroker() {
|
|
51
|
+
const randomizedBrokers = this.options.bootstrapServers.toSorted(() => Math.random() - 0.5);
|
|
52
|
+
for (const options of randomizedBrokers) {
|
|
53
|
+
try {
|
|
54
|
+
const broker = await new broker_1.Broker({
|
|
55
|
+
clientId: this.options.clientId,
|
|
56
|
+
sasl: this.options.sasl,
|
|
57
|
+
ssl: this.options.ssl,
|
|
58
|
+
options,
|
|
59
|
+
});
|
|
60
|
+
await broker.connect();
|
|
61
|
+
return broker;
|
|
62
|
+
}
|
|
63
|
+
catch (error) {
|
|
64
|
+
logger_1.log.warn(`Failed to connect to seed broker ${options.host}:${options.port}`, error);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
throw new error_1.KafkaTSError('No seed brokers found');
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
exports.Cluster = Cluster;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|