kafka-ts 1.3.1-beta.1 → 1.3.1-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -7
- package/dist/api/alter-configs.d.ts +26 -0
- package/dist/api/alter-configs.js +33 -0
- package/dist/api/api-versions.d.ts +5 -2
- package/dist/api/api-versions.js +13 -0
- package/dist/api/create-topics.d.ts +14 -12
- package/dist/api/create-topics.js +104 -12
- package/dist/api/delete-topics.d.ts +10 -8
- package/dist/api/delete-topics.js +61 -7
- package/dist/api/fetch.d.ts +15 -12
- package/dist/api/fetch.js +131 -13
- package/dist/api/find-coordinator.d.ts +9 -7
- package/dist/api/find-coordinator.js +63 -5
- package/dist/api/heartbeat.d.ts +7 -5
- package/dist/api/heartbeat.js +42 -4
- package/dist/api/index.d.ts +47 -118
- package/dist/api/init-producer-id.d.ts +7 -5
- package/dist/api/init-producer-id.js +53 -9
- package/dist/api/join-group.d.ts +9 -7
- package/dist/api/join-group.js +95 -6
- package/dist/api/leave-group.d.ts +8 -6
- package/dist/api/leave-group.js +49 -6
- package/dist/api/list-offsets.d.ts +9 -7
- package/dist/api/list-offsets.js +85 -8
- package/dist/api/metadata.d.ts +10 -9
- package/dist/api/metadata.js +109 -8
- package/dist/api/offset-commit.d.ts +10 -8
- package/dist/api/offset-commit.js +88 -8
- package/dist/api/offset-fetch.d.ts +11 -9
- package/dist/api/offset-fetch.js +94 -9
- package/dist/api/produce.d.ts +8 -10
- package/dist/api/produce.js +132 -38
- package/dist/api/sasl-authenticate.d.ts +8 -6
- package/dist/api/sasl-authenticate.js +43 -3
- package/dist/api/sasl-handshake.d.ts +7 -4
- package/dist/api/sasl-handshake.js +10 -0
- package/dist/api/sync-group.d.ts +7 -5
- package/dist/api/sync-group.js +62 -5
- package/dist/broker.js +6 -5
- package/dist/cluster.test.js +17 -14
- package/dist/connection.d.ts +11 -1
- package/dist/connection.js +27 -2
- package/dist/consumer/consumer.js +13 -9
- package/dist/consumer/metadata.d.ts +24 -0
- package/dist/consumer/metadata.js +64 -0
- package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
- package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
- package/dist/examples/src/replicator.js +34 -0
- package/dist/examples/src/utils/json.js +5 -0
- package/dist/request-handler.d.ts +16 -0
- package/dist/request-handler.js +67 -0
- package/dist/request-handler.test.d.ts +1 -0
- package/dist/request-handler.test.js +340 -0
- package/dist/src/api/api-versions.js +18 -0
- package/dist/src/api/create-topics.js +46 -0
- package/dist/src/api/delete-topics.js +26 -0
- package/dist/src/api/fetch.js +95 -0
- package/dist/src/api/find-coordinator.js +34 -0
- package/dist/src/api/heartbeat.js +22 -0
- package/dist/src/api/index.js +38 -0
- package/dist/src/api/init-producer-id.js +24 -0
- package/dist/src/api/join-group.js +48 -0
- package/dist/src/api/leave-group.js +30 -0
- package/dist/src/api/list-offsets.js +39 -0
- package/dist/src/api/metadata.js +47 -0
- package/dist/src/api/offset-commit.js +39 -0
- package/dist/src/api/offset-fetch.js +44 -0
- package/dist/src/api/produce.js +119 -0
- package/dist/src/api/sync-group.js +31 -0
- package/dist/src/broker.js +35 -0
- package/dist/src/connection.js +21 -0
- package/dist/src/consumer/consumer-group.js +131 -0
- package/dist/src/consumer/consumer.js +103 -0
- package/dist/src/consumer/metadata.js +52 -0
- package/dist/src/consumer/offset-manager.js +23 -0
- package/dist/src/index.js +19 -0
- package/dist/src/producer/producer.js +84 -0
- package/dist/src/request-handler.js +57 -0
- package/dist/src/request-handler.test.js +321 -0
- package/dist/src/types.js +2 -0
- package/dist/src/utils/api.js +5 -0
- package/dist/src/utils/decoder.js +161 -0
- package/dist/src/utils/encoder.js +137 -0
- package/dist/src/utils/error.js +10 -0
- package/dist/utils/api.d.ts +4 -1
- package/dist/utils/cached.d.ts +3 -0
- package/dist/utils/cached.js +19 -0
- package/dist/utils/debug.d.ts +2 -0
- package/dist/utils/debug.js +11 -0
- package/dist/utils/decoder.d.ts +2 -2
- package/dist/utils/decoder.js +14 -1
- package/dist/utils/encoder.d.ts +1 -0
- package/dist/utils/encoder.js +14 -0
- package/dist/utils/lock.d.ts +8 -0
- package/dist/utils/lock.js +44 -0
- package/dist/utils/memo.d.ts +1 -0
- package/dist/utils/memo.js +16 -0
- package/dist/utils/mutex.d.ts +3 -0
- package/dist/utils/mutex.js +32 -0
- package/package.json +1 -1
package/dist/cluster.test.js
CHANGED
|
@@ -15,6 +15,7 @@ const kafka = (0, client_1.createKafkaClient)({
|
|
|
15
15
|
});
|
|
16
16
|
vitest_1.describe.sequential('Low-level API', () => {
|
|
17
17
|
const groupId = (0, crypto_1.randomBytes)(16).toString('hex');
|
|
18
|
+
const topicName = 'kafka-ts-test-topic';
|
|
18
19
|
let cluster;
|
|
19
20
|
(0, vitest_1.beforeAll)(async () => {
|
|
20
21
|
cluster = await kafka.createCluster();
|
|
@@ -24,9 +25,9 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
24
25
|
allowTopicAutoCreation: false,
|
|
25
26
|
includeTopicAuthorizedOperations: false,
|
|
26
27
|
});
|
|
27
|
-
if (metadataResult.topics.some((topic) => topic.name ===
|
|
28
|
+
if (metadataResult.topics.some((topic) => topic.name === topicName)) {
|
|
28
29
|
await cluster.sendRequest(api_1.API.DELETE_TOPICS, {
|
|
29
|
-
topics: [{ name:
|
|
30
|
+
topics: [{ name: topicName, topicId: null }],
|
|
30
31
|
timeoutMs: 10000,
|
|
31
32
|
});
|
|
32
33
|
}
|
|
@@ -43,7 +44,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
43
44
|
const result = await cluster.sendRequest(api_1.API.CREATE_TOPICS, {
|
|
44
45
|
topics: [
|
|
45
46
|
{
|
|
46
|
-
name:
|
|
47
|
+
name: topicName,
|
|
47
48
|
numPartitions: 10,
|
|
48
49
|
replicationFactor: 3,
|
|
49
50
|
assignments: [],
|
|
@@ -53,9 +54,9 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
53
54
|
timeoutMs: 10000,
|
|
54
55
|
validateOnly: false,
|
|
55
56
|
});
|
|
56
|
-
topicId = result.topics[0].
|
|
57
|
+
topicId = result.topics[0]._topicId;
|
|
57
58
|
result.topics.forEach((topic) => {
|
|
58
|
-
topic.
|
|
59
|
+
topic._topicId = 'Any<UUID>';
|
|
59
60
|
});
|
|
60
61
|
(0, vitest_1.expect)(result).toMatchSnapshot();
|
|
61
62
|
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
@@ -82,7 +83,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
82
83
|
let leaderId = 0;
|
|
83
84
|
(0, vitest_1.it)('should request metadata for a topic', async () => {
|
|
84
85
|
const result = await cluster.sendRequest(api_1.API.METADATA, {
|
|
85
|
-
topics: [{ id: topicId, name:
|
|
86
|
+
topics: [{ id: topicId, name: topicName }],
|
|
86
87
|
allowTopicAutoCreation: false,
|
|
87
88
|
includeTopicAuthorizedOperations: false,
|
|
88
89
|
});
|
|
@@ -118,7 +119,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
118
119
|
acks: 1,
|
|
119
120
|
topicData: [
|
|
120
121
|
{
|
|
121
|
-
name:
|
|
122
|
+
name: topicName,
|
|
122
123
|
partitionData: [
|
|
123
124
|
{
|
|
124
125
|
index: partitionIndex,
|
|
@@ -164,6 +165,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
164
165
|
topics: [
|
|
165
166
|
{
|
|
166
167
|
topicId,
|
|
168
|
+
topicName,
|
|
167
169
|
partitions: [
|
|
168
170
|
{
|
|
169
171
|
partition: partitionIndex,
|
|
@@ -180,7 +182,8 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
180
182
|
rackId: '',
|
|
181
183
|
});
|
|
182
184
|
result.responses.forEach((response) => {
|
|
183
|
-
|
|
185
|
+
if ('topicId' in response)
|
|
186
|
+
response.topicId = 'Any<UUID>';
|
|
184
187
|
response.partitions.forEach((partition) => {
|
|
185
188
|
partition.records.forEach((record) => {
|
|
186
189
|
(0, vitest_1.expect)(record.baseTimestamp).toBeGreaterThan(1721926744730n);
|
|
@@ -220,7 +223,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
220
223
|
protocols: [
|
|
221
224
|
{
|
|
222
225
|
name: 'RoundRobinAssigner',
|
|
223
|
-
metadata: { version: 0, topics: [
|
|
226
|
+
metadata: { version: 0, topics: [topicName] },
|
|
224
227
|
},
|
|
225
228
|
],
|
|
226
229
|
reason: null,
|
|
@@ -245,7 +248,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
245
248
|
protocols: [
|
|
246
249
|
{
|
|
247
250
|
name: 'RoundRobinAssigner',
|
|
248
|
-
metadata: { version: 0, topics: [
|
|
251
|
+
metadata: { version: 0, topics: [topicName] },
|
|
249
252
|
},
|
|
250
253
|
],
|
|
251
254
|
reason: null,
|
|
@@ -282,7 +285,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
282
285
|
groupInstanceId: null,
|
|
283
286
|
topics: [
|
|
284
287
|
{
|
|
285
|
-
name:
|
|
288
|
+
name: topicName,
|
|
286
289
|
partitions: [
|
|
287
290
|
{ partitionIndex: 0, committedOffset: 1n, committedLeaderEpoch: 0, committedMetadata: null },
|
|
288
291
|
],
|
|
@@ -298,7 +301,7 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
298
301
|
groupId,
|
|
299
302
|
topics: [
|
|
300
303
|
{
|
|
301
|
-
name:
|
|
304
|
+
name: topicName,
|
|
302
305
|
partitionIndexes: [0],
|
|
303
306
|
},
|
|
304
307
|
],
|
|
@@ -332,11 +335,11 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
332
335
|
});
|
|
333
336
|
(0, vitest_1.it)('should delete topics', async () => {
|
|
334
337
|
const result = await cluster.sendRequest(api_1.API.DELETE_TOPICS, {
|
|
335
|
-
topics: [{ name:
|
|
338
|
+
topics: [{ name: topicName, topicId: null }],
|
|
336
339
|
timeoutMs: 10000,
|
|
337
340
|
});
|
|
338
341
|
result.responses.forEach((response) => {
|
|
339
|
-
response.
|
|
342
|
+
response._topicId = 'Any<UUID>';
|
|
340
343
|
});
|
|
341
344
|
(0, vitest_1.expect)(result).toMatchSnapshot();
|
|
342
345
|
});
|
package/dist/connection.d.ts
CHANGED
|
@@ -7,17 +7,27 @@ type ConnectionOptions = {
|
|
|
7
7
|
ssl: TLSSocketOptions | null;
|
|
8
8
|
requestTimeout: number;
|
|
9
9
|
};
|
|
10
|
+
type Versions = {
|
|
11
|
+
[apiKey: number]: {
|
|
12
|
+
minVersion: number;
|
|
13
|
+
maxVersion: number;
|
|
14
|
+
};
|
|
15
|
+
};
|
|
10
16
|
export declare class Connection {
|
|
11
17
|
private options;
|
|
12
18
|
private socket;
|
|
13
19
|
private queue;
|
|
14
20
|
private lastCorrelationId;
|
|
15
21
|
private chunks;
|
|
22
|
+
private versions;
|
|
16
23
|
constructor(options: ConnectionOptions);
|
|
17
24
|
isConnected(): boolean;
|
|
18
25
|
connect(): Promise<void>;
|
|
19
26
|
disconnect(): Promise<void>;
|
|
20
|
-
|
|
27
|
+
setVersions(versions: Versions): void;
|
|
28
|
+
private validateVersion;
|
|
29
|
+
private validateVersionCached;
|
|
30
|
+
sendRequest<Request, Response>(apiLatest: Api<Request, Response>, body: Request): Promise<Response>;
|
|
21
31
|
private write;
|
|
22
32
|
private handleData;
|
|
23
33
|
private nextCorrelationId;
|
package/dist/connection.js
CHANGED
|
@@ -50,6 +50,7 @@ const assert_1 = __importDefault(require("assert"));
|
|
|
50
50
|
const net_1 = __importStar(require("net"));
|
|
51
51
|
const tls_1 = __importDefault(require("tls"));
|
|
52
52
|
const api_1 = require("./api");
|
|
53
|
+
const cached_1 = require("./utils/cached");
|
|
53
54
|
const decoder_1 = require("./utils/decoder");
|
|
54
55
|
const encoder_1 = require("./utils/encoder");
|
|
55
56
|
const error_1 = require("./utils/error");
|
|
@@ -62,6 +63,7 @@ class Connection {
|
|
|
62
63
|
queue = {};
|
|
63
64
|
lastCorrelationId = 0;
|
|
64
65
|
chunks = [];
|
|
66
|
+
versions;
|
|
65
67
|
constructor(options) {
|
|
66
68
|
this.options = options;
|
|
67
69
|
}
|
|
@@ -105,7 +107,26 @@ class Connection {
|
|
|
105
107
|
this.socket.end(resolve);
|
|
106
108
|
});
|
|
107
109
|
}
|
|
108
|
-
|
|
110
|
+
setVersions(versions) {
|
|
111
|
+
this.versions = versions;
|
|
112
|
+
this.validateVersionCached.clear();
|
|
113
|
+
}
|
|
114
|
+
validateVersion(api) {
|
|
115
|
+
if (!this.versions)
|
|
116
|
+
return api;
|
|
117
|
+
const versionInfo = this.versions[api.apiKey];
|
|
118
|
+
if (!versionInfo)
|
|
119
|
+
throw new Error(`Broker does not support API ${(0, api_1.getApiName)(api)}`);
|
|
120
|
+
if (api.apiVersion < versionInfo.minVersion || api.apiVersion > versionInfo.maxVersion) {
|
|
121
|
+
if (api.fallback)
|
|
122
|
+
return this.validateVersion(api.fallback);
|
|
123
|
+
throw new Error(`Broker does not support API ${(0, api_1.getApiName)(api)} version ${api.apiVersion} (minVersion=${versionInfo.minVersion}, maxVersion=${versionInfo.maxVersion})`);
|
|
124
|
+
}
|
|
125
|
+
return api;
|
|
126
|
+
}
|
|
127
|
+
validateVersionCached = (0, cached_1.cached)(this.validateVersion, (api) => api.apiKey.toString());
|
|
128
|
+
async sendRequest(apiLatest, body) {
|
|
129
|
+
const api = this.validateVersionCached(apiLatest);
|
|
109
130
|
const correlationId = this.nextCorrelationId();
|
|
110
131
|
const apiName = (0, api_1.getApiName)(api);
|
|
111
132
|
const encoder = new encoder_1.Encoder()
|
|
@@ -113,6 +134,8 @@ class Connection {
|
|
|
113
134
|
.writeInt16(api.apiVersion)
|
|
114
135
|
.writeInt32(correlationId)
|
|
115
136
|
.writeString(this.options.clientId);
|
|
137
|
+
if (api.requestHeaderVersion === 2)
|
|
138
|
+
encoder.writeTagBuffer();
|
|
116
139
|
const request = api.request(encoder, body);
|
|
117
140
|
const requestEncoder = new encoder_1.Encoder().writeInt32(request.getBufferLength()).writeEncoder(request);
|
|
118
141
|
const { stack } = new Error();
|
|
@@ -123,7 +146,7 @@ class Connection {
|
|
|
123
146
|
reject(new error_1.ConnectionError(`${apiName} timed out`, stack));
|
|
124
147
|
}, this.options.requestTimeout);
|
|
125
148
|
try {
|
|
126
|
-
this.queue[correlationId] = { resolve, reject };
|
|
149
|
+
this.queue[correlationId] = { api, resolve, reject };
|
|
127
150
|
await this.write(requestEncoder.value());
|
|
128
151
|
}
|
|
129
152
|
catch (error) {
|
|
@@ -160,6 +183,8 @@ class Connection {
|
|
|
160
183
|
const responseDecoder = new decoder_1.Decoder(decoder.read(responseSize));
|
|
161
184
|
const correlationId = responseDecoder.readInt32();
|
|
162
185
|
const context = this.queue[correlationId];
|
|
186
|
+
if (context?.api.responseHeaderVersion === 1)
|
|
187
|
+
responseDecoder.readTagBuffer();
|
|
163
188
|
if (context) {
|
|
164
189
|
delete this.queue[correlationId];
|
|
165
190
|
context.resolve({ responseDecoder, responseSize });
|
|
@@ -173,10 +173,10 @@ class Consumer extends events_1.default {
|
|
|
173
173
|
const { retrier } = options;
|
|
174
174
|
this.consumerGroup?.handleLastHeartbeat();
|
|
175
175
|
const topicPartitions = {};
|
|
176
|
-
const messages = response.responses.flatMap((
|
|
177
|
-
const topic = this.metadata.getTopicNameById(topicId);
|
|
176
|
+
const messages = response.responses.flatMap((response) => {
|
|
177
|
+
const topic = 'topicName' in response ? response.topicName : this.metadata.getTopicNameById(response.topicId);
|
|
178
178
|
topicPartitions[topic] ??= new Set();
|
|
179
|
-
return partitions.flatMap(({ partitionIndex, records }) => {
|
|
179
|
+
return response.partitions.flatMap(({ partitionIndex, records }) => {
|
|
180
180
|
topicPartitions[topic].add(partitionIndex);
|
|
181
181
|
return records.flatMap(({ baseTimestamp, baseOffset, records }) => records.flatMap((message) => ({
|
|
182
182
|
topic,
|
|
@@ -211,10 +211,13 @@ class Consumer extends events_1.default {
|
|
|
211
211
|
this.off('rebalanceInProgress', onRebalance);
|
|
212
212
|
}
|
|
213
213
|
if (!abortController.signal.aborted) {
|
|
214
|
-
response.responses.forEach((
|
|
215
|
-
partitions.forEach(({ partitionIndex, records }) => {
|
|
214
|
+
response.responses.forEach((response) => {
|
|
215
|
+
response.partitions.forEach(({ partitionIndex, records }) => {
|
|
216
216
|
records.forEach(({ baseOffset, lastOffsetDelta }) => {
|
|
217
|
-
|
|
217
|
+
const topic = 'topicName' in response
|
|
218
|
+
? response.topicName
|
|
219
|
+
: this.metadata.getTopicNameById(response.topicId);
|
|
220
|
+
this.offsetManager.resolve(topic, partitionIndex, baseOffset + BigInt(lastOffsetDelta) + 1n);
|
|
218
221
|
});
|
|
219
222
|
});
|
|
220
223
|
});
|
|
@@ -232,12 +235,13 @@ class Consumer extends events_1.default {
|
|
|
232
235
|
isolationLevel,
|
|
233
236
|
sessionId: 0,
|
|
234
237
|
sessionEpoch: -1,
|
|
235
|
-
topics: Object.entries(assignment).map(([
|
|
236
|
-
topicId: this.metadata.getTopicIdByName(
|
|
238
|
+
topics: Object.entries(assignment).map(([topicName, partitions]) => ({
|
|
239
|
+
topicId: this.metadata.getTopicIdByName(topicName),
|
|
240
|
+
topicName,
|
|
237
241
|
partitions: partitions.map((partition) => ({
|
|
238
242
|
partition,
|
|
239
243
|
currentLeaderEpoch: -1,
|
|
240
|
-
fetchOffset: this.offsetManager.getCurrentOffset(
|
|
244
|
+
fetchOffset: this.offsetManager.getCurrentOffset(topicName, partition),
|
|
241
245
|
lastFetchedEpoch: -1,
|
|
242
246
|
logStartOffset: -1n,
|
|
243
247
|
partitionMaxBytes,
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { IsolationLevel } from "../api/fetch";
|
|
2
|
+
import { Assignment } from "../api/sync-group";
|
|
3
|
+
import { Cluster } from "../cluster";
|
|
4
|
+
import { OffsetManager } from "./offset-manager";
|
|
5
|
+
export type Metadata = ReturnType<typeof createMetadata>;
|
|
6
|
+
type MetadataOptions = {
|
|
7
|
+
cluster: Cluster;
|
|
8
|
+
topics?: string[];
|
|
9
|
+
isolationLevel?: IsolationLevel;
|
|
10
|
+
allowTopicAutoCreation?: boolean;
|
|
11
|
+
fromBeginning?: boolean;
|
|
12
|
+
offsetManager?: OffsetManager;
|
|
13
|
+
};
|
|
14
|
+
export declare const createMetadata: ({ cluster, topics, isolationLevel, allowTopicAutoCreation, fromBeginning, offsetManager, }: MetadataOptions) => {
|
|
15
|
+
init: () => Promise<void>;
|
|
16
|
+
getTopicPartitions: () => Record<string, number[]>;
|
|
17
|
+
getTopicIdByName: (name: string) => string;
|
|
18
|
+
getTopicNameById: (id: string) => string;
|
|
19
|
+
getAssignment: () => Assignment;
|
|
20
|
+
setAssignment: (newAssignment: Assignment) => void;
|
|
21
|
+
getLeaderIdByTopicPartition: (topic: string, partition: number) => number;
|
|
22
|
+
getIsrNodeIdsByTopicPartition: (topic: string, partition: number) => number[];
|
|
23
|
+
};
|
|
24
|
+
export {};
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createMetadata = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const createMetadata = ({ cluster, topics, isolationLevel = 0 /* IsolationLevel.READ_UNCOMMITTED */, allowTopicAutoCreation = true, fromBeginning = false, offsetManager, }) => {
|
|
6
|
+
let topicPartitions = {};
|
|
7
|
+
let topicNameById = {};
|
|
8
|
+
let topicIdByName = {};
|
|
9
|
+
let leaderIdByTopicPartition = {};
|
|
10
|
+
let isrNodesByTopicPartition;
|
|
11
|
+
let assignment = {};
|
|
12
|
+
const fetchMetadata = async () => {
|
|
13
|
+
const response = await cluster.sendRequest(api_1.API.METADATA, {
|
|
14
|
+
allowTopicAutoCreation,
|
|
15
|
+
includeTopicAuthorizedOperations: false,
|
|
16
|
+
topics: topics?.map((name) => ({ id: null, name })) ?? null,
|
|
17
|
+
});
|
|
18
|
+
topicPartitions = Object.fromEntries(response.topics.map((topic) => [topic.name, topic.partitions.map((partition) => partition.partitionIndex)]));
|
|
19
|
+
topicNameById = Object.fromEntries(response.topics.map((topic) => [topic.topicId, topic.name]));
|
|
20
|
+
topicIdByName = Object.fromEntries(response.topics.map((topic) => [topic.name, topic.topicId]));
|
|
21
|
+
leaderIdByTopicPartition = Object.fromEntries(response.topics.map((topic) => [
|
|
22
|
+
topic.name,
|
|
23
|
+
Object.fromEntries(topic.partitions.map((partition) => [partition.partitionIndex, partition.leaderId])),
|
|
24
|
+
]));
|
|
25
|
+
isrNodesByTopicPartition = Object.fromEntries(response.topics.map((topic) => [
|
|
26
|
+
topic.name,
|
|
27
|
+
Object.fromEntries(topic.partitions.map((partition) => [partition.partitionIndex, partition.isrNodes])),
|
|
28
|
+
]));
|
|
29
|
+
assignment = topicPartitions;
|
|
30
|
+
};
|
|
31
|
+
const listOffsets = async () => {
|
|
32
|
+
const offsets = await cluster.sendRequest(api_1.API.LIST_OFFSETS, {
|
|
33
|
+
replicaId: -1,
|
|
34
|
+
isolationLevel,
|
|
35
|
+
topics: Object.entries(assignment)
|
|
36
|
+
.flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
|
|
37
|
+
.map(({ topic, partition }) => ({
|
|
38
|
+
name: topic,
|
|
39
|
+
partitions: [{ partitionIndex: partition, currentLeaderEpoch: -1, timestamp: -1n }],
|
|
40
|
+
})),
|
|
41
|
+
});
|
|
42
|
+
offsets.topics.forEach(({ name, partitions }) => {
|
|
43
|
+
partitions.forEach(({ partitionIndex, offset }) => {
|
|
44
|
+
offsetManager?.resolve(name, partitionIndex, fromBeginning ? 0n : offset);
|
|
45
|
+
});
|
|
46
|
+
});
|
|
47
|
+
};
|
|
48
|
+
return {
|
|
49
|
+
init: async () => {
|
|
50
|
+
await fetchMetadata();
|
|
51
|
+
await listOffsets();
|
|
52
|
+
},
|
|
53
|
+
getTopicPartitions: () => topicPartitions,
|
|
54
|
+
getTopicIdByName: (name) => topicIdByName[name],
|
|
55
|
+
getTopicNameById: (id) => topicNameById[id],
|
|
56
|
+
getAssignment: () => assignment,
|
|
57
|
+
setAssignment: (newAssignment) => {
|
|
58
|
+
assignment = newAssignment;
|
|
59
|
+
},
|
|
60
|
+
getLeaderIdByTopicPartition: (topic, partition) => leaderIdByTopicPartition[topic][partition],
|
|
61
|
+
getIsrNodeIdsByTopicPartition: (topic, partition) => isrNodesByTopicPartition[topic][partition],
|
|
62
|
+
};
|
|
63
|
+
};
|
|
64
|
+
exports.createMetadata = createMetadata;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
type TopicPartitionLeader = {
|
|
2
|
+
[topicName: string]: {
|
|
3
|
+
[partitionId: number]: number;
|
|
4
|
+
};
|
|
5
|
+
};
|
|
6
|
+
type MessagesByNodeTopicPartition<T> = {
|
|
7
|
+
[nodeId: number]: {
|
|
8
|
+
[topicName: string]: {
|
|
9
|
+
[partitionId: number]: T[];
|
|
10
|
+
};
|
|
11
|
+
};
|
|
12
|
+
};
|
|
13
|
+
export declare const distributeMessagesToTopicPartitionLeaders: <T extends {
|
|
14
|
+
topic: string;
|
|
15
|
+
partition: number;
|
|
16
|
+
}>(messages: T[], topicPartitionLeader: TopicPartitionLeader) => MessagesByNodeTopicPartition<T>;
|
|
17
|
+
export {};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.distributeMessagesToTopicPartitionLeaders = void 0;
|
|
4
|
+
const distributeMessagesToTopicPartitionLeaders = (messages, topicPartitionLeader) => {
|
|
5
|
+
const result = {};
|
|
6
|
+
messages.forEach((message) => {
|
|
7
|
+
const leaderId = topicPartitionLeader[message.topic][message.partition];
|
|
8
|
+
result[leaderId] ??= {};
|
|
9
|
+
result[leaderId][message.topic] ??= {};
|
|
10
|
+
result[leaderId][message.topic][message.partition] ??= [];
|
|
11
|
+
result[leaderId][message.topic][message.partition].push(message);
|
|
12
|
+
});
|
|
13
|
+
return result;
|
|
14
|
+
};
|
|
15
|
+
exports.distributeMessagesToTopicPartitionLeaders = distributeMessagesToTopicPartitionLeaders;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const vitest_1 = require("vitest");
|
|
4
|
+
const messages_to_topic_partition_leaders_1 = require("./messages-to-topic-partition-leaders");
|
|
5
|
+
(0, vitest_1.describe)('Distribute messages to partition leader ids', () => {
|
|
6
|
+
(0, vitest_1.describe)('distributeMessagesToTopicPartitionLeaders', () => {
|
|
7
|
+
(0, vitest_1.it)('snoke', () => {
|
|
8
|
+
const result = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)([{ topic: 'topic', partition: 0, key: null, value: null, offset: 0n, timestamp: 0n, headers: {} }], { topic: { 0: 1 } });
|
|
9
|
+
(0, vitest_1.expect)(result).toMatchInlineSnapshot(`
|
|
10
|
+
{
|
|
11
|
+
"1": {
|
|
12
|
+
"topic": {
|
|
13
|
+
"0": [
|
|
14
|
+
{
|
|
15
|
+
"headers": {},
|
|
16
|
+
"key": null,
|
|
17
|
+
"offset": 0n,
|
|
18
|
+
"partition": 0,
|
|
19
|
+
"timestamp": 0n,
|
|
20
|
+
"topic": "topic",
|
|
21
|
+
"value": null,
|
|
22
|
+
},
|
|
23
|
+
],
|
|
24
|
+
},
|
|
25
|
+
},
|
|
26
|
+
}
|
|
27
|
+
`);
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
});
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const kafkats_1 = require("kafkats");
|
|
4
|
+
const json_1 = require("./utils/json");
|
|
5
|
+
(async () => {
|
|
6
|
+
const brokers = [{ host: "localhost", port: 9092 }];
|
|
7
|
+
const topic = "playground-topic";
|
|
8
|
+
// const producer = createProducer({ brokers });
|
|
9
|
+
// const producerInterval = setInterval(async () => {
|
|
10
|
+
// await producer.send([
|
|
11
|
+
// {
|
|
12
|
+
// topic,
|
|
13
|
+
// partition: 0,
|
|
14
|
+
// offset: 1n,
|
|
15
|
+
// timestamp: BigInt(Date.now()),
|
|
16
|
+
// key: null,
|
|
17
|
+
// value: `PING ${Math.random()}`,
|
|
18
|
+
// headers: { timestamp: Date.now().toString() }
|
|
19
|
+
// }
|
|
20
|
+
// ])
|
|
21
|
+
// }, 5000);
|
|
22
|
+
const consumer = await (0, kafkats_1.startConsumer)({
|
|
23
|
+
topics: [topic],
|
|
24
|
+
brokers,
|
|
25
|
+
onBatch: (messages) => {
|
|
26
|
+
console.log(JSON.stringify(messages, json_1.serializer, 2));
|
|
27
|
+
},
|
|
28
|
+
});
|
|
29
|
+
process.on("SIGINT", async () => {
|
|
30
|
+
await consumer.close();
|
|
31
|
+
// clearInterval(producerInterval);
|
|
32
|
+
// await producer.close();
|
|
33
|
+
});
|
|
34
|
+
})();
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Connection } from "./connection";
|
|
2
|
+
import { Api } from "./utils/api";
|
|
3
|
+
type RequestHandlerOptions = {
|
|
4
|
+
clientId: string | null;
|
|
5
|
+
};
|
|
6
|
+
export declare class RequestHandler {
|
|
7
|
+
private connection;
|
|
8
|
+
private options;
|
|
9
|
+
private queue;
|
|
10
|
+
private currentBuffer;
|
|
11
|
+
constructor(connection: Connection, options: RequestHandlerOptions);
|
|
12
|
+
private handleData;
|
|
13
|
+
sendRequest<Request, Response>(api: Api<Request, Response>, args: Request): Promise<Response>;
|
|
14
|
+
}
|
|
15
|
+
export type SendRequest = typeof RequestHandler.prototype.sendRequest;
|
|
16
|
+
export {};
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.RequestHandler = void 0;
|
|
7
|
+
const node_assert_1 = __importDefault(require("node:assert"));
|
|
8
|
+
const decoder_1 = require("./utils/decoder");
|
|
9
|
+
const encoder_1 = require("./utils/encoder");
|
|
10
|
+
class RequestHandler {
|
|
11
|
+
connection;
|
|
12
|
+
options;
|
|
13
|
+
queue = {};
|
|
14
|
+
currentBuffer = null;
|
|
15
|
+
constructor(connection, options) {
|
|
16
|
+
this.connection = connection;
|
|
17
|
+
this.options = options;
|
|
18
|
+
this.connection.on("data", this.handleData);
|
|
19
|
+
}
|
|
20
|
+
handleData(buffer) {
|
|
21
|
+
this.currentBuffer = this.currentBuffer ? Buffer.concat([this.currentBuffer, buffer]) : buffer;
|
|
22
|
+
if (this.currentBuffer.length < 4) {
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
const decoder = (0, decoder_1.createDecoder)({ buffer: this.currentBuffer });
|
|
26
|
+
const size = decoder.readInt32();
|
|
27
|
+
if (size > decoder.buffer.length) {
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
const correlationId = decoder.readInt32();
|
|
31
|
+
const request = this.queue[correlationId];
|
|
32
|
+
delete this.queue[correlationId];
|
|
33
|
+
request.callback(decoder);
|
|
34
|
+
// debug(handleData.name, 'Response offsets', {
|
|
35
|
+
// offset: decoder.offset,
|
|
36
|
+
// length: decoder.buffer.length,
|
|
37
|
+
// rest: decoder.buffer.subarray(decoder.offset, decoder.buffer.length)?.toString(),
|
|
38
|
+
// });
|
|
39
|
+
(0, node_assert_1.default)(decoder.offset - 4 === size, `Buffer not correctly consumed: ${decoder.offset - 4} !== ${buffer.length}`);
|
|
40
|
+
this.currentBuffer = null;
|
|
41
|
+
}
|
|
42
|
+
async sendRequest(api, args) {
|
|
43
|
+
const correlationId = Math.floor(Math.random() * 1000000);
|
|
44
|
+
const encoder = (0, encoder_1.createEncoder)()
|
|
45
|
+
.writeInt16(api.apiKey)
|
|
46
|
+
.writeInt16(api.apiVersion)
|
|
47
|
+
.writeInt32(correlationId)
|
|
48
|
+
.writeString(this.options.clientId);
|
|
49
|
+
const request = api.request(encoder, args).value();
|
|
50
|
+
const buffer = (0, encoder_1.createEncoder)().writeInt32(request.length).write(request).value();
|
|
51
|
+
return new Promise(async (resolve, reject) => {
|
|
52
|
+
await this.connection.write(buffer);
|
|
53
|
+
this.queue[correlationId] = {
|
|
54
|
+
callback: (decoder) => {
|
|
55
|
+
try {
|
|
56
|
+
const response = api.response(decoder);
|
|
57
|
+
resolve(response);
|
|
58
|
+
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
reject(error);
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
};
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
exports.RequestHandler = RequestHandler;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const kafka: import("./client").Client;
|