kafka-ts 1.1.9 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/consumer/consumer.js +7 -7
- package/dist/consumer/offset-manager.js +6 -5
- package/dist/distributors/group-by-leader-id.d.ts +10 -0
- package/dist/distributors/group-by-leader-id.js +13 -0
- package/dist/distributors/group-partitions-by-topic.d.ts +6 -0
- package/dist/distributors/group-partitions-by-topic.js +12 -0
- package/dist/metadata.d.ts +1 -1
- package/dist/metadata.js +1 -1
- package/dist/producer/producer-buffer.d.ts +20 -0
- package/dist/producer/producer-buffer.js +118 -0
- package/dist/producer/producer-state.d.ts +15 -0
- package/dist/producer/producer-state.js +33 -0
- package/dist/producer/producer.d.ts +6 -11
- package/dist/producer/producer.js +32 -101
- package/dist/utils/decoder.js +16 -9
- package/dist/utils/encoder.d.ts +10 -8
- package/dist/utils/encoder.js +95 -58
- package/dist/utils/promise-chain.d.ts +5 -0
- package/dist/utils/promise-chain.js +39 -0
- package/dist/utils/shared.d.ts +1 -1
- package/dist/utils/shared.js +8 -7
- package/package.json +1 -1
- package/dist/distributors/messages-to-topic-partition-leaders.d.ts +0 -17
- package/dist/distributors/messages-to-topic-partition-leaders.js +0 -15
- package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +0 -1
- package/dist/distributors/messages-to-topic-partition-leaders.test.js +0 -30
- package/dist/utils/lock.d.ts +0 -8
- package/dist/utils/lock.js +0 -44
package/README.md
CHANGED
|
@@ -177,7 +177,7 @@ Custom SASL mechanisms can be implemented following the `SASLProvider` interface
|
|
|
177
177
|
| allowTopicAutoCreation | boolean | false | false | Allow kafka to auto-create topic when it doesn't exist |
|
|
178
178
|
| partitioner | Partitioner | false | defaultPartitioner | Custom partitioner function. By default, it uses a default java-compatible partitioner. |
|
|
179
179
|
|
|
180
|
-
### `producer.send(messages: Message[]
|
|
180
|
+
### `producer.send(messages: Message[])`
|
|
181
181
|
|
|
182
182
|
<!-- export type Message = {
|
|
183
183
|
topic: string;
|
|
@@ -15,7 +15,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
15
15
|
exports.Consumer = void 0;
|
|
16
16
|
const events_1 = __importDefault(require("events"));
|
|
17
17
|
const api_1 = require("../api");
|
|
18
|
-
const
|
|
18
|
+
const group_by_leader_id_1 = require("../distributors/group-by-leader-id");
|
|
19
|
+
const group_partitions_by_topic_1 = require("../distributors/group-partitions-by-topic");
|
|
19
20
|
const delay_1 = require("../utils/delay");
|
|
20
21
|
const error_1 = require("../utils/error");
|
|
21
22
|
const logger_1 = require("../utils/logger");
|
|
@@ -112,12 +113,11 @@ class Consumer extends events_1.default {
|
|
|
112
113
|
try {
|
|
113
114
|
await this.consumerGroup?.join();
|
|
114
115
|
// TODO: If leader is not available, find another read replica
|
|
115
|
-
const
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
])),
|
|
116
|
+
const topicPartitions = Object.entries(this.metadata.getAssignment()).flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })));
|
|
117
|
+
const topicPartitionsByLeaderId = (0, group_by_leader_id_1.groupByLeaderId)(topicPartitions, this.metadata.getTopicPartitionLeaderIds());
|
|
118
|
+
const nodeAssignments = Object.entries(topicPartitionsByLeaderId).map(([leaderId, topicPartitions]) => ({
|
|
119
|
+
nodeId: parseInt(leaderId),
|
|
120
|
+
assignment: (0, group_partitions_by_topic_1.groupPartitionsByTopic)(topicPartitions),
|
|
121
121
|
}));
|
|
122
122
|
this.fetchManager = new fetch_manager_1.FetchManager({
|
|
123
123
|
fetch: this.fetch.bind(this),
|
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.OffsetManager = void 0;
|
|
4
4
|
const api_1 = require("../api");
|
|
5
|
-
const
|
|
5
|
+
const group_by_leader_id_1 = require("../distributors/group-by-leader-id");
|
|
6
|
+
const group_partitions_by_topic_1 = require("../distributors/group-partitions-by-topic");
|
|
6
7
|
const tracer_1 = require("../utils/tracer");
|
|
7
8
|
const trace = (0, tracer_1.createTracer)('OffsetManager');
|
|
8
9
|
class OffsetManager {
|
|
@@ -40,11 +41,11 @@ class OffsetManager {
|
|
|
40
41
|
async fetchOffsets(options) {
|
|
41
42
|
const { metadata } = this.options;
|
|
42
43
|
const topicPartitions = Object.entries(metadata.getAssignment()).flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })));
|
|
43
|
-
const
|
|
44
|
-
await Promise.all(Object.entries(
|
|
44
|
+
const topicPartitionsByLeaderId = (0, group_by_leader_id_1.groupByLeaderId)(topicPartitions, metadata.getTopicPartitionLeaderIds());
|
|
45
|
+
await Promise.all(Object.entries(topicPartitionsByLeaderId).map(([leaderId, topicPartitions]) => this.listOffsets({
|
|
45
46
|
...options,
|
|
46
|
-
nodeId: parseInt(
|
|
47
|
-
nodeAssignment:
|
|
47
|
+
nodeId: parseInt(leaderId),
|
|
48
|
+
nodeAssignment: (0, group_partitions_by_topic_1.groupPartitionsByTopic)(topicPartitions),
|
|
48
49
|
})));
|
|
49
50
|
}
|
|
50
51
|
async listOffsets({ nodeId, nodeAssignment, fromTimestamp, }) {
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.groupByLeaderId = void 0;
|
|
4
|
+
const groupByLeaderId = (items, leaderIdByTopicPartition) => {
|
|
5
|
+
const result = {};
|
|
6
|
+
items.forEach((item) => {
|
|
7
|
+
const leaderId = leaderIdByTopicPartition[item.topic][item.partition];
|
|
8
|
+
result[leaderId] ??= [];
|
|
9
|
+
result[leaderId].push(item);
|
|
10
|
+
});
|
|
11
|
+
return result;
|
|
12
|
+
};
|
|
13
|
+
exports.groupByLeaderId = groupByLeaderId;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.groupPartitionsByTopic = void 0;
|
|
4
|
+
const groupPartitionsByTopic = (items) => {
|
|
5
|
+
const result = {};
|
|
6
|
+
items.forEach((item) => {
|
|
7
|
+
result[item.topic] ??= [];
|
|
8
|
+
result[item.topic].push(item.partition);
|
|
9
|
+
});
|
|
10
|
+
return result;
|
|
11
|
+
};
|
|
12
|
+
exports.groupPartitionsByTopic = groupPartitionsByTopic;
|
package/dist/metadata.d.ts
CHANGED
|
@@ -16,7 +16,7 @@ export declare class Metadata {
|
|
|
16
16
|
getTopicIdByName(name: string): string;
|
|
17
17
|
getTopicNameById(id: string): string;
|
|
18
18
|
fetchMetadataIfNecessary({ topics, allowTopicAutoCreation, }: {
|
|
19
|
-
topics: string[]
|
|
19
|
+
topics: string[];
|
|
20
20
|
allowTopicAutoCreation: boolean;
|
|
21
21
|
}): Promise<void>;
|
|
22
22
|
fetchMetadata({ topics, allowTopicAutoCreation, }: {
|
package/dist/metadata.js
CHANGED
|
@@ -41,7 +41,7 @@ class Metadata {
|
|
|
41
41
|
return this.topicNameById[id];
|
|
42
42
|
}
|
|
43
43
|
async fetchMetadataIfNecessary({ topics, allowTopicAutoCreation, }) {
|
|
44
|
-
const missingTopics =
|
|
44
|
+
const missingTopics = topics.filter((topic) => !this.topicPartitions[topic]);
|
|
45
45
|
if (!missingTopics.length) {
|
|
46
46
|
return;
|
|
47
47
|
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { Cluster } from '../cluster';
|
|
2
|
+
import { Message } from '../types';
|
|
3
|
+
import { ProducerState } from './producer-state';
|
|
4
|
+
type ProducerBufferOptions = {
|
|
5
|
+
nodeId: number;
|
|
6
|
+
maxBatchSize: number;
|
|
7
|
+
cluster: Cluster;
|
|
8
|
+
state: ProducerState;
|
|
9
|
+
};
|
|
10
|
+
export declare class ProducerBuffer {
|
|
11
|
+
private options;
|
|
12
|
+
private buffer;
|
|
13
|
+
private head;
|
|
14
|
+
private isFlushing;
|
|
15
|
+
constructor(options: ProducerBufferOptions);
|
|
16
|
+
enqueue(messages: Message[]): Promise<void>;
|
|
17
|
+
private flush;
|
|
18
|
+
private compactBuffer;
|
|
19
|
+
}
|
|
20
|
+
export {};
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ProducerBuffer = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
class ProducerBuffer {
|
|
6
|
+
options;
|
|
7
|
+
buffer = [];
|
|
8
|
+
head = 0;
|
|
9
|
+
isFlushing = false;
|
|
10
|
+
constructor(options) {
|
|
11
|
+
this.options = options;
|
|
12
|
+
}
|
|
13
|
+
enqueue(messages) {
|
|
14
|
+
return new Promise((resolve, reject) => {
|
|
15
|
+
this.buffer.push({ messages, resolve, reject });
|
|
16
|
+
this.flush();
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
async flush() {
|
|
20
|
+
if (this.isFlushing)
|
|
21
|
+
return;
|
|
22
|
+
this.isFlushing = true;
|
|
23
|
+
const { cluster, state, nodeId, maxBatchSize } = this.options;
|
|
24
|
+
while (true) {
|
|
25
|
+
const batch = [];
|
|
26
|
+
const resolvers = [];
|
|
27
|
+
const rejecters = [];
|
|
28
|
+
while (this.head < this.buffer.length) {
|
|
29
|
+
const entry = this.buffer[this.head++];
|
|
30
|
+
batch.push(...entry.messages);
|
|
31
|
+
resolvers.push(entry.resolve);
|
|
32
|
+
rejecters.push(entry.reject);
|
|
33
|
+
const nextLength = this.buffer[this.head]?.messages.length ?? 0;
|
|
34
|
+
if (batch.length + nextLength > maxBatchSize) {
|
|
35
|
+
break;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
if (!batch.length)
|
|
39
|
+
break;
|
|
40
|
+
this.compactBuffer();
|
|
41
|
+
const topicPartitionMessages = {};
|
|
42
|
+
batch.forEach((message) => {
|
|
43
|
+
topicPartitionMessages[message.topic] ??= {};
|
|
44
|
+
topicPartitionMessages[message.topic][message.partition] ??= [];
|
|
45
|
+
topicPartitionMessages[message.topic][message.partition].push(message);
|
|
46
|
+
});
|
|
47
|
+
const defaultTimestamp = BigInt(Date.now());
|
|
48
|
+
const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
|
|
49
|
+
name: topic,
|
|
50
|
+
partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
|
|
51
|
+
const partitionIndex = parseInt(partition);
|
|
52
|
+
let baseTimestamp;
|
|
53
|
+
let maxTimestamp;
|
|
54
|
+
messages.forEach(({ timestamp = defaultTimestamp }) => {
|
|
55
|
+
if (!baseTimestamp || timestamp < baseTimestamp) {
|
|
56
|
+
baseTimestamp = timestamp;
|
|
57
|
+
}
|
|
58
|
+
if (!maxTimestamp || timestamp > maxTimestamp) {
|
|
59
|
+
maxTimestamp = timestamp;
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
return {
|
|
63
|
+
index: partitionIndex,
|
|
64
|
+
baseOffset: 0n,
|
|
65
|
+
partitionLeaderEpoch: -1,
|
|
66
|
+
attributes: 0,
|
|
67
|
+
lastOffsetDelta: messages.length - 1,
|
|
68
|
+
baseTimestamp: baseTimestamp ?? 0n,
|
|
69
|
+
maxTimestamp: maxTimestamp ?? 0n,
|
|
70
|
+
producerId: state.producerId,
|
|
71
|
+
producerEpoch: 0,
|
|
72
|
+
baseSequence: state.getSequence(topic, partitionIndex),
|
|
73
|
+
records: messages.map((message, index) => ({
|
|
74
|
+
attributes: 0,
|
|
75
|
+
timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
|
|
76
|
+
offsetDelta: index,
|
|
77
|
+
key: message.key ?? null,
|
|
78
|
+
value: message.value,
|
|
79
|
+
headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
|
|
80
|
+
key,
|
|
81
|
+
value,
|
|
82
|
+
})),
|
|
83
|
+
})),
|
|
84
|
+
};
|
|
85
|
+
}),
|
|
86
|
+
}));
|
|
87
|
+
try {
|
|
88
|
+
await cluster.sendRequestToNode(nodeId)(api_1.API.PRODUCE, {
|
|
89
|
+
transactionalId: null,
|
|
90
|
+
acks: -1,
|
|
91
|
+
timeoutMs: 30000,
|
|
92
|
+
topicData,
|
|
93
|
+
});
|
|
94
|
+
topicData.forEach(({ name, partitionData }) => {
|
|
95
|
+
partitionData.forEach(({ index, records }) => {
|
|
96
|
+
state.updateSequence(name, index, records.length);
|
|
97
|
+
});
|
|
98
|
+
});
|
|
99
|
+
resolvers.forEach((resolve) => resolve());
|
|
100
|
+
}
|
|
101
|
+
catch (error) {
|
|
102
|
+
rejecters.forEach((reject) => reject(error));
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
this.isFlushing = false;
|
|
106
|
+
}
|
|
107
|
+
compactBuffer() {
|
|
108
|
+
if (this.head >= this.buffer.length) {
|
|
109
|
+
this.buffer = [];
|
|
110
|
+
this.head = 0;
|
|
111
|
+
}
|
|
112
|
+
else if (this.head > 1000 && this.head > this.buffer.length / 2) {
|
|
113
|
+
this.buffer = this.buffer.slice(this.head);
|
|
114
|
+
this.head = 0;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
exports.ProducerBuffer = ProducerBuffer;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { Cluster } from "../cluster";
|
|
2
|
+
type ProducerStateOptions = {
|
|
3
|
+
cluster: Cluster;
|
|
4
|
+
};
|
|
5
|
+
export declare class ProducerState {
|
|
6
|
+
private options;
|
|
7
|
+
producerId: bigint;
|
|
8
|
+
private producerEpoch;
|
|
9
|
+
private sequences;
|
|
10
|
+
constructor(options: ProducerStateOptions);
|
|
11
|
+
initProducerId(): Promise<void>;
|
|
12
|
+
getSequence(topic: string, partition: number): number;
|
|
13
|
+
updateSequence(topic: string, partition: number, messagesCount: number): void;
|
|
14
|
+
}
|
|
15
|
+
export {};
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ProducerState = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
class ProducerState {
|
|
6
|
+
options;
|
|
7
|
+
producerId = 0n;
|
|
8
|
+
producerEpoch = 0;
|
|
9
|
+
sequences = {};
|
|
10
|
+
constructor(options) {
|
|
11
|
+
this.options = options;
|
|
12
|
+
}
|
|
13
|
+
async initProducerId() {
|
|
14
|
+
const result = await this.options.cluster.sendRequest(api_1.API.INIT_PRODUCER_ID, {
|
|
15
|
+
transactionalId: null,
|
|
16
|
+
transactionTimeoutMs: 0,
|
|
17
|
+
producerId: this.producerId,
|
|
18
|
+
producerEpoch: this.producerEpoch,
|
|
19
|
+
});
|
|
20
|
+
this.producerId = result.producerId;
|
|
21
|
+
this.producerEpoch = result.producerEpoch;
|
|
22
|
+
this.sequences = {};
|
|
23
|
+
}
|
|
24
|
+
getSequence(topic, partition) {
|
|
25
|
+
return this.sequences[topic]?.[partition] ?? 0;
|
|
26
|
+
}
|
|
27
|
+
updateSequence(topic, partition, messagesCount) {
|
|
28
|
+
this.sequences[topic] ??= {};
|
|
29
|
+
this.sequences[topic][partition] ??= 0;
|
|
30
|
+
this.sequences[topic][partition] += messagesCount;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
exports.ProducerState = ProducerState;
|
|
@@ -4,25 +4,20 @@ import { Message } from '../types';
|
|
|
4
4
|
export type ProducerOptions = {
|
|
5
5
|
allowTopicAutoCreation?: boolean;
|
|
6
6
|
partitioner?: Partitioner;
|
|
7
|
+
maxBatchSize?: number;
|
|
7
8
|
};
|
|
8
9
|
export declare class Producer {
|
|
9
10
|
private cluster;
|
|
10
11
|
private options;
|
|
11
12
|
private metadata;
|
|
12
|
-
private
|
|
13
|
-
private producerEpoch;
|
|
14
|
-
private sequences;
|
|
13
|
+
private state;
|
|
15
14
|
private partition;
|
|
16
|
-
private
|
|
15
|
+
private chain;
|
|
16
|
+
private bufferByNodeId;
|
|
17
17
|
constructor(cluster: Cluster, options: ProducerOptions);
|
|
18
|
-
send(messages: Message[]
|
|
19
|
-
acks?: -1 | 1;
|
|
20
|
-
}): Promise<void>;
|
|
18
|
+
send(messages: Message[]): Promise<void>;
|
|
21
19
|
close(): Promise<void>;
|
|
22
20
|
private ensureProducerInitialized;
|
|
23
|
-
private
|
|
24
|
-
private getSequence;
|
|
25
|
-
private updateSequence;
|
|
26
|
-
private fetchMetadata;
|
|
21
|
+
private fetchMetadataForTopics;
|
|
27
22
|
private handleError;
|
|
28
23
|
}
|
|
@@ -11,107 +11,60 @@ var __metadata = (this && this.__metadata) || function (k, v) {
|
|
|
11
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
12
|
exports.Producer = void 0;
|
|
13
13
|
const api_1 = require("../api");
|
|
14
|
-
const
|
|
14
|
+
const group_by_leader_id_1 = require("../distributors/group-by-leader-id");
|
|
15
15
|
const partitioner_1 = require("../distributors/partitioner");
|
|
16
16
|
const metadata_1 = require("../metadata");
|
|
17
17
|
const error_1 = require("../utils/error");
|
|
18
|
-
const lock_1 = require("../utils/lock");
|
|
19
18
|
const logger_1 = require("../utils/logger");
|
|
20
|
-
const
|
|
19
|
+
const promise_chain_1 = require("../utils/promise-chain");
|
|
21
20
|
const shared_1 = require("../utils/shared");
|
|
22
21
|
const tracer_1 = require("../utils/tracer");
|
|
22
|
+
const producer_buffer_1 = require("./producer-buffer");
|
|
23
|
+
const producer_state_1 = require("./producer-state");
|
|
23
24
|
const trace = (0, tracer_1.createTracer)('Producer');
|
|
24
25
|
class Producer {
|
|
25
26
|
cluster;
|
|
26
27
|
options;
|
|
27
28
|
metadata;
|
|
28
|
-
|
|
29
|
-
producerEpoch = 0;
|
|
30
|
-
sequences = {};
|
|
29
|
+
state;
|
|
31
30
|
partition;
|
|
32
|
-
|
|
31
|
+
chain = new promise_chain_1.PromiseChain();
|
|
32
|
+
bufferByNodeId = {};
|
|
33
33
|
constructor(cluster, options) {
|
|
34
34
|
this.cluster = cluster;
|
|
35
35
|
this.options = {
|
|
36
36
|
...options,
|
|
37
37
|
allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
|
|
38
38
|
partitioner: options.partitioner ?? partitioner_1.defaultPartitioner,
|
|
39
|
+
maxBatchSize: options.maxBatchSize ?? 500,
|
|
39
40
|
};
|
|
40
41
|
this.metadata = new metadata_1.Metadata({ cluster });
|
|
42
|
+
this.state = new producer_state_1.ProducerState({ cluster });
|
|
41
43
|
this.partition = this.options.partitioner({ metadata: this.metadata });
|
|
42
44
|
}
|
|
43
|
-
async send(messages
|
|
45
|
+
async send(messages) {
|
|
44
46
|
await this.ensureProducerInitialized();
|
|
45
|
-
const
|
|
46
|
-
|
|
47
|
-
const topics = new Set(messages.map((message) => message.topic));
|
|
48
|
-
await this.lock.acquire([...topics].map((topic) => `metadata:${topic}`), () => this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation }));
|
|
47
|
+
const topics = [...new Set(messages.map((message) => message.topic))];
|
|
48
|
+
await this.fetchMetadataForTopics(topics);
|
|
49
49
|
const partitionedMessages = messages.map((message) => {
|
|
50
50
|
message.partition = this.partition(message);
|
|
51
51
|
return message;
|
|
52
52
|
});
|
|
53
|
-
const
|
|
54
|
-
await Promise.all(Object.entries(
|
|
53
|
+
const messagesByLeaderId = (0, group_by_leader_id_1.groupByLeaderId)(partitionedMessages, this.metadata.getTopicPartitionLeaderIds());
|
|
54
|
+
await Promise.all(Object.entries(messagesByLeaderId).map(async ([leaderId, messages]) => {
|
|
55
|
+
const nodeId = parseInt(leaderId);
|
|
56
|
+
const buffer = (this.bufferByNodeId[nodeId] ??= new producer_buffer_1.ProducerBuffer({
|
|
57
|
+
nodeId,
|
|
58
|
+
maxBatchSize: this.options.maxBatchSize,
|
|
59
|
+
cluster: this.cluster,
|
|
60
|
+
state: this.state,
|
|
61
|
+
}));
|
|
55
62
|
try {
|
|
56
|
-
await
|
|
57
|
-
const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
|
|
58
|
-
name: topic,
|
|
59
|
-
partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
|
|
60
|
-
const partitionIndex = parseInt(partition);
|
|
61
|
-
let baseTimestamp;
|
|
62
|
-
let maxTimestamp;
|
|
63
|
-
messages.forEach(({ timestamp = defaultTimestamp }) => {
|
|
64
|
-
if (!baseTimestamp || timestamp < baseTimestamp) {
|
|
65
|
-
baseTimestamp = timestamp;
|
|
66
|
-
}
|
|
67
|
-
if (!maxTimestamp || timestamp > maxTimestamp) {
|
|
68
|
-
maxTimestamp = timestamp;
|
|
69
|
-
}
|
|
70
|
-
});
|
|
71
|
-
return {
|
|
72
|
-
index: partitionIndex,
|
|
73
|
-
baseOffset: 0n,
|
|
74
|
-
partitionLeaderEpoch: -1,
|
|
75
|
-
attributes: 0,
|
|
76
|
-
lastOffsetDelta: messages.length - 1,
|
|
77
|
-
baseTimestamp: baseTimestamp ?? 0n,
|
|
78
|
-
maxTimestamp: maxTimestamp ?? 0n,
|
|
79
|
-
producerId: this.producerId,
|
|
80
|
-
producerEpoch: 0,
|
|
81
|
-
baseSequence: this.getSequence(topic, partitionIndex),
|
|
82
|
-
records: messages.map((message, index) => ({
|
|
83
|
-
attributes: 0,
|
|
84
|
-
timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
|
|
85
|
-
offsetDelta: index,
|
|
86
|
-
key: message.key ?? null,
|
|
87
|
-
value: message.value,
|
|
88
|
-
headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
|
|
89
|
-
key,
|
|
90
|
-
value,
|
|
91
|
-
})),
|
|
92
|
-
})),
|
|
93
|
-
};
|
|
94
|
-
}),
|
|
95
|
-
}));
|
|
96
|
-
await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
|
|
97
|
-
transactionalId: null,
|
|
98
|
-
acks,
|
|
99
|
-
timeoutMs: 30000,
|
|
100
|
-
topicData,
|
|
101
|
-
});
|
|
102
|
-
topicData.forEach(({ name, partitionData }) => {
|
|
103
|
-
partitionData.forEach(({ index, records }) => {
|
|
104
|
-
this.updateSequence(name, index, records.length);
|
|
105
|
-
});
|
|
106
|
-
});
|
|
107
|
-
});
|
|
63
|
+
await buffer.enqueue(messages);
|
|
108
64
|
}
|
|
109
65
|
catch (error) {
|
|
110
66
|
await this.handleError(error);
|
|
111
|
-
|
|
112
|
-
.flatMap((partitionMessages) => Object.values(partitionMessages).flat())
|
|
113
|
-
.map(({ partition, ...message }) => message);
|
|
114
|
-
return this.send(messages, { acks });
|
|
67
|
+
return this.send(messages);
|
|
115
68
|
}
|
|
116
69
|
}));
|
|
117
70
|
}
|
|
@@ -120,47 +73,25 @@ class Producer {
|
|
|
120
73
|
}
|
|
121
74
|
ensureProducerInitialized = (0, shared_1.shared)(async () => {
|
|
122
75
|
await this.cluster.ensureConnected();
|
|
123
|
-
if (!this.producerId) {
|
|
124
|
-
await this.initProducerId();
|
|
76
|
+
if (!this.state.producerId) {
|
|
77
|
+
await this.state.initProducerId();
|
|
125
78
|
}
|
|
126
79
|
});
|
|
127
|
-
async
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
transactionTimeoutMs: 0,
|
|
132
|
-
producerId: this.producerId,
|
|
133
|
-
producerEpoch: this.producerEpoch,
|
|
134
|
-
});
|
|
135
|
-
this.producerId = result.producerId;
|
|
136
|
-
this.producerEpoch = result.producerEpoch;
|
|
137
|
-
this.sequences = {};
|
|
138
|
-
});
|
|
139
|
-
}
|
|
140
|
-
getSequence(topic, partition) {
|
|
141
|
-
return this.sequences[topic]?.[partition] ?? 0;
|
|
142
|
-
}
|
|
143
|
-
updateSequence(topic, partition, messagesCount) {
|
|
144
|
-
this.sequences[topic] ??= {};
|
|
145
|
-
this.sequences[topic][partition] ??= 0;
|
|
146
|
-
this.sequences[topic][partition] += messagesCount;
|
|
147
|
-
}
|
|
148
|
-
async fetchMetadata(topics, allowTopicAutoCreation) {
|
|
149
|
-
return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
|
|
150
|
-
await this.metadata.fetchMetadata({ topics, allowTopicAutoCreation });
|
|
151
|
-
});
|
|
152
|
-
}
|
|
80
|
+
fetchMetadataForTopics = (0, shared_1.shared)(async (topics) => {
|
|
81
|
+
const { allowTopicAutoCreation } = this.options;
|
|
82
|
+
await this.chain.run(topics.map((topic) => `metadata:${topic}`), () => this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation }));
|
|
83
|
+
});
|
|
153
84
|
async handleError(error) {
|
|
154
85
|
await (0, api_1.handleApiError)(error).catch(async (error) => {
|
|
155
86
|
if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.NOT_LEADER_OR_FOLLOWER) {
|
|
156
87
|
logger_1.log.debug('Refreshing metadata', { reason: error.message });
|
|
157
88
|
const topics = Object.keys(this.metadata.getTopicPartitions());
|
|
158
|
-
await this.fetchMetadata(topics, false);
|
|
89
|
+
await this.metadata.fetchMetadata({ topics, allowTopicAutoCreation: false });
|
|
159
90
|
return;
|
|
160
91
|
}
|
|
161
92
|
if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.OUT_OF_ORDER_SEQUENCE_NUMBER) {
|
|
162
93
|
logger_1.log.debug('Out of order sequence number. Reinitializing producer ID');
|
|
163
|
-
await this.initProducerId();
|
|
94
|
+
await this.state.initProducerId();
|
|
164
95
|
return;
|
|
165
96
|
}
|
|
166
97
|
throw error;
|
|
@@ -171,6 +102,6 @@ exports.Producer = Producer;
|
|
|
171
102
|
__decorate([
|
|
172
103
|
trace(() => ({ root: true })),
|
|
173
104
|
__metadata("design:type", Function),
|
|
174
|
-
__metadata("design:paramtypes", [Array
|
|
105
|
+
__metadata("design:paramtypes", [Array]),
|
|
175
106
|
__metadata("design:returntype", Promise)
|
|
176
107
|
], Producer.prototype, "send", null);
|
package/dist/utils/decoder.js
CHANGED
|
@@ -110,30 +110,37 @@ class Decoder {
|
|
|
110
110
|
}
|
|
111
111
|
readArray(callback) {
|
|
112
112
|
const length = this.readInt32();
|
|
113
|
-
const results = Array.
|
|
113
|
+
const results = new Array(Math.max(length, 0));
|
|
114
|
+
for (let i = 0; i < length; i++)
|
|
115
|
+
results[i] = callback(this);
|
|
114
116
|
return results;
|
|
115
117
|
}
|
|
116
118
|
readCompactArray(callback) {
|
|
117
119
|
const length = this.readUVarInt() - 1;
|
|
118
|
-
const results = Array.
|
|
120
|
+
const results = new Array(Math.max(length, 0));
|
|
121
|
+
for (let i = 0; i < length; i++)
|
|
122
|
+
results[i] = callback(this);
|
|
119
123
|
return results;
|
|
120
124
|
}
|
|
121
125
|
readVarIntArray(callback) {
|
|
122
126
|
const length = this.readVarInt();
|
|
123
|
-
const results = Array.
|
|
127
|
+
const results = new Array(Math.max(length, 0));
|
|
128
|
+
for (let i = 0; i < length; i++)
|
|
129
|
+
results[i] = callback(this);
|
|
124
130
|
return results;
|
|
125
131
|
}
|
|
126
132
|
readRecords(callback) {
|
|
127
133
|
const length = this.readInt32();
|
|
128
|
-
|
|
134
|
+
const results = [];
|
|
135
|
+
for (let i = 0; i < length; i++) {
|
|
129
136
|
const size = this.readVarInt();
|
|
130
|
-
if (!size)
|
|
131
|
-
|
|
132
|
-
}
|
|
137
|
+
if (!size)
|
|
138
|
+
continue;
|
|
133
139
|
const child = new Decoder(this.buffer.subarray(this.offset, this.offset + size));
|
|
134
140
|
this.offset += size;
|
|
135
|
-
|
|
136
|
-
}
|
|
141
|
+
results.push(callback(child));
|
|
142
|
+
}
|
|
143
|
+
return results;
|
|
137
144
|
}
|
|
138
145
|
read(length) {
|
|
139
146
|
const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
|
package/dist/utils/encoder.d.ts
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
export declare class Encoder {
|
|
2
|
-
private
|
|
3
|
-
|
|
2
|
+
private buffer;
|
|
3
|
+
private offset;
|
|
4
|
+
constructor(initialCapacity?: number);
|
|
5
|
+
private ensure;
|
|
4
6
|
getBufferLength(): number;
|
|
5
|
-
write(
|
|
6
|
-
writeEncoder(
|
|
7
|
+
write(src: Buffer): this;
|
|
8
|
+
writeEncoder(other: Encoder): this;
|
|
7
9
|
writeInt8(value: number): this;
|
|
8
10
|
writeInt16(value: number): this;
|
|
9
11
|
writeInt32(value: number): this;
|
|
@@ -18,10 +20,10 @@ export declare class Encoder {
|
|
|
18
20
|
writeVarIntString(value: string | null): this;
|
|
19
21
|
writeUUID(value: string | null): this;
|
|
20
22
|
writeBoolean(value: boolean): this;
|
|
21
|
-
writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) =>
|
|
22
|
-
writeCompactArray<T>(arr: T[] | null, callback: (encoder: Encoder, item: T) =>
|
|
23
|
-
writeVarIntArray<T>(arr: T[], callback: (encoder: Encoder, item: T) =>
|
|
23
|
+
writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => void): this;
|
|
24
|
+
writeCompactArray<T>(arr: T[] | null, callback: (encoder: Encoder, item: T) => void): this;
|
|
25
|
+
writeVarIntArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => void): this;
|
|
24
26
|
writeBytes(value: Buffer): this;
|
|
25
27
|
writeCompactBytes(value: Buffer): this;
|
|
26
|
-
value(): Buffer<
|
|
28
|
+
value(): Buffer<ArrayBufferLike>;
|
|
27
29
|
}
|
package/dist/utils/encoder.js
CHANGED
|
@@ -2,121 +2,158 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Encoder = void 0;
|
|
4
4
|
class Encoder {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
5
|
+
buffer;
|
|
6
|
+
offset = 0;
|
|
7
|
+
constructor(initialCapacity = 512) {
|
|
8
|
+
this.buffer = Buffer.allocUnsafe(initialCapacity);
|
|
9
|
+
}
|
|
10
|
+
ensure(extra) {
|
|
11
|
+
const need = this.offset + extra;
|
|
12
|
+
if (need <= this.buffer.length)
|
|
13
|
+
return;
|
|
14
|
+
let cap = this.buffer.length;
|
|
15
|
+
while (cap < need)
|
|
16
|
+
cap <<= 1;
|
|
17
|
+
const n = Buffer.allocUnsafe(cap);
|
|
18
|
+
this.buffer.copy(n, 0, 0, this.offset);
|
|
19
|
+
this.buffer = n;
|
|
8
20
|
}
|
|
9
21
|
getBufferLength() {
|
|
10
|
-
return this.
|
|
22
|
+
return this.offset;
|
|
11
23
|
}
|
|
12
|
-
write(
|
|
13
|
-
this.
|
|
24
|
+
write(src) {
|
|
25
|
+
this.ensure(src.length);
|
|
26
|
+
src.copy(this.buffer, this.offset);
|
|
27
|
+
this.offset += src.length;
|
|
14
28
|
return this;
|
|
15
29
|
}
|
|
16
|
-
writeEncoder(
|
|
17
|
-
|
|
30
|
+
writeEncoder(other) {
|
|
31
|
+
this.write(other.buffer.subarray(0, other.offset));
|
|
32
|
+
return this;
|
|
18
33
|
}
|
|
19
34
|
writeInt8(value) {
|
|
20
|
-
|
|
21
|
-
buffer.writeInt8(value);
|
|
22
|
-
|
|
35
|
+
this.ensure(1);
|
|
36
|
+
this.buffer.writeInt8(value, this.offset);
|
|
37
|
+
this.offset += 1;
|
|
38
|
+
return this;
|
|
23
39
|
}
|
|
24
40
|
writeInt16(value) {
|
|
25
|
-
|
|
26
|
-
buffer.writeInt16BE(value);
|
|
27
|
-
|
|
41
|
+
this.ensure(2);
|
|
42
|
+
this.buffer.writeInt16BE(value, this.offset);
|
|
43
|
+
this.offset += 2;
|
|
44
|
+
return this;
|
|
28
45
|
}
|
|
29
46
|
writeInt32(value) {
|
|
30
|
-
|
|
31
|
-
buffer.writeInt32BE(value);
|
|
32
|
-
|
|
47
|
+
this.ensure(4);
|
|
48
|
+
this.buffer.writeInt32BE(value, this.offset);
|
|
49
|
+
this.offset += 4;
|
|
50
|
+
return this;
|
|
33
51
|
}
|
|
34
52
|
writeUInt32(value) {
|
|
35
|
-
|
|
36
|
-
buffer.writeUInt32BE(value);
|
|
37
|
-
|
|
53
|
+
this.ensure(4);
|
|
54
|
+
this.buffer.writeUInt32BE(value, this.offset);
|
|
55
|
+
this.offset += 4;
|
|
56
|
+
return this;
|
|
38
57
|
}
|
|
39
58
|
writeInt64(value) {
|
|
40
|
-
|
|
41
|
-
buffer.writeBigInt64BE(value);
|
|
42
|
-
|
|
59
|
+
this.ensure(8);
|
|
60
|
+
this.buffer.writeBigInt64BE(value, this.offset);
|
|
61
|
+
this.offset += 8;
|
|
62
|
+
return this;
|
|
43
63
|
}
|
|
44
64
|
writeUVarInt(value) {
|
|
45
|
-
|
|
46
|
-
while (
|
|
47
|
-
|
|
65
|
+
this.ensure(5);
|
|
66
|
+
while (value & 0xffffff80) {
|
|
67
|
+
this.buffer[this.offset++] = (value & 0x7f) | 0x80;
|
|
48
68
|
value >>>= 7;
|
|
49
69
|
}
|
|
50
|
-
|
|
51
|
-
return this
|
|
70
|
+
this.buffer[this.offset++] = value & 0x7f;
|
|
71
|
+
return this;
|
|
52
72
|
}
|
|
53
73
|
writeVarInt(value) {
|
|
54
|
-
|
|
55
|
-
return this.writeUVarInt(encodedValue);
|
|
74
|
+
return this.writeUVarInt((value << 1) ^ (value >> 31));
|
|
56
75
|
}
|
|
57
76
|
writeUVarLong(value) {
|
|
58
|
-
|
|
59
|
-
while (
|
|
60
|
-
|
|
77
|
+
this.ensure(10);
|
|
78
|
+
while (value >= 0x80n) {
|
|
79
|
+
this.buffer[this.offset++] = Number((value & 0x7fn) | 0x80n);
|
|
61
80
|
value >>= 7n;
|
|
62
81
|
}
|
|
63
|
-
|
|
64
|
-
return this
|
|
82
|
+
this.buffer[this.offset++] = Number(value);
|
|
83
|
+
return this;
|
|
65
84
|
}
|
|
66
85
|
writeVarLong(value) {
|
|
67
|
-
|
|
68
|
-
return this.writeUVarLong(encodedValue);
|
|
86
|
+
return this.writeUVarLong((value << 1n) ^ (value >> 63n));
|
|
69
87
|
}
|
|
70
88
|
writeString(value) {
|
|
71
|
-
if (value === null)
|
|
89
|
+
if (value === null)
|
|
72
90
|
return this.writeInt16(-1);
|
|
73
|
-
}
|
|
74
91
|
const buffer = Buffer.from(value, 'utf-8');
|
|
75
|
-
|
|
92
|
+
this.writeInt16(buffer.length);
|
|
93
|
+
this.write(buffer);
|
|
94
|
+
return this;
|
|
76
95
|
}
|
|
77
96
|
writeCompactString(value) {
|
|
78
|
-
if (value === null)
|
|
97
|
+
if (value === null)
|
|
79
98
|
return this.writeUVarInt(0);
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
99
|
+
const b = Buffer.from(value, 'utf-8');
|
|
100
|
+
this.writeUVarInt(b.length + 1);
|
|
101
|
+
this.write(b);
|
|
102
|
+
return this;
|
|
83
103
|
}
|
|
84
104
|
writeVarIntString(value) {
|
|
85
|
-
if (value === null)
|
|
105
|
+
if (value === null)
|
|
86
106
|
return this.writeVarInt(-1);
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
107
|
+
const b = Buffer.from(value, 'utf-8');
|
|
108
|
+
this.writeVarInt(b.length);
|
|
109
|
+
this.write(b);
|
|
110
|
+
return this;
|
|
90
111
|
}
|
|
91
112
|
writeUUID(value) {
|
|
92
113
|
if (value === null) {
|
|
93
|
-
|
|
114
|
+
this.ensure(16);
|
|
115
|
+
this.buffer.fill(0, this.offset, this.offset + 16);
|
|
116
|
+
this.offset += 16;
|
|
117
|
+
return this;
|
|
94
118
|
}
|
|
95
|
-
|
|
119
|
+
this.write(Buffer.from(value, 'hex'));
|
|
120
|
+
return this;
|
|
96
121
|
}
|
|
97
122
|
writeBoolean(value) {
|
|
98
123
|
return this.writeInt8(value ? 1 : 0);
|
|
99
124
|
}
|
|
100
125
|
writeArray(arr, callback) {
|
|
101
|
-
|
|
126
|
+
this.writeInt32(arr.length);
|
|
127
|
+
for (const it of arr)
|
|
128
|
+
callback(this, it);
|
|
129
|
+
return this;
|
|
102
130
|
}
|
|
103
131
|
writeCompactArray(arr, callback) {
|
|
104
|
-
if (arr === null)
|
|
132
|
+
if (arr === null)
|
|
105
133
|
return this.writeUVarInt(0);
|
|
106
|
-
|
|
107
|
-
|
|
134
|
+
this.writeUVarInt(arr.length + 1);
|
|
135
|
+
for (const it of arr)
|
|
136
|
+
callback(this, it);
|
|
137
|
+
return this;
|
|
108
138
|
}
|
|
109
139
|
writeVarIntArray(arr, callback) {
|
|
110
|
-
|
|
140
|
+
this.writeVarInt(arr.length);
|
|
141
|
+
for (const it of arr)
|
|
142
|
+
callback(this, it);
|
|
143
|
+
return this;
|
|
111
144
|
}
|
|
112
145
|
writeBytes(value) {
|
|
113
|
-
|
|
146
|
+
this.writeInt32(value.length);
|
|
147
|
+
this.write(value);
|
|
148
|
+
return this;
|
|
114
149
|
}
|
|
115
150
|
writeCompactBytes(value) {
|
|
116
|
-
|
|
151
|
+
this.writeUVarInt(value.length + 1);
|
|
152
|
+
this.write(value);
|
|
153
|
+
return this;
|
|
117
154
|
}
|
|
118
155
|
value() {
|
|
119
|
-
return
|
|
156
|
+
return this.buffer.subarray(0, this.offset);
|
|
120
157
|
}
|
|
121
158
|
}
|
|
122
159
|
exports.Encoder = Encoder;
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.PromiseChain = void 0;
|
|
4
|
+
class PromiseChain {
|
|
5
|
+
locks = new Map();
|
|
6
|
+
async run(keys, callback) {
|
|
7
|
+
const orderedKeys = [...new Set(keys)].sort();
|
|
8
|
+
const releases = [];
|
|
9
|
+
for (const key of orderedKeys) {
|
|
10
|
+
const release = await this.acquire(key);
|
|
11
|
+
releases.push(release);
|
|
12
|
+
}
|
|
13
|
+
try {
|
|
14
|
+
await callback();
|
|
15
|
+
}
|
|
16
|
+
finally {
|
|
17
|
+
releases.reverse().forEach((release) => release());
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
async acquire(key) {
|
|
21
|
+
const previousTail = this.locks.get(key);
|
|
22
|
+
let release;
|
|
23
|
+
const currentTail = new Promise((resolve) => (release = resolve));
|
|
24
|
+
if (previousTail) {
|
|
25
|
+
this.locks.set(key, previousTail.then(() => currentTail));
|
|
26
|
+
await previousTail;
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
this.locks.set(key, currentTail);
|
|
30
|
+
}
|
|
31
|
+
return () => {
|
|
32
|
+
release();
|
|
33
|
+
if (this.locks.get(key) === currentTail) {
|
|
34
|
+
this.locks.delete(key);
|
|
35
|
+
}
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
exports.PromiseChain = PromiseChain;
|
package/dist/utils/shared.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const shared: <F extends () => Promise<any>>(func: F) => () => ReturnType<F>;
|
|
1
|
+
export declare const shared: <F extends (...args: any[]) => Promise<any>>(func: F) => (...args: Parameters<F>) => ReturnType<F>;
|
package/dist/utils/shared.js
CHANGED
|
@@ -2,15 +2,16 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.shared = void 0;
|
|
4
4
|
const shared = (func) => {
|
|
5
|
-
let
|
|
6
|
-
return () => {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
5
|
+
let promises = {};
|
|
6
|
+
return (...args) => {
|
|
7
|
+
const key = JSON.stringify(args);
|
|
8
|
+
if (!promises[key]) {
|
|
9
|
+
promises[key] = func(...args);
|
|
10
|
+
promises[key].finally(() => {
|
|
11
|
+
delete promises[key];
|
|
11
12
|
});
|
|
12
13
|
}
|
|
13
|
-
return
|
|
14
|
+
return promises[key];
|
|
14
15
|
};
|
|
15
16
|
};
|
|
16
17
|
exports.shared = shared;
|
package/package.json
CHANGED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
type TopicPartitionLeader = {
|
|
2
|
-
[topicName: string]: {
|
|
3
|
-
[partitionId: number]: number;
|
|
4
|
-
};
|
|
5
|
-
};
|
|
6
|
-
type MessagesByNodeTopicPartition<T> = {
|
|
7
|
-
[nodeId: number]: {
|
|
8
|
-
[topicName: string]: {
|
|
9
|
-
[partitionId: number]: T[];
|
|
10
|
-
};
|
|
11
|
-
};
|
|
12
|
-
};
|
|
13
|
-
export declare const distributeMessagesToTopicPartitionLeaders: <T extends {
|
|
14
|
-
topic: string;
|
|
15
|
-
partition: number;
|
|
16
|
-
}>(messages: T[], topicPartitionLeader: TopicPartitionLeader) => MessagesByNodeTopicPartition<T>;
|
|
17
|
-
export {};
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.distributeMessagesToTopicPartitionLeaders = void 0;
|
|
4
|
-
const distributeMessagesToTopicPartitionLeaders = (messages, topicPartitionLeader) => {
|
|
5
|
-
const result = {};
|
|
6
|
-
messages.forEach((message) => {
|
|
7
|
-
const leaderId = topicPartitionLeader[message.topic][message.partition];
|
|
8
|
-
result[leaderId] ??= {};
|
|
9
|
-
result[leaderId][message.topic] ??= {};
|
|
10
|
-
result[leaderId][message.topic][message.partition] ??= [];
|
|
11
|
-
result[leaderId][message.topic][message.partition].push(message);
|
|
12
|
-
});
|
|
13
|
-
return result;
|
|
14
|
-
};
|
|
15
|
-
exports.distributeMessagesToTopicPartitionLeaders = distributeMessagesToTopicPartitionLeaders;
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
const vitest_1 = require("vitest");
|
|
4
|
-
const messages_to_topic_partition_leaders_1 = require("./messages-to-topic-partition-leaders");
|
|
5
|
-
(0, vitest_1.describe)('Distribute messages to partition leader ids', () => {
|
|
6
|
-
(0, vitest_1.describe)('distributeMessagesToTopicPartitionLeaders', () => {
|
|
7
|
-
(0, vitest_1.it)('snoke', () => {
|
|
8
|
-
const result = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)([{ topic: 'topic', partition: 0, key: null, value: null, offset: 0n, timestamp: 0n, headers: {} }], { topic: { 0: 1 } });
|
|
9
|
-
(0, vitest_1.expect)(result).toMatchInlineSnapshot(`
|
|
10
|
-
{
|
|
11
|
-
"1": {
|
|
12
|
-
"topic": {
|
|
13
|
-
"0": [
|
|
14
|
-
{
|
|
15
|
-
"headers": {},
|
|
16
|
-
"key": null,
|
|
17
|
-
"offset": 0n,
|
|
18
|
-
"partition": 0,
|
|
19
|
-
"timestamp": 0n,
|
|
20
|
-
"topic": "topic",
|
|
21
|
-
"value": null,
|
|
22
|
-
},
|
|
23
|
-
],
|
|
24
|
-
},
|
|
25
|
-
},
|
|
26
|
-
}
|
|
27
|
-
`);
|
|
28
|
-
});
|
|
29
|
-
});
|
|
30
|
-
});
|
package/dist/utils/lock.d.ts
DELETED
package/dist/utils/lock.js
DELETED
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.Lock = void 0;
|
|
7
|
-
const events_1 = __importDefault(require("events"));
|
|
8
|
-
const logger_1 = require("./logger");
|
|
9
|
-
class Lock extends events_1.default {
|
|
10
|
-
locks = {};
|
|
11
|
-
constructor() {
|
|
12
|
-
super();
|
|
13
|
-
this.setMaxListeners(Infinity);
|
|
14
|
-
}
|
|
15
|
-
async acquire(keys, callback) {
|
|
16
|
-
await Promise.all(keys.map((key) => this.acquireKey(key)));
|
|
17
|
-
try {
|
|
18
|
-
await callback();
|
|
19
|
-
}
|
|
20
|
-
finally {
|
|
21
|
-
keys.forEach((key) => this.releaseKey(key));
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
async acquireKey(key) {
|
|
25
|
-
while (this.locks[key]) {
|
|
26
|
-
await new Promise((resolve) => {
|
|
27
|
-
const timeout = setTimeout(() => {
|
|
28
|
-
logger_1.log.warn(`Lock timed out`, { key });
|
|
29
|
-
this.releaseKey(key);
|
|
30
|
-
}, 60_000);
|
|
31
|
-
this.once(`release:${key}`, () => {
|
|
32
|
-
clearTimeout(timeout);
|
|
33
|
-
resolve();
|
|
34
|
-
});
|
|
35
|
-
});
|
|
36
|
-
}
|
|
37
|
-
this.locks[key] = true;
|
|
38
|
-
}
|
|
39
|
-
releaseKey(key) {
|
|
40
|
-
this.locks[key] = false;
|
|
41
|
-
this.emit(`release:${key}`);
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
exports.Lock = Lock;
|