kafka-ts 1.1.9 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -177,7 +177,7 @@ Custom SASL mechanisms can be implemented following the `SASLProvider` interface
177
177
  | allowTopicAutoCreation | boolean | false | false | Allow kafka to auto-create topic when it doesn't exist |
178
178
  | partitioner | Partitioner | false | defaultPartitioner | Custom partitioner function. By default, it uses a default java-compatible partitioner. |
179
179
 
180
- ### `producer.send(messages: Message[], options?: { acks?: -1 | 1 })`
180
+ ### `producer.send(messages: Message[])`
181
181
 
182
182
  <!-- export type Message = {
183
183
  topic: string;
@@ -15,7 +15,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
15
15
  exports.Consumer = void 0;
16
16
  const events_1 = __importDefault(require("events"));
17
17
  const api_1 = require("../api");
18
- const messages_to_topic_partition_leaders_1 = require("../distributors/messages-to-topic-partition-leaders");
18
+ const group_by_leader_id_1 = require("../distributors/group-by-leader-id");
19
+ const group_partitions_by_topic_1 = require("../distributors/group-partitions-by-topic");
19
20
  const delay_1 = require("../utils/delay");
20
21
  const error_1 = require("../utils/error");
21
22
  const logger_1 = require("../utils/logger");
@@ -112,12 +113,11 @@ class Consumer extends events_1.default {
112
113
  try {
113
114
  await this.consumerGroup?.join();
114
115
  // TODO: If leader is not available, find another read replica
115
- const nodeAssignments = Object.entries((0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(Object.entries(this.metadata.getAssignment()).flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition }))), this.metadata.getTopicPartitionLeaderIds())).map(([nodeId, assignment]) => ({
116
- nodeId: parseInt(nodeId),
117
- assignment: Object.fromEntries(Object.entries(assignment).map(([topic, partitions]) => [
118
- topic,
119
- Object.keys(partitions).map(Number),
120
- ])),
116
+ const topicPartitions = Object.entries(this.metadata.getAssignment()).flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })));
117
+ const topicPartitionsByLeaderId = (0, group_by_leader_id_1.groupByLeaderId)(topicPartitions, this.metadata.getTopicPartitionLeaderIds());
118
+ const nodeAssignments = Object.entries(topicPartitionsByLeaderId).map(([leaderId, topicPartitions]) => ({
119
+ nodeId: parseInt(leaderId),
120
+ assignment: (0, group_partitions_by_topic_1.groupPartitionsByTopic)(topicPartitions),
121
121
  }));
122
122
  this.fetchManager = new fetch_manager_1.FetchManager({
123
123
  fetch: this.fetch.bind(this),
@@ -2,7 +2,8 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OffsetManager = void 0;
4
4
  const api_1 = require("../api");
5
- const messages_to_topic_partition_leaders_1 = require("../distributors/messages-to-topic-partition-leaders");
5
+ const group_by_leader_id_1 = require("../distributors/group-by-leader-id");
6
+ const group_partitions_by_topic_1 = require("../distributors/group-partitions-by-topic");
6
7
  const tracer_1 = require("../utils/tracer");
7
8
  const trace = (0, tracer_1.createTracer)('OffsetManager');
8
9
  class OffsetManager {
@@ -40,11 +41,11 @@ class OffsetManager {
40
41
  async fetchOffsets(options) {
41
42
  const { metadata } = this.options;
42
43
  const topicPartitions = Object.entries(metadata.getAssignment()).flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })));
43
- const nodeTopicPartitions = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(topicPartitions, metadata.getTopicPartitionLeaderIds());
44
- await Promise.all(Object.entries(nodeTopicPartitions).map(([nodeId, topicPartitions]) => this.listOffsets({
44
+ const topicPartitionsByLeaderId = (0, group_by_leader_id_1.groupByLeaderId)(topicPartitions, metadata.getTopicPartitionLeaderIds());
45
+ await Promise.all(Object.entries(topicPartitionsByLeaderId).map(([leaderId, topicPartitions]) => this.listOffsets({
45
46
  ...options,
46
- nodeId: parseInt(nodeId),
47
- nodeAssignment: Object.fromEntries(Object.entries(topicPartitions).map(([topicName, partitions]) => [topicName, Object.keys(partitions).map(Number)])),
47
+ nodeId: parseInt(leaderId),
48
+ nodeAssignment: (0, group_partitions_by_topic_1.groupPartitionsByTopic)(topicPartitions),
48
49
  })));
49
50
  }
50
51
  async listOffsets({ nodeId, nodeAssignment, fromTimestamp, }) {
@@ -0,0 +1,10 @@
1
+ export declare const groupByLeaderId: <T extends {
2
+ topic: string;
3
+ partition: number;
4
+ }>(items: T[], leaderIdByTopicPartition: {
5
+ [topic: string]: {
6
+ [partition: number]: number;
7
+ };
8
+ }) => {
9
+ [nodeId: number]: T[];
10
+ };
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.groupByLeaderId = void 0;
4
+ const groupByLeaderId = (items, leaderIdByTopicPartition) => {
5
+ const result = {};
6
+ items.forEach((item) => {
7
+ const leaderId = leaderIdByTopicPartition[item.topic][item.partition];
8
+ result[leaderId] ??= [];
9
+ result[leaderId].push(item);
10
+ });
11
+ return result;
12
+ };
13
+ exports.groupByLeaderId = groupByLeaderId;
@@ -0,0 +1,6 @@
1
+ export declare const groupPartitionsByTopic: <T extends {
2
+ topic: string;
3
+ partition: number;
4
+ }>(items: T[]) => {
5
+ [topic: string]: number[];
6
+ };
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.groupPartitionsByTopic = void 0;
4
+ const groupPartitionsByTopic = (items) => {
5
+ const result = {};
6
+ items.forEach((item) => {
7
+ result[item.topic] ??= [];
8
+ result[item.topic].push(item.partition);
9
+ });
10
+ return result;
11
+ };
12
+ exports.groupPartitionsByTopic = groupPartitionsByTopic;
@@ -16,7 +16,7 @@ export declare class Metadata {
16
16
  getTopicIdByName(name: string): string;
17
17
  getTopicNameById(id: string): string;
18
18
  fetchMetadataIfNecessary({ topics, allowTopicAutoCreation, }: {
19
- topics: string[] | Set<string>;
19
+ topics: string[];
20
20
  allowTopicAutoCreation: boolean;
21
21
  }): Promise<void>;
22
22
  fetchMetadata({ topics, allowTopicAutoCreation, }: {
package/dist/metadata.js CHANGED
@@ -41,7 +41,7 @@ class Metadata {
41
41
  return this.topicNameById[id];
42
42
  }
43
43
  async fetchMetadataIfNecessary({ topics, allowTopicAutoCreation, }) {
44
- const missingTopics = Array.from(topics).filter((topic) => !this.topicPartitions[topic]);
44
+ const missingTopics = topics.filter((topic) => !this.topicPartitions[topic]);
45
45
  if (!missingTopics.length) {
46
46
  return;
47
47
  }
@@ -0,0 +1,20 @@
1
+ import { Cluster } from '../cluster';
2
+ import { Message } from '../types';
3
+ import { ProducerState } from './producer-state';
4
+ type ProducerBufferOptions = {
5
+ nodeId: number;
6
+ maxBatchSize: number;
7
+ cluster: Cluster;
8
+ state: ProducerState;
9
+ };
10
+ export declare class ProducerBuffer {
11
+ private options;
12
+ private buffer;
13
+ private head;
14
+ private isFlushing;
15
+ constructor(options: ProducerBufferOptions);
16
+ enqueue(messages: Message[]): Promise<void>;
17
+ private flush;
18
+ private compactBuffer;
19
+ }
20
+ export {};
@@ -0,0 +1,118 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ProducerBuffer = void 0;
4
+ const api_1 = require("../api");
5
+ class ProducerBuffer {
6
+ options;
7
+ buffer = [];
8
+ head = 0;
9
+ isFlushing = false;
10
+ constructor(options) {
11
+ this.options = options;
12
+ }
13
+ enqueue(messages) {
14
+ return new Promise((resolve, reject) => {
15
+ this.buffer.push({ messages, resolve, reject });
16
+ this.flush();
17
+ });
18
+ }
19
+ async flush() {
20
+ if (this.isFlushing)
21
+ return;
22
+ this.isFlushing = true;
23
+ const { cluster, state, nodeId, maxBatchSize } = this.options;
24
+ while (true) {
25
+ const batch = [];
26
+ const resolvers = [];
27
+ const rejecters = [];
28
+ while (this.head < this.buffer.length) {
29
+ const entry = this.buffer[this.head++];
30
+ batch.push(...entry.messages);
31
+ resolvers.push(entry.resolve);
32
+ rejecters.push(entry.reject);
33
+ const nextLength = this.buffer[this.head]?.messages.length ?? 0;
34
+ if (batch.length + nextLength > maxBatchSize) {
35
+ break;
36
+ }
37
+ }
38
+ if (!batch.length)
39
+ break;
40
+ this.compactBuffer();
41
+ const topicPartitionMessages = {};
42
+ batch.forEach((message) => {
43
+ topicPartitionMessages[message.topic] ??= {};
44
+ topicPartitionMessages[message.topic][message.partition] ??= [];
45
+ topicPartitionMessages[message.topic][message.partition].push(message);
46
+ });
47
+ const defaultTimestamp = BigInt(Date.now());
48
+ const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
49
+ name: topic,
50
+ partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
51
+ const partitionIndex = parseInt(partition);
52
+ let baseTimestamp;
53
+ let maxTimestamp;
54
+ messages.forEach(({ timestamp = defaultTimestamp }) => {
55
+ if (!baseTimestamp || timestamp < baseTimestamp) {
56
+ baseTimestamp = timestamp;
57
+ }
58
+ if (!maxTimestamp || timestamp > maxTimestamp) {
59
+ maxTimestamp = timestamp;
60
+ }
61
+ });
62
+ return {
63
+ index: partitionIndex,
64
+ baseOffset: 0n,
65
+ partitionLeaderEpoch: -1,
66
+ attributes: 0,
67
+ lastOffsetDelta: messages.length - 1,
68
+ baseTimestamp: baseTimestamp ?? 0n,
69
+ maxTimestamp: maxTimestamp ?? 0n,
70
+ producerId: state.producerId,
71
+ producerEpoch: 0,
72
+ baseSequence: state.getSequence(topic, partitionIndex),
73
+ records: messages.map((message, index) => ({
74
+ attributes: 0,
75
+ timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
76
+ offsetDelta: index,
77
+ key: message.key ?? null,
78
+ value: message.value,
79
+ headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
80
+ key,
81
+ value,
82
+ })),
83
+ })),
84
+ };
85
+ }),
86
+ }));
87
+ try {
88
+ await cluster.sendRequestToNode(nodeId)(api_1.API.PRODUCE, {
89
+ transactionalId: null,
90
+ acks: -1,
91
+ timeoutMs: 30000,
92
+ topicData,
93
+ });
94
+ topicData.forEach(({ name, partitionData }) => {
95
+ partitionData.forEach(({ index, records }) => {
96
+ state.updateSequence(name, index, records.length);
97
+ });
98
+ });
99
+ resolvers.forEach((resolve) => resolve());
100
+ }
101
+ catch (error) {
102
+ rejecters.forEach((reject) => reject(error));
103
+ }
104
+ }
105
+ this.isFlushing = false;
106
+ }
107
+ compactBuffer() {
108
+ if (this.head >= this.buffer.length) {
109
+ this.buffer = [];
110
+ this.head = 0;
111
+ }
112
+ else if (this.head > 1000 && this.head > this.buffer.length / 2) {
113
+ this.buffer = this.buffer.slice(this.head);
114
+ this.head = 0;
115
+ }
116
+ }
117
+ }
118
+ exports.ProducerBuffer = ProducerBuffer;
@@ -0,0 +1,15 @@
1
+ import { Cluster } from "../cluster";
2
+ type ProducerStateOptions = {
3
+ cluster: Cluster;
4
+ };
5
+ export declare class ProducerState {
6
+ private options;
7
+ producerId: bigint;
8
+ private producerEpoch;
9
+ private sequences;
10
+ constructor(options: ProducerStateOptions);
11
+ initProducerId(): Promise<void>;
12
+ getSequence(topic: string, partition: number): number;
13
+ updateSequence(topic: string, partition: number, messagesCount: number): void;
14
+ }
15
+ export {};
@@ -0,0 +1,33 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ProducerState = void 0;
4
+ const api_1 = require("../api");
5
+ class ProducerState {
6
+ options;
7
+ producerId = 0n;
8
+ producerEpoch = 0;
9
+ sequences = {};
10
+ constructor(options) {
11
+ this.options = options;
12
+ }
13
+ async initProducerId() {
14
+ const result = await this.options.cluster.sendRequest(api_1.API.INIT_PRODUCER_ID, {
15
+ transactionalId: null,
16
+ transactionTimeoutMs: 0,
17
+ producerId: this.producerId,
18
+ producerEpoch: this.producerEpoch,
19
+ });
20
+ this.producerId = result.producerId;
21
+ this.producerEpoch = result.producerEpoch;
22
+ this.sequences = {};
23
+ }
24
+ getSequence(topic, partition) {
25
+ return this.sequences[topic]?.[partition] ?? 0;
26
+ }
27
+ updateSequence(topic, partition, messagesCount) {
28
+ this.sequences[topic] ??= {};
29
+ this.sequences[topic][partition] ??= 0;
30
+ this.sequences[topic][partition] += messagesCount;
31
+ }
32
+ }
33
+ exports.ProducerState = ProducerState;
@@ -4,25 +4,20 @@ import { Message } from '../types';
4
4
  export type ProducerOptions = {
5
5
  allowTopicAutoCreation?: boolean;
6
6
  partitioner?: Partitioner;
7
+ maxBatchSize?: number;
7
8
  };
8
9
  export declare class Producer {
9
10
  private cluster;
10
11
  private options;
11
12
  private metadata;
12
- private producerId;
13
- private producerEpoch;
14
- private sequences;
13
+ private state;
15
14
  private partition;
16
- private lock;
15
+ private chain;
16
+ private bufferByNodeId;
17
17
  constructor(cluster: Cluster, options: ProducerOptions);
18
- send(messages: Message[], { acks }?: {
19
- acks?: -1 | 1;
20
- }): Promise<void>;
18
+ send(messages: Message[]): Promise<void>;
21
19
  close(): Promise<void>;
22
20
  private ensureProducerInitialized;
23
- private initProducerId;
24
- private getSequence;
25
- private updateSequence;
26
- private fetchMetadata;
21
+ private fetchMetadataForTopics;
27
22
  private handleError;
28
23
  }
@@ -11,107 +11,60 @@ var __metadata = (this && this.__metadata) || function (k, v) {
11
11
  Object.defineProperty(exports, "__esModule", { value: true });
12
12
  exports.Producer = void 0;
13
13
  const api_1 = require("../api");
14
- const messages_to_topic_partition_leaders_1 = require("../distributors/messages-to-topic-partition-leaders");
14
+ const group_by_leader_id_1 = require("../distributors/group-by-leader-id");
15
15
  const partitioner_1 = require("../distributors/partitioner");
16
16
  const metadata_1 = require("../metadata");
17
17
  const error_1 = require("../utils/error");
18
- const lock_1 = require("../utils/lock");
19
18
  const logger_1 = require("../utils/logger");
20
- const retry_1 = require("../utils/retry");
19
+ const promise_chain_1 = require("../utils/promise-chain");
21
20
  const shared_1 = require("../utils/shared");
22
21
  const tracer_1 = require("../utils/tracer");
22
+ const producer_buffer_1 = require("./producer-buffer");
23
+ const producer_state_1 = require("./producer-state");
23
24
  const trace = (0, tracer_1.createTracer)('Producer');
24
25
  class Producer {
25
26
  cluster;
26
27
  options;
27
28
  metadata;
28
- producerId = 0n;
29
- producerEpoch = 0;
30
- sequences = {};
29
+ state;
31
30
  partition;
32
- lock = new lock_1.Lock();
31
+ chain = new promise_chain_1.PromiseChain();
32
+ bufferByNodeId = {};
33
33
  constructor(cluster, options) {
34
34
  this.cluster = cluster;
35
35
  this.options = {
36
36
  ...options,
37
37
  allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
38
38
  partitioner: options.partitioner ?? partitioner_1.defaultPartitioner,
39
+ maxBatchSize: options.maxBatchSize ?? 500,
39
40
  };
40
41
  this.metadata = new metadata_1.Metadata({ cluster });
42
+ this.state = new producer_state_1.ProducerState({ cluster });
41
43
  this.partition = this.options.partitioner({ metadata: this.metadata });
42
44
  }
43
- async send(messages, { acks = -1 } = {}) {
45
+ async send(messages) {
44
46
  await this.ensureProducerInitialized();
45
- const { allowTopicAutoCreation } = this.options;
46
- const defaultTimestamp = BigInt(Date.now());
47
- const topics = new Set(messages.map((message) => message.topic));
48
- await this.lock.acquire([...topics].map((topic) => `metadata:${topic}`), () => this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation }));
47
+ const topics = [...new Set(messages.map((message) => message.topic))];
48
+ await this.fetchMetadataForTopics(topics);
49
49
  const partitionedMessages = messages.map((message) => {
50
50
  message.partition = this.partition(message);
51
51
  return message;
52
52
  });
53
- const nodeTopicPartitionMessages = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(partitionedMessages, this.metadata.getTopicPartitionLeaderIds());
54
- await Promise.all(Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
53
+ const messagesByLeaderId = (0, group_by_leader_id_1.groupByLeaderId)(partitionedMessages, this.metadata.getTopicPartitionLeaderIds());
54
+ await Promise.all(Object.entries(messagesByLeaderId).map(async ([leaderId, messages]) => {
55
+ const nodeId = parseInt(leaderId);
56
+ const buffer = (this.bufferByNodeId[nodeId] ??= new producer_buffer_1.ProducerBuffer({
57
+ nodeId,
58
+ maxBatchSize: this.options.maxBatchSize,
59
+ cluster: this.cluster,
60
+ state: this.state,
61
+ }));
55
62
  try {
56
- await this.lock.acquire([`node:${nodeId}`], async () => {
57
- const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
58
- name: topic,
59
- partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
60
- const partitionIndex = parseInt(partition);
61
- let baseTimestamp;
62
- let maxTimestamp;
63
- messages.forEach(({ timestamp = defaultTimestamp }) => {
64
- if (!baseTimestamp || timestamp < baseTimestamp) {
65
- baseTimestamp = timestamp;
66
- }
67
- if (!maxTimestamp || timestamp > maxTimestamp) {
68
- maxTimestamp = timestamp;
69
- }
70
- });
71
- return {
72
- index: partitionIndex,
73
- baseOffset: 0n,
74
- partitionLeaderEpoch: -1,
75
- attributes: 0,
76
- lastOffsetDelta: messages.length - 1,
77
- baseTimestamp: baseTimestamp ?? 0n,
78
- maxTimestamp: maxTimestamp ?? 0n,
79
- producerId: this.producerId,
80
- producerEpoch: 0,
81
- baseSequence: this.getSequence(topic, partitionIndex),
82
- records: messages.map((message, index) => ({
83
- attributes: 0,
84
- timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
85
- offsetDelta: index,
86
- key: message.key ?? null,
87
- value: message.value,
88
- headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
89
- key,
90
- value,
91
- })),
92
- })),
93
- };
94
- }),
95
- }));
96
- await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
97
- transactionalId: null,
98
- acks,
99
- timeoutMs: 30000,
100
- topicData,
101
- });
102
- topicData.forEach(({ name, partitionData }) => {
103
- partitionData.forEach(({ index, records }) => {
104
- this.updateSequence(name, index, records.length);
105
- });
106
- });
107
- });
63
+ await buffer.enqueue(messages);
108
64
  }
109
65
  catch (error) {
110
66
  await this.handleError(error);
111
- const messages = Object.values(topicPartitionMessages)
112
- .flatMap((partitionMessages) => Object.values(partitionMessages).flat())
113
- .map(({ partition, ...message }) => message);
114
- return this.send(messages, { acks });
67
+ return this.send(messages);
115
68
  }
116
69
  }));
117
70
  }
@@ -120,47 +73,25 @@ class Producer {
120
73
  }
121
74
  ensureProducerInitialized = (0, shared_1.shared)(async () => {
122
75
  await this.cluster.ensureConnected();
123
- if (!this.producerId) {
124
- await this.initProducerId();
76
+ if (!this.state.producerId) {
77
+ await this.state.initProducerId();
125
78
  }
126
79
  });
127
- async initProducerId() {
128
- return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
129
- const result = await this.cluster.sendRequest(api_1.API.INIT_PRODUCER_ID, {
130
- transactionalId: null,
131
- transactionTimeoutMs: 0,
132
- producerId: this.producerId,
133
- producerEpoch: this.producerEpoch,
134
- });
135
- this.producerId = result.producerId;
136
- this.producerEpoch = result.producerEpoch;
137
- this.sequences = {};
138
- });
139
- }
140
- getSequence(topic, partition) {
141
- return this.sequences[topic]?.[partition] ?? 0;
142
- }
143
- updateSequence(topic, partition, messagesCount) {
144
- this.sequences[topic] ??= {};
145
- this.sequences[topic][partition] ??= 0;
146
- this.sequences[topic][partition] += messagesCount;
147
- }
148
- async fetchMetadata(topics, allowTopicAutoCreation) {
149
- return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
150
- await this.metadata.fetchMetadata({ topics, allowTopicAutoCreation });
151
- });
152
- }
80
+ fetchMetadataForTopics = (0, shared_1.shared)(async (topics) => {
81
+ const { allowTopicAutoCreation } = this.options;
82
+ await this.chain.run(topics.map((topic) => `metadata:${topic}`), () => this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation }));
83
+ });
153
84
  async handleError(error) {
154
85
  await (0, api_1.handleApiError)(error).catch(async (error) => {
155
86
  if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.NOT_LEADER_OR_FOLLOWER) {
156
87
  logger_1.log.debug('Refreshing metadata', { reason: error.message });
157
88
  const topics = Object.keys(this.metadata.getTopicPartitions());
158
- await this.fetchMetadata(topics, false);
89
+ await this.metadata.fetchMetadata({ topics, allowTopicAutoCreation: false });
159
90
  return;
160
91
  }
161
92
  if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.OUT_OF_ORDER_SEQUENCE_NUMBER) {
162
93
  logger_1.log.debug('Out of order sequence number. Reinitializing producer ID');
163
- await this.initProducerId();
94
+ await this.state.initProducerId();
164
95
  return;
165
96
  }
166
97
  throw error;
@@ -171,6 +102,6 @@ exports.Producer = Producer;
171
102
  __decorate([
172
103
  trace(() => ({ root: true })),
173
104
  __metadata("design:type", Function),
174
- __metadata("design:paramtypes", [Array, Object]),
105
+ __metadata("design:paramtypes", [Array]),
175
106
  __metadata("design:returntype", Promise)
176
107
  ], Producer.prototype, "send", null);
@@ -110,12 +110,16 @@ class Decoder {
110
110
  }
111
111
  readArray(callback) {
112
112
  const length = this.readInt32();
113
- const results = Array.from({ length }).map(() => callback(this));
113
+ const results = new Array(length);
114
+ for (let i = 0; i < length; i++)
115
+ results[i] = callback(this);
114
116
  return results;
115
117
  }
116
118
  readCompactArray(callback) {
117
119
  const length = this.readUVarInt() - 1;
118
- const results = Array.from({ length }).map(() => callback(this));
120
+ const results = new Array(length);
121
+ for (let i = 0; i < length; i++)
122
+ results[i] = callback(this);
119
123
  return results;
120
124
  }
121
125
  readVarIntArray(callback) {
@@ -125,15 +129,16 @@ class Decoder {
125
129
  }
126
130
  readRecords(callback) {
127
131
  const length = this.readInt32();
128
- return Array.from({ length }).map(() => {
132
+ const results = [];
133
+ for (let i = 0; i < length; i++) {
129
134
  const size = this.readVarInt();
130
- if (!size) {
131
- return null;
132
- }
135
+ if (!size)
136
+ continue;
133
137
  const child = new Decoder(this.buffer.subarray(this.offset, this.offset + size));
134
138
  this.offset += size;
135
- return callback(child);
136
- }).filter(x => x !== null);
139
+ results.push(callback(child));
140
+ }
141
+ return results;
137
142
  }
138
143
  read(length) {
139
144
  const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
@@ -1,9 +1,11 @@
1
1
  export declare class Encoder {
2
- private chunks;
3
- getChunks(): Buffer<ArrayBufferLike>[];
2
+ private buffer;
3
+ private offset;
4
+ constructor(initialCapacity?: number);
5
+ private ensure;
4
6
  getBufferLength(): number;
5
- write(...buffers: Buffer[]): this;
6
- writeEncoder(encoder: Encoder): this;
7
+ write(src: Buffer): this;
8
+ writeEncoder(other: Encoder): this;
7
9
  writeInt8(value: number): this;
8
10
  writeInt16(value: number): this;
9
11
  writeInt32(value: number): this;
@@ -18,10 +20,10 @@ export declare class Encoder {
18
20
  writeVarIntString(value: string | null): this;
19
21
  writeUUID(value: string | null): this;
20
22
  writeBoolean(value: boolean): this;
21
- writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => Encoder): this;
22
- writeCompactArray<T>(arr: T[] | null, callback: (encoder: Encoder, item: T) => Encoder): this;
23
- writeVarIntArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => Encoder): this;
23
+ writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => void): this;
24
+ writeCompactArray<T>(arr: T[] | null, callback: (encoder: Encoder, item: T) => void): this;
25
+ writeVarIntArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => void): this;
24
26
  writeBytes(value: Buffer): this;
25
27
  writeCompactBytes(value: Buffer): this;
26
- value(): Buffer<ArrayBuffer>;
28
+ value(): Buffer<ArrayBufferLike>;
27
29
  }
@@ -2,121 +2,158 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Encoder = void 0;
4
4
  class Encoder {
5
- chunks = [];
6
- getChunks() {
7
- return this.chunks;
5
+ buffer;
6
+ offset = 0;
7
+ constructor(initialCapacity = 512) {
8
+ this.buffer = Buffer.allocUnsafe(initialCapacity);
9
+ }
10
+ ensure(extra) {
11
+ const need = this.offset + extra;
12
+ if (need <= this.buffer.length)
13
+ return;
14
+ let cap = this.buffer.length;
15
+ while (cap < need)
16
+ cap <<= 1;
17
+ const n = Buffer.allocUnsafe(cap);
18
+ this.buffer.copy(n, 0, 0, this.offset);
19
+ this.buffer = n;
8
20
  }
9
21
  getBufferLength() {
10
- return this.chunks.reduce((acc, chunk) => acc + chunk.length, 0);
22
+ return this.offset;
11
23
  }
12
- write(...buffers) {
13
- this.chunks.push(...buffers);
24
+ write(src) {
25
+ this.ensure(src.length);
26
+ src.copy(this.buffer, this.offset);
27
+ this.offset += src.length;
14
28
  return this;
15
29
  }
16
- writeEncoder(encoder) {
17
- return this.write(...encoder.getChunks());
30
+ writeEncoder(other) {
31
+ this.write(other.buffer.subarray(0, other.offset));
32
+ return this;
18
33
  }
19
34
  writeInt8(value) {
20
- const buffer = Buffer.allocUnsafe(1);
21
- buffer.writeInt8(value);
22
- return this.write(buffer);
35
+ this.ensure(1);
36
+ this.buffer.writeInt8(value, this.offset);
37
+ this.offset += 1;
38
+ return this;
23
39
  }
24
40
  writeInt16(value) {
25
- const buffer = Buffer.allocUnsafe(2);
26
- buffer.writeInt16BE(value);
27
- return this.write(buffer);
41
+ this.ensure(2);
42
+ this.buffer.writeInt16BE(value, this.offset);
43
+ this.offset += 2;
44
+ return this;
28
45
  }
29
46
  writeInt32(value) {
30
- const buffer = Buffer.allocUnsafe(4);
31
- buffer.writeInt32BE(value);
32
- return this.write(buffer);
47
+ this.ensure(4);
48
+ this.buffer.writeInt32BE(value, this.offset);
49
+ this.offset += 4;
50
+ return this;
33
51
  }
34
52
  writeUInt32(value) {
35
- const buffer = Buffer.allocUnsafe(4);
36
- buffer.writeUInt32BE(value);
37
- return this.write(buffer);
53
+ this.ensure(4);
54
+ this.buffer.writeUInt32BE(value, this.offset);
55
+ this.offset += 4;
56
+ return this;
38
57
  }
39
58
  writeInt64(value) {
40
- const buffer = Buffer.allocUnsafe(8);
41
- buffer.writeBigInt64BE(value);
42
- return this.write(buffer);
59
+ this.ensure(8);
60
+ this.buffer.writeBigInt64BE(value, this.offset);
61
+ this.offset += 8;
62
+ return this;
43
63
  }
44
64
  writeUVarInt(value) {
45
- const byteArray = [];
46
- while ((value & 0xffffff80) !== 0) {
47
- byteArray.push((value & 0x7f) | 0x80);
65
+ this.ensure(5);
66
+ while (value & 0xffffff80) {
67
+ this.buffer[this.offset++] = (value & 0x7f) | 0x80;
48
68
  value >>>= 7;
49
69
  }
50
- byteArray.push(value & 0x7f);
51
- return this.write(Buffer.from(byteArray));
70
+ this.buffer[this.offset++] = value & 0x7f;
71
+ return this;
52
72
  }
53
73
  writeVarInt(value) {
54
- const encodedValue = (value << 1) ^ (value >> 31);
55
- return this.writeUVarInt(encodedValue);
74
+ return this.writeUVarInt((value << 1) ^ (value >> 31));
56
75
  }
57
76
  writeUVarLong(value) {
58
- const byteArray = [];
59
- while ((value & 0xffffffffffffff80n) !== 0n) {
60
- byteArray.push(Number((value & BigInt(0x7f)) | BigInt(0x80)));
77
+ this.ensure(10);
78
+ while (value >= 0x80n) {
79
+ this.buffer[this.offset++] = Number((value & 0x7fn) | 0x80n);
61
80
  value >>= 7n;
62
81
  }
63
- byteArray.push(Number(value & BigInt(0x7f)));
64
- return this.write(Buffer.from(byteArray));
82
+ this.buffer[this.offset++] = Number(value);
83
+ return this;
65
84
  }
66
85
  writeVarLong(value) {
67
- const encodedValue = (value << BigInt(1)) ^ (value >> BigInt(63));
68
- return this.writeUVarLong(encodedValue);
86
+ return this.writeUVarLong((value << 1n) ^ (value >> 63n));
69
87
  }
70
88
  writeString(value) {
71
- if (value === null) {
89
+ if (value === null)
72
90
  return this.writeInt16(-1);
73
- }
74
91
  const buffer = Buffer.from(value, 'utf-8');
75
- return this.writeInt16(buffer.length).write(buffer);
92
+ this.writeInt16(buffer.length);
93
+ this.write(buffer);
94
+ return this;
76
95
  }
77
96
  writeCompactString(value) {
78
- if (value === null) {
97
+ if (value === null)
79
98
  return this.writeUVarInt(0);
80
- }
81
- const buffer = Buffer.from(value, 'utf-8');
82
- return this.writeUVarInt(buffer.length + 1).write(buffer);
99
+ const b = Buffer.from(value, 'utf-8');
100
+ this.writeUVarInt(b.length + 1);
101
+ this.write(b);
102
+ return this;
83
103
  }
84
104
  writeVarIntString(value) {
85
- if (value === null) {
105
+ if (value === null)
86
106
  return this.writeVarInt(-1);
87
- }
88
- const buffer = Buffer.from(value, 'utf-8');
89
- return this.writeVarInt(buffer.length).write(buffer);
107
+ const b = Buffer.from(value, 'utf-8');
108
+ this.writeVarInt(b.length);
109
+ this.write(b);
110
+ return this;
90
111
  }
91
112
  writeUUID(value) {
92
113
  if (value === null) {
93
- return this.write(Buffer.alloc(16));
114
+ this.ensure(16);
115
+ this.buffer.fill(0, this.offset, this.offset + 16);
116
+ this.offset += 16;
117
+ return this;
94
118
  }
95
- return this.write(Buffer.from(value, 'hex'));
119
+ this.write(Buffer.from(value, 'hex'));
120
+ return this;
96
121
  }
97
122
  writeBoolean(value) {
98
123
  return this.writeInt8(value ? 1 : 0);
99
124
  }
100
125
  writeArray(arr, callback) {
101
- return this.writeInt32(arr.length).write(...arr.flatMap((item) => callback(new Encoder(), item).getChunks()));
126
+ this.writeInt32(arr.length);
127
+ for (const it of arr)
128
+ callback(this, it);
129
+ return this;
102
130
  }
103
131
  writeCompactArray(arr, callback) {
104
- if (arr === null) {
132
+ if (arr === null)
105
133
  return this.writeUVarInt(0);
106
- }
107
- return this.writeUVarInt(arr.length + 1).write(...arr.flatMap((item) => callback(new Encoder(), item).getChunks()));
134
+ this.writeUVarInt(arr.length + 1);
135
+ for (const it of arr)
136
+ callback(this, it);
137
+ return this;
108
138
  }
109
139
  writeVarIntArray(arr, callback) {
110
- return this.writeVarInt(arr.length).write(...arr.flatMap((item) => callback(new Encoder(), item).getChunks()));
140
+ this.writeVarInt(arr.length);
141
+ for (const it of arr)
142
+ callback(this, it);
143
+ return this;
111
144
  }
112
145
  writeBytes(value) {
113
- return this.writeInt32(value.length).write(value);
146
+ this.writeInt32(value.length);
147
+ this.write(value);
148
+ return this;
114
149
  }
115
150
  writeCompactBytes(value) {
116
- return this.writeUVarInt(value.length + 1).write(value);
151
+ this.writeUVarInt(value.length + 1);
152
+ this.write(value);
153
+ return this;
117
154
  }
118
155
  value() {
119
- return Buffer.concat(this.chunks);
156
+ return this.buffer.subarray(0, this.offset);
120
157
  }
121
158
  }
122
159
  exports.Encoder = Encoder;
@@ -0,0 +1,5 @@
1
+ export declare class PromiseChain {
2
+ private locks;
3
+ run(keys: string[], callback: () => Promise<void>): Promise<void>;
4
+ private acquire;
5
+ }
@@ -0,0 +1,39 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PromiseChain = void 0;
4
+ class PromiseChain {
5
+ locks = new Map();
6
+ async run(keys, callback) {
7
+ const orderedKeys = [...new Set(keys)].sort();
8
+ const releases = [];
9
+ for (const key of orderedKeys) {
10
+ const release = await this.acquire(key);
11
+ releases.push(release);
12
+ }
13
+ try {
14
+ await callback();
15
+ }
16
+ finally {
17
+ releases.reverse().forEach((release) => release());
18
+ }
19
+ }
20
+ async acquire(key) {
21
+ const previousTail = this.locks.get(key);
22
+ let release;
23
+ const currentTail = new Promise((resolve) => (release = resolve));
24
+ if (previousTail) {
25
+ this.locks.set(key, previousTail.then(() => currentTail));
26
+ await previousTail;
27
+ }
28
+ else {
29
+ this.locks.set(key, currentTail);
30
+ }
31
+ return () => {
32
+ release();
33
+ if (this.locks.get(key) === currentTail) {
34
+ this.locks.delete(key);
35
+ }
36
+ };
37
+ }
38
+ }
39
+ exports.PromiseChain = PromiseChain;
@@ -1 +1 @@
1
- export declare const shared: <F extends () => Promise<any>>(func: F) => () => ReturnType<F>;
1
+ export declare const shared: <F extends (...args: any[]) => Promise<any>>(func: F) => (...args: Parameters<F>) => ReturnType<F>;
@@ -2,15 +2,16 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.shared = void 0;
4
4
  const shared = (func) => {
5
- let promise;
6
- return () => {
7
- if (!promise) {
8
- promise = func();
9
- promise.finally(() => {
10
- promise = undefined;
5
+ let promises = {};
6
+ return (...args) => {
7
+ const key = JSON.stringify(args);
8
+ if (!promises[key]) {
9
+ promises[key] = func(...args);
10
+ promises[key].finally(() => {
11
+ delete promises[key];
11
12
  });
12
13
  }
13
- return promise;
14
+ return promises[key];
14
15
  };
15
16
  };
16
17
  exports.shared = shared;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "1.1.9",
3
+ "version": "1.2.0",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",
@@ -1,17 +0,0 @@
1
- type TopicPartitionLeader = {
2
- [topicName: string]: {
3
- [partitionId: number]: number;
4
- };
5
- };
6
- type MessagesByNodeTopicPartition<T> = {
7
- [nodeId: number]: {
8
- [topicName: string]: {
9
- [partitionId: number]: T[];
10
- };
11
- };
12
- };
13
- export declare const distributeMessagesToTopicPartitionLeaders: <T extends {
14
- topic: string;
15
- partition: number;
16
- }>(messages: T[], topicPartitionLeader: TopicPartitionLeader) => MessagesByNodeTopicPartition<T>;
17
- export {};
@@ -1,15 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.distributeMessagesToTopicPartitionLeaders = void 0;
4
- const distributeMessagesToTopicPartitionLeaders = (messages, topicPartitionLeader) => {
5
- const result = {};
6
- messages.forEach((message) => {
7
- const leaderId = topicPartitionLeader[message.topic][message.partition];
8
- result[leaderId] ??= {};
9
- result[leaderId][message.topic] ??= {};
10
- result[leaderId][message.topic][message.partition] ??= [];
11
- result[leaderId][message.topic][message.partition].push(message);
12
- });
13
- return result;
14
- };
15
- exports.distributeMessagesToTopicPartitionLeaders = distributeMessagesToTopicPartitionLeaders;
@@ -1,30 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const vitest_1 = require("vitest");
4
- const messages_to_topic_partition_leaders_1 = require("./messages-to-topic-partition-leaders");
5
- (0, vitest_1.describe)('Distribute messages to partition leader ids', () => {
6
- (0, vitest_1.describe)('distributeMessagesToTopicPartitionLeaders', () => {
7
- (0, vitest_1.it)('snoke', () => {
8
- const result = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)([{ topic: 'topic', partition: 0, key: null, value: null, offset: 0n, timestamp: 0n, headers: {} }], { topic: { 0: 1 } });
9
- (0, vitest_1.expect)(result).toMatchInlineSnapshot(`
10
- {
11
- "1": {
12
- "topic": {
13
- "0": [
14
- {
15
- "headers": {},
16
- "key": null,
17
- "offset": 0n,
18
- "partition": 0,
19
- "timestamp": 0n,
20
- "topic": "topic",
21
- "value": null,
22
- },
23
- ],
24
- },
25
- },
26
- }
27
- `);
28
- });
29
- });
30
- });
@@ -1,8 +0,0 @@
1
- import EventEmitter from 'events';
2
- export declare class Lock extends EventEmitter {
3
- private locks;
4
- constructor();
5
- acquire(keys: string[], callback: () => Promise<void>): Promise<void>;
6
- private acquireKey;
7
- private releaseKey;
8
- }
@@ -1,44 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.Lock = void 0;
7
- const events_1 = __importDefault(require("events"));
8
- const logger_1 = require("./logger");
9
- class Lock extends events_1.default {
10
- locks = {};
11
- constructor() {
12
- super();
13
- this.setMaxListeners(Infinity);
14
- }
15
- async acquire(keys, callback) {
16
- await Promise.all(keys.map((key) => this.acquireKey(key)));
17
- try {
18
- await callback();
19
- }
20
- finally {
21
- keys.forEach((key) => this.releaseKey(key));
22
- }
23
- }
24
- async acquireKey(key) {
25
- while (this.locks[key]) {
26
- await new Promise((resolve) => {
27
- const timeout = setTimeout(() => {
28
- logger_1.log.warn(`Lock timed out`, { key });
29
- this.releaseKey(key);
30
- }, 60_000);
31
- this.once(`release:${key}`, () => {
32
- clearTimeout(timeout);
33
- resolve();
34
- });
35
- });
36
- }
37
- this.locks[key] = true;
38
- }
39
- releaseKey(key) {
40
- this.locks[key] = false;
41
- this.emit(`release:${key}`);
42
- }
43
- }
44
- exports.Lock = Lock;