kafka-ts 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -167,7 +167,6 @@ Custom SASL mechanisms can be implemented following the `SASLProvider` interface
167
167
  | partitionMaxBytes | number | false | 1_048_576 | Maximum number of bytes to return per partition in the fetch response |
168
168
  | allowTopicAutoCreation | boolean | false | false | Allow kafka to auto-create topic when it doesn't exist |
169
169
  | fromTimestamp | bigint | false | -1 | Start consuming messages from timestamp (-1 = latest offsets, -2 = earliest offsets) |
170
- | batchSize | number | false | null | Maximum number of records called `onBatch` |
171
170
  | onBatch | (batch: Message[]) => Promise<unknown> | true | | Callback executed when a batch of messages is received |
172
171
 
173
172
  ### `kafka.createProducer()`
@@ -157,9 +157,13 @@ class ConsumerGroup {
157
157
  groupInstanceId,
158
158
  memberId: this.memberId,
159
159
  generationIdOrMemberEpoch: this.generationId,
160
- topics: Object.entries(topicPartitions).map(([topic, partitions]) => ({
160
+ topics: Object.entries(topicPartitions)
161
+ .filter(([topic]) => topic in offsetManager.pendingOffsets)
162
+ .map(([topic, partitions]) => ({
161
163
  name: topic,
162
- partitions: [...partitions].map((partitionIndex) => ({
164
+ partitions: [...partitions]
165
+ .filter((partition) => partition in offsetManager.pendingOffsets[topic])
166
+ .map((partitionIndex) => ({
163
167
  partitionIndex,
164
168
  committedOffset: offsetManager.pendingOffsets[topic][partitionIndex],
165
169
  committedLeaderEpoch: -1,
@@ -19,7 +19,6 @@ export type ConsumerOptions = {
19
19
  allowTopicAutoCreation?: boolean;
20
20
  fromBeginning?: boolean;
21
21
  fromTimestamp?: bigint;
22
- batchSize?: number | null;
23
22
  retrier?: Retrier;
24
23
  onBatch: (messages: Required<Message>[]) => unknown;
25
24
  };
@@ -52,7 +52,6 @@ class Consumer extends events_1.default {
52
52
  allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
53
53
  fromBeginning: options.fromBeginning ?? false,
54
54
  fromTimestamp: options.fromTimestamp ?? (options.fromBeginning ? -2n : -1n),
55
- batchSize: options.batchSize ?? null,
56
55
  retrier: options.retrier ?? retrier_1.defaultRetrier,
57
56
  };
58
57
  this.metadata = new consumer_metadata_1.ConsumerMetadata({ cluster: this.cluster });
@@ -106,7 +105,7 @@ class Consumer extends events_1.default {
106
105
  await this.cluster.disconnect().catch((error) => logger_1.log.debug(`Failed to disconnect: ${error.message}`));
107
106
  }
108
107
  async startFetchManager() {
109
- const { groupId, batchSize } = this.options;
108
+ const { groupId } = this.options;
110
109
  while (!this.stopHook) {
111
110
  try {
112
111
  await this.consumerGroup?.join();
@@ -121,9 +120,6 @@ class Consumer extends events_1.default {
121
120
  this.fetchManager = new fetch_manager_1.FetchManager({
122
121
  fetch: this.fetch.bind(this),
123
122
  process: this.process.bind(this),
124
- metadata: this.metadata,
125
- consumerGroup: this.consumerGroup,
126
- batchSize,
127
123
  nodeAssignments,
128
124
  });
129
125
  await this.fetchManager.start();
@@ -165,13 +161,29 @@ class Consumer extends events_1.default {
165
161
  this.consumerGroup?.handleLastHeartbeat();
166
162
  }
167
163
  }
168
- async process(messages) {
164
+ async process(response) {
169
165
  const { options } = this;
170
166
  const { retrier } = options;
167
+ this.consumerGroup?.handleLastHeartbeat();
171
168
  const topicPartitions = {};
172
- for (const { topic, partition } of messages) {
169
+ const messages = response.responses.flatMap(({ topicId, partitions }) => {
170
+ const topic = this.metadata.getTopicNameById(topicId);
173
171
  topicPartitions[topic] ??= new Set();
174
- topicPartitions[topic].add(partition);
172
+ return partitions.flatMap(({ partitionIndex, records }) => {
173
+ topicPartitions[topic].add(partitionIndex);
174
+ return records.flatMap(({ baseTimestamp, baseOffset, records }) => records.flatMap((message) => ({
175
+ topic,
176
+ partition: partitionIndex,
177
+ key: message.key ?? null,
178
+ value: message.value ?? null,
179
+ headers: Object.fromEntries(message.headers.map(({ key, value }) => [key, value])),
180
+ timestamp: baseTimestamp + BigInt(message.timestampDelta),
181
+ offset: baseOffset + BigInt(message.offsetDelta),
182
+ })));
183
+ });
184
+ });
185
+ if (!messages.length) {
186
+ return;
175
187
  }
176
188
  await retrier(() => options.onBatch(messages));
177
189
  messages.forEach(({ topic, partition, offset }) => this.offsetManager.resolve(topic, partition, offset + 1n));
@@ -180,6 +192,7 @@ class Consumer extends events_1.default {
180
192
  }
181
193
  fetch(nodeId, assignment) {
182
194
  const { rackId, maxWaitMs, minBytes, maxBytes, partitionMaxBytes, isolationLevel } = this.options;
195
+ this.consumerGroup?.handleLastHeartbeat();
183
196
  return this.cluster.sendRequestToNode(nodeId)(api_1.API.FETCH, {
184
197
  maxWaitMs,
185
198
  minBytes,
@@ -217,8 +230,8 @@ __decorate([
217
230
  __metadata("design:returntype", Promise)
218
231
  ], Consumer.prototype, "close", null);
219
232
  __decorate([
220
- trace((messages) => ({ count: messages.length })),
233
+ trace(),
221
234
  __metadata("design:type", Function),
222
- __metadata("design:paramtypes", [Array]),
235
+ __metadata("design:paramtypes", [Object]),
223
236
  __metadata("design:returntype", Promise)
224
237
  ], Consumer.prototype, "process", null);
@@ -1,14 +1,8 @@
1
1
  import { FetchResponse } from '../api/fetch';
2
2
  import { Assignment } from '../api/sync-group';
3
- import { Metadata } from '../metadata';
4
- import { Batch, Message } from '../types';
5
- import { ConsumerGroup } from './consumer-group';
6
3
  type FetchManagerOptions = {
7
4
  fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
8
- process: (batch: Batch) => Promise<void>;
9
- batchSize?: number | null;
10
- metadata: Metadata;
11
- consumerGroup?: ConsumerGroup;
5
+ process: (response: FetchResponse) => Promise<void>;
12
6
  nodeAssignments: {
13
7
  nodeId: number;
14
8
  assignment: Assignment;
@@ -16,16 +10,9 @@ type FetchManagerOptions = {
16
10
  };
17
11
  export declare class FetchManager {
18
12
  private options;
19
- private queue;
20
- private isRunning;
21
13
  private fetchers;
22
- private processor;
23
- private pollCallback;
24
- private fetcherCallbacks;
25
14
  constructor(options: FetchManagerOptions);
26
15
  start(): Promise<void>;
27
- stop(): Promise<void>;
28
- poll(): Promise<Required<Message>[]>;
29
- private onResponse;
16
+ stop(): Promise<void[]>;
30
17
  }
31
18
  export {};
@@ -12,85 +12,25 @@ Object.defineProperty(exports, "__esModule", { value: true });
12
12
  exports.FetchManager = void 0;
13
13
  const tracer_1 = require("../utils/tracer");
14
14
  const fetcher_1 = require("./fetcher");
15
- const processor_1 = require("./processor");
16
15
  const trace = (0, tracer_1.createTracer)('FetchManager');
17
16
  class FetchManager {
18
17
  options;
19
- queue = [];
20
- isRunning = false;
21
18
  fetchers;
22
- processor;
23
- pollCallback;
24
- fetcherCallbacks = {};
25
19
  constructor(options) {
26
20
  this.options = options;
27
21
  const { fetch, process, nodeAssignments } = this.options;
28
- this.fetchers = nodeAssignments.map(({ nodeId, assignment }, index) => new fetcher_1.Fetcher(index, {
29
- nodeId,
30
- assignment,
31
- fetch,
32
- onResponse: this.onResponse.bind(this),
33
- }));
34
- this.processor = new processor_1.Processor({ process, poll: this.poll.bind(this) });
22
+ this.fetchers = nodeAssignments.map(({ nodeId, assignment }) => new fetcher_1.Fetcher({ nodeId, assignment, fetch, process }));
35
23
  }
36
24
  async start() {
37
- this.queue = [];
38
- this.isRunning = true;
39
25
  try {
40
- await Promise.all([...this.fetchers.map((fetcher) => fetcher.loop()), this.processor.loop()]);
26
+ await Promise.all(this.fetchers.map((fetcher) => fetcher.loop()));
41
27
  }
42
28
  finally {
43
29
  await this.stop();
44
30
  }
45
31
  }
46
32
  async stop() {
47
- this.isRunning = false;
48
- const stopPromise = Promise.all([
49
- ...this.fetchers.map((fetcher) => fetcher.stop()),
50
- this.processor.stop(),
51
- ]);
52
- this.pollCallback?.();
53
- Object.values(this.fetcherCallbacks).forEach((callback) => callback());
54
- this.fetcherCallbacks = {};
55
- await stopPromise;
56
- }
57
- async poll() {
58
- if (!this.isRunning) {
59
- return [];
60
- }
61
- const { consumerGroup, batchSize } = this.options;
62
- consumerGroup?.handleLastHeartbeat();
63
- const batch = this.queue.splice(0, batchSize ?? undefined);
64
- if (!batch.length) {
65
- await new Promise((resolve) => (this.pollCallback = resolve));
66
- return this.poll();
67
- }
68
- const [checkpoints, messages] = partition(batch, (entry) => 'kind' in entry && entry.kind === 'checkpoint');
69
- checkpoints.forEach(({ fetcherId }) => this.fetcherCallbacks[fetcherId]?.());
70
- return messages;
71
- }
72
- async onResponse(fetcherId, response) {
73
- const { metadata, consumerGroup } = this.options;
74
- consumerGroup?.handleLastHeartbeat();
75
- const messages = response.responses.flatMap(({ topicId, partitions }) => partitions.flatMap(({ partitionIndex, records }) => records.flatMap(({ baseTimestamp, baseOffset, records }) => records.flatMap((message) => ({
76
- topic: metadata.getTopicNameById(topicId),
77
- partition: partitionIndex,
78
- key: message.key ?? null,
79
- value: message.value ?? null,
80
- headers: Object.fromEntries(message.headers.map(({ key, value }) => [key, value])),
81
- timestamp: baseTimestamp + BigInt(message.timestampDelta),
82
- offset: baseOffset + BigInt(message.offsetDelta),
83
- })))));
84
- if (!messages.length) {
85
- return;
86
- }
87
- // wait until all broker batches have been processed or fetch manager is requested to stop
88
- await new Promise((resolve) => {
89
- this.fetcherCallbacks[fetcherId] = resolve;
90
- this.queue.push(...messages, { kind: 'checkpoint', fetcherId });
91
- this.pollCallback?.();
92
- });
93
- consumerGroup?.handleLastHeartbeat();
33
+ return Promise.all(this.fetchers.map((fetcher) => fetcher.stop()));
94
34
  }
95
35
  }
96
36
  exports.FetchManager = FetchManager;
@@ -100,28 +40,3 @@ __decorate([
100
40
  __metadata("design:paramtypes", []),
101
41
  __metadata("design:returntype", Promise)
102
42
  ], FetchManager.prototype, "start", null);
103
- __decorate([
104
- trace(),
105
- __metadata("design:type", Function),
106
- __metadata("design:paramtypes", []),
107
- __metadata("design:returntype", Promise)
108
- ], FetchManager.prototype, "poll", null);
109
- __decorate([
110
- trace(),
111
- __metadata("design:type", Function),
112
- __metadata("design:paramtypes", [Number, Object]),
113
- __metadata("design:returntype", Promise)
114
- ], FetchManager.prototype, "onResponse", null);
115
- const partition = (batch, predicate) => {
116
- const checkpoints = [];
117
- const messages = [];
118
- for (const entry of batch) {
119
- if (predicate(entry)) {
120
- checkpoints.push(entry);
121
- }
122
- else {
123
- messages.push(entry);
124
- }
125
- }
126
- return [checkpoints, messages];
127
- };
@@ -7,15 +7,14 @@ type FetcherOptions = {
7
7
  nodeId: number;
8
8
  assignment: Assignment;
9
9
  fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
10
- onResponse: (fetcherId: number, response: FetchResponse) => Promise<void>;
10
+ process: (response: FetchResponse) => Promise<void>;
11
11
  };
12
12
  export declare class Fetcher extends EventEmitter<{
13
13
  stopped: [];
14
14
  }> {
15
- private fetcherId;
16
15
  private options;
17
16
  private isRunning;
18
- constructor(fetcherId: number, options: FetcherOptions);
17
+ constructor(options: FetcherOptions);
19
18
  loop(): Promise<void>;
20
19
  private step;
21
20
  stop(): Promise<void>;
@@ -14,12 +14,10 @@ const stream_1 = require("stream");
14
14
  const tracer_1 = require("../utils/tracer");
15
15
  const trace = (0, tracer_1.createTracer)('Fetcher');
16
16
  class Fetcher extends stream_1.EventEmitter {
17
- fetcherId;
18
17
  options;
19
18
  isRunning = false;
20
- constructor(fetcherId, options) {
19
+ constructor(options) {
21
20
  super();
22
- this.fetcherId = fetcherId;
23
21
  this.options = options;
24
22
  }
25
23
  async loop() {
@@ -35,12 +33,11 @@ class Fetcher extends stream_1.EventEmitter {
35
33
  }
36
34
  }
37
35
  async step() {
38
- const { nodeId, assignment, fetch, onResponse } = this.options;
36
+ const { nodeId, assignment, fetch, process } = this.options;
39
37
  const response = await fetch(nodeId, assignment);
40
- if (!this.isRunning) {
38
+ if (!this.isRunning)
41
39
  return;
42
- }
43
- await onResponse(this.fetcherId, response);
40
+ await process(response);
44
41
  }
45
42
  async stop() {
46
43
  if (!this.isRunning) {
@@ -16,11 +16,11 @@ export declare class Metadata {
16
16
  getTopicIdByName(name: string): string;
17
17
  getTopicNameById(id: string): string;
18
18
  fetchMetadataIfNecessary({ topics, allowTopicAutoCreation, }: {
19
- topics: string[];
19
+ topics: string[] | Set<string>;
20
20
  allowTopicAutoCreation: boolean;
21
21
  }): Promise<void>;
22
22
  fetchMetadata({ topics, allowTopicAutoCreation, }: {
23
- topics: string[] | null;
23
+ topics: string[] | Set<string> | null;
24
24
  allowTopicAutoCreation: boolean;
25
25
  }): Promise<void>;
26
26
  }
package/dist/metadata.js CHANGED
@@ -41,7 +41,7 @@ class Metadata {
41
41
  return this.topicNameById[id];
42
42
  }
43
43
  async fetchMetadataIfNecessary({ topics, allowTopicAutoCreation, }) {
44
- const missingTopics = topics.filter((topic) => !this.topicPartitions[topic]);
44
+ const missingTopics = Array.from(topics).filter((topic) => !this.topicPartitions[topic]);
45
45
  if (!missingTopics.length) {
46
46
  return;
47
47
  }
@@ -63,7 +63,7 @@ class Metadata {
63
63
  const { cluster } = this.options;
64
64
  const response = await cluster.sendRequest(api_1.API.METADATA, {
65
65
  allowTopicAutoCreation,
66
- topics: topics?.map((name) => ({ id: null, name })) ?? null,
66
+ topics: topics ? Array.from(topics).map((name) => ({ id: null, name })) : null,
67
67
  });
68
68
  this.topicPartitions = {
69
69
  ...this.topicPartitions,
@@ -40,9 +40,13 @@ class Producer {
40
40
  await this.ensureConnected();
41
41
  const { allowTopicAutoCreation } = this.options;
42
42
  const defaultTimestamp = BigInt(Date.now());
43
- const topics = Array.from(new Set(messages.map((message) => message.topic)));
43
+ const topics = new Set(messages.map((message) => message.topic));
44
44
  await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
45
- const nodeTopicPartitionMessages = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(messages.map((message) => ({ ...message, partition: this.partition(message) })), this.metadata.getTopicPartitionLeaderIds());
45
+ const partitionedMessages = messages.map((message) => {
46
+ message.partition = this.partition(message);
47
+ return message;
48
+ });
49
+ const nodeTopicPartitionMessages = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(partitionedMessages, this.metadata.getTopicPartitionLeaderIds());
46
50
  try {
47
51
  await Promise.all(Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
48
52
  const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "1.0.0",
3
+ "version": "1.0.2",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",