kafka-ts 0.0.1-beta.4 → 0.0.1-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. package/.github/workflows/release.yml +19 -6
  2. package/README.md +15 -21
  3. package/dist/api/api-versions.d.ts +9 -0
  4. package/dist/api/api-versions.js +24 -0
  5. package/dist/api/create-topics.d.ts +38 -0
  6. package/dist/api/create-topics.js +53 -0
  7. package/dist/api/delete-topics.d.ts +18 -0
  8. package/dist/api/delete-topics.js +33 -0
  9. package/dist/api/fetch.d.ts +84 -0
  10. package/dist/api/fetch.js +142 -0
  11. package/dist/api/find-coordinator.d.ts +21 -0
  12. package/dist/api/find-coordinator.js +39 -0
  13. package/dist/api/heartbeat.d.ts +11 -0
  14. package/dist/api/heartbeat.js +27 -0
  15. package/dist/api/index.d.ts +578 -0
  16. package/dist/api/index.js +165 -0
  17. package/dist/api/init-producer-id.d.ts +13 -0
  18. package/dist/api/init-producer-id.js +29 -0
  19. package/dist/api/join-group.d.ts +34 -0
  20. package/dist/api/join-group.js +51 -0
  21. package/dist/api/leave-group.d.ts +19 -0
  22. package/dist/api/leave-group.js +39 -0
  23. package/dist/api/list-offsets.d.ts +29 -0
  24. package/dist/api/list-offsets.js +48 -0
  25. package/dist/api/metadata.d.ts +40 -0
  26. package/dist/api/metadata.js +58 -0
  27. package/dist/api/offset-commit.d.ts +28 -0
  28. package/dist/api/offset-commit.js +48 -0
  29. package/dist/api/offset-fetch.d.ts +33 -0
  30. package/dist/api/offset-fetch.js +57 -0
  31. package/dist/api/produce.d.ts +54 -0
  32. package/dist/api/produce.js +126 -0
  33. package/dist/api/sasl-authenticate.d.ts +11 -0
  34. package/dist/api/sasl-authenticate.js +23 -0
  35. package/dist/api/sasl-handshake.d.ts +6 -0
  36. package/dist/api/sasl-handshake.js +19 -0
  37. package/dist/api/sync-group.d.ts +24 -0
  38. package/dist/api/sync-group.js +36 -0
  39. package/dist/auth/index.d.ts +2 -0
  40. package/dist/auth/index.js +8 -0
  41. package/dist/auth/plain.d.ts +5 -0
  42. package/dist/auth/plain.js +12 -0
  43. package/dist/auth/scram.d.ts +9 -0
  44. package/dist/auth/scram.js +40 -0
  45. package/dist/broker.d.ts +30 -0
  46. package/dist/broker.js +55 -0
  47. package/dist/client.d.ts +23 -0
  48. package/dist/client.js +36 -0
  49. package/dist/cluster.d.ts +27 -0
  50. package/dist/cluster.js +70 -0
  51. package/dist/cluster.test.d.ts +1 -0
  52. package/dist/cluster.test.js +345 -0
  53. package/dist/codecs/gzip.d.ts +2 -0
  54. package/dist/codecs/gzip.js +8 -0
  55. package/dist/codecs/index.d.ts +2 -0
  56. package/dist/codecs/index.js +17 -0
  57. package/dist/codecs/none.d.ts +2 -0
  58. package/dist/codecs/none.js +7 -0
  59. package/dist/codecs/types.d.ts +5 -0
  60. package/dist/codecs/types.js +2 -0
  61. package/dist/connection.d.ts +26 -0
  62. package/dist/connection.js +175 -0
  63. package/dist/consumer/consumer-group.d.ts +41 -0
  64. package/dist/consumer/consumer-group.js +217 -0
  65. package/dist/consumer/consumer-metadata.d.ts +7 -0
  66. package/dist/consumer/consumer-metadata.js +14 -0
  67. package/dist/consumer/consumer.d.ts +44 -0
  68. package/dist/consumer/consumer.js +225 -0
  69. package/dist/consumer/fetch-manager.d.ts +33 -0
  70. package/dist/consumer/fetch-manager.js +140 -0
  71. package/dist/consumer/fetcher.d.ts +25 -0
  72. package/dist/consumer/fetcher.js +64 -0
  73. package/dist/consumer/offset-manager.d.ts +22 -0
  74. package/dist/consumer/offset-manager.js +66 -0
  75. package/dist/consumer/processor.d.ts +19 -0
  76. package/dist/consumer/processor.js +59 -0
  77. package/dist/distributors/assignments-to-replicas.d.ts +16 -0
  78. package/dist/distributors/assignments-to-replicas.js +59 -0
  79. package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
  80. package/dist/distributors/assignments-to-replicas.test.js +40 -0
  81. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  82. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  83. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  84. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  85. package/dist/distributors/partitioner.d.ts +7 -0
  86. package/dist/distributors/partitioner.js +23 -0
  87. package/dist/index.d.ts +9 -0
  88. package/dist/index.js +26 -0
  89. package/dist/metadata.d.ts +24 -0
  90. package/dist/metadata.js +106 -0
  91. package/dist/producer/producer.d.ts +24 -0
  92. package/dist/producer/producer.js +131 -0
  93. package/dist/types.d.ts +11 -0
  94. package/dist/types.js +2 -0
  95. package/dist/utils/api.d.ts +9 -0
  96. package/dist/utils/api.js +5 -0
  97. package/dist/utils/crypto.d.ts +8 -0
  98. package/dist/utils/crypto.js +18 -0
  99. package/dist/utils/decoder.d.ts +30 -0
  100. package/dist/utils/decoder.js +152 -0
  101. package/dist/utils/delay.d.ts +1 -0
  102. package/dist/utils/delay.js +5 -0
  103. package/dist/utils/encoder.d.ts +28 -0
  104. package/dist/utils/encoder.js +125 -0
  105. package/dist/utils/error.d.ts +11 -0
  106. package/dist/utils/error.js +27 -0
  107. package/dist/utils/logger.d.ts +9 -0
  108. package/dist/utils/logger.js +32 -0
  109. package/dist/utils/memo.d.ts +1 -0
  110. package/dist/utils/memo.js +16 -0
  111. package/dist/utils/murmur2.d.ts +3 -0
  112. package/dist/utils/murmur2.js +40 -0
  113. package/dist/utils/retrier.d.ts +10 -0
  114. package/dist/utils/retrier.js +22 -0
  115. package/dist/utils/tracer.d.ts +5 -0
  116. package/dist/utils/tracer.js +39 -0
  117. package/docker-compose.yml +3 -3
  118. package/examples/package-lock.json +3501 -3
  119. package/examples/package.json +8 -1
  120. package/examples/src/benchmark/common.ts +98 -0
  121. package/examples/src/benchmark/kafka-ts.ts +67 -0
  122. package/examples/src/benchmark/kafkajs.ts +51 -0
  123. package/examples/src/client.ts +4 -1
  124. package/examples/src/opentelemetry.ts +46 -0
  125. package/examples/src/producer.ts +11 -11
  126. package/package.json +4 -2
  127. package/scripts/create-scram-user.sh +4 -2
  128. package/scripts/generate-certs.sh +2 -0
  129. package/src/__snapshots__/cluster.test.ts.snap +35 -185
  130. package/src/api/fetch.ts +6 -1
  131. package/src/api/index.ts +3 -1
  132. package/src/api/metadata.ts +1 -1
  133. package/src/api/produce.ts +7 -10
  134. package/src/cluster.test.ts +2 -2
  135. package/src/cluster.ts +9 -16
  136. package/src/connection.ts +28 -15
  137. package/src/consumer/consumer-group.ts +35 -15
  138. package/src/consumer/consumer.ts +28 -18
  139. package/src/consumer/fetch-manager.ts +29 -45
  140. package/src/consumer/fetcher.ts +21 -14
  141. package/src/consumer/offset-manager.ts +18 -7
  142. package/src/consumer/processor.ts +14 -10
  143. package/src/distributors/assignments-to-replicas.ts +1 -3
  144. package/src/index.ts +1 -1
  145. package/src/metadata.ts +4 -0
  146. package/src/producer/producer.ts +11 -6
  147. package/src/utils/decoder.ts +0 -4
  148. package/src/utils/encoder.ts +26 -19
  149. package/src/utils/logger.ts +4 -4
  150. package/src/utils/tracer.ts +39 -23
  151. package/certs/ca.key +0 -52
  152. package/certs/ca.srl +0 -1
  153. package/certs/kafka.crt +0 -29
  154. package/certs/kafka.csr +0 -26
  155. package/certs/kafka.key +0 -52
  156. package/src/utils/mutex.ts +0 -31
package/src/connection.ts CHANGED
@@ -6,11 +6,12 @@ import { Api } from './utils/api';
6
6
  import { Decoder } from './utils/decoder';
7
7
  import { Encoder } from './utils/encoder';
8
8
  import { ConnectionError } from './utils/error';
9
+ import { log } from './utils/logger';
9
10
  import { createTracer } from './utils/tracer';
10
11
 
11
12
  const trace = createTracer('Connection');
12
13
 
13
- export type ConnectionOptions = {
14
+ type ConnectionOptions = {
14
15
  clientId: string | null;
15
16
  connection: TcpSocketConnectOpts;
16
17
  ssl: TLSSocketOptions | null;
@@ -24,14 +25,14 @@ export class Connection {
24
25
  [correlationId: number]: { resolve: (response: RawResonse) => void; reject: (error: Error) => void };
25
26
  } = {};
26
27
  private lastCorrelationId = 0;
27
- private buffer: Buffer | null = null;
28
+ private chunks: Buffer[] = [];
28
29
 
29
30
  constructor(private options: ConnectionOptions) {}
30
31
 
31
32
  @trace()
32
33
  public async connect() {
33
34
  this.queue = {};
34
- this.buffer = null;
35
+ this.chunks = [];
35
36
 
36
37
  await new Promise<void>((resolve, reject) => {
37
38
  const { ssl, connection } = this.options;
@@ -73,6 +74,7 @@ export class Connection {
73
74
  @trace((api, body) => ({ message: getApiName(api), body }))
74
75
  public async sendRequest<Request, Response>(api: Api<Request, Response>, body: Request): Promise<Response> {
75
76
  const correlationId = this.nextCorrelationId();
77
+ const apiName = getApiName(api);
76
78
 
77
79
  const encoder = new Encoder()
78
80
  .writeInt16(api.apiKey)
@@ -80,17 +82,24 @@ export class Connection {
80
82
  .writeInt32(correlationId)
81
83
  .writeString(this.options.clientId);
82
84
 
83
- const request = api.request(encoder, body).value();
84
- const requestEncoder = new Encoder().writeInt32(request.length).write(request);
85
+ const request = api.request(encoder, body);
86
+ const requestEncoder = new Encoder().writeInt32(request.getByteLength()).writeEncoder(request);
85
87
 
88
+ let timeout: NodeJS.Timeout | undefined;
86
89
  const { responseDecoder, responseSize } = await new Promise<RawResonse>(async (resolve, reject) => {
90
+ timeout = setTimeout(() => {
91
+ delete this.queue[correlationId];
92
+ reject(new ConnectionError(`${apiName} timed out`));
93
+ }, 30_000);
94
+
87
95
  try {
88
- await this.write(requestEncoder.value());
89
96
  this.queue[correlationId] = { resolve, reject };
97
+ await this.write(requestEncoder.value());
90
98
  } catch (error) {
91
99
  reject(error);
92
100
  }
93
101
  });
102
+ clearTimeout(timeout);
94
103
  const response = await api.response(responseDecoder);
95
104
 
96
105
  assert(
@@ -116,12 +125,13 @@ export class Connection {
116
125
  }
117
126
 
118
127
  private handleData(buffer: Buffer) {
119
- this.buffer = this.buffer ? Buffer.concat([this.buffer, buffer]) : buffer;
120
- if (this.buffer.length < 4) {
128
+ this.chunks.push(buffer);
129
+
130
+ const decoder = new Decoder(Buffer.concat(this.chunks));
131
+ if (decoder.getBufferLength() < 4) {
121
132
  return;
122
133
  }
123
134
 
124
- const decoder = new Decoder(this.buffer);
125
135
  const size = decoder.readInt32();
126
136
  if (size !== decoder.getBufferLength() - 4) {
127
137
  return;
@@ -129,15 +139,18 @@ export class Connection {
129
139
 
130
140
  const correlationId = decoder.readInt32();
131
141
 
132
- const { resolve } = this.queue[correlationId];
133
- delete this.queue[correlationId];
134
-
135
- resolve({ responseDecoder: decoder, responseSize: size });
136
- this.buffer = null;
142
+ const context = this.queue[correlationId];
143
+ if (context) {
144
+ delete this.queue[correlationId];
145
+ context.resolve({ responseDecoder: decoder, responseSize: size });
146
+ } else {
147
+ log.debug('Could not find pending request for correlationId', { correlationId });
148
+ }
149
+ this.chunks = [];
137
150
  }
138
151
 
139
152
  private nextCorrelationId() {
140
- return (this.lastCorrelationId = (this.lastCorrelationId + 1) % 2 ** 31);
153
+ return this.lastCorrelationId++;
141
154
  }
142
155
  }
143
156
 
@@ -1,11 +1,15 @@
1
+ import EventEmitter from 'events';
1
2
  import { API, API_ERROR } from '../api';
2
3
  import { KEY_TYPE } from '../api/find-coordinator';
3
4
  import { Assignment, MemberAssignment } from '../api/sync-group';
4
5
  import { Cluster } from '../cluster';
5
6
  import { KafkaTSApiError, KafkaTSError } from '../utils/error';
7
+ import { createTracer } from '../utils/tracer';
6
8
  import { ConsumerMetadata } from './consumer-metadata';
7
9
  import { OffsetManager } from './offset-manager';
8
10
 
11
+ const trace = createTracer('ConsumerGroup');
12
+
9
13
  type ConsumerGroupOptions = {
10
14
  cluster: Cluster;
11
15
  topics: string[];
@@ -17,7 +21,7 @@ type ConsumerGroupOptions = {
17
21
  offsetManager: OffsetManager;
18
22
  };
19
23
 
20
- export class ConsumerGroup {
24
+ export class ConsumerGroup extends EventEmitter<{ offsetCommit: [] }> {
21
25
  private coordinatorId = -1;
22
26
  private memberId = '';
23
27
  private generationId = -1;
@@ -26,12 +30,16 @@ export class ConsumerGroup {
26
30
  private heartbeatInterval: NodeJS.Timeout | null = null;
27
31
  private heartbeatError: KafkaTSError | null = null;
28
32
 
29
- constructor(private options: ConsumerGroupOptions) {}
33
+ constructor(private options: ConsumerGroupOptions) {
34
+ super();
35
+ }
30
36
 
37
+ @trace()
31
38
  public async join() {
32
39
  await this.findCoordinator();
33
40
  await this.options.cluster.setSeedBroker(this.coordinatorId);
34
41
 
42
+ this.memberId = '';
35
43
  await this.joinGroup();
36
44
  await this.syncGroup();
37
45
  await this.offsetFetch();
@@ -55,12 +63,16 @@ export class ConsumerGroup {
55
63
  }
56
64
  }
57
65
 
58
- public async handleLastHeartbeat() {
66
+ public handleLastHeartbeat() {
59
67
  if (this.heartbeatError) {
60
68
  throw this.heartbeatError;
61
69
  }
62
70
  }
63
71
 
72
+ public resetHeartbeat() {
73
+ this.heartbeatError = null;
74
+ }
75
+
64
76
  private async findCoordinator() {
65
77
  const { coordinators } = await this.options.cluster.sendRequest(API.FIND_COORDINATOR, {
66
78
  keyType: KEY_TYPE.GROUP,
@@ -147,30 +159,34 @@ export class ConsumerGroup {
147
159
  if (!request.groups.length) return;
148
160
 
149
161
  const response = await cluster.sendRequest(API.OFFSET_FETCH, request);
162
+
163
+ const topicPartitions: Record<string, Set<number>> = {};
150
164
  response.groups.forEach((group) => {
151
165
  group.topics.forEach((topic) => {
152
- topic.partitions
153
- .filter(({ committedOffset }) => committedOffset >= 0)
154
- .forEach(({ partitionIndex, committedOffset }) =>
155
- offsetManager.resolve(topic.name, partitionIndex, committedOffset),
156
- );
166
+ topicPartitions[topic.name] ??= new Set();
167
+ topic.partitions.forEach(({ partitionIndex, committedOffset }) => {
168
+ if (committedOffset >= 0) {
169
+ topicPartitions[topic.name].add(partitionIndex);
170
+ offsetManager.resolve(topic.name, partitionIndex, committedOffset);
171
+ }
172
+ });
157
173
  });
158
174
  });
159
- offsetManager.flush();
175
+ offsetManager.flush(topicPartitions);
160
176
  }
161
177
 
162
- public async offsetCommit() {
178
+ public async offsetCommit(topicPartitions: Record<string, Set<number>>) {
163
179
  const { cluster, groupId, groupInstanceId, offsetManager } = this.options;
164
180
  const request = {
165
181
  groupId,
166
182
  groupInstanceId,
167
183
  memberId: this.memberId,
168
184
  generationIdOrMemberEpoch: this.generationId,
169
- topics: Object.entries(offsetManager.pendingOffsets).map(([topic, partitions]) => ({
185
+ topics: Object.entries(topicPartitions).map(([topic, partitions]) => ({
170
186
  name: topic,
171
- partitions: Object.entries(partitions).map(([partition, offset]) => ({
172
- partitionIndex: parseInt(partition),
173
- committedOffset: offset,
187
+ partitions: [...partitions].map((partitionIndex) => ({
188
+ partitionIndex,
189
+ committedOffset: offsetManager.pendingOffsets[topic][partitionIndex],
174
190
  committedLeaderEpoch: -1,
175
191
  committedMetadata: null,
176
192
  })),
@@ -180,7 +196,7 @@ export class ConsumerGroup {
180
196
  return;
181
197
  }
182
198
  await cluster.sendRequest(API.OFFSET_COMMIT, request);
183
- offsetManager.flush();
199
+ this.emit('offsetCommit');
184
200
  }
185
201
 
186
202
  public async heartbeat() {
@@ -194,6 +210,10 @@ export class ConsumerGroup {
194
210
  }
195
211
 
196
212
  public async leaveGroup() {
213
+ if (this.coordinatorId === -1) {
214
+ return;
215
+ }
216
+
197
217
  const { cluster, groupId, groupInstanceId } = this.options;
198
218
  this.stopHeartbeater();
199
219
  try {
@@ -1,3 +1,4 @@
1
+ import EventEmitter from 'events';
1
2
  import { API, API_ERROR } from '../api';
2
3
  import { IsolationLevel } from '../api/fetch';
3
4
  import { Assignment } from '../api/sync-group';
@@ -33,7 +34,7 @@ export type ConsumerOptions = {
33
34
  concurrency?: number;
34
35
  } & ({ onBatch: (messages: Required<Message>[]) => unknown } | { onMessage: (message: Required<Message>) => unknown });
35
36
 
36
- export class Consumer {
37
+ export class Consumer extends EventEmitter<{ offsetCommit: [] }> {
37
38
  private options: Required<ConsumerOptions>;
38
39
  private metadata: ConsumerMetadata;
39
40
  private consumerGroup: ConsumerGroup | undefined;
@@ -45,6 +46,8 @@ export class Consumer {
45
46
  private cluster: Cluster,
46
47
  options: ConsumerOptions,
47
48
  ) {
49
+ super();
50
+
48
51
  this.options = {
49
52
  ...options,
50
53
  groupId: options.groupId ?? null,
@@ -81,8 +84,10 @@ export class Consumer {
81
84
  offsetManager: this.offsetManager,
82
85
  })
83
86
  : undefined;
87
+ this.consumerGroup?.on('offsetCommit', () => this.emit('offsetCommit'));
84
88
  }
85
89
 
90
+ @trace()
86
91
  public async start(): Promise<void> {
87
92
  const { topics, allowTopicAutoCreation, fromBeginning } = this.options;
88
93
 
@@ -95,7 +100,7 @@ export class Consumer {
95
100
  await this.offsetManager.fetchOffsets({ fromBeginning });
96
101
  await this.consumerGroup?.join();
97
102
  } catch (error) {
98
- log.error('Failed to start consumer', error);
103
+ log.warn('Failed to start consumer', error);
99
104
  log.debug(`Restarting consumer in 1 second...`);
100
105
  await delay(1000);
101
106
 
@@ -113,14 +118,16 @@ export class Consumer {
113
118
  await this.fetchManager?.stop();
114
119
  });
115
120
  }
116
- await this.consumerGroup?.leaveGroup().catch((error) => log.warn(`Failed to leave group: ${error.message}`));
117
- await this.cluster.disconnect().catch((error) => log.warn(`Failed to disconnect: ${error.message}`));
121
+ await this.consumerGroup?.leaveGroup().catch((error) => log.debug(`Failed to leave group: ${error.message}`));
122
+ await this.cluster.disconnect().catch((error) => log.debug(`Failed to disconnect: ${error.message}`));
118
123
  }
119
124
 
120
- private startFetchManager = async () => {
125
+ private async startFetchManager() {
121
126
  const { batchGranularity, concurrency } = this.options;
122
127
 
123
128
  while (!this.stopHook) {
129
+ this.consumerGroup?.resetHeartbeat();
130
+
124
131
  // TODO: If leader is not available, find another read replica
125
132
  const nodeAssignments = Object.entries(
126
133
  distributeMessagesToTopicPartitionLeaders(
@@ -158,7 +165,7 @@ export class Consumer {
158
165
  if (!nodeAssignments.length) {
159
166
  log.debug('No partitions assigned. Waiting for reassignment...');
160
167
  await delay(this.options.maxWaitMs);
161
- await this.consumerGroup?.handleLastHeartbeat();
168
+ this.consumerGroup?.handleLastHeartbeat();
162
169
  }
163
170
  } catch (error) {
164
171
  await this.fetchManager.stop();
@@ -186,12 +193,18 @@ export class Consumer {
186
193
  }
187
194
  }
188
195
  this.stopHook?.();
189
- };
196
+ }
190
197
 
191
- @trace()
198
+ @trace((messages) => ({ count: messages.length }))
192
199
  private async process(messages: Required<Message>[]) {
193
200
  const { options } = this;
194
201
 
202
+ const topicPartitions: Record<string, Set<number>> = {};
203
+ for (const { topic, partition } of messages) {
204
+ topicPartitions[topic] ??= new Set();
205
+ topicPartitions[topic].add(partition);
206
+ }
207
+
195
208
  if ('onBatch' in options) {
196
209
  await options.onBatch(messages);
197
210
 
@@ -199,19 +212,16 @@ export class Consumer {
199
212
  this.offsetManager.resolve(topic, partition, offset + 1n),
200
213
  );
201
214
  } else if ('onMessage' in options) {
202
- try {
203
- for (const message of messages) {
204
- await options.onMessage(message);
215
+ for (const message of messages) {
216
+ await options.onMessage(message);
205
217
 
206
- const { topic, partition, offset } = message;
207
- this.offsetManager.resolve(topic, partition, offset + 1n);
208
- }
209
- } catch (error) {
210
- await this.consumerGroup?.offsetCommit().catch(() => {});
211
- throw error;
218
+ const { topic, partition, offset } = message;
219
+ this.offsetManager.resolve(topic, partition, offset + 1n);
212
220
  }
213
221
  }
214
- await this.consumerGroup?.offsetCommit();
222
+
223
+ await this.consumerGroup?.offsetCommit(topicPartitions);
224
+ this.offsetManager.flush(topicPartitions);
215
225
  }
216
226
 
217
227
  private fetch(nodeId: number, assignment: Assignment) {
@@ -1,5 +1,4 @@
1
- import EventEmitter from 'events';
2
- import { API } from '../api';
1
+ import { FetchResponse } from '../api/fetch';
3
2
  import { Assignment } from '../api/sync-group';
4
3
  import { Metadata } from '../metadata';
5
4
  import { Batch, Message } from '../types';
@@ -14,7 +13,7 @@ const trace = createTracer('FetchManager');
14
13
  export type BatchGranularity = 'partition' | 'topic' | 'broker';
15
14
 
16
15
  type FetchManagerOptions = {
17
- fetch: (nodeId: number, assignment: Assignment) => Promise<Awaited<ReturnType<(typeof API.FETCH)['response']>>>;
16
+ fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
18
17
  process: (batch: Batch) => Promise<void>;
19
18
  metadata: Metadata;
20
19
  consumerGroup?: ConsumerGroup;
@@ -26,15 +25,15 @@ type FetchManagerOptions = {
26
25
  type Checkpoint = { kind: 'checkpoint'; fetcherId: number };
27
26
  type Entry = Batch | Checkpoint;
28
27
 
29
- export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number]; stop: [] }> {
28
+ export class FetchManager {
30
29
  private queue: Entry[] = [];
31
30
  private isRunning = false;
32
31
  private fetchers: Fetcher[];
33
32
  private processors: Processor[];
33
+ private pollQueue: (() => void)[] = [];
34
+ private fetcherCallbacks: Record<number, () => void> = {};
34
35
 
35
36
  constructor(private options: FetchManagerOptions) {
36
- super();
37
-
38
37
  const { fetch, process, consumerGroup, nodeAssignments, concurrency } = this.options;
39
38
 
40
39
  this.fetchers = nodeAssignments.map(
@@ -52,6 +51,7 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
52
51
  );
53
52
  }
54
53
 
54
+ @trace(() => ({ root: true }))
55
55
  public async start() {
56
56
  this.queue = [];
57
57
  this.isRunning = true;
@@ -62,20 +62,25 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
62
62
  ...this.processors.map((processor) => processor.loop()),
63
63
  ]);
64
64
  } finally {
65
- this.isRunning = false;
66
- this.emit('stop');
65
+ await this.stop();
67
66
  }
68
67
  }
69
68
 
70
- @trace()
71
69
  public async stop() {
72
70
  this.isRunning = false;
73
- this.emit('stop');
74
71
 
75
- await Promise.all([
72
+ const stopPromise = Promise.all([
76
73
  ...this.fetchers.map((fetcher) => fetcher.stop()),
77
74
  ...this.processors.map((processor) => processor.stop()),
78
75
  ]);
76
+
77
+ this.pollQueue.forEach((resolve) => resolve());
78
+ this.pollQueue = [];
79
+
80
+ Object.values(this.fetcherCallbacks).forEach((callback) => callback());
81
+ this.fetcherCallbacks = {};
82
+
83
+ await stopPromise;
79
84
  }
80
85
 
81
86
  @trace()
@@ -88,29 +93,23 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
88
93
  if (!batch) {
89
94
  // wait until new data is available or fetch manager is requested to stop
90
95
  await new Promise<void>((resolve) => {
91
- const onData = () => {
92
- this.removeListener('stop', onStop);
93
- resolve();
94
- };
95
- const onStop = () => {
96
- this.removeListener('data', onData);
97
- resolve();
98
- };
99
- this.once('data', onData);
100
- this.once('stop', onStop);
96
+ this.pollQueue.push(resolve);
101
97
  });
102
98
  return this.poll();
103
99
  }
104
100
 
105
101
  if ('kind' in batch && batch.kind === 'checkpoint') {
106
- this.emit('checkpoint', batch.fetcherId);
102
+ this.fetcherCallbacks[batch.fetcherId]?.();
107
103
  return this.poll();
108
104
  }
109
105
 
106
+ this.pollQueue?.shift()?.();
107
+
110
108
  return batch as Exclude<Entry, Checkpoint>;
111
109
  }
112
110
 
113
- private async onResponse(fetcherId: number, response: Awaited<ReturnType<(typeof API.FETCH)['response']>>) {
111
+ @trace()
112
+ private async onResponse(fetcherId: number, response: FetchResponse) {
114
113
  const { metadata, batchGranularity } = this.options;
115
114
 
116
115
  const batches = fetchResponseToBatches(response, batchGranularity, metadata);
@@ -120,30 +119,15 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
120
119
 
121
120
  // wait until all broker batches have been processed or fetch manager is requested to stop
122
121
  await new Promise<void>((resolve) => {
123
- const onCheckpoint = (id: number) => {
124
- if (id === fetcherId) {
125
- this.removeListener('checkpoint', onCheckpoint);
126
- this.removeListener('stop', onStop);
127
- resolve();
128
- }
129
- };
130
- const onStop = () => {
131
- this.removeListener('checkpoint', onCheckpoint);
132
- resolve();
133
- };
134
- this.on('checkpoint', onCheckpoint);
135
- this.once('stop', onStop);
136
-
137
- this.queue.push(...batches);
138
- this.queue.push({ kind: 'checkpoint', fetcherId });
139
-
140
- this.emit('data');
122
+ this.fetcherCallbacks[fetcherId] = resolve;
123
+ this.queue.push(...batches, { kind: 'checkpoint', fetcherId });
124
+ this.pollQueue?.shift()?.();
141
125
  });
142
126
  }
143
127
  }
144
128
 
145
129
  const fetchResponseToBatches = (
146
- batch: Awaited<ReturnType<typeof API.FETCH.response>>,
130
+ batch: FetchResponse,
147
131
  batchGranularity: BatchGranularity,
148
132
  metadata: Metadata,
149
133
  ): Batch[] => {
@@ -176,9 +160,9 @@ const fetchResponseToBatches = (
176
160
  .map((topicPartition) => topicPartition.flatMap((partitionMessages) => partitionMessages))
177
161
  .filter((messages) => messages.length);
178
162
  case 'partition':
179
- return brokerTopics.flatMap((topicPartition) =>
180
- topicPartition.map((partitionMessages) => partitionMessages),
181
- );
163
+ return brokerTopics
164
+ .flatMap((topicPartition) => topicPartition.map((partitionMessages) => partitionMessages))
165
+ .filter((messages) => messages.length);
182
166
  default:
183
167
  throw new KafkaTSError(`Unhandled batch granularity: ${batchGranularity}`);
184
168
  }
@@ -1,5 +1,5 @@
1
1
  import { EventEmitter } from 'stream';
2
- import { API } from '../api';
2
+ import { FetchResponse } from '../api/fetch';
3
3
  import { Assignment } from '../api/sync-group';
4
4
  import { createTracer } from '../utils/tracer';
5
5
  import { ConsumerGroup } from './consumer-group';
@@ -10,11 +10,11 @@ type FetcherOptions = {
10
10
  nodeId: number;
11
11
  assignment: Assignment;
12
12
  consumerGroup?: ConsumerGroup;
13
- fetch: (nodeId: number, assignment: Assignment) => Promise<Awaited<ReturnType<(typeof API.FETCH)['response']>>>;
14
- onResponse: (fetcherId: number, response: Awaited<ReturnType<(typeof API.FETCH)['response']>>) => Promise<void>;
13
+ fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
14
+ onResponse: (fetcherId: number, response: FetchResponse) => Promise<void>;
15
15
  };
16
16
 
17
- export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; drain: [] }> {
17
+ export class Fetcher extends EventEmitter<{ stopped: [] }> {
18
18
  private isRunning = false;
19
19
 
20
20
  constructor(
@@ -25,17 +25,11 @@ export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; dra
25
25
  }
26
26
 
27
27
  public async loop() {
28
- const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
29
-
30
28
  this.isRunning = true;
31
- this.once('stop', () => (this.isRunning = false));
32
-
29
+
33
30
  try {
34
31
  while (this.isRunning) {
35
- const response = await fetch(nodeId, assignment);
36
- await consumerGroup?.handleLastHeartbeat();
37
- await onResponse(this.fetcherId, response);
38
- await consumerGroup?.handleLastHeartbeat();
32
+ await this.step();
39
33
  }
40
34
  } finally {
41
35
  this.isRunning = false;
@@ -44,14 +38,27 @@ export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; dra
44
38
  }
45
39
 
46
40
  @trace()
41
+ private async step() {
42
+ const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
43
+
44
+ const response = await fetch(nodeId, assignment);
45
+ if (!this.isRunning) {
46
+ return;
47
+ }
48
+ consumerGroup?.handleLastHeartbeat();
49
+ await onResponse(this.fetcherId, response);
50
+ consumerGroup?.handleLastHeartbeat();
51
+ }
52
+
47
53
  public async stop() {
48
54
  if (!this.isRunning) {
49
55
  return;
50
56
  }
51
57
 
52
- this.emit('stop');
53
- return new Promise<void>((resolve) => {
58
+ const stopPromise = new Promise<void>((resolve) => {
54
59
  this.once('stopped', resolve);
55
60
  });
61
+ this.isRunning = false;
62
+ return stopPromise;
56
63
  }
57
64
  }
@@ -3,8 +3,11 @@ import { IsolationLevel } from '../api/fetch';
3
3
  import { Assignment } from '../api/sync-group';
4
4
  import { Cluster } from '../cluster';
5
5
  import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
6
+ import { createTracer } from '../utils/tracer';
6
7
  import { ConsumerMetadata } from './consumer-metadata';
7
8
 
9
+ const trace = createTracer('OffsetManager');
10
+
8
11
  type OffsetManagerOptions = {
9
12
  cluster: Cluster;
10
13
  metadata: ConsumerMetadata;
@@ -24,13 +27,18 @@ export class OffsetManager {
24
27
  public resolve(topic: string, partition: number, offset: bigint) {
25
28
  this.pendingOffsets[topic] ??= {};
26
29
  this.pendingOffsets[topic][partition] = offset;
27
-
28
- this.currentOffsets[topic] ??= {};
29
- this.currentOffsets[topic][partition] = offset;
30
30
  }
31
31
 
32
- public flush() {
33
- this.pendingOffsets = {};
32
+ public flush(topicPartitions: Record<string, Set<number>>) {
33
+ Object.entries(topicPartitions).forEach(([topic, partitions]) => {
34
+ this.currentOffsets[topic] ??= {};
35
+ partitions.forEach((partition) => {
36
+ if (this.pendingOffsets[topic]?.[partition]) {
37
+ this.currentOffsets[topic][partition] = this.pendingOffsets[topic][partition];
38
+ delete this.pendingOffsets[topic][partition];
39
+ }
40
+ });
41
+ });
34
42
  }
35
43
 
36
44
  public async fetchOffsets(options: { fromBeginning: boolean }) {
@@ -58,7 +66,6 @@ export class OffsetManager {
58
66
  }),
59
67
  ),
60
68
  );
61
- this.flush();
62
69
  }
63
70
 
64
71
  private async listOffsets({
@@ -83,11 +90,15 @@ export class OffsetManager {
83
90
  })),
84
91
  });
85
92
 
93
+ const topicPartitions: Record<string, Set<number>> = {};
86
94
  offsets.topics.forEach(({ name, partitions }) => {
95
+ topicPartitions[name] ??= new Set();
87
96
  partitions.forEach(({ partitionIndex, offset }) => {
97
+ topicPartitions[name].add(partitionIndex);
88
98
  this.resolve(name, partitionIndex, fromBeginning ? 0n : offset);
89
99
  });
90
100
  });
91
- this.flush();
101
+
102
+ this.flush(topicPartitions);
92
103
  }
93
104
  }