kafka-ts 0.0.1-beta.3 → 0.0.1-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. package/.github/workflows/release.yml +19 -6
  2. package/README.md +15 -21
  3. package/dist/api/api-versions.d.ts +9 -0
  4. package/dist/api/api-versions.js +24 -0
  5. package/dist/api/create-topics.d.ts +38 -0
  6. package/dist/api/create-topics.js +53 -0
  7. package/dist/api/delete-topics.d.ts +18 -0
  8. package/dist/api/delete-topics.js +33 -0
  9. package/dist/api/fetch.d.ts +84 -0
  10. package/dist/api/fetch.js +142 -0
  11. package/dist/api/find-coordinator.d.ts +21 -0
  12. package/dist/api/find-coordinator.js +39 -0
  13. package/dist/api/heartbeat.d.ts +11 -0
  14. package/dist/api/heartbeat.js +27 -0
  15. package/dist/api/index.d.ts +578 -0
  16. package/dist/api/index.js +165 -0
  17. package/dist/api/init-producer-id.d.ts +13 -0
  18. package/dist/api/init-producer-id.js +29 -0
  19. package/dist/api/join-group.d.ts +34 -0
  20. package/dist/api/join-group.js +51 -0
  21. package/dist/api/leave-group.d.ts +19 -0
  22. package/dist/api/leave-group.js +39 -0
  23. package/dist/api/list-offsets.d.ts +29 -0
  24. package/dist/api/list-offsets.js +48 -0
  25. package/dist/api/metadata.d.ts +40 -0
  26. package/dist/api/metadata.js +58 -0
  27. package/dist/api/offset-commit.d.ts +28 -0
  28. package/dist/api/offset-commit.js +48 -0
  29. package/dist/api/offset-fetch.d.ts +33 -0
  30. package/dist/api/offset-fetch.js +57 -0
  31. package/dist/api/produce.d.ts +54 -0
  32. package/dist/api/produce.js +126 -0
  33. package/dist/api/sasl-authenticate.d.ts +11 -0
  34. package/dist/api/sasl-authenticate.js +23 -0
  35. package/dist/api/sasl-handshake.d.ts +6 -0
  36. package/dist/api/sasl-handshake.js +19 -0
  37. package/dist/api/sync-group.d.ts +24 -0
  38. package/dist/api/sync-group.js +36 -0
  39. package/dist/auth/index.d.ts +2 -0
  40. package/dist/auth/index.js +8 -0
  41. package/dist/auth/plain.d.ts +5 -0
  42. package/dist/auth/plain.js +12 -0
  43. package/dist/auth/scram.d.ts +9 -0
  44. package/dist/auth/scram.js +40 -0
  45. package/dist/broker.d.ts +30 -0
  46. package/dist/broker.js +55 -0
  47. package/dist/client.d.ts +23 -0
  48. package/dist/client.js +36 -0
  49. package/dist/cluster.d.ts +27 -0
  50. package/dist/cluster.js +70 -0
  51. package/dist/cluster.test.d.ts +1 -0
  52. package/dist/cluster.test.js +345 -0
  53. package/dist/codecs/gzip.d.ts +2 -0
  54. package/dist/codecs/gzip.js +8 -0
  55. package/dist/codecs/index.d.ts +2 -0
  56. package/dist/codecs/index.js +17 -0
  57. package/dist/codecs/none.d.ts +2 -0
  58. package/dist/codecs/none.js +7 -0
  59. package/dist/codecs/types.d.ts +5 -0
  60. package/dist/codecs/types.js +2 -0
  61. package/dist/connection.d.ts +26 -0
  62. package/dist/connection.js +175 -0
  63. package/dist/consumer/consumer-group.d.ts +41 -0
  64. package/dist/consumer/consumer-group.js +217 -0
  65. package/dist/consumer/consumer-metadata.d.ts +7 -0
  66. package/dist/consumer/consumer-metadata.js +14 -0
  67. package/dist/consumer/consumer.d.ts +44 -0
  68. package/dist/consumer/consumer.js +225 -0
  69. package/dist/consumer/fetch-manager.d.ts +33 -0
  70. package/dist/consumer/fetch-manager.js +140 -0
  71. package/dist/consumer/fetcher.d.ts +25 -0
  72. package/dist/consumer/fetcher.js +64 -0
  73. package/dist/consumer/offset-manager.d.ts +22 -0
  74. package/dist/consumer/offset-manager.js +66 -0
  75. package/dist/consumer/processor.d.ts +19 -0
  76. package/dist/consumer/processor.js +59 -0
  77. package/dist/distributors/assignments-to-replicas.d.ts +16 -0
  78. package/dist/distributors/assignments-to-replicas.js +59 -0
  79. package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
  80. package/dist/distributors/assignments-to-replicas.test.js +40 -0
  81. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  82. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  83. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  84. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  85. package/dist/distributors/partitioner.d.ts +7 -0
  86. package/dist/distributors/partitioner.js +23 -0
  87. package/dist/index.d.ts +9 -0
  88. package/dist/index.js +26 -0
  89. package/dist/metadata.d.ts +24 -0
  90. package/dist/metadata.js +106 -0
  91. package/dist/producer/producer.d.ts +24 -0
  92. package/dist/producer/producer.js +131 -0
  93. package/dist/types.d.ts +11 -0
  94. package/dist/types.js +2 -0
  95. package/dist/utils/api.d.ts +9 -0
  96. package/dist/utils/api.js +5 -0
  97. package/dist/utils/crypto.d.ts +8 -0
  98. package/dist/utils/crypto.js +18 -0
  99. package/dist/utils/decoder.d.ts +30 -0
  100. package/dist/utils/decoder.js +152 -0
  101. package/dist/utils/delay.d.ts +1 -0
  102. package/dist/utils/delay.js +5 -0
  103. package/dist/utils/encoder.d.ts +28 -0
  104. package/dist/utils/encoder.js +125 -0
  105. package/dist/utils/error.d.ts +11 -0
  106. package/dist/utils/error.js +27 -0
  107. package/dist/utils/logger.d.ts +9 -0
  108. package/dist/utils/logger.js +32 -0
  109. package/dist/utils/memo.d.ts +1 -0
  110. package/dist/utils/memo.js +16 -0
  111. package/dist/utils/murmur2.d.ts +3 -0
  112. package/dist/utils/murmur2.js +40 -0
  113. package/dist/utils/retrier.d.ts +10 -0
  114. package/dist/utils/retrier.js +22 -0
  115. package/dist/utils/tracer.d.ts +5 -0
  116. package/dist/utils/tracer.js +39 -0
  117. package/docker-compose.yml +3 -3
  118. package/examples/package-lock.json +3501 -3
  119. package/examples/package.json +8 -1
  120. package/examples/src/benchmark/common.ts +98 -0
  121. package/examples/src/benchmark/kafka-ts.ts +67 -0
  122. package/examples/src/benchmark/kafkajs.ts +51 -0
  123. package/examples/src/client.ts +4 -1
  124. package/examples/src/consumer.ts +7 -1
  125. package/examples/src/create-topic.ts +3 -3
  126. package/examples/src/opentelemetry.ts +46 -0
  127. package/examples/src/producer.ts +11 -11
  128. package/examples/src/replicator.ts +2 -1
  129. package/package.json +4 -2
  130. package/scripts/create-scram-user.sh +4 -2
  131. package/scripts/generate-certs.sh +2 -0
  132. package/src/__snapshots__/cluster.test.ts.snap +160 -53
  133. package/src/api/fetch.ts +83 -28
  134. package/src/api/index.ts +3 -1
  135. package/src/api/metadata.ts +1 -1
  136. package/src/api/produce.ts +7 -10
  137. package/src/cluster.test.ts +10 -7
  138. package/src/cluster.ts +36 -38
  139. package/src/codecs/gzip.ts +9 -0
  140. package/src/codecs/index.ts +16 -0
  141. package/src/codecs/none.ts +6 -0
  142. package/src/codecs/types.ts +4 -0
  143. package/src/connection.ts +31 -17
  144. package/src/consumer/consumer-group.ts +43 -21
  145. package/src/consumer/consumer.ts +58 -37
  146. package/src/consumer/fetch-manager.ts +36 -46
  147. package/src/consumer/fetcher.ts +20 -13
  148. package/src/consumer/offset-manager.ts +18 -7
  149. package/src/consumer/processor.ts +14 -8
  150. package/src/distributors/assignments-to-replicas.ts +1 -3
  151. package/src/index.ts +2 -0
  152. package/src/metadata.ts +4 -0
  153. package/src/producer/producer.ts +14 -9
  154. package/src/utils/api.ts +1 -1
  155. package/src/utils/decoder.ts +9 -3
  156. package/src/utils/encoder.ts +26 -19
  157. package/src/utils/logger.ts +37 -0
  158. package/src/utils/tracer.ts +40 -22
  159. package/certs/ca.key +0 -52
  160. package/certs/ca.srl +0 -1
  161. package/certs/kafka.crt +0 -29
  162. package/certs/kafka.csr +0 -26
  163. package/certs/kafka.key +0 -52
  164. package/src/utils/debug.ts +0 -9
@@ -1,17 +1,21 @@
1
+ import EventEmitter from 'events';
1
2
  import { API, API_ERROR } from '../api';
2
3
  import { IsolationLevel } from '../api/fetch';
3
4
  import { Assignment } from '../api/sync-group';
4
5
  import { Cluster } from '../cluster';
5
- import { distributeAssignmentsToNodes } from '../distributors/assignments-to-replicas';
6
+ import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
6
7
  import { Message } from '../types';
7
8
  import { delay } from '../utils/delay';
8
9
  import { ConnectionError, KafkaTSApiError } from '../utils/error';
9
- import { defaultRetrier, Retrier } from '../utils/retrier';
10
+ import { log } from '../utils/logger';
11
+ import { createTracer } from '../utils/tracer';
10
12
  import { ConsumerGroup } from './consumer-group';
11
13
  import { ConsumerMetadata } from './consumer-metadata';
12
- import { FetchManager, BatchGranularity } from './fetch-manager';
14
+ import { BatchGranularity, FetchManager } from './fetch-manager';
13
15
  import { OffsetManager } from './offset-manager';
14
16
 
17
+ const trace = createTracer('Consumer');
18
+
15
19
  export type ConsumerOptions = {
16
20
  topics: string[];
17
21
  groupId?: string | null;
@@ -26,12 +30,11 @@ export type ConsumerOptions = {
26
30
  partitionMaxBytes?: number;
27
31
  allowTopicAutoCreation?: boolean;
28
32
  fromBeginning?: boolean;
29
- retrier?: Retrier;
30
33
  batchGranularity?: BatchGranularity;
31
34
  concurrency?: number;
32
35
  } & ({ onBatch: (messages: Required<Message>[]) => unknown } | { onMessage: (message: Required<Message>) => unknown });
33
36
 
34
- export class Consumer {
37
+ export class Consumer extends EventEmitter<{ offsetCommit: [] }> {
35
38
  private options: Required<ConsumerOptions>;
36
39
  private metadata: ConsumerMetadata;
37
40
  private consumerGroup: ConsumerGroup | undefined;
@@ -43,6 +46,8 @@ export class Consumer {
43
46
  private cluster: Cluster,
44
47
  options: ConsumerOptions,
45
48
  ) {
49
+ super();
50
+
46
51
  this.options = {
47
52
  ...options,
48
53
  groupId: options.groupId ?? null,
@@ -57,7 +62,6 @@ export class Consumer {
57
62
  isolationLevel: options.isolationLevel ?? IsolationLevel.READ_UNCOMMITTED,
58
63
  allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
59
64
  fromBeginning: options.fromBeginning ?? false,
60
- retrier: options.retrier ?? defaultRetrier,
61
65
  batchGranularity: options.batchGranularity ?? 'partition',
62
66
  concurrency: options.concurrency ?? 1,
63
67
  };
@@ -80,8 +84,10 @@ export class Consumer {
80
84
  offsetManager: this.offsetManager,
81
85
  })
82
86
  : undefined;
87
+ this.consumerGroup?.on('offsetCommit', () => this.emit('offsetCommit'));
83
88
  }
84
89
 
90
+ @trace()
85
91
  public async start(): Promise<void> {
86
92
  const { topics, allowTopicAutoCreation, fromBeginning } = this.options;
87
93
 
@@ -94,16 +100,17 @@ export class Consumer {
94
100
  await this.offsetManager.fetchOffsets({ fromBeginning });
95
101
  await this.consumerGroup?.join();
96
102
  } catch (error) {
97
- console.error(error);
98
- console.debug(`Restarting consumer in 1 second...`);
103
+ log.warn('Failed to start consumer', error);
104
+ log.debug(`Restarting consumer in 1 second...`);
99
105
  await delay(1000);
100
106
 
101
107
  if (this.stopHook) return (this.stopHook as () => void)();
102
108
  return this.close(true).then(() => this.start());
103
109
  }
104
- setImmediate(() => this.startFetchManager());
110
+ this.startFetchManager();
105
111
  }
106
112
 
113
+ @trace()
107
114
  public async close(force = false): Promise<void> {
108
115
  if (!force) {
109
116
  await new Promise<void>(async (resolve) => {
@@ -111,22 +118,33 @@ export class Consumer {
111
118
  await this.fetchManager?.stop();
112
119
  });
113
120
  }
114
- await this.consumerGroup
115
- ?.leaveGroup()
116
- .catch((error) => console.warn(`Failed to leave group: ${error.message}`));
117
- await this.cluster.disconnect().catch((error) => console.warn(`Failed to disconnect: ${error.message}`));
121
+ await this.consumerGroup?.leaveGroup().catch((error) => log.debug(`Failed to leave group: ${error.message}`));
122
+ await this.cluster.disconnect().catch((error) => log.debug(`Failed to disconnect: ${error.message}`));
118
123
  }
119
124
 
120
- private startFetchManager = async () => {
125
+ private async startFetchManager() {
121
126
  const { batchGranularity, concurrency } = this.options;
122
127
 
123
128
  while (!this.stopHook) {
129
+ this.consumerGroup?.resetHeartbeat();
130
+
131
+ // TODO: If leader is not available, find another read replica
124
132
  const nodeAssignments = Object.entries(
125
- distributeAssignmentsToNodes(
126
- this.metadata.getAssignment(),
127
- this.metadata.getTopicPartitionReplicaIds(),
133
+ distributeMessagesToTopicPartitionLeaders(
134
+ Object.entries(this.metadata.getAssignment()).flatMap(([topic, partitions]) =>
135
+ partitions.map((partition) => ({ topic, partition })),
136
+ ),
137
+ this.metadata.getTopicPartitionLeaderIds(),
128
138
  ),
129
- ).map(([nodeId, assignment]) => ({ nodeId: parseInt(nodeId), assignment }));
139
+ ).map(([nodeId, assignment]) => ({
140
+ nodeId: parseInt(nodeId),
141
+ assignment: Object.fromEntries(
142
+ Object.entries(assignment).map(([topic, partitions]) => [
143
+ topic,
144
+ Object.keys(partitions).map(Number),
145
+ ]),
146
+ ),
147
+ }));
130
148
 
131
149
  const numPartitions = Object.values(this.metadata.getAssignment()).flat().length;
132
150
  const numProcessors = Math.min(concurrency, numPartitions);
@@ -145,19 +163,19 @@ export class Consumer {
145
163
  await this.fetchManager.start();
146
164
 
147
165
  if (!nodeAssignments.length) {
148
- console.debug('No partitions assigned. Waiting for reassignment...');
166
+ log.debug('No partitions assigned. Waiting for reassignment...');
149
167
  await delay(this.options.maxWaitMs);
150
- await this.consumerGroup?.handleLastHeartbeat();
168
+ this.consumerGroup?.handleLastHeartbeat();
151
169
  }
152
170
  } catch (error) {
153
171
  await this.fetchManager.stop();
154
172
 
155
173
  if ((error as KafkaTSApiError).errorCode === API_ERROR.REBALANCE_IN_PROGRESS) {
156
- console.debug('Rebalance in progress...');
174
+ log.debug('Rebalance in progress...');
157
175
  continue;
158
176
  }
159
177
  if ((error as KafkaTSApiError).errorCode === API_ERROR.FENCED_INSTANCE_ID) {
160
- console.debug('New consumer with the same groupInstanceId joined. Exiting the consumer...');
178
+ log.debug('New consumer with the same groupInstanceId joined. Exiting the consumer...');
161
179
  this.close();
162
180
  break;
163
181
  }
@@ -165,42 +183,45 @@ export class Consumer {
165
183
  error instanceof ConnectionError ||
166
184
  (error instanceof KafkaTSApiError && error.errorCode === API_ERROR.NOT_COORDINATOR)
167
185
  ) {
168
- console.debug(`${error.message}. Restarting consumer...`);
186
+ log.debug(`${error.message}. Restarting consumer...`);
169
187
  this.close().then(() => this.start());
170
188
  break;
171
189
  }
172
- console.error(error);
190
+ log.error((error as Error).message, error);
173
191
  this.close();
174
192
  break;
175
193
  }
176
194
  }
177
195
  this.stopHook?.();
178
- };
196
+ }
179
197
 
198
+ @trace((messages) => ({ count: messages.length }))
180
199
  private async process(messages: Required<Message>[]) {
181
200
  const { options } = this;
182
- const { retrier } = options;
201
+
202
+ const topicPartitions: Record<string, Set<number>> = {};
203
+ for (const { topic, partition } of messages) {
204
+ topicPartitions[topic] ??= new Set();
205
+ topicPartitions[topic].add(partition);
206
+ }
183
207
 
184
208
  if ('onBatch' in options) {
185
- await retrier(() => options.onBatch(messages));
209
+ await options.onBatch(messages);
186
210
 
187
211
  messages.forEach(({ topic, partition, offset }) =>
188
212
  this.offsetManager.resolve(topic, partition, offset + 1n),
189
213
  );
190
214
  } else if ('onMessage' in options) {
191
- try {
192
- for (const message of messages) {
193
- await retrier(() => options.onMessage(message));
215
+ for (const message of messages) {
216
+ await options.onMessage(message);
194
217
 
195
- const { topic, partition, offset } = message;
196
- this.offsetManager.resolve(topic, partition, offset + 1n);
197
- }
198
- } catch (error) {
199
- await this.consumerGroup?.offsetCommit().catch(() => {});
200
- throw error;
218
+ const { topic, partition, offset } = message;
219
+ this.offsetManager.resolve(topic, partition, offset + 1n);
201
220
  }
202
221
  }
203
- await this.consumerGroup?.offsetCommit();
222
+
223
+ await this.consumerGroup?.offsetCommit(topicPartitions);
224
+ this.offsetManager.flush(topicPartitions);
204
225
  }
205
226
 
206
227
  private fetch(nodeId: number, assignment: Assignment) {
@@ -1,5 +1,4 @@
1
- import EventEmitter from 'events';
2
- import { API } from '../api';
1
+ import { FetchResponse } from '../api/fetch';
3
2
  import { Assignment } from '../api/sync-group';
4
3
  import { Metadata } from '../metadata';
5
4
  import { Batch, Message } from '../types';
@@ -14,7 +13,7 @@ const trace = createTracer('FetchManager');
14
13
  export type BatchGranularity = 'partition' | 'topic' | 'broker';
15
14
 
16
15
  type FetchManagerOptions = {
17
- fetch: (nodeId: number, assignment: Assignment) => Promise<ReturnType<(typeof API.FETCH)['response']>>;
16
+ fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
18
17
  process: (batch: Batch) => Promise<void>;
19
18
  metadata: Metadata;
20
19
  consumerGroup?: ConsumerGroup;
@@ -26,15 +25,15 @@ type FetchManagerOptions = {
26
25
  type Checkpoint = { kind: 'checkpoint'; fetcherId: number };
27
26
  type Entry = Batch | Checkpoint;
28
27
 
29
- export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number]; stop: [] }> {
28
+ export class FetchManager {
30
29
  private queue: Entry[] = [];
31
30
  private isRunning = false;
32
31
  private fetchers: Fetcher[];
33
32
  private processors: Processor[];
33
+ private pollQueue: (() => void)[] = [];
34
+ private fetcherCallbacks: Record<number, () => void> = {};
34
35
 
35
36
  constructor(private options: FetchManagerOptions) {
36
- super();
37
-
38
37
  const { fetch, process, consumerGroup, nodeAssignments, concurrency } = this.options;
39
38
 
40
39
  this.fetchers = nodeAssignments.map(
@@ -52,6 +51,7 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
52
51
  );
53
52
  }
54
53
 
54
+ @trace(() => ({ root: true }))
55
55
  public async start() {
56
56
  this.queue = [];
57
57
  this.isRunning = true;
@@ -62,20 +62,25 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
62
62
  ...this.processors.map((processor) => processor.loop()),
63
63
  ]);
64
64
  } finally {
65
- this.isRunning = false;
66
- this.emit('stop');
65
+ await this.stop();
67
66
  }
68
67
  }
69
68
 
70
- @trace()
71
69
  public async stop() {
72
70
  this.isRunning = false;
73
- this.emit('stop');
74
71
 
75
- await Promise.all([
72
+ const stopPromise = Promise.all([
76
73
  ...this.fetchers.map((fetcher) => fetcher.stop()),
77
74
  ...this.processors.map((processor) => processor.stop()),
78
75
  ]);
76
+
77
+ this.pollQueue.forEach((resolve) => resolve());
78
+ this.pollQueue = [];
79
+
80
+ Object.values(this.fetcherCallbacks).forEach((callback) => callback());
81
+ this.fetcherCallbacks = {};
82
+
83
+ await stopPromise;
79
84
  }
80
85
 
81
86
  @trace()
@@ -86,58 +91,43 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
86
91
 
87
92
  const batch = this.queue.shift();
88
93
  if (!batch) {
94
+ // wait until new data is available or fetch manager is requested to stop
89
95
  await new Promise<void>((resolve) => {
90
- const onData = () => {
91
- this.removeListener('stop', onStop);
92
- resolve();
93
- };
94
- const onStop = () => {
95
- this.removeListener('data', onData);
96
- resolve();
97
- };
98
- this.once('data', onData);
99
- this.once('stop', onStop);
96
+ this.pollQueue.push(resolve);
100
97
  });
101
98
  return this.poll();
102
99
  }
103
100
 
104
101
  if ('kind' in batch && batch.kind === 'checkpoint') {
105
- this.emit('checkpoint', batch.fetcherId);
102
+ this.fetcherCallbacks[batch.fetcherId]?.();
106
103
  return this.poll();
107
104
  }
108
105
 
106
+ this.pollQueue?.shift()?.();
107
+
109
108
  return batch as Exclude<Entry, Checkpoint>;
110
109
  }
111
110
 
112
- private async onResponse(fetcherId: number, response: ReturnType<(typeof API.FETCH)['response']>) {
111
+ @trace()
112
+ private async onResponse(fetcherId: number, response: FetchResponse) {
113
113
  const { metadata, batchGranularity } = this.options;
114
114
 
115
115
  const batches = fetchResponseToBatches(response, batchGranularity, metadata);
116
- if (batches.length) {
117
- this.queue.push(...batches);
118
- this.queue.push({ kind: 'checkpoint', fetcherId });
119
-
120
- this.emit('data');
121
- await new Promise<void>((resolve) => {
122
- const onCheckpoint = (id: number) => {
123
- if (id === fetcherId) {
124
- this.removeListener('stop', onStop);
125
- resolve();
126
- }
127
- };
128
- const onStop = () => {
129
- this.removeListener('checkpoint', onCheckpoint);
130
- resolve();
131
- };
132
- this.once('checkpoint', onCheckpoint);
133
- this.once('stop', onStop);
134
- });
116
+ if (!batches.length) {
117
+ return;
135
118
  }
119
+
120
+ // wait until all broker batches have been processed or fetch manager is requested to stop
121
+ await new Promise<void>((resolve) => {
122
+ this.fetcherCallbacks[fetcherId] = resolve;
123
+ this.queue.push(...batches, { kind: 'checkpoint', fetcherId });
124
+ this.pollQueue?.shift()?.();
125
+ });
136
126
  }
137
127
  }
138
128
 
139
129
  const fetchResponseToBatches = (
140
- batch: ReturnType<typeof API.FETCH.response>,
130
+ batch: FetchResponse,
141
131
  batchGranularity: BatchGranularity,
142
132
  metadata: Metadata,
143
133
  ): Batch[] => {
@@ -170,9 +160,9 @@ const fetchResponseToBatches = (
170
160
  .map((topicPartition) => topicPartition.flatMap((partitionMessages) => partitionMessages))
171
161
  .filter((messages) => messages.length);
172
162
  case 'partition':
173
- return brokerTopics.flatMap((topicPartition) =>
174
- topicPartition.map((partitionMessages) => partitionMessages),
175
- );
163
+ return brokerTopics
164
+ .flatMap((topicPartition) => topicPartition.map((partitionMessages) => partitionMessages))
165
+ .filter((messages) => messages.length);
176
166
  default:
177
167
  throw new KafkaTSError(`Unhandled batch granularity: ${batchGranularity}`);
178
168
  }
@@ -1,5 +1,5 @@
1
1
  import { EventEmitter } from 'stream';
2
- import { API } from '../api';
2
+ import { FetchResponse } from '../api/fetch';
3
3
  import { Assignment } from '../api/sync-group';
4
4
  import { createTracer } from '../utils/tracer';
5
5
  import { ConsumerGroup } from './consumer-group';
@@ -10,11 +10,11 @@ type FetcherOptions = {
10
10
  nodeId: number;
11
11
  assignment: Assignment;
12
12
  consumerGroup?: ConsumerGroup;
13
- fetch: (nodeId: number, assignment: Assignment) => Promise<ReturnType<(typeof API.FETCH)['response']>>;
14
- onResponse: (fetcherId: number, response: ReturnType<(typeof API.FETCH)['response']>) => Promise<void>;
13
+ fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
14
+ onResponse: (fetcherId: number, response: FetchResponse) => Promise<void>;
15
15
  };
16
16
 
17
- export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; drain: [] }> {
17
+ export class Fetcher extends EventEmitter<{ stopped: [] }> {
18
18
  private isRunning = false;
19
19
 
20
20
  constructor(
@@ -25,17 +25,11 @@ export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; dra
25
25
  }
26
26
 
27
27
  public async loop() {
28
- const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
29
-
30
28
  this.isRunning = true;
31
- this.once('stop', () => (this.isRunning = false));
32
29
 
33
30
  try {
34
31
  while (this.isRunning) {
35
- const response = await fetch(nodeId, assignment);
36
- await consumerGroup?.handleLastHeartbeat();
37
- await onResponse(this.fetcherId, response);
38
- await consumerGroup?.handleLastHeartbeat();
32
+ await this.step();
39
33
  }
40
34
  } finally {
41
35
  this.isRunning = false;
@@ -44,14 +38,27 @@ export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; dra
44
38
  }
45
39
 
46
40
  @trace()
41
+ private async step() {
42
+ const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
43
+
44
+ const response = await fetch(nodeId, assignment);
45
+ if (!this.isRunning) {
46
+ return;
47
+ }
48
+ consumerGroup?.handleLastHeartbeat();
49
+ await onResponse(this.fetcherId, response);
50
+ consumerGroup?.handleLastHeartbeat();
51
+ }
52
+
47
53
  public async stop() {
48
54
  if (!this.isRunning) {
49
55
  return;
50
56
  }
51
57
 
52
- this.emit('stop');
53
- return new Promise<void>((resolve) => {
58
+ const stopPromise = new Promise<void>((resolve) => {
54
59
  this.once('stopped', resolve);
55
60
  });
61
+ this.isRunning = false;
62
+ return stopPromise;
56
63
  }
57
64
  }
@@ -3,8 +3,11 @@ import { IsolationLevel } from '../api/fetch';
3
3
  import { Assignment } from '../api/sync-group';
4
4
  import { Cluster } from '../cluster';
5
5
  import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
6
+ import { createTracer } from '../utils/tracer';
6
7
  import { ConsumerMetadata } from './consumer-metadata';
7
8
 
9
+ const trace = createTracer('OffsetManager');
10
+
8
11
  type OffsetManagerOptions = {
9
12
  cluster: Cluster;
10
13
  metadata: ConsumerMetadata;
@@ -24,13 +27,18 @@ export class OffsetManager {
24
27
  public resolve(topic: string, partition: number, offset: bigint) {
25
28
  this.pendingOffsets[topic] ??= {};
26
29
  this.pendingOffsets[topic][partition] = offset;
27
-
28
- this.currentOffsets[topic] ??= {};
29
- this.currentOffsets[topic][partition] = offset;
30
30
  }
31
31
 
32
- public flush() {
33
- this.pendingOffsets = {};
32
+ public flush(topicPartitions: Record<string, Set<number>>) {
33
+ Object.entries(topicPartitions).forEach(([topic, partitions]) => {
34
+ this.currentOffsets[topic] ??= {};
35
+ partitions.forEach((partition) => {
36
+ if (this.pendingOffsets[topic]?.[partition]) {
37
+ this.currentOffsets[topic][partition] = this.pendingOffsets[topic][partition];
38
+ delete this.pendingOffsets[topic][partition];
39
+ }
40
+ });
41
+ });
34
42
  }
35
43
 
36
44
  public async fetchOffsets(options: { fromBeginning: boolean }) {
@@ -58,7 +66,6 @@ export class OffsetManager {
58
66
  }),
59
67
  ),
60
68
  );
61
- this.flush();
62
69
  }
63
70
 
64
71
  private async listOffsets({
@@ -83,11 +90,15 @@ export class OffsetManager {
83
90
  })),
84
91
  });
85
92
 
93
+ const topicPartitions: Record<string, Set<number>> = {};
86
94
  offsets.topics.forEach(({ name, partitions }) => {
95
+ topicPartitions[name] ??= new Set();
87
96
  partitions.forEach(({ partitionIndex, offset }) => {
97
+ topicPartitions[name].add(partitionIndex);
88
98
  this.resolve(name, partitionIndex, fromBeginning ? 0n : offset);
89
99
  });
90
100
  });
91
- this.flush();
101
+
102
+ this.flush(topicPartitions);
92
103
  }
93
104
  }
@@ -9,7 +9,7 @@ type ProcessorOptions = {
9
9
  process: (batch: Batch) => Promise<void>;
10
10
  };
11
11
 
12
- export class Processor extends EventEmitter<{ stop: []; stopped: [] }> {
12
+ export class Processor extends EventEmitter<{ stopped: [] }> {
13
13
  private isRunning = false;
14
14
 
15
15
  constructor(private options: ProcessorOptions) {
@@ -17,15 +17,11 @@ export class Processor extends EventEmitter<{ stop: []; stopped: [] }> {
17
17
  }
18
18
 
19
19
  public async loop() {
20
- const { poll, process } = this.options;
21
-
22
20
  this.isRunning = true;
23
- this.once('stop', () => (this.isRunning = false));
24
21
 
25
22
  try {
26
23
  while (this.isRunning) {
27
- const batch = await poll();
28
- await process(batch);
24
+ await this.step();
29
25
  }
30
26
  } finally {
31
27
  this.isRunning = false;
@@ -34,14 +30,24 @@ export class Processor extends EventEmitter<{ stop: []; stopped: [] }> {
34
30
  }
35
31
 
36
32
  @trace()
33
+ private async step() {
34
+ const { poll, process } = this.options;
35
+
36
+ const batch = await poll();
37
+ if (batch.length) {
38
+ await process(batch);
39
+ }
40
+ }
41
+
37
42
  public async stop() {
38
43
  if (!this.isRunning) {
39
44
  return;
40
45
  }
41
46
 
42
- return new Promise<void>((resolve) => {
47
+ const stopPromise = new Promise<void>((resolve) => {
43
48
  this.once('stopped', resolve);
44
- this.emit('stop');
45
49
  });
50
+ this.isRunning = false;
51
+ return stopPromise;
46
52
  }
47
53
  }
@@ -1,6 +1,6 @@
1
1
  type Assignment = { [topicName: string]: number[] };
2
2
  type TopicPartitionReplicaIds = { [topicName: string]: { [partition: number]: number[] } };
3
- export type NodeAssignment = { [replicaId: number]: Assignment };
3
+ type NodeAssignment = { [replicaId: number]: Assignment };
4
4
 
5
5
  /** From replica ids pick the one with fewest assignments to balance the load across brokers */
6
6
  export const distributeAssignmentsToNodesBalanced = (
@@ -81,5 +81,3 @@ const getPartitionsByReplica = (assignment: Assignment, topicPartitionReplicaIds
81
81
  }
82
82
  return Object.entries(partitionsByReplicaId);
83
83
  };
84
-
85
- export const distributeAssignmentsToNodes = distributeAssignmentsToNodesBalanced;
package/src/index.ts CHANGED
@@ -5,3 +5,5 @@ export * from './client';
5
5
  export * from './distributors/partitioner';
6
6
  export * from './types';
7
7
  export * from './utils/error';
8
+ export * from './utils/logger';
9
+ export { Tracer, setTracer } from './utils/tracer';
package/src/metadata.ts CHANGED
@@ -2,6 +2,9 @@ import { API, API_ERROR } from './api';
2
2
  import { Cluster } from './cluster';
3
3
  import { delay } from './utils/delay';
4
4
  import { KafkaTSApiError } from './utils/error';
5
+ import { createTracer } from './utils/tracer';
6
+
7
+ const trace = createTracer('Metadata');
5
8
 
6
9
  type MetadataOptions = {
7
10
  cluster: Cluster;
@@ -36,6 +39,7 @@ export class Metadata {
36
39
  return this.topicNameById[id];
37
40
  }
38
41
 
42
+ @trace()
39
43
  public async fetchMetadataIfNecessary({
40
44
  topics,
41
45
  allowTopicAutoCreation,
@@ -7,6 +7,9 @@ import { Message } from '../types';
7
7
  import { delay } from '../utils/delay';
8
8
  import { KafkaTSApiError } from '../utils/error';
9
9
  import { memo } from '../utils/memo';
10
+ import { createTracer } from '../utils/tracer';
11
+
12
+ const trace = createTracer('Producer');
10
13
 
11
14
  export type ProducerOptions = {
12
15
  allowTopicAutoCreation?: boolean;
@@ -34,7 +37,8 @@ export class Producer {
34
37
  this.partition = this.options.partitioner({ metadata: this.metadata });
35
38
  }
36
39
 
37
- public async send(messages: Message[]) {
40
+ @trace(() => ({ root: true }))
41
+ public async send(messages: Message[], { acks = -1 }: { acks?: -1 | 1 } = {}) {
38
42
  await this.ensureConnected();
39
43
 
40
44
  const { allowTopicAutoCreation } = this.options;
@@ -44,19 +48,20 @@ export class Producer {
44
48
  await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
45
49
 
46
50
  const nodeTopicPartitionMessages = distributeMessagesToTopicPartitionLeaders(
47
- messages.map(message => ({ ...message, partition: this.partition(message) })),
51
+ messages.map((message) => ({ ...message, partition: this.partition(message) })),
48
52
  this.metadata.getTopicPartitionLeaderIds(),
49
53
  );
50
54
 
51
55
  await Promise.all(
52
- Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
53
- await this.cluster.sendRequestToNode(parseInt(nodeId))(API.PRODUCE, {
56
+ Object.entries(nodeTopicPartitionMessages).map(([nodeId, topicPartitionMessages]) =>
57
+ this.cluster.sendRequestToNode(parseInt(nodeId))(API.PRODUCE, {
54
58
  transactionalId: null,
55
- acks: 1,
59
+ acks,
56
60
  timeoutMs: 5000,
57
61
  topicData: Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
58
62
  name: topic,
59
63
  partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
64
+ const partitionIndex = parseInt(partition);
60
65
  let baseTimestamp: bigint | undefined;
61
66
  let maxTimestamp: bigint | undefined;
62
67
 
@@ -69,9 +74,9 @@ export class Producer {
69
74
  }
70
75
  });
71
76
 
72
- const baseSequence = this.nextSequence(topic, parseInt(partition), messages.length);
77
+ const baseSequence = this.nextSequence(topic, partitionIndex, messages.length);
73
78
  return {
74
- index: parseInt(partition),
79
+ index: partitionIndex,
75
80
  baseOffset: 0n,
76
81
  partitionLeaderEpoch: -1,
77
82
  attributes: 0,
@@ -95,8 +100,8 @@ export class Producer {
95
100
  };
96
101
  }),
97
102
  })),
98
- });
99
- }),
103
+ }),
104
+ ),
100
105
  );
101
106
  }
102
107
 
package/src/utils/api.ts CHANGED
@@ -5,7 +5,7 @@ export type Api<Request, Response> = {
5
5
  apiKey: number;
6
6
  apiVersion: number;
7
7
  request: (encoder: Encoder, body: Request) => Encoder;
8
- response: (buffer: Decoder) => Response;
8
+ response: (buffer: Decoder) => Promise<Response> | Response;
9
9
  };
10
10
 
11
11
  export const createApi = <Request, Response>(api: Api<Request, Response>) => api;