kafka-ts 0.0.1-beta.3 → 0.0.1-beta.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/release.yml +19 -6
- package/README.md +15 -21
- package/dist/api/api-versions.d.ts +9 -0
- package/dist/api/api-versions.js +24 -0
- package/dist/api/create-topics.d.ts +38 -0
- package/dist/api/create-topics.js +53 -0
- package/dist/api/delete-topics.d.ts +18 -0
- package/dist/api/delete-topics.js +33 -0
- package/dist/api/fetch.d.ts +84 -0
- package/dist/api/fetch.js +142 -0
- package/dist/api/find-coordinator.d.ts +21 -0
- package/dist/api/find-coordinator.js +39 -0
- package/dist/api/heartbeat.d.ts +11 -0
- package/dist/api/heartbeat.js +27 -0
- package/dist/api/index.d.ts +578 -0
- package/dist/api/index.js +165 -0
- package/dist/api/init-producer-id.d.ts +13 -0
- package/dist/api/init-producer-id.js +29 -0
- package/dist/api/join-group.d.ts +34 -0
- package/dist/api/join-group.js +51 -0
- package/dist/api/leave-group.d.ts +19 -0
- package/dist/api/leave-group.js +39 -0
- package/dist/api/list-offsets.d.ts +29 -0
- package/dist/api/list-offsets.js +48 -0
- package/dist/api/metadata.d.ts +40 -0
- package/dist/api/metadata.js +58 -0
- package/dist/api/offset-commit.d.ts +28 -0
- package/dist/api/offset-commit.js +48 -0
- package/dist/api/offset-fetch.d.ts +33 -0
- package/dist/api/offset-fetch.js +57 -0
- package/dist/api/produce.d.ts +54 -0
- package/dist/api/produce.js +126 -0
- package/dist/api/sasl-authenticate.d.ts +11 -0
- package/dist/api/sasl-authenticate.js +23 -0
- package/dist/api/sasl-handshake.d.ts +6 -0
- package/dist/api/sasl-handshake.js +19 -0
- package/dist/api/sync-group.d.ts +24 -0
- package/dist/api/sync-group.js +36 -0
- package/dist/auth/index.d.ts +2 -0
- package/dist/auth/index.js +8 -0
- package/dist/auth/plain.d.ts +5 -0
- package/dist/auth/plain.js +12 -0
- package/dist/auth/scram.d.ts +9 -0
- package/dist/auth/scram.js +40 -0
- package/dist/broker.d.ts +30 -0
- package/dist/broker.js +55 -0
- package/dist/client.d.ts +23 -0
- package/dist/client.js +36 -0
- package/dist/cluster.d.ts +27 -0
- package/dist/cluster.js +70 -0
- package/dist/cluster.test.d.ts +1 -0
- package/dist/cluster.test.js +345 -0
- package/dist/codecs/gzip.d.ts +2 -0
- package/dist/codecs/gzip.js +8 -0
- package/dist/codecs/index.d.ts +2 -0
- package/dist/codecs/index.js +17 -0
- package/dist/codecs/none.d.ts +2 -0
- package/dist/codecs/none.js +7 -0
- package/dist/codecs/types.d.ts +5 -0
- package/dist/codecs/types.js +2 -0
- package/dist/connection.d.ts +26 -0
- package/dist/connection.js +175 -0
- package/dist/consumer/consumer-group.d.ts +41 -0
- package/dist/consumer/consumer-group.js +217 -0
- package/dist/consumer/consumer-metadata.d.ts +7 -0
- package/dist/consumer/consumer-metadata.js +14 -0
- package/dist/consumer/consumer.d.ts +44 -0
- package/dist/consumer/consumer.js +225 -0
- package/dist/consumer/fetch-manager.d.ts +33 -0
- package/dist/consumer/fetch-manager.js +140 -0
- package/dist/consumer/fetcher.d.ts +25 -0
- package/dist/consumer/fetcher.js +64 -0
- package/dist/consumer/offset-manager.d.ts +22 -0
- package/dist/consumer/offset-manager.js +66 -0
- package/dist/consumer/processor.d.ts +19 -0
- package/dist/consumer/processor.js +59 -0
- package/dist/distributors/assignments-to-replicas.d.ts +16 -0
- package/dist/distributors/assignments-to-replicas.js +59 -0
- package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
- package/dist/distributors/assignments-to-replicas.test.js +40 -0
- package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
- package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
- package/dist/distributors/partitioner.d.ts +7 -0
- package/dist/distributors/partitioner.js +23 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +26 -0
- package/dist/metadata.d.ts +24 -0
- package/dist/metadata.js +106 -0
- package/dist/producer/producer.d.ts +24 -0
- package/dist/producer/producer.js +131 -0
- package/dist/types.d.ts +11 -0
- package/dist/types.js +2 -0
- package/dist/utils/api.d.ts +9 -0
- package/dist/utils/api.js +5 -0
- package/dist/utils/crypto.d.ts +8 -0
- package/dist/utils/crypto.js +18 -0
- package/dist/utils/decoder.d.ts +30 -0
- package/dist/utils/decoder.js +152 -0
- package/dist/utils/delay.d.ts +1 -0
- package/dist/utils/delay.js +5 -0
- package/dist/utils/encoder.d.ts +28 -0
- package/dist/utils/encoder.js +125 -0
- package/dist/utils/error.d.ts +11 -0
- package/dist/utils/error.js +27 -0
- package/dist/utils/logger.d.ts +9 -0
- package/dist/utils/logger.js +32 -0
- package/dist/utils/memo.d.ts +1 -0
- package/dist/utils/memo.js +16 -0
- package/dist/utils/murmur2.d.ts +3 -0
- package/dist/utils/murmur2.js +40 -0
- package/dist/utils/retrier.d.ts +10 -0
- package/dist/utils/retrier.js +22 -0
- package/dist/utils/tracer.d.ts +5 -0
- package/dist/utils/tracer.js +39 -0
- package/docker-compose.yml +3 -3
- package/examples/package-lock.json +3501 -3
- package/examples/package.json +8 -1
- package/examples/src/benchmark/common.ts +98 -0
- package/examples/src/benchmark/kafka-ts.ts +67 -0
- package/examples/src/benchmark/kafkajs.ts +51 -0
- package/examples/src/client.ts +4 -1
- package/examples/src/consumer.ts +7 -1
- package/examples/src/create-topic.ts +3 -3
- package/examples/src/opentelemetry.ts +46 -0
- package/examples/src/producer.ts +11 -11
- package/examples/src/replicator.ts +2 -1
- package/package.json +4 -2
- package/scripts/create-scram-user.sh +4 -2
- package/scripts/generate-certs.sh +2 -0
- package/src/__snapshots__/cluster.test.ts.snap +160 -53
- package/src/api/fetch.ts +83 -28
- package/src/api/index.ts +3 -1
- package/src/api/metadata.ts +1 -1
- package/src/api/produce.ts +7 -10
- package/src/cluster.test.ts +10 -7
- package/src/cluster.ts +36 -38
- package/src/codecs/gzip.ts +9 -0
- package/src/codecs/index.ts +16 -0
- package/src/codecs/none.ts +6 -0
- package/src/codecs/types.ts +4 -0
- package/src/connection.ts +31 -17
- package/src/consumer/consumer-group.ts +43 -21
- package/src/consumer/consumer.ts +58 -37
- package/src/consumer/fetch-manager.ts +36 -46
- package/src/consumer/fetcher.ts +20 -13
- package/src/consumer/offset-manager.ts +18 -7
- package/src/consumer/processor.ts +14 -8
- package/src/distributors/assignments-to-replicas.ts +1 -3
- package/src/index.ts +2 -0
- package/src/metadata.ts +4 -0
- package/src/producer/producer.ts +14 -9
- package/src/utils/api.ts +1 -1
- package/src/utils/decoder.ts +9 -3
- package/src/utils/encoder.ts +26 -19
- package/src/utils/logger.ts +37 -0
- package/src/utils/tracer.ts +40 -22
- package/certs/ca.key +0 -52
- package/certs/ca.srl +0 -1
- package/certs/kafka.crt +0 -29
- package/certs/kafka.csr +0 -26
- package/certs/kafka.key +0 -52
- package/src/utils/debug.ts +0 -9
package/src/consumer/consumer.ts
CHANGED
|
@@ -1,17 +1,21 @@
|
|
|
1
|
+
import EventEmitter from 'events';
|
|
1
2
|
import { API, API_ERROR } from '../api';
|
|
2
3
|
import { IsolationLevel } from '../api/fetch';
|
|
3
4
|
import { Assignment } from '../api/sync-group';
|
|
4
5
|
import { Cluster } from '../cluster';
|
|
5
|
-
import {
|
|
6
|
+
import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
|
|
6
7
|
import { Message } from '../types';
|
|
7
8
|
import { delay } from '../utils/delay';
|
|
8
9
|
import { ConnectionError, KafkaTSApiError } from '../utils/error';
|
|
9
|
-
import {
|
|
10
|
+
import { log } from '../utils/logger';
|
|
11
|
+
import { createTracer } from '../utils/tracer';
|
|
10
12
|
import { ConsumerGroup } from './consumer-group';
|
|
11
13
|
import { ConsumerMetadata } from './consumer-metadata';
|
|
12
|
-
import {
|
|
14
|
+
import { BatchGranularity, FetchManager } from './fetch-manager';
|
|
13
15
|
import { OffsetManager } from './offset-manager';
|
|
14
16
|
|
|
17
|
+
const trace = createTracer('Consumer');
|
|
18
|
+
|
|
15
19
|
export type ConsumerOptions = {
|
|
16
20
|
topics: string[];
|
|
17
21
|
groupId?: string | null;
|
|
@@ -26,12 +30,11 @@ export type ConsumerOptions = {
|
|
|
26
30
|
partitionMaxBytes?: number;
|
|
27
31
|
allowTopicAutoCreation?: boolean;
|
|
28
32
|
fromBeginning?: boolean;
|
|
29
|
-
retrier?: Retrier;
|
|
30
33
|
batchGranularity?: BatchGranularity;
|
|
31
34
|
concurrency?: number;
|
|
32
35
|
} & ({ onBatch: (messages: Required<Message>[]) => unknown } | { onMessage: (message: Required<Message>) => unknown });
|
|
33
36
|
|
|
34
|
-
export class Consumer {
|
|
37
|
+
export class Consumer extends EventEmitter<{ offsetCommit: [] }> {
|
|
35
38
|
private options: Required<ConsumerOptions>;
|
|
36
39
|
private metadata: ConsumerMetadata;
|
|
37
40
|
private consumerGroup: ConsumerGroup | undefined;
|
|
@@ -43,6 +46,8 @@ export class Consumer {
|
|
|
43
46
|
private cluster: Cluster,
|
|
44
47
|
options: ConsumerOptions,
|
|
45
48
|
) {
|
|
49
|
+
super();
|
|
50
|
+
|
|
46
51
|
this.options = {
|
|
47
52
|
...options,
|
|
48
53
|
groupId: options.groupId ?? null,
|
|
@@ -57,7 +62,6 @@ export class Consumer {
|
|
|
57
62
|
isolationLevel: options.isolationLevel ?? IsolationLevel.READ_UNCOMMITTED,
|
|
58
63
|
allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
|
|
59
64
|
fromBeginning: options.fromBeginning ?? false,
|
|
60
|
-
retrier: options.retrier ?? defaultRetrier,
|
|
61
65
|
batchGranularity: options.batchGranularity ?? 'partition',
|
|
62
66
|
concurrency: options.concurrency ?? 1,
|
|
63
67
|
};
|
|
@@ -80,8 +84,10 @@ export class Consumer {
|
|
|
80
84
|
offsetManager: this.offsetManager,
|
|
81
85
|
})
|
|
82
86
|
: undefined;
|
|
87
|
+
this.consumerGroup?.on('offsetCommit', () => this.emit('offsetCommit'));
|
|
83
88
|
}
|
|
84
89
|
|
|
90
|
+
@trace()
|
|
85
91
|
public async start(): Promise<void> {
|
|
86
92
|
const { topics, allowTopicAutoCreation, fromBeginning } = this.options;
|
|
87
93
|
|
|
@@ -94,16 +100,17 @@ export class Consumer {
|
|
|
94
100
|
await this.offsetManager.fetchOffsets({ fromBeginning });
|
|
95
101
|
await this.consumerGroup?.join();
|
|
96
102
|
} catch (error) {
|
|
97
|
-
|
|
98
|
-
|
|
103
|
+
log.warn('Failed to start consumer', error);
|
|
104
|
+
log.debug(`Restarting consumer in 1 second...`);
|
|
99
105
|
await delay(1000);
|
|
100
106
|
|
|
101
107
|
if (this.stopHook) return (this.stopHook as () => void)();
|
|
102
108
|
return this.close(true).then(() => this.start());
|
|
103
109
|
}
|
|
104
|
-
|
|
110
|
+
this.startFetchManager();
|
|
105
111
|
}
|
|
106
112
|
|
|
113
|
+
@trace()
|
|
107
114
|
public async close(force = false): Promise<void> {
|
|
108
115
|
if (!force) {
|
|
109
116
|
await new Promise<void>(async (resolve) => {
|
|
@@ -111,22 +118,33 @@ export class Consumer {
|
|
|
111
118
|
await this.fetchManager?.stop();
|
|
112
119
|
});
|
|
113
120
|
}
|
|
114
|
-
await this.consumerGroup
|
|
115
|
-
|
|
116
|
-
.catch((error) => console.warn(`Failed to leave group: ${error.message}`));
|
|
117
|
-
await this.cluster.disconnect().catch((error) => console.warn(`Failed to disconnect: ${error.message}`));
|
|
121
|
+
await this.consumerGroup?.leaveGroup().catch((error) => log.debug(`Failed to leave group: ${error.message}`));
|
|
122
|
+
await this.cluster.disconnect().catch((error) => log.debug(`Failed to disconnect: ${error.message}`));
|
|
118
123
|
}
|
|
119
124
|
|
|
120
|
-
private
|
|
125
|
+
private async startFetchManager() {
|
|
121
126
|
const { batchGranularity, concurrency } = this.options;
|
|
122
127
|
|
|
123
128
|
while (!this.stopHook) {
|
|
129
|
+
this.consumerGroup?.resetHeartbeat();
|
|
130
|
+
|
|
131
|
+
// TODO: If leader is not available, find another read replica
|
|
124
132
|
const nodeAssignments = Object.entries(
|
|
125
|
-
|
|
126
|
-
this.metadata.getAssignment(),
|
|
127
|
-
|
|
133
|
+
distributeMessagesToTopicPartitionLeaders(
|
|
134
|
+
Object.entries(this.metadata.getAssignment()).flatMap(([topic, partitions]) =>
|
|
135
|
+
partitions.map((partition) => ({ topic, partition })),
|
|
136
|
+
),
|
|
137
|
+
this.metadata.getTopicPartitionLeaderIds(),
|
|
128
138
|
),
|
|
129
|
-
).map(([nodeId, assignment]) => ({
|
|
139
|
+
).map(([nodeId, assignment]) => ({
|
|
140
|
+
nodeId: parseInt(nodeId),
|
|
141
|
+
assignment: Object.fromEntries(
|
|
142
|
+
Object.entries(assignment).map(([topic, partitions]) => [
|
|
143
|
+
topic,
|
|
144
|
+
Object.keys(partitions).map(Number),
|
|
145
|
+
]),
|
|
146
|
+
),
|
|
147
|
+
}));
|
|
130
148
|
|
|
131
149
|
const numPartitions = Object.values(this.metadata.getAssignment()).flat().length;
|
|
132
150
|
const numProcessors = Math.min(concurrency, numPartitions);
|
|
@@ -145,19 +163,19 @@ export class Consumer {
|
|
|
145
163
|
await this.fetchManager.start();
|
|
146
164
|
|
|
147
165
|
if (!nodeAssignments.length) {
|
|
148
|
-
|
|
166
|
+
log.debug('No partitions assigned. Waiting for reassignment...');
|
|
149
167
|
await delay(this.options.maxWaitMs);
|
|
150
|
-
|
|
168
|
+
this.consumerGroup?.handleLastHeartbeat();
|
|
151
169
|
}
|
|
152
170
|
} catch (error) {
|
|
153
171
|
await this.fetchManager.stop();
|
|
154
172
|
|
|
155
173
|
if ((error as KafkaTSApiError).errorCode === API_ERROR.REBALANCE_IN_PROGRESS) {
|
|
156
|
-
|
|
174
|
+
log.debug('Rebalance in progress...');
|
|
157
175
|
continue;
|
|
158
176
|
}
|
|
159
177
|
if ((error as KafkaTSApiError).errorCode === API_ERROR.FENCED_INSTANCE_ID) {
|
|
160
|
-
|
|
178
|
+
log.debug('New consumer with the same groupInstanceId joined. Exiting the consumer...');
|
|
161
179
|
this.close();
|
|
162
180
|
break;
|
|
163
181
|
}
|
|
@@ -165,42 +183,45 @@ export class Consumer {
|
|
|
165
183
|
error instanceof ConnectionError ||
|
|
166
184
|
(error instanceof KafkaTSApiError && error.errorCode === API_ERROR.NOT_COORDINATOR)
|
|
167
185
|
) {
|
|
168
|
-
|
|
186
|
+
log.debug(`${error.message}. Restarting consumer...`);
|
|
169
187
|
this.close().then(() => this.start());
|
|
170
188
|
break;
|
|
171
189
|
}
|
|
172
|
-
|
|
190
|
+
log.error((error as Error).message, error);
|
|
173
191
|
this.close();
|
|
174
192
|
break;
|
|
175
193
|
}
|
|
176
194
|
}
|
|
177
195
|
this.stopHook?.();
|
|
178
|
-
}
|
|
196
|
+
}
|
|
179
197
|
|
|
198
|
+
@trace((messages) => ({ count: messages.length }))
|
|
180
199
|
private async process(messages: Required<Message>[]) {
|
|
181
200
|
const { options } = this;
|
|
182
|
-
|
|
201
|
+
|
|
202
|
+
const topicPartitions: Record<string, Set<number>> = {};
|
|
203
|
+
for (const { topic, partition } of messages) {
|
|
204
|
+
topicPartitions[topic] ??= new Set();
|
|
205
|
+
topicPartitions[topic].add(partition);
|
|
206
|
+
}
|
|
183
207
|
|
|
184
208
|
if ('onBatch' in options) {
|
|
185
|
-
await
|
|
209
|
+
await options.onBatch(messages);
|
|
186
210
|
|
|
187
211
|
messages.forEach(({ topic, partition, offset }) =>
|
|
188
212
|
this.offsetManager.resolve(topic, partition, offset + 1n),
|
|
189
213
|
);
|
|
190
214
|
} else if ('onMessage' in options) {
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
await retrier(() => options.onMessage(message));
|
|
215
|
+
for (const message of messages) {
|
|
216
|
+
await options.onMessage(message);
|
|
194
217
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
}
|
|
198
|
-
} catch (error) {
|
|
199
|
-
await this.consumerGroup?.offsetCommit().catch(() => {});
|
|
200
|
-
throw error;
|
|
218
|
+
const { topic, partition, offset } = message;
|
|
219
|
+
this.offsetManager.resolve(topic, partition, offset + 1n);
|
|
201
220
|
}
|
|
202
221
|
}
|
|
203
|
-
|
|
222
|
+
|
|
223
|
+
await this.consumerGroup?.offsetCommit(topicPartitions);
|
|
224
|
+
this.offsetManager.flush(topicPartitions);
|
|
204
225
|
}
|
|
205
226
|
|
|
206
227
|
private fetch(nodeId: number, assignment: Assignment) {
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { API } from '../api';
|
|
1
|
+
import { FetchResponse } from '../api/fetch';
|
|
3
2
|
import { Assignment } from '../api/sync-group';
|
|
4
3
|
import { Metadata } from '../metadata';
|
|
5
4
|
import { Batch, Message } from '../types';
|
|
@@ -14,7 +13,7 @@ const trace = createTracer('FetchManager');
|
|
|
14
13
|
export type BatchGranularity = 'partition' | 'topic' | 'broker';
|
|
15
14
|
|
|
16
15
|
type FetchManagerOptions = {
|
|
17
|
-
fetch: (nodeId: number, assignment: Assignment) => Promise<
|
|
16
|
+
fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
|
|
18
17
|
process: (batch: Batch) => Promise<void>;
|
|
19
18
|
metadata: Metadata;
|
|
20
19
|
consumerGroup?: ConsumerGroup;
|
|
@@ -26,15 +25,15 @@ type FetchManagerOptions = {
|
|
|
26
25
|
type Checkpoint = { kind: 'checkpoint'; fetcherId: number };
|
|
27
26
|
type Entry = Batch | Checkpoint;
|
|
28
27
|
|
|
29
|
-
export class FetchManager
|
|
28
|
+
export class FetchManager {
|
|
30
29
|
private queue: Entry[] = [];
|
|
31
30
|
private isRunning = false;
|
|
32
31
|
private fetchers: Fetcher[];
|
|
33
32
|
private processors: Processor[];
|
|
33
|
+
private pollQueue: (() => void)[] = [];
|
|
34
|
+
private fetcherCallbacks: Record<number, () => void> = {};
|
|
34
35
|
|
|
35
36
|
constructor(private options: FetchManagerOptions) {
|
|
36
|
-
super();
|
|
37
|
-
|
|
38
37
|
const { fetch, process, consumerGroup, nodeAssignments, concurrency } = this.options;
|
|
39
38
|
|
|
40
39
|
this.fetchers = nodeAssignments.map(
|
|
@@ -52,6 +51,7 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
|
|
|
52
51
|
);
|
|
53
52
|
}
|
|
54
53
|
|
|
54
|
+
@trace(() => ({ root: true }))
|
|
55
55
|
public async start() {
|
|
56
56
|
this.queue = [];
|
|
57
57
|
this.isRunning = true;
|
|
@@ -62,20 +62,25 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
|
|
|
62
62
|
...this.processors.map((processor) => processor.loop()),
|
|
63
63
|
]);
|
|
64
64
|
} finally {
|
|
65
|
-
this.
|
|
66
|
-
this.emit('stop');
|
|
65
|
+
await this.stop();
|
|
67
66
|
}
|
|
68
67
|
}
|
|
69
68
|
|
|
70
|
-
@trace()
|
|
71
69
|
public async stop() {
|
|
72
70
|
this.isRunning = false;
|
|
73
|
-
this.emit('stop');
|
|
74
71
|
|
|
75
|
-
|
|
72
|
+
const stopPromise = Promise.all([
|
|
76
73
|
...this.fetchers.map((fetcher) => fetcher.stop()),
|
|
77
74
|
...this.processors.map((processor) => processor.stop()),
|
|
78
75
|
]);
|
|
76
|
+
|
|
77
|
+
this.pollQueue.forEach((resolve) => resolve());
|
|
78
|
+
this.pollQueue = [];
|
|
79
|
+
|
|
80
|
+
Object.values(this.fetcherCallbacks).forEach((callback) => callback());
|
|
81
|
+
this.fetcherCallbacks = {};
|
|
82
|
+
|
|
83
|
+
await stopPromise;
|
|
79
84
|
}
|
|
80
85
|
|
|
81
86
|
@trace()
|
|
@@ -86,58 +91,43 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
|
|
|
86
91
|
|
|
87
92
|
const batch = this.queue.shift();
|
|
88
93
|
if (!batch) {
|
|
94
|
+
// wait until new data is available or fetch manager is requested to stop
|
|
89
95
|
await new Promise<void>((resolve) => {
|
|
90
|
-
|
|
91
|
-
this.removeListener('stop', onStop);
|
|
92
|
-
resolve();
|
|
93
|
-
};
|
|
94
|
-
const onStop = () => {
|
|
95
|
-
this.removeListener('data', onData);
|
|
96
|
-
resolve();
|
|
97
|
-
};
|
|
98
|
-
this.once('data', onData);
|
|
99
|
-
this.once('stop', onStop);
|
|
96
|
+
this.pollQueue.push(resolve);
|
|
100
97
|
});
|
|
101
98
|
return this.poll();
|
|
102
99
|
}
|
|
103
100
|
|
|
104
101
|
if ('kind' in batch && batch.kind === 'checkpoint') {
|
|
105
|
-
this.
|
|
102
|
+
this.fetcherCallbacks[batch.fetcherId]?.();
|
|
106
103
|
return this.poll();
|
|
107
104
|
}
|
|
108
105
|
|
|
106
|
+
this.pollQueue?.shift()?.();
|
|
107
|
+
|
|
109
108
|
return batch as Exclude<Entry, Checkpoint>;
|
|
110
109
|
}
|
|
111
110
|
|
|
112
|
-
|
|
111
|
+
@trace()
|
|
112
|
+
private async onResponse(fetcherId: number, response: FetchResponse) {
|
|
113
113
|
const { metadata, batchGranularity } = this.options;
|
|
114
114
|
|
|
115
115
|
const batches = fetchResponseToBatches(response, batchGranularity, metadata);
|
|
116
|
-
if (batches.length) {
|
|
117
|
-
|
|
118
|
-
this.queue.push({ kind: 'checkpoint', fetcherId });
|
|
119
|
-
|
|
120
|
-
this.emit('data');
|
|
121
|
-
await new Promise<void>((resolve) => {
|
|
122
|
-
const onCheckpoint = (id: number) => {
|
|
123
|
-
if (id === fetcherId) {
|
|
124
|
-
this.removeListener('stop', onStop);
|
|
125
|
-
resolve();
|
|
126
|
-
}
|
|
127
|
-
};
|
|
128
|
-
const onStop = () => {
|
|
129
|
-
this.removeListener('checkpoint', onCheckpoint);
|
|
130
|
-
resolve();
|
|
131
|
-
};
|
|
132
|
-
this.once('checkpoint', onCheckpoint);
|
|
133
|
-
this.once('stop', onStop);
|
|
134
|
-
});
|
|
116
|
+
if (!batches.length) {
|
|
117
|
+
return;
|
|
135
118
|
}
|
|
119
|
+
|
|
120
|
+
// wait until all broker batches have been processed or fetch manager is requested to stop
|
|
121
|
+
await new Promise<void>((resolve) => {
|
|
122
|
+
this.fetcherCallbacks[fetcherId] = resolve;
|
|
123
|
+
this.queue.push(...batches, { kind: 'checkpoint', fetcherId });
|
|
124
|
+
this.pollQueue?.shift()?.();
|
|
125
|
+
});
|
|
136
126
|
}
|
|
137
127
|
}
|
|
138
128
|
|
|
139
129
|
const fetchResponseToBatches = (
|
|
140
|
-
batch:
|
|
130
|
+
batch: FetchResponse,
|
|
141
131
|
batchGranularity: BatchGranularity,
|
|
142
132
|
metadata: Metadata,
|
|
143
133
|
): Batch[] => {
|
|
@@ -170,9 +160,9 @@ const fetchResponseToBatches = (
|
|
|
170
160
|
.map((topicPartition) => topicPartition.flatMap((partitionMessages) => partitionMessages))
|
|
171
161
|
.filter((messages) => messages.length);
|
|
172
162
|
case 'partition':
|
|
173
|
-
return brokerTopics
|
|
174
|
-
topicPartition.map((partitionMessages) => partitionMessages)
|
|
175
|
-
|
|
163
|
+
return brokerTopics
|
|
164
|
+
.flatMap((topicPartition) => topicPartition.map((partitionMessages) => partitionMessages))
|
|
165
|
+
.filter((messages) => messages.length);
|
|
176
166
|
default:
|
|
177
167
|
throw new KafkaTSError(`Unhandled batch granularity: ${batchGranularity}`);
|
|
178
168
|
}
|
package/src/consumer/fetcher.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { EventEmitter } from 'stream';
|
|
2
|
-
import {
|
|
2
|
+
import { FetchResponse } from '../api/fetch';
|
|
3
3
|
import { Assignment } from '../api/sync-group';
|
|
4
4
|
import { createTracer } from '../utils/tracer';
|
|
5
5
|
import { ConsumerGroup } from './consumer-group';
|
|
@@ -10,11 +10,11 @@ type FetcherOptions = {
|
|
|
10
10
|
nodeId: number;
|
|
11
11
|
assignment: Assignment;
|
|
12
12
|
consumerGroup?: ConsumerGroup;
|
|
13
|
-
fetch: (nodeId: number, assignment: Assignment) => Promise<
|
|
14
|
-
onResponse: (fetcherId: number, response:
|
|
13
|
+
fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
|
|
14
|
+
onResponse: (fetcherId: number, response: FetchResponse) => Promise<void>;
|
|
15
15
|
};
|
|
16
16
|
|
|
17
|
-
export class Fetcher extends EventEmitter<{
|
|
17
|
+
export class Fetcher extends EventEmitter<{ stopped: [] }> {
|
|
18
18
|
private isRunning = false;
|
|
19
19
|
|
|
20
20
|
constructor(
|
|
@@ -25,17 +25,11 @@ export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; dra
|
|
|
25
25
|
}
|
|
26
26
|
|
|
27
27
|
public async loop() {
|
|
28
|
-
const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
|
|
29
|
-
|
|
30
28
|
this.isRunning = true;
|
|
31
|
-
this.once('stop', () => (this.isRunning = false));
|
|
32
29
|
|
|
33
30
|
try {
|
|
34
31
|
while (this.isRunning) {
|
|
35
|
-
|
|
36
|
-
await consumerGroup?.handleLastHeartbeat();
|
|
37
|
-
await onResponse(this.fetcherId, response);
|
|
38
|
-
await consumerGroup?.handleLastHeartbeat();
|
|
32
|
+
await this.step();
|
|
39
33
|
}
|
|
40
34
|
} finally {
|
|
41
35
|
this.isRunning = false;
|
|
@@ -44,14 +38,27 @@ export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; dra
|
|
|
44
38
|
}
|
|
45
39
|
|
|
46
40
|
@trace()
|
|
41
|
+
private async step() {
|
|
42
|
+
const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
|
|
43
|
+
|
|
44
|
+
const response = await fetch(nodeId, assignment);
|
|
45
|
+
if (!this.isRunning) {
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
consumerGroup?.handleLastHeartbeat();
|
|
49
|
+
await onResponse(this.fetcherId, response);
|
|
50
|
+
consumerGroup?.handleLastHeartbeat();
|
|
51
|
+
}
|
|
52
|
+
|
|
47
53
|
public async stop() {
|
|
48
54
|
if (!this.isRunning) {
|
|
49
55
|
return;
|
|
50
56
|
}
|
|
51
57
|
|
|
52
|
-
|
|
53
|
-
return new Promise<void>((resolve) => {
|
|
58
|
+
const stopPromise = new Promise<void>((resolve) => {
|
|
54
59
|
this.once('stopped', resolve);
|
|
55
60
|
});
|
|
61
|
+
this.isRunning = false;
|
|
62
|
+
return stopPromise;
|
|
56
63
|
}
|
|
57
64
|
}
|
|
@@ -3,8 +3,11 @@ import { IsolationLevel } from '../api/fetch';
|
|
|
3
3
|
import { Assignment } from '../api/sync-group';
|
|
4
4
|
import { Cluster } from '../cluster';
|
|
5
5
|
import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
|
|
6
|
+
import { createTracer } from '../utils/tracer';
|
|
6
7
|
import { ConsumerMetadata } from './consumer-metadata';
|
|
7
8
|
|
|
9
|
+
const trace = createTracer('OffsetManager');
|
|
10
|
+
|
|
8
11
|
type OffsetManagerOptions = {
|
|
9
12
|
cluster: Cluster;
|
|
10
13
|
metadata: ConsumerMetadata;
|
|
@@ -24,13 +27,18 @@ export class OffsetManager {
|
|
|
24
27
|
public resolve(topic: string, partition: number, offset: bigint) {
|
|
25
28
|
this.pendingOffsets[topic] ??= {};
|
|
26
29
|
this.pendingOffsets[topic][partition] = offset;
|
|
27
|
-
|
|
28
|
-
this.currentOffsets[topic] ??= {};
|
|
29
|
-
this.currentOffsets[topic][partition] = offset;
|
|
30
30
|
}
|
|
31
31
|
|
|
32
|
-
public flush() {
|
|
33
|
-
|
|
32
|
+
public flush(topicPartitions: Record<string, Set<number>>) {
|
|
33
|
+
Object.entries(topicPartitions).forEach(([topic, partitions]) => {
|
|
34
|
+
this.currentOffsets[topic] ??= {};
|
|
35
|
+
partitions.forEach((partition) => {
|
|
36
|
+
if (this.pendingOffsets[topic]?.[partition]) {
|
|
37
|
+
this.currentOffsets[topic][partition] = this.pendingOffsets[topic][partition];
|
|
38
|
+
delete this.pendingOffsets[topic][partition];
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
});
|
|
34
42
|
}
|
|
35
43
|
|
|
36
44
|
public async fetchOffsets(options: { fromBeginning: boolean }) {
|
|
@@ -58,7 +66,6 @@ export class OffsetManager {
|
|
|
58
66
|
}),
|
|
59
67
|
),
|
|
60
68
|
);
|
|
61
|
-
this.flush();
|
|
62
69
|
}
|
|
63
70
|
|
|
64
71
|
private async listOffsets({
|
|
@@ -83,11 +90,15 @@ export class OffsetManager {
|
|
|
83
90
|
})),
|
|
84
91
|
});
|
|
85
92
|
|
|
93
|
+
const topicPartitions: Record<string, Set<number>> = {};
|
|
86
94
|
offsets.topics.forEach(({ name, partitions }) => {
|
|
95
|
+
topicPartitions[name] ??= new Set();
|
|
87
96
|
partitions.forEach(({ partitionIndex, offset }) => {
|
|
97
|
+
topicPartitions[name].add(partitionIndex);
|
|
88
98
|
this.resolve(name, partitionIndex, fromBeginning ? 0n : offset);
|
|
89
99
|
});
|
|
90
100
|
});
|
|
91
|
-
|
|
101
|
+
|
|
102
|
+
this.flush(topicPartitions);
|
|
92
103
|
}
|
|
93
104
|
}
|
|
@@ -9,7 +9,7 @@ type ProcessorOptions = {
|
|
|
9
9
|
process: (batch: Batch) => Promise<void>;
|
|
10
10
|
};
|
|
11
11
|
|
|
12
|
-
export class Processor extends EventEmitter<{
|
|
12
|
+
export class Processor extends EventEmitter<{ stopped: [] }> {
|
|
13
13
|
private isRunning = false;
|
|
14
14
|
|
|
15
15
|
constructor(private options: ProcessorOptions) {
|
|
@@ -17,15 +17,11 @@ export class Processor extends EventEmitter<{ stop: []; stopped: [] }> {
|
|
|
17
17
|
}
|
|
18
18
|
|
|
19
19
|
public async loop() {
|
|
20
|
-
const { poll, process } = this.options;
|
|
21
|
-
|
|
22
20
|
this.isRunning = true;
|
|
23
|
-
this.once('stop', () => (this.isRunning = false));
|
|
24
21
|
|
|
25
22
|
try {
|
|
26
23
|
while (this.isRunning) {
|
|
27
|
-
|
|
28
|
-
await process(batch);
|
|
24
|
+
await this.step();
|
|
29
25
|
}
|
|
30
26
|
} finally {
|
|
31
27
|
this.isRunning = false;
|
|
@@ -34,14 +30,24 @@ export class Processor extends EventEmitter<{ stop: []; stopped: [] }> {
|
|
|
34
30
|
}
|
|
35
31
|
|
|
36
32
|
@trace()
|
|
33
|
+
private async step() {
|
|
34
|
+
const { poll, process } = this.options;
|
|
35
|
+
|
|
36
|
+
const batch = await poll();
|
|
37
|
+
if (batch.length) {
|
|
38
|
+
await process(batch);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
37
42
|
public async stop() {
|
|
38
43
|
if (!this.isRunning) {
|
|
39
44
|
return;
|
|
40
45
|
}
|
|
41
46
|
|
|
42
|
-
|
|
47
|
+
const stopPromise = new Promise<void>((resolve) => {
|
|
43
48
|
this.once('stopped', resolve);
|
|
44
|
-
this.emit('stop');
|
|
45
49
|
});
|
|
50
|
+
this.isRunning = false;
|
|
51
|
+
return stopPromise;
|
|
46
52
|
}
|
|
47
53
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
type Assignment = { [topicName: string]: number[] };
|
|
2
2
|
type TopicPartitionReplicaIds = { [topicName: string]: { [partition: number]: number[] } };
|
|
3
|
-
|
|
3
|
+
type NodeAssignment = { [replicaId: number]: Assignment };
|
|
4
4
|
|
|
5
5
|
/** From replica ids pick the one with fewest assignments to balance the load across brokers */
|
|
6
6
|
export const distributeAssignmentsToNodesBalanced = (
|
|
@@ -81,5 +81,3 @@ const getPartitionsByReplica = (assignment: Assignment, topicPartitionReplicaIds
|
|
|
81
81
|
}
|
|
82
82
|
return Object.entries(partitionsByReplicaId);
|
|
83
83
|
};
|
|
84
|
-
|
|
85
|
-
export const distributeAssignmentsToNodes = distributeAssignmentsToNodesBalanced;
|
package/src/index.ts
CHANGED
package/src/metadata.ts
CHANGED
|
@@ -2,6 +2,9 @@ import { API, API_ERROR } from './api';
|
|
|
2
2
|
import { Cluster } from './cluster';
|
|
3
3
|
import { delay } from './utils/delay';
|
|
4
4
|
import { KafkaTSApiError } from './utils/error';
|
|
5
|
+
import { createTracer } from './utils/tracer';
|
|
6
|
+
|
|
7
|
+
const trace = createTracer('Metadata');
|
|
5
8
|
|
|
6
9
|
type MetadataOptions = {
|
|
7
10
|
cluster: Cluster;
|
|
@@ -36,6 +39,7 @@ export class Metadata {
|
|
|
36
39
|
return this.topicNameById[id];
|
|
37
40
|
}
|
|
38
41
|
|
|
42
|
+
@trace()
|
|
39
43
|
public async fetchMetadataIfNecessary({
|
|
40
44
|
topics,
|
|
41
45
|
allowTopicAutoCreation,
|
package/src/producer/producer.ts
CHANGED
|
@@ -7,6 +7,9 @@ import { Message } from '../types';
|
|
|
7
7
|
import { delay } from '../utils/delay';
|
|
8
8
|
import { KafkaTSApiError } from '../utils/error';
|
|
9
9
|
import { memo } from '../utils/memo';
|
|
10
|
+
import { createTracer } from '../utils/tracer';
|
|
11
|
+
|
|
12
|
+
const trace = createTracer('Producer');
|
|
10
13
|
|
|
11
14
|
export type ProducerOptions = {
|
|
12
15
|
allowTopicAutoCreation?: boolean;
|
|
@@ -34,7 +37,8 @@ export class Producer {
|
|
|
34
37
|
this.partition = this.options.partitioner({ metadata: this.metadata });
|
|
35
38
|
}
|
|
36
39
|
|
|
37
|
-
|
|
40
|
+
@trace(() => ({ root: true }))
|
|
41
|
+
public async send(messages: Message[], { acks = -1 }: { acks?: -1 | 1 } = {}) {
|
|
38
42
|
await this.ensureConnected();
|
|
39
43
|
|
|
40
44
|
const { allowTopicAutoCreation } = this.options;
|
|
@@ -44,19 +48,20 @@ export class Producer {
|
|
|
44
48
|
await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
|
|
45
49
|
|
|
46
50
|
const nodeTopicPartitionMessages = distributeMessagesToTopicPartitionLeaders(
|
|
47
|
-
messages.map(message => ({ ...message, partition: this.partition(message) })),
|
|
51
|
+
messages.map((message) => ({ ...message, partition: this.partition(message) })),
|
|
48
52
|
this.metadata.getTopicPartitionLeaderIds(),
|
|
49
53
|
);
|
|
50
54
|
|
|
51
55
|
await Promise.all(
|
|
52
|
-
Object.entries(nodeTopicPartitionMessages).map(
|
|
53
|
-
|
|
56
|
+
Object.entries(nodeTopicPartitionMessages).map(([nodeId, topicPartitionMessages]) =>
|
|
57
|
+
this.cluster.sendRequestToNode(parseInt(nodeId))(API.PRODUCE, {
|
|
54
58
|
transactionalId: null,
|
|
55
|
-
acks
|
|
59
|
+
acks,
|
|
56
60
|
timeoutMs: 5000,
|
|
57
61
|
topicData: Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
|
|
58
62
|
name: topic,
|
|
59
63
|
partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
|
|
64
|
+
const partitionIndex = parseInt(partition);
|
|
60
65
|
let baseTimestamp: bigint | undefined;
|
|
61
66
|
let maxTimestamp: bigint | undefined;
|
|
62
67
|
|
|
@@ -69,9 +74,9 @@ export class Producer {
|
|
|
69
74
|
}
|
|
70
75
|
});
|
|
71
76
|
|
|
72
|
-
const baseSequence = this.nextSequence(topic,
|
|
77
|
+
const baseSequence = this.nextSequence(topic, partitionIndex, messages.length);
|
|
73
78
|
return {
|
|
74
|
-
index:
|
|
79
|
+
index: partitionIndex,
|
|
75
80
|
baseOffset: 0n,
|
|
76
81
|
partitionLeaderEpoch: -1,
|
|
77
82
|
attributes: 0,
|
|
@@ -95,8 +100,8 @@ export class Producer {
|
|
|
95
100
|
};
|
|
96
101
|
}),
|
|
97
102
|
})),
|
|
98
|
-
})
|
|
99
|
-
|
|
103
|
+
}),
|
|
104
|
+
),
|
|
100
105
|
);
|
|
101
106
|
}
|
|
102
107
|
|
package/src/utils/api.ts
CHANGED
|
@@ -5,7 +5,7 @@ export type Api<Request, Response> = {
|
|
|
5
5
|
apiKey: number;
|
|
6
6
|
apiVersion: number;
|
|
7
7
|
request: (encoder: Encoder, body: Request) => Encoder;
|
|
8
|
-
response: (buffer: Decoder) => Response;
|
|
8
|
+
response: (buffer: Decoder) => Promise<Response> | Response;
|
|
9
9
|
};
|
|
10
10
|
|
|
11
11
|
export const createApi = <Request, Response>(api: Api<Request, Response>) => api;
|