kafka-ts 0.0.6 → 0.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/index.d.ts +1 -1
- package/dist/api/sync-group.d.ts +1 -1
- package/dist/api/sync-group.js +21 -2
- package/dist/consumer/consumer-group.d.ts +4 -6
- package/dist/consumer/consumer-group.js +17 -14
- package/dist/consumer/consumer.d.ts +1 -0
- package/dist/consumer/consumer.js +3 -3
- package/dist/consumer/fetch-manager.js +6 -3
- package/dist/consumer/fetcher.d.ts +0 -2
- package/dist/consumer/fetcher.js +1 -3
- package/package.json +3 -2
package/dist/api/index.d.ts
CHANGED
package/dist/api/sync-group.d.ts
CHANGED
package/dist/api/sync-group.js
CHANGED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.SYNC_GROUP = void 0;
|
|
4
4
|
const api_1 = require("../utils/api");
|
|
5
|
+
const decoder_1 = require("../utils/decoder");
|
|
6
|
+
const encoder_1 = require("../utils/encoder");
|
|
5
7
|
const error_1 = require("../utils/error");
|
|
6
8
|
exports.SYNC_GROUP = (0, api_1.createApi)({
|
|
7
9
|
apiKey: 14,
|
|
@@ -16,7 +18,7 @@ exports.SYNC_GROUP = (0, api_1.createApi)({
|
|
|
16
18
|
.writeCompactString(data.protocolName)
|
|
17
19
|
.writeCompactArray(data.assignments, (encoder, assignment) => encoder
|
|
18
20
|
.writeCompactString(assignment.memberId)
|
|
19
|
-
.
|
|
21
|
+
.writeCompactBytes(encodeAssignment(assignment.assignment))
|
|
20
22
|
.writeUVarInt(0))
|
|
21
23
|
.writeUVarInt(0),
|
|
22
24
|
response: (decoder) => {
|
|
@@ -26,7 +28,7 @@ exports.SYNC_GROUP = (0, api_1.createApi)({
|
|
|
26
28
|
errorCode: decoder.readInt16(),
|
|
27
29
|
protocolType: decoder.readCompactString(),
|
|
28
30
|
protocolName: decoder.readCompactString(),
|
|
29
|
-
assignments: decoder.
|
|
31
|
+
assignments: decodeAssignment(decoder.readCompactBytes()),
|
|
30
32
|
_tag2: decoder.readTagBuffer(),
|
|
31
33
|
};
|
|
32
34
|
if (result.errorCode)
|
|
@@ -34,3 +36,20 @@ exports.SYNC_GROUP = (0, api_1.createApi)({
|
|
|
34
36
|
return result;
|
|
35
37
|
},
|
|
36
38
|
});
|
|
39
|
+
const encodeAssignment = (data) => new encoder_1.Encoder()
|
|
40
|
+
.writeInt16(0)
|
|
41
|
+
.writeArray(Object.entries(data), (encoder, [topic, partitions]) => encoder.writeString(topic).writeArray(partitions, (encoder, partition) => encoder.writeInt32(partition)))
|
|
42
|
+
.writeBytes(Buffer.alloc(0))
|
|
43
|
+
.value();
|
|
44
|
+
const decodeAssignment = (data) => {
|
|
45
|
+
const decoder = new decoder_1.Decoder(data);
|
|
46
|
+
const result = {
|
|
47
|
+
version: decoder.readInt16(),
|
|
48
|
+
assignment: decoder.readArray((decoder) => ({
|
|
49
|
+
topic: decoder.readString(),
|
|
50
|
+
partitions: decoder.readArray((decoder) => decoder.readInt32()),
|
|
51
|
+
})),
|
|
52
|
+
userData: decoder.readBytes(),
|
|
53
|
+
};
|
|
54
|
+
return Object.fromEntries(result.assignment.map(({ topic, partitions }) => [topic, partitions]));
|
|
55
|
+
};
|
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
|
-
import EventEmitter from 'events';
|
|
3
1
|
import { Cluster } from '../cluster';
|
|
2
|
+
import { Consumer } from './consumer';
|
|
4
3
|
import { ConsumerMetadata } from './consumer-metadata';
|
|
5
4
|
import { OffsetManager } from './offset-manager';
|
|
6
5
|
type ConsumerGroupOptions = {
|
|
@@ -12,10 +11,9 @@ type ConsumerGroupOptions = {
|
|
|
12
11
|
rebalanceTimeoutMs: number;
|
|
13
12
|
metadata: ConsumerMetadata;
|
|
14
13
|
offsetManager: OffsetManager;
|
|
14
|
+
consumer: Consumer;
|
|
15
15
|
};
|
|
16
|
-
export declare class ConsumerGroup
|
|
17
|
-
offsetCommit: [];
|
|
18
|
-
}> {
|
|
16
|
+
export declare class ConsumerGroup {
|
|
19
17
|
private options;
|
|
20
18
|
private coordinatorId;
|
|
21
19
|
private memberId;
|
|
@@ -25,11 +23,11 @@ export declare class ConsumerGroup extends EventEmitter<{
|
|
|
25
23
|
private heartbeatInterval;
|
|
26
24
|
private heartbeatError;
|
|
27
25
|
constructor(options: ConsumerGroupOptions);
|
|
26
|
+
init(): Promise<void>;
|
|
28
27
|
join(): Promise<void>;
|
|
29
28
|
private startHeartbeater;
|
|
30
29
|
private stopHeartbeater;
|
|
31
30
|
handleLastHeartbeat(): void;
|
|
32
|
-
resetHeartbeat(): void;
|
|
33
31
|
private findCoordinator;
|
|
34
32
|
private joinGroup;
|
|
35
33
|
private syncGroup;
|
|
@@ -8,17 +8,13 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key,
|
|
|
8
8
|
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
9
9
|
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
10
10
|
};
|
|
11
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
-
};
|
|
14
11
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
12
|
exports.ConsumerGroup = void 0;
|
|
16
|
-
const events_1 = __importDefault(require("events"));
|
|
17
13
|
const api_1 = require("../api");
|
|
18
14
|
const find_coordinator_1 = require("../api/find-coordinator");
|
|
19
15
|
const tracer_1 = require("../utils/tracer");
|
|
20
16
|
const trace = (0, tracer_1.createTracer)('ConsumerGroup');
|
|
21
|
-
class ConsumerGroup
|
|
17
|
+
class ConsumerGroup {
|
|
22
18
|
options;
|
|
23
19
|
coordinatorId = -1;
|
|
24
20
|
memberId = '';
|
|
@@ -28,19 +24,22 @@ class ConsumerGroup extends events_1.default {
|
|
|
28
24
|
heartbeatInterval = null;
|
|
29
25
|
heartbeatError = null;
|
|
30
26
|
constructor(options) {
|
|
31
|
-
super();
|
|
32
27
|
this.options = options;
|
|
33
28
|
}
|
|
34
|
-
async
|
|
29
|
+
async init() {
|
|
35
30
|
await this.findCoordinator();
|
|
36
31
|
await this.options.cluster.setSeedBroker(this.coordinatorId);
|
|
37
32
|
this.memberId = '';
|
|
33
|
+
}
|
|
34
|
+
async join() {
|
|
38
35
|
await this.joinGroup();
|
|
39
36
|
await this.syncGroup();
|
|
40
37
|
await this.offsetFetch();
|
|
41
38
|
this.startHeartbeater();
|
|
42
39
|
}
|
|
43
40
|
async startHeartbeater() {
|
|
41
|
+
this.stopHeartbeater();
|
|
42
|
+
this.heartbeatError = null;
|
|
44
43
|
this.heartbeatInterval = setInterval(async () => {
|
|
45
44
|
try {
|
|
46
45
|
await this.heartbeat();
|
|
@@ -61,9 +60,6 @@ class ConsumerGroup extends events_1.default {
|
|
|
61
60
|
throw this.heartbeatError;
|
|
62
61
|
}
|
|
63
62
|
}
|
|
64
|
-
resetHeartbeat() {
|
|
65
|
-
this.heartbeatError = null;
|
|
66
|
-
}
|
|
67
63
|
async findCoordinator() {
|
|
68
64
|
const { coordinators } = await this.options.cluster.sendRequest(api_1.API.FIND_COORDINATOR, {
|
|
69
65
|
keyType: find_coordinator_1.KEY_TYPE.GROUP,
|
|
@@ -121,7 +117,7 @@ class ConsumerGroup extends events_1.default {
|
|
|
121
117
|
protocolName: 'RoundRobinAssigner',
|
|
122
118
|
assignments,
|
|
123
119
|
});
|
|
124
|
-
metadata.setAssignment(
|
|
120
|
+
metadata.setAssignment(response.assignments);
|
|
125
121
|
}
|
|
126
122
|
async offsetFetch() {
|
|
127
123
|
const { cluster, groupId, topics, metadata, offsetManager } = this.options;
|
|
@@ -155,7 +151,7 @@ class ConsumerGroup extends events_1.default {
|
|
|
155
151
|
offsetManager.flush(topicPartitions);
|
|
156
152
|
}
|
|
157
153
|
async offsetCommit(topicPartitions) {
|
|
158
|
-
const { cluster, groupId, groupInstanceId, offsetManager } = this.options;
|
|
154
|
+
const { cluster, groupId, groupInstanceId, offsetManager, consumer } = this.options;
|
|
159
155
|
const request = {
|
|
160
156
|
groupId,
|
|
161
157
|
groupInstanceId,
|
|
@@ -175,16 +171,17 @@ class ConsumerGroup extends events_1.default {
|
|
|
175
171
|
return;
|
|
176
172
|
}
|
|
177
173
|
await cluster.sendRequest(api_1.API.OFFSET_COMMIT, request);
|
|
178
|
-
|
|
174
|
+
consumer.emit('offsetCommit');
|
|
179
175
|
}
|
|
180
176
|
async heartbeat() {
|
|
181
|
-
const { cluster, groupId, groupInstanceId } = this.options;
|
|
177
|
+
const { cluster, groupId, groupInstanceId, consumer } = this.options;
|
|
182
178
|
await cluster.sendRequest(api_1.API.HEARTBEAT, {
|
|
183
179
|
groupId,
|
|
184
180
|
groupInstanceId,
|
|
185
181
|
memberId: this.memberId,
|
|
186
182
|
generationId: this.generationId,
|
|
187
183
|
});
|
|
184
|
+
consumer.emit('heartbeat');
|
|
188
185
|
}
|
|
189
186
|
async leaveGroup() {
|
|
190
187
|
if (this.coordinatorId === -1) {
|
|
@@ -207,6 +204,12 @@ class ConsumerGroup extends events_1.default {
|
|
|
207
204
|
}
|
|
208
205
|
}
|
|
209
206
|
exports.ConsumerGroup = ConsumerGroup;
|
|
207
|
+
__decorate([
|
|
208
|
+
trace(),
|
|
209
|
+
__metadata("design:type", Function),
|
|
210
|
+
__metadata("design:paramtypes", []),
|
|
211
|
+
__metadata("design:returntype", Promise)
|
|
212
|
+
], ConsumerGroup.prototype, "init", null);
|
|
210
213
|
__decorate([
|
|
211
214
|
trace(),
|
|
212
215
|
__metadata("design:type", Function),
|
|
@@ -71,9 +71,9 @@ class Consumer extends events_1.default {
|
|
|
71
71
|
rebalanceTimeoutMs: this.options.rebalanceTimeoutMs,
|
|
72
72
|
metadata: this.metadata,
|
|
73
73
|
offsetManager: this.offsetManager,
|
|
74
|
+
consumer: this,
|
|
74
75
|
})
|
|
75
76
|
: undefined;
|
|
76
|
-
this.consumerGroup?.on('offsetCommit', () => this.emit('offsetCommit'));
|
|
77
77
|
}
|
|
78
78
|
async start() {
|
|
79
79
|
const { topics, allowTopicAutoCreation, fromBeginning } = this.options;
|
|
@@ -83,7 +83,7 @@ class Consumer extends events_1.default {
|
|
|
83
83
|
await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
|
|
84
84
|
this.metadata.setAssignment(this.metadata.getTopicPartitions());
|
|
85
85
|
await this.offsetManager.fetchOffsets({ fromBeginning });
|
|
86
|
-
await this.consumerGroup?.
|
|
86
|
+
await this.consumerGroup?.init();
|
|
87
87
|
}
|
|
88
88
|
catch (error) {
|
|
89
89
|
logger_1.log.error('Failed to start consumer', error);
|
|
@@ -108,7 +108,7 @@ class Consumer extends events_1.default {
|
|
|
108
108
|
async startFetchManager() {
|
|
109
109
|
const { batchGranularity, concurrency } = this.options;
|
|
110
110
|
while (!this.stopHook) {
|
|
111
|
-
this.consumerGroup?.
|
|
111
|
+
await this.consumerGroup?.join();
|
|
112
112
|
// TODO: If leader is not available, find another read replica
|
|
113
113
|
const nodeAssignments = Object.entries((0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(Object.entries(this.metadata.getAssignment()).flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition }))), this.metadata.getTopicPartitionLeaderIds())).map(([nodeId, assignment]) => ({
|
|
114
114
|
nodeId: parseInt(nodeId),
|
|
@@ -25,11 +25,10 @@ class FetchManager {
|
|
|
25
25
|
fetcherCallbacks = {};
|
|
26
26
|
constructor(options) {
|
|
27
27
|
this.options = options;
|
|
28
|
-
const { fetch, process,
|
|
28
|
+
const { fetch, process, nodeAssignments, concurrency } = this.options;
|
|
29
29
|
this.fetchers = nodeAssignments.map(({ nodeId, assignment }, index) => new fetcher_1.Fetcher(index, {
|
|
30
30
|
nodeId,
|
|
31
31
|
assignment,
|
|
32
|
-
consumerGroup,
|
|
33
32
|
fetch,
|
|
34
33
|
onResponse: this.onResponse.bind(this),
|
|
35
34
|
}));
|
|
@@ -64,6 +63,8 @@ class FetchManager {
|
|
|
64
63
|
if (!this.isRunning) {
|
|
65
64
|
return [];
|
|
66
65
|
}
|
|
66
|
+
const { consumerGroup } = this.options;
|
|
67
|
+
consumerGroup?.handleLastHeartbeat();
|
|
67
68
|
const batch = this.queue.shift();
|
|
68
69
|
if (!batch) {
|
|
69
70
|
// wait until new data is available or fetch manager is requested to stop
|
|
@@ -80,7 +81,8 @@ class FetchManager {
|
|
|
80
81
|
return batch;
|
|
81
82
|
}
|
|
82
83
|
async onResponse(fetcherId, response) {
|
|
83
|
-
const { metadata, batchGranularity } = this.options;
|
|
84
|
+
const { metadata, batchGranularity, consumerGroup } = this.options;
|
|
85
|
+
consumerGroup?.handleLastHeartbeat();
|
|
84
86
|
const batches = fetchResponseToBatches(response, batchGranularity, metadata);
|
|
85
87
|
if (!batches.length) {
|
|
86
88
|
return;
|
|
@@ -91,6 +93,7 @@ class FetchManager {
|
|
|
91
93
|
this.queue.push(...batches, { kind: 'checkpoint', fetcherId });
|
|
92
94
|
this.pollQueue?.shift()?.();
|
|
93
95
|
});
|
|
96
|
+
consumerGroup?.handleLastHeartbeat();
|
|
94
97
|
}
|
|
95
98
|
}
|
|
96
99
|
exports.FetchManager = FetchManager;
|
|
@@ -3,11 +3,9 @@
|
|
|
3
3
|
import { EventEmitter } from 'stream';
|
|
4
4
|
import { FetchResponse } from '../api/fetch';
|
|
5
5
|
import { Assignment } from '../api/sync-group';
|
|
6
|
-
import { ConsumerGroup } from './consumer-group';
|
|
7
6
|
type FetcherOptions = {
|
|
8
7
|
nodeId: number;
|
|
9
8
|
assignment: Assignment;
|
|
10
|
-
consumerGroup?: ConsumerGroup;
|
|
11
9
|
fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
|
|
12
10
|
onResponse: (fetcherId: number, response: FetchResponse) => Promise<void>;
|
|
13
11
|
};
|
package/dist/consumer/fetcher.js
CHANGED
|
@@ -35,14 +35,12 @@ class Fetcher extends stream_1.EventEmitter {
|
|
|
35
35
|
}
|
|
36
36
|
}
|
|
37
37
|
async step() {
|
|
38
|
-
const { nodeId, assignment,
|
|
38
|
+
const { nodeId, assignment, fetch, onResponse } = this.options;
|
|
39
39
|
const response = await fetch(nodeId, assignment);
|
|
40
40
|
if (!this.isRunning) {
|
|
41
41
|
return;
|
|
42
42
|
}
|
|
43
|
-
consumerGroup?.handleLastHeartbeat();
|
|
44
43
|
await onResponse(this.fetcherId, response);
|
|
45
|
-
consumerGroup?.handleLastHeartbeat();
|
|
46
44
|
}
|
|
47
45
|
async stop() {
|
|
48
46
|
if (!this.isRunning) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "kafka-ts",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.8",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"author": "Priit Käärd",
|
|
6
6
|
"license": "MIT",
|
|
@@ -9,7 +9,8 @@
|
|
|
9
9
|
"url": "https://github.com/priitkaard/kafka-ts.git"
|
|
10
10
|
},
|
|
11
11
|
"scripts": {
|
|
12
|
-
"
|
|
12
|
+
"up": "npm run down && KAFKA_VERSION=3.7.1 docker-compose up -d && sleep 5 && bash ./scripts/create-scram-user.sh",
|
|
13
|
+
"down": "KAFKA_VERSION=3.7.1 docker-compose down",
|
|
13
14
|
"version:prerelease": "npm version prerelease",
|
|
14
15
|
"version:patch": "npm version patch",
|
|
15
16
|
"format": "prettier --write .",
|