kafka-ts 1.1.7 → 1.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cluster.js +1 -0
- package/dist/consumer/metadata.d.ts +24 -0
- package/dist/consumer/metadata.js +64 -0
- package/dist/examples/src/replicator.js +34 -0
- package/dist/examples/src/utils/json.js +5 -0
- package/dist/request-handler.d.ts +16 -0
- package/dist/request-handler.js +67 -0
- package/dist/request-handler.test.d.ts +1 -0
- package/dist/request-handler.test.js +340 -0
- package/dist/src/api/api-versions.js +18 -0
- package/dist/src/api/create-topics.js +46 -0
- package/dist/src/api/delete-topics.js +26 -0
- package/dist/src/api/fetch.js +95 -0
- package/dist/src/api/find-coordinator.js +34 -0
- package/dist/src/api/heartbeat.js +22 -0
- package/dist/src/api/index.js +38 -0
- package/dist/src/api/init-producer-id.js +24 -0
- package/dist/src/api/join-group.js +48 -0
- package/dist/src/api/leave-group.js +30 -0
- package/dist/src/api/list-offsets.js +39 -0
- package/dist/src/api/metadata.js +47 -0
- package/dist/src/api/offset-commit.js +39 -0
- package/dist/src/api/offset-fetch.js +44 -0
- package/dist/src/api/produce.js +119 -0
- package/dist/src/api/sync-group.js +31 -0
- package/dist/src/broker.js +35 -0
- package/dist/src/connection.js +21 -0
- package/dist/src/consumer/consumer-group.js +131 -0
- package/dist/src/consumer/consumer.js +103 -0
- package/dist/src/consumer/metadata.js +52 -0
- package/dist/src/consumer/offset-manager.js +23 -0
- package/dist/src/index.js +19 -0
- package/dist/src/producer/producer.js +84 -0
- package/dist/src/request-handler.js +57 -0
- package/dist/src/request-handler.test.js +321 -0
- package/dist/src/types.js +2 -0
- package/dist/src/utils/api.js +5 -0
- package/dist/src/utils/decoder.js +161 -0
- package/dist/src/utils/encoder.js +137 -0
- package/dist/src/utils/error.js +10 -0
- package/dist/utils/debug.d.ts +2 -0
- package/dist/utils/debug.js +11 -0
- package/dist/utils/memo.d.ts +1 -0
- package/dist/utils/memo.js +16 -0
- package/dist/utils/mutex.d.ts +3 -0
- package/dist/utils/mutex.js +32 -0
- package/package.json +1 -1
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createConnection = void 0;
|
|
4
|
+
const net_1 = require("net");
|
|
5
|
+
const createConnection = (options) => {
|
|
6
|
+
const socket = new net_1.Socket();
|
|
7
|
+
const connect = () => {
|
|
8
|
+
return new Promise((resolve, reject) => {
|
|
9
|
+
socket.connect(options);
|
|
10
|
+
socket.once('connect', resolve);
|
|
11
|
+
socket.once('error', reject);
|
|
12
|
+
});
|
|
13
|
+
};
|
|
14
|
+
const disconnect = () => {
|
|
15
|
+
return new Promise((resolve) => {
|
|
16
|
+
socket.end(resolve);
|
|
17
|
+
});
|
|
18
|
+
};
|
|
19
|
+
return { connect, disconnect, socket };
|
|
20
|
+
};
|
|
21
|
+
exports.createConnection = createConnection;
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createConsumerGroup = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const find_coordinator_1 = require("../api/find-coordinator");
|
|
6
|
+
const broker_1 = require("../broker");
|
|
7
|
+
const createConsumerGroup = ({ topics, groupId, groupInstanceId, sessionTimeoutMs, rebalanceTimeoutMs, coordinator, metadata, offsetManager, }) => {
|
|
8
|
+
let memberId = "";
|
|
9
|
+
let generationId = -1;
|
|
10
|
+
let leaderId = "";
|
|
11
|
+
let memberIds = [];
|
|
12
|
+
const findCoordinator = async () => {
|
|
13
|
+
const { coordinators } = await coordinator.sendRequest(api_1.API.FIND_COORDINATOR, {
|
|
14
|
+
keyType: find_coordinator_1.KEY_TYPE.GROUP,
|
|
15
|
+
keys: [groupId],
|
|
16
|
+
});
|
|
17
|
+
if (coordinators[0].host !== coordinator.host || coordinators[0].port !== coordinator.port) {
|
|
18
|
+
await coordinator.disconnect();
|
|
19
|
+
coordinator = await (0, broker_1.connectBroker)({
|
|
20
|
+
clientId: coordinator.clientId,
|
|
21
|
+
options: { host: coordinators[0].host, port: coordinators[0].port },
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
const joinGroup = async () => {
|
|
26
|
+
const response = await coordinator.sendRequest(api_1.API.JOIN_GROUP, {
|
|
27
|
+
groupId,
|
|
28
|
+
groupInstanceId,
|
|
29
|
+
memberId,
|
|
30
|
+
sessionTimeoutMs,
|
|
31
|
+
rebalanceTimeoutMs,
|
|
32
|
+
protocolType: "consumer",
|
|
33
|
+
protocols: [{ name: "RoundRobinAssigner", metadata: { version: 0, topics } }],
|
|
34
|
+
reason: null,
|
|
35
|
+
});
|
|
36
|
+
memberId = response.memberId;
|
|
37
|
+
generationId = response.generationId;
|
|
38
|
+
leaderId = response.leader;
|
|
39
|
+
memberIds = response.members.map((member) => member.memberId);
|
|
40
|
+
if (response.errorCode === api_1.API_ERROR.MEMBER_ID_REQUIRED) {
|
|
41
|
+
return joinGroup();
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
const syncGroup = async () => {
|
|
45
|
+
let assignments = [];
|
|
46
|
+
if (memberId === leaderId) {
|
|
47
|
+
const memberAssignments = Object.entries(metadata.getTopicPartitions())
|
|
48
|
+
.flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
|
|
49
|
+
.reduce((acc, { topic, partition }, index) => {
|
|
50
|
+
const memberId = memberIds[index % memberIds.length];
|
|
51
|
+
acc[memberId] ??= {};
|
|
52
|
+
acc[memberId][topic] ??= [];
|
|
53
|
+
acc[memberId][topic].push(partition);
|
|
54
|
+
return acc;
|
|
55
|
+
}, {});
|
|
56
|
+
assignments = Object.entries(memberAssignments).map(([memberId, assignment]) => ({ memberId, assignment }));
|
|
57
|
+
}
|
|
58
|
+
const response = await coordinator.sendRequest(api_1.API.SYNC_GROUP, {
|
|
59
|
+
groupId,
|
|
60
|
+
groupInstanceId,
|
|
61
|
+
memberId,
|
|
62
|
+
generationId,
|
|
63
|
+
protocolType: "consumer",
|
|
64
|
+
protocolName: "RoundRobinAssigner",
|
|
65
|
+
assignments,
|
|
66
|
+
});
|
|
67
|
+
metadata.setAssignment(JSON.parse(response.assignments));
|
|
68
|
+
};
|
|
69
|
+
const offsetFetch = async () => {
|
|
70
|
+
const response = await coordinator.sendRequest(api_1.API.OFFSET_FETCH, {
|
|
71
|
+
groups: [
|
|
72
|
+
{
|
|
73
|
+
groupId,
|
|
74
|
+
memberId,
|
|
75
|
+
memberEpoch: -1,
|
|
76
|
+
topics: topics.map((topic) => ({ name: topic, partitionIndexes: metadata.getAssignment()[topic] })),
|
|
77
|
+
},
|
|
78
|
+
],
|
|
79
|
+
requireStable: true,
|
|
80
|
+
});
|
|
81
|
+
response.groups.forEach((group) => {
|
|
82
|
+
group.topics.forEach((topic) => {
|
|
83
|
+
topic.partitions
|
|
84
|
+
.filter(({ committedOffset }) => committedOffset >= 0)
|
|
85
|
+
.forEach(({ partitionIndex, committedOffset }) => offsetManager.resolve(topic.name, partitionIndex, committedOffset));
|
|
86
|
+
});
|
|
87
|
+
});
|
|
88
|
+
};
|
|
89
|
+
const offsetCommit = async () => {
|
|
90
|
+
await coordinator.sendRequest(api_1.API.OFFSET_COMMIT, {
|
|
91
|
+
groupId,
|
|
92
|
+
groupInstanceId,
|
|
93
|
+
memberId,
|
|
94
|
+
generationIdOrMemberEpoch: generationId,
|
|
95
|
+
topics: Object.entries(offsetManager.getPendingOffsets()).map(([topic, partitions]) => ({
|
|
96
|
+
name: topic,
|
|
97
|
+
partitions: Object.entries(partitions).map(([partition, offset]) => ({
|
|
98
|
+
partitionIndex: parseInt(partition),
|
|
99
|
+
committedOffset: offset,
|
|
100
|
+
committedLeaderEpoch: -1,
|
|
101
|
+
committedMetadata: null,
|
|
102
|
+
})),
|
|
103
|
+
})),
|
|
104
|
+
});
|
|
105
|
+
offsetManager.flush();
|
|
106
|
+
};
|
|
107
|
+
const leaveGroup = async () => {
|
|
108
|
+
if (!groupId) {
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
await coordinator.sendRequest(api_1.API.LEAVE_GROUP, {
|
|
112
|
+
groupId,
|
|
113
|
+
members: [{ memberId, groupInstanceId, reason: null }],
|
|
114
|
+
});
|
|
115
|
+
};
|
|
116
|
+
return {
|
|
117
|
+
join: async () => {
|
|
118
|
+
await findCoordinator();
|
|
119
|
+
await joinGroup();
|
|
120
|
+
await syncGroup();
|
|
121
|
+
await offsetFetch();
|
|
122
|
+
},
|
|
123
|
+
commit: async () => {
|
|
124
|
+
await offsetCommit();
|
|
125
|
+
},
|
|
126
|
+
leave: async () => {
|
|
127
|
+
await leaveGroup();
|
|
128
|
+
},
|
|
129
|
+
};
|
|
130
|
+
};
|
|
131
|
+
exports.createConsumerGroup = createConsumerGroup;
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.startConsumer = void 0;
|
|
4
|
+
const fetch_1 = require("../api/fetch");
|
|
5
|
+
const broker_1 = require("../broker");
|
|
6
|
+
const consumer_group_1 = require("./consumer-group");
|
|
7
|
+
const metadata_1 = require("./metadata");
|
|
8
|
+
const offset_manager_1 = require("./offset-manager");
|
|
9
|
+
const startConsumer = async ({ brokers, clientId = null, groupId, topics, groupInstanceId = null, rackId = "", isolationLevel = 0 /* IsolationLevel.READ_UNCOMMITTED */, sessionTimeoutMs = 30_000, rebalanceTimeoutMs = 60_000, maxWaitMs = 5000, minBytes = 1, maxBytes = 1_000_000, partitionMaxBytes = 1_000_000, allowTopicAutoCreation = true, fromBeginning = false, ...options }) => {
|
|
10
|
+
let coordinator = await (0, broker_1.connectBroker)({ clientId, options: brokers[0] });
|
|
11
|
+
let stopHook;
|
|
12
|
+
const offsetManager = (0, offset_manager_1.createOffsetManager)();
|
|
13
|
+
const metadata = (0, metadata_1.createMetadata)({
|
|
14
|
+
topics,
|
|
15
|
+
isolationLevel,
|
|
16
|
+
allowTopicAutoCreation,
|
|
17
|
+
fromBeginning,
|
|
18
|
+
coordinator,
|
|
19
|
+
offsetManager,
|
|
20
|
+
});
|
|
21
|
+
const consumerGroup = groupId
|
|
22
|
+
? (0, consumer_group_1.createConsumerGroup)({
|
|
23
|
+
topics,
|
|
24
|
+
groupId,
|
|
25
|
+
groupInstanceId,
|
|
26
|
+
sessionTimeoutMs,
|
|
27
|
+
rebalanceTimeoutMs,
|
|
28
|
+
coordinator,
|
|
29
|
+
metadata,
|
|
30
|
+
offsetManager,
|
|
31
|
+
})
|
|
32
|
+
: undefined;
|
|
33
|
+
const fetch = async () => {
|
|
34
|
+
const response = await coordinator.sendRequest(fetch_1.FETCH, {
|
|
35
|
+
maxWaitMs,
|
|
36
|
+
minBytes,
|
|
37
|
+
maxBytes,
|
|
38
|
+
isolationLevel,
|
|
39
|
+
sessionId: 0,
|
|
40
|
+
sessionEpoch: -1,
|
|
41
|
+
topics: Object.entries(metadata.getAssignment())
|
|
42
|
+
.flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
|
|
43
|
+
.map(({ topic, partition }) => ({
|
|
44
|
+
topicId: metadata.getTopicIdByName(topic),
|
|
45
|
+
partitions: [
|
|
46
|
+
{
|
|
47
|
+
partition,
|
|
48
|
+
currentLeaderEpoch: -1,
|
|
49
|
+
fetchOffset: offsetManager.getCurrentOffset(topic, partition),
|
|
50
|
+
lastFetchedEpoch: -1,
|
|
51
|
+
logStartOffset: 0n,
|
|
52
|
+
partitionMaxBytes,
|
|
53
|
+
},
|
|
54
|
+
],
|
|
55
|
+
})),
|
|
56
|
+
forgottenTopicsData: [],
|
|
57
|
+
rackId,
|
|
58
|
+
});
|
|
59
|
+
return response;
|
|
60
|
+
};
|
|
61
|
+
const fetchLoop = async () => {
|
|
62
|
+
while (!stopHook) {
|
|
63
|
+
const batch = await fetch();
|
|
64
|
+
const messages = batch.responses.flatMap(({ topicId, partitions }) => partitions.flatMap(({ partitionIndex, records }) => records.flatMap(({ baseTimestamp, baseOffset, records }) => records.map((message) => ({
|
|
65
|
+
topic: metadata.getTopicNameById(topicId),
|
|
66
|
+
partition: partitionIndex,
|
|
67
|
+
key: message.key ?? null,
|
|
68
|
+
value: message.value ?? null,
|
|
69
|
+
headers: Object.fromEntries(message.headers.map(({ key, value }) => [key, value])),
|
|
70
|
+
timestamp: baseTimestamp + BigInt(message.timestampDelta),
|
|
71
|
+
offset: baseOffset + BigInt(message.offsetDelta),
|
|
72
|
+
})))));
|
|
73
|
+
// TODO: Implement exponential backoff
|
|
74
|
+
try {
|
|
75
|
+
if ("onBatch" in options) {
|
|
76
|
+
await options.onBatch(messages);
|
|
77
|
+
}
|
|
78
|
+
else if ("onMessage" in options) {
|
|
79
|
+
for (const message of messages) {
|
|
80
|
+
await options.onMessage(message);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
catch (error) {
|
|
85
|
+
console.error(error);
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
messages.forEach(({ topic, partition, offset }) => offsetManager.resolve(topic, partition, offset));
|
|
89
|
+
await consumerGroup?.commit();
|
|
90
|
+
}
|
|
91
|
+
stopHook();
|
|
92
|
+
};
|
|
93
|
+
const close = async () => {
|
|
94
|
+
await new Promise((resolve) => (stopHook = resolve));
|
|
95
|
+
await consumerGroup?.leave();
|
|
96
|
+
await coordinator.disconnect();
|
|
97
|
+
};
|
|
98
|
+
await metadata.init();
|
|
99
|
+
await consumerGroup?.join();
|
|
100
|
+
fetchLoop();
|
|
101
|
+
return { close };
|
|
102
|
+
};
|
|
103
|
+
exports.startConsumer = startConsumer;
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createMetadata = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const createMetadata = ({ topics, isolationLevel, allowTopicAutoCreation, fromBeginning, coordinator, offsetManager, }) => {
|
|
6
|
+
let topicPartitions = {};
|
|
7
|
+
let topicNameById = {};
|
|
8
|
+
let topicIdByName = {};
|
|
9
|
+
let assignment = {};
|
|
10
|
+
const fetchMetadata = async () => {
|
|
11
|
+
const response = await coordinator.sendRequest(api_1.API.METADATA, {
|
|
12
|
+
allowTopicAutoCreation,
|
|
13
|
+
includeTopicAuthorizedOperations: false,
|
|
14
|
+
topics: topics.map((name) => ({ id: null, name })),
|
|
15
|
+
});
|
|
16
|
+
topicPartitions = Object.fromEntries(response.topics.map((topic) => [topic.name, topic.partitions.map((partition) => partition.partitionIndex)]));
|
|
17
|
+
topicNameById = Object.fromEntries(response.topics.map((topic) => [topic.topicId, topic.name]));
|
|
18
|
+
topicIdByName = Object.fromEntries(response.topics.map((topic) => [topic.name, topic.topicId]));
|
|
19
|
+
assignment = topicPartitions;
|
|
20
|
+
};
|
|
21
|
+
const listOffsets = async () => {
|
|
22
|
+
const offsets = await coordinator.sendRequest(api_1.API.LIST_OFFSETS, {
|
|
23
|
+
replicaId: -1,
|
|
24
|
+
isolationLevel,
|
|
25
|
+
topics: Object.entries(assignment)
|
|
26
|
+
.flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
|
|
27
|
+
.map(({ topic, partition }) => ({
|
|
28
|
+
name: topic,
|
|
29
|
+
partitions: [{ partitionIndex: partition, currentLeaderEpoch: -1, timestamp: -1n }],
|
|
30
|
+
})),
|
|
31
|
+
});
|
|
32
|
+
offsets.topics.forEach(({ name, partitions }) => {
|
|
33
|
+
partitions.forEach(({ partitionIndex, offset }) => {
|
|
34
|
+
offsetManager.resolve(name, partitionIndex, fromBeginning ? 0n : offset - 1n);
|
|
35
|
+
});
|
|
36
|
+
});
|
|
37
|
+
};
|
|
38
|
+
return {
|
|
39
|
+
init: async () => {
|
|
40
|
+
await fetchMetadata();
|
|
41
|
+
await listOffsets();
|
|
42
|
+
},
|
|
43
|
+
getTopicPartitions: () => topicPartitions,
|
|
44
|
+
getTopicIdByName: (name) => topicIdByName[name],
|
|
45
|
+
getTopicNameById: (id) => topicNameById[id],
|
|
46
|
+
getAssignment: () => assignment,
|
|
47
|
+
setAssignment: (newAssignment) => {
|
|
48
|
+
assignment = newAssignment;
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
};
|
|
52
|
+
exports.createMetadata = createMetadata;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createOffsetManager = void 0;
|
|
4
|
+
const createOffsetManager = () => {
|
|
5
|
+
let currentOffsets = {};
|
|
6
|
+
let pendingOffsets = {};
|
|
7
|
+
const resolve = (topic, partition, offset) => {
|
|
8
|
+
pendingOffsets[topic] ??= {};
|
|
9
|
+
pendingOffsets[topic][partition] = offset + 1n;
|
|
10
|
+
currentOffsets[topic] ??= {};
|
|
11
|
+
currentOffsets[topic][partition] = offset + 1n;
|
|
12
|
+
};
|
|
13
|
+
const flush = () => {
|
|
14
|
+
pendingOffsets = {};
|
|
15
|
+
};
|
|
16
|
+
return {
|
|
17
|
+
getCurrentOffset: (topic, partition) => currentOffsets[topic]?.[partition] ?? 0n,
|
|
18
|
+
resolve,
|
|
19
|
+
flush,
|
|
20
|
+
getPendingOffsets: () => pendingOffsets,
|
|
21
|
+
};
|
|
22
|
+
};
|
|
23
|
+
exports.createOffsetManager = createOffsetManager;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./consumer/consumer"), exports);
|
|
18
|
+
__exportStar(require("./producer/producer"), exports);
|
|
19
|
+
__exportStar(require("./types"), exports);
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createProducer = void 0;
|
|
4
|
+
const api_1 = require("../api");
|
|
5
|
+
const broker_1 = require("../broker");
|
|
6
|
+
const createProducer = ({ clientId = null, brokers }) => {
|
|
7
|
+
let producerId = 0n;
|
|
8
|
+
let producerEpoch = 0;
|
|
9
|
+
let sequences = {};
|
|
10
|
+
let connect = (async () => {
|
|
11
|
+
const broker = await (0, broker_1.connectBroker)({ clientId, options: brokers[0] });
|
|
12
|
+
const result = await broker.sendRequest(api_1.API.INIT_PRODUCER_ID, {
|
|
13
|
+
transactionalId: null,
|
|
14
|
+
transactionTimeoutMs: 0,
|
|
15
|
+
producerId,
|
|
16
|
+
producerEpoch,
|
|
17
|
+
});
|
|
18
|
+
producerId = result.producerId;
|
|
19
|
+
producerEpoch = result.producerEpoch;
|
|
20
|
+
sequences = {};
|
|
21
|
+
return broker;
|
|
22
|
+
})();
|
|
23
|
+
const getSequence = (topic, partition) => {
|
|
24
|
+
sequences[topic] ??= {};
|
|
25
|
+
sequences[topic][partition] ??= 0;
|
|
26
|
+
return sequences[topic][partition]++;
|
|
27
|
+
};
|
|
28
|
+
const send = async (messages) => {
|
|
29
|
+
const { sendRequest } = await connect;
|
|
30
|
+
const topicPartitionMessages = {};
|
|
31
|
+
messages.forEach((message) => {
|
|
32
|
+
topicPartitionMessages[message.topic] ??= {};
|
|
33
|
+
topicPartitionMessages[message.topic][message.partition] ??= [];
|
|
34
|
+
topicPartitionMessages[message.topic][message.partition].push(message);
|
|
35
|
+
});
|
|
36
|
+
await sendRequest(api_1.API.PRODUCE, {
|
|
37
|
+
transactionalId: null,
|
|
38
|
+
acks: 1,
|
|
39
|
+
timeoutMs: 5000,
|
|
40
|
+
topicData: Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
|
|
41
|
+
name: topic,
|
|
42
|
+
partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
|
|
43
|
+
let baseTimestamp;
|
|
44
|
+
let maxTimestamp;
|
|
45
|
+
messages.forEach(message => {
|
|
46
|
+
if (!baseTimestamp || message.timestamp < baseTimestamp) {
|
|
47
|
+
baseTimestamp = message.timestamp;
|
|
48
|
+
}
|
|
49
|
+
if (!maxTimestamp || message.timestamp > maxTimestamp) {
|
|
50
|
+
maxTimestamp = message.timestamp;
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
return ({
|
|
54
|
+
index: parseInt(partition),
|
|
55
|
+
baseOffset: 0n,
|
|
56
|
+
partitionLeaderEpoch: -1,
|
|
57
|
+
attributes: 0,
|
|
58
|
+
lastOffsetDelta: messages.length - 1,
|
|
59
|
+
baseTimestamp: baseTimestamp ?? 0n,
|
|
60
|
+
maxTimestamp: maxTimestamp ?? 0n,
|
|
61
|
+
producerId,
|
|
62
|
+
producerEpoch: 0,
|
|
63
|
+
baseSequence: getSequence(topic, parseInt(partition)),
|
|
64
|
+
records: messages.map((message, index) => ({
|
|
65
|
+
attributes: 0,
|
|
66
|
+
timestampDelta: message.timestamp - (baseTimestamp ?? 0n),
|
|
67
|
+
offsetDelta: index,
|
|
68
|
+
key: message.key,
|
|
69
|
+
value: message.value,
|
|
70
|
+
headers: Object.entries(message.headers).map(([key, value]) => ({ key, value })),
|
|
71
|
+
})),
|
|
72
|
+
});
|
|
73
|
+
}),
|
|
74
|
+
})),
|
|
75
|
+
});
|
|
76
|
+
};
|
|
77
|
+
const close = async () => {
|
|
78
|
+
const { disconnect } = await connect;
|
|
79
|
+
// TODO: wait for inflight requests to complete
|
|
80
|
+
await disconnect();
|
|
81
|
+
};
|
|
82
|
+
return { send, close };
|
|
83
|
+
};
|
|
84
|
+
exports.createProducer = createProducer;
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.createRequestHandler = void 0;
|
|
7
|
+
const node_assert_1 = __importDefault(require("node:assert"));
|
|
8
|
+
const api_1 = require("./api");
|
|
9
|
+
const decoder_1 = require("./utils/decoder");
|
|
10
|
+
const encoder_1 = require("./utils/encoder");
|
|
11
|
+
const createRequestHandler = ({ clientId, connection }) => {
|
|
12
|
+
const requestQueue = {};
|
|
13
|
+
let currentBuffer = null;
|
|
14
|
+
const handleData = (buffer) => {
|
|
15
|
+
currentBuffer = currentBuffer ? Buffer.concat([currentBuffer, buffer]) : buffer;
|
|
16
|
+
const decoder = (0, decoder_1.createDecoder)({ buffer: currentBuffer });
|
|
17
|
+
const size = decoder.readInt32();
|
|
18
|
+
if (size > decoder.buffer.length) {
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
21
|
+
const correlationId = decoder.readInt32();
|
|
22
|
+
const request = requestQueue[correlationId];
|
|
23
|
+
delete requestQueue[correlationId];
|
|
24
|
+
request.callback(decoder);
|
|
25
|
+
// console.log({
|
|
26
|
+
// offset: decoder.offset,
|
|
27
|
+
// length: decoder.buffer.length,
|
|
28
|
+
// rest: decoder.buffer.subarray(decoder.offset, decoder.buffer.length)?.toString(),
|
|
29
|
+
// });
|
|
30
|
+
(0, node_assert_1.default)(decoder.offset - 4 === size, `Buffer not correctly consumed: ${decoder.offset - 4} !== ${buffer.length}`);
|
|
31
|
+
currentBuffer = null;
|
|
32
|
+
};
|
|
33
|
+
const sendRequest = (api, args) => {
|
|
34
|
+
const [apiName] = Object.entries(api_1.API).find(([, value]) => value === api) ?? ["UNKNOWN"];
|
|
35
|
+
console.log(`[sendRequest] ${apiName}`);
|
|
36
|
+
const correlationId = Math.floor(Math.random() * 1000000);
|
|
37
|
+
const encoder = (0, encoder_1.createEncoder)()
|
|
38
|
+
.writeInt16(api.apiKey)
|
|
39
|
+
.writeInt16(api.apiVersion)
|
|
40
|
+
.writeInt32(correlationId)
|
|
41
|
+
.writeString(clientId);
|
|
42
|
+
const request = api.request(encoder, args);
|
|
43
|
+
const buffer = (0, encoder_1.createEncoder)()
|
|
44
|
+
.writeInt32(request.length)
|
|
45
|
+
.write(request)
|
|
46
|
+
.value();
|
|
47
|
+
return new Promise((resolve, reject) => {
|
|
48
|
+
requestQueue[correlationId] = {
|
|
49
|
+
callback: (decoder) => resolve(api.response(decoder)),
|
|
50
|
+
};
|
|
51
|
+
connection.socket.write(buffer, (err) => (err ? reject(err) : undefined));
|
|
52
|
+
});
|
|
53
|
+
};
|
|
54
|
+
connection.socket.on("data", handleData);
|
|
55
|
+
return { sendRequest };
|
|
56
|
+
};
|
|
57
|
+
exports.createRequestHandler = createRequestHandler;
|