kafka-ts 1.1.8 → 1.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/consumer/consumer-group.js +84 -99
  2. package/dist/consumer/consumer.js +16 -25
  3. package/dist/producer/producer.js +5 -12
  4. package/dist/utils/retry.d.ts +1 -0
  5. package/dist/utils/retry.js +19 -0
  6. package/package.json +1 -1
  7. package/dist/consumer/metadata.d.ts +0 -24
  8. package/dist/consumer/metadata.js +0 -64
  9. package/dist/examples/src/replicator.js +0 -34
  10. package/dist/examples/src/utils/json.js +0 -5
  11. package/dist/request-handler.d.ts +0 -16
  12. package/dist/request-handler.js +0 -67
  13. package/dist/request-handler.test.d.ts +0 -1
  14. package/dist/request-handler.test.js +0 -340
  15. package/dist/src/api/api-versions.js +0 -18
  16. package/dist/src/api/create-topics.js +0 -46
  17. package/dist/src/api/delete-topics.js +0 -26
  18. package/dist/src/api/fetch.js +0 -95
  19. package/dist/src/api/find-coordinator.js +0 -34
  20. package/dist/src/api/heartbeat.js +0 -22
  21. package/dist/src/api/index.js +0 -38
  22. package/dist/src/api/init-producer-id.js +0 -24
  23. package/dist/src/api/join-group.js +0 -48
  24. package/dist/src/api/leave-group.js +0 -30
  25. package/dist/src/api/list-offsets.js +0 -39
  26. package/dist/src/api/metadata.js +0 -47
  27. package/dist/src/api/offset-commit.js +0 -39
  28. package/dist/src/api/offset-fetch.js +0 -44
  29. package/dist/src/api/produce.js +0 -119
  30. package/dist/src/api/sync-group.js +0 -31
  31. package/dist/src/broker.js +0 -35
  32. package/dist/src/connection.js +0 -21
  33. package/dist/src/consumer/consumer-group.js +0 -131
  34. package/dist/src/consumer/consumer.js +0 -103
  35. package/dist/src/consumer/metadata.js +0 -52
  36. package/dist/src/consumer/offset-manager.js +0 -23
  37. package/dist/src/index.js +0 -19
  38. package/dist/src/producer/producer.js +0 -84
  39. package/dist/src/request-handler.js +0 -57
  40. package/dist/src/request-handler.test.js +0 -321
  41. package/dist/src/types.js +0 -2
  42. package/dist/src/utils/api.js +0 -5
  43. package/dist/src/utils/decoder.js +0 -161
  44. package/dist/src/utils/encoder.js +0 -137
  45. package/dist/src/utils/error.js +0 -10
  46. package/dist/utils/debug.d.ts +0 -2
  47. package/dist/utils/debug.js +0 -11
  48. package/dist/utils/memo.d.ts +0 -1
  49. package/dist/utils/memo.js +0 -16
  50. package/dist/utils/mutex.d.ts +0 -3
  51. package/dist/utils/mutex.js +0 -32
@@ -14,6 +14,7 @@ const api_1 = require("../api");
14
14
  const find_coordinator_1 = require("../api/find-coordinator");
15
15
  const error_1 = require("../utils/error");
16
16
  const logger_1 = require("../utils/logger");
17
+ const retry_1 = require("../utils/retry");
17
18
  const tracer_1 = require("../utils/tracer");
18
19
  const trace = (0, tracer_1.createTracer)('ConsumerGroup');
19
20
  class ConsumerGroup {
@@ -62,7 +63,7 @@ class ConsumerGroup {
62
63
  }
63
64
  }
64
65
  async findCoordinator() {
65
- try {
66
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
66
67
  const { coordinators } = await this.options.cluster.sendRequest(api_1.API.FIND_COORDINATOR, {
67
68
  keyType: find_coordinator_1.KEY_TYPE.GROUP,
68
69
  keys: [this.options.groupId],
@@ -70,15 +71,11 @@ class ConsumerGroup {
70
71
  this.coordinatorId = coordinators[0].nodeId;
71
72
  await this.options.cluster.setSeedBroker(this.coordinatorId);
72
73
  this.heartbeatError = null;
73
- }
74
- catch (error) {
75
- await this.handleError(error);
76
- return this.findCoordinator();
77
- }
74
+ });
78
75
  }
79
76
  async joinGroup() {
80
- const { cluster, groupId, groupInstanceId, sessionTimeoutMs, rebalanceTimeoutMs, topics } = this.options;
81
- try {
77
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
78
+ const { cluster, groupId, groupInstanceId, sessionTimeoutMs, rebalanceTimeoutMs, topics } = this.options;
82
79
  const response = await cluster.sendRequest(api_1.API.JOIN_GROUP, {
83
80
  groupId,
84
81
  groupInstanceId,
@@ -93,28 +90,27 @@ class ConsumerGroup {
93
90
  this.generationId = response.generationId;
94
91
  this.leaderId = response.leader;
95
92
  this.memberIds = response.members.map((member) => member.memberId);
96
- }
97
- catch (error) {
98
- await this.handleError(error);
99
- return this.joinGroup();
100
- }
93
+ });
101
94
  }
102
95
  async syncGroup() {
103
- const { cluster, metadata, groupId, groupInstanceId } = this.options;
104
- let assignments = [];
105
- if (this.memberId === this.leaderId) {
106
- const memberAssignments = Object.entries(metadata.getTopicPartitions())
107
- .flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
108
- .reduce((acc, { topic, partition }, index) => {
109
- const memberId = this.memberIds[index % this.memberIds.length];
110
- acc[memberId] ??= {};
111
- acc[memberId][topic] ??= [];
112
- acc[memberId][topic].push(partition);
113
- return acc;
114
- }, {});
115
- assignments = Object.entries(memberAssignments).map(([memberId, assignment]) => ({ memberId, assignment }));
116
- }
117
- try {
96
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
97
+ const { cluster, metadata, groupId, groupInstanceId } = this.options;
98
+ let assignments = [];
99
+ if (this.memberId === this.leaderId) {
100
+ const memberAssignments = Object.entries(metadata.getTopicPartitions())
101
+ .flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
102
+ .reduce((acc, { topic, partition }, index) => {
103
+ const memberId = this.memberIds[index % this.memberIds.length];
104
+ acc[memberId] ??= {};
105
+ acc[memberId][topic] ??= [];
106
+ acc[memberId][topic].push(partition);
107
+ return acc;
108
+ }, {});
109
+ assignments = Object.entries(memberAssignments).map(([memberId, assignment]) => ({
110
+ memberId,
111
+ assignment,
112
+ }));
113
+ }
118
114
  const response = await cluster.sendRequest(api_1.API.SYNC_GROUP, {
119
115
  groupId,
120
116
  groupInstanceId,
@@ -125,29 +121,25 @@ class ConsumerGroup {
125
121
  assignments,
126
122
  });
127
123
  metadata.setAssignment(response.assignments);
128
- }
129
- catch (error) {
130
- await this.handleError(error);
131
- return this.syncGroup();
132
- }
124
+ });
133
125
  }
134
126
  async offsetFetch() {
135
- const { cluster, groupId, topics, metadata, offsetManager } = this.options;
136
- const assignment = metadata.getAssignment();
137
- const request = {
138
- groups: [
139
- {
140
- groupId,
141
- topics: topics
142
- .map((topic) => ({ name: topic, partitionIndexes: assignment[topic] ?? [] }))
143
- .filter(({ partitionIndexes }) => partitionIndexes.length),
144
- },
145
- ].filter(({ topics }) => topics.length),
146
- requireStable: true,
147
- };
148
- if (!request.groups.length)
149
- return;
150
- try {
127
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
128
+ const { cluster, groupId, topics, metadata, offsetManager } = this.options;
129
+ const assignment = metadata.getAssignment();
130
+ const request = {
131
+ groups: [
132
+ {
133
+ groupId,
134
+ topics: topics
135
+ .map((topic) => ({ name: topic, partitionIndexes: assignment[topic] ?? [] }))
136
+ .filter(({ partitionIndexes }) => partitionIndexes.length),
137
+ },
138
+ ].filter(({ topics }) => topics.length),
139
+ requireStable: true,
140
+ };
141
+ if (!request.groups.length)
142
+ return;
151
143
  const response = await cluster.sendRequest(api_1.API.OFFSET_FETCH, request);
152
144
  const topicPartitions = {};
153
145
  response.groups.forEach((group) => {
@@ -162,44 +154,36 @@ class ConsumerGroup {
162
154
  });
163
155
  });
164
156
  offsetManager.flush(topicPartitions);
165
- }
166
- catch (error) {
167
- await this.handleError(error);
168
- return this.offsetFetch();
169
- }
157
+ });
170
158
  }
171
159
  async offsetCommit(topicPartitions) {
172
- const { cluster, groupId, groupInstanceId, offsetManager, consumer } = this.options;
173
- const request = {
174
- groupId,
175
- groupInstanceId,
176
- memberId: this.memberId,
177
- generationIdOrMemberEpoch: this.generationId,
178
- topics: Object.entries(topicPartitions)
179
- .filter(([topic]) => topic in offsetManager.pendingOffsets)
180
- .map(([topic, partitions]) => ({
181
- name: topic,
182
- partitions: [...partitions]
183
- .filter((partition) => partition in offsetManager.pendingOffsets[topic])
184
- .map((partitionIndex) => ({
185
- partitionIndex,
186
- committedOffset: offsetManager.pendingOffsets[topic][partitionIndex],
187
- committedLeaderEpoch: -1,
188
- committedMetadata: null,
160
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
161
+ const { cluster, groupId, groupInstanceId, offsetManager, consumer } = this.options;
162
+ const request = {
163
+ groupId,
164
+ groupInstanceId,
165
+ memberId: this.memberId,
166
+ generationIdOrMemberEpoch: this.generationId,
167
+ topics: Object.entries(topicPartitions)
168
+ .filter(([topic]) => topic in offsetManager.pendingOffsets)
169
+ .map(([topic, partitions]) => ({
170
+ name: topic,
171
+ partitions: [...partitions]
172
+ .filter((partition) => partition in offsetManager.pendingOffsets[topic])
173
+ .map((partitionIndex) => ({
174
+ partitionIndex,
175
+ committedOffset: offsetManager.pendingOffsets[topic][partitionIndex],
176
+ committedLeaderEpoch: -1,
177
+ committedMetadata: null,
178
+ })),
189
179
  })),
190
- })),
191
- };
192
- if (!request.topics.length) {
193
- return;
194
- }
195
- try {
180
+ };
181
+ if (!request.topics.length) {
182
+ return;
183
+ }
196
184
  await cluster.sendRequest(api_1.API.OFFSET_COMMIT, request);
197
- }
198
- catch (error) {
199
- await this.handleError(error);
200
- return this.offsetCommit(topicPartitions);
201
- }
202
- consumer.emit('offsetCommit');
185
+ consumer.emit('offsetCommit');
186
+ });
203
187
  }
204
188
  async heartbeat() {
205
189
  const { cluster, groupId, groupInstanceId, consumer } = this.options;
@@ -212,24 +196,25 @@ class ConsumerGroup {
212
196
  consumer.emit('heartbeat');
213
197
  }
214
198
  async leaveGroup() {
215
- if (this.coordinatorId === -1) {
216
- return;
217
- }
218
- const { cluster, groupId, groupInstanceId } = this.options;
219
- this.stopHeartbeater();
220
- try {
221
- await cluster.sendRequest(api_1.API.LEAVE_GROUP, {
222
- groupId,
223
- members: [{ memberId: this.memberId, groupInstanceId, reason: null }],
224
- });
225
- }
226
- catch (error) {
227
- if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.FENCED_INSTANCE_ID) {
199
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
200
+ if (this.coordinatorId === -1) {
228
201
  return;
229
202
  }
230
- await this.handleError(error);
231
- return this.leaveGroup();
232
- }
203
+ const { cluster, groupId, groupInstanceId } = this.options;
204
+ this.stopHeartbeater();
205
+ try {
206
+ await cluster.sendRequest(api_1.API.LEAVE_GROUP, {
207
+ groupId,
208
+ members: [{ memberId: this.memberId, groupInstanceId, reason: null }],
209
+ });
210
+ }
211
+ catch (error) {
212
+ if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.FENCED_INSTANCE_ID) {
213
+ return;
214
+ }
215
+ throw error;
216
+ }
217
+ });
233
218
  }
234
219
  async handleError(error) {
235
220
  await (0, api_1.handleApiError)(error).catch(async (error) => {
@@ -20,6 +20,7 @@ const delay_1 = require("../utils/delay");
20
20
  const error_1 = require("../utils/error");
21
21
  const logger_1 = require("../utils/logger");
22
22
  const retrier_1 = require("../utils/retrier");
23
+ const retry_1 = require("../utils/retry");
23
24
  const tracer_1 = require("../utils/tracer");
24
25
  const consumer_group_1 = require("./consumer-group");
25
26
  const consumer_metadata_1 = require("./consumer-metadata");
@@ -100,7 +101,9 @@ class Consumer extends events_1.default {
100
101
  await this.fetchManager?.stop();
101
102
  });
102
103
  }
103
- await this.consumerGroup?.leaveGroup().catch((error) => logger_1.log.debug('Failed to leave group', { reason: error.message }));
104
+ await this.consumerGroup
105
+ ?.leaveGroup()
106
+ .catch((error) => logger_1.log.debug('Failed to leave group', { reason: error.message }));
104
107
  await this.cluster.disconnect().catch(() => { });
105
108
  }
106
109
  async startFetchManager() {
@@ -140,7 +143,7 @@ class Consumer extends events_1.default {
140
143
  if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.NOT_COORDINATOR) {
141
144
  logger_1.log.debug('Not coordinator. Searching for new coordinator...');
142
145
  await this.consumerGroup?.findCoordinator();
143
- return;
146
+ continue;
144
147
  }
145
148
  if (error instanceof error_1.ConnectionError) {
146
149
  logger_1.log.debug(`${error.message}. Restarting consumer...`, { stack: error.stack });
@@ -210,10 +213,10 @@ class Consumer extends events_1.default {
210
213
  this.offsetManager.flush(topicPartitions);
211
214
  }
212
215
  async fetch(nodeId, assignment) {
213
- const { rackId, maxWaitMs, minBytes, maxBytes, partitionMaxBytes, isolationLevel } = this.options;
214
- this.consumerGroup?.handleLastHeartbeat();
215
- try {
216
- return await this.cluster.sendRequestToNode(nodeId)(api_1.API.FETCH, {
216
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
217
+ const { rackId, maxWaitMs, minBytes, maxBytes, partitionMaxBytes, isolationLevel } = this.options;
218
+ this.consumerGroup?.handleLastHeartbeat();
219
+ return this.cluster.sendRequestToNode(nodeId)(api_1.API.FETCH, {
217
220
  maxWaitMs,
218
221
  minBytes,
219
222
  maxBytes,
@@ -234,31 +237,19 @@ class Consumer extends events_1.default {
234
237
  forgottenTopicsData: [],
235
238
  rackId,
236
239
  });
237
- }
238
- catch (error) {
239
- await this.handleError(error);
240
- return this.fetch(nodeId, assignment);
241
- }
240
+ });
242
241
  }
243
242
  async fetchMetadata() {
244
- const { topics, allowTopicAutoCreation } = this.options;
245
- try {
243
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
244
+ const { topics, allowTopicAutoCreation } = this.options;
246
245
  await this.metadata.fetchMetadata({ topics, allowTopicAutoCreation });
247
- }
248
- catch (error) {
249
- await this.handleError(error);
250
- return this.fetchMetadata();
251
- }
246
+ });
252
247
  }
253
248
  async fetchOffsets() {
254
- const { fromTimestamp } = this.options;
255
- try {
249
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
250
+ const { fromTimestamp } = this.options;
256
251
  await this.offsetManager.fetchOffsets({ fromTimestamp });
257
- }
258
- catch (error) {
259
- await this.handleError(error);
260
- return this.fetchOffsets();
261
- }
252
+ });
262
253
  }
263
254
  async handleError(error) {
264
255
  await (0, api_1.handleApiError)(error).catch(async (error) => {
@@ -17,6 +17,7 @@ const metadata_1 = require("../metadata");
17
17
  const error_1 = require("../utils/error");
18
18
  const lock_1 = require("../utils/lock");
19
19
  const logger_1 = require("../utils/logger");
20
+ const retry_1 = require("../utils/retry");
20
21
  const shared_1 = require("../utils/shared");
21
22
  const tracer_1 = require("../utils/tracer");
22
23
  const trace = (0, tracer_1.createTracer)('Producer');
@@ -124,7 +125,7 @@ class Producer {
124
125
  }
125
126
  });
126
127
  async initProducerId() {
127
- try {
128
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
128
129
  const result = await this.cluster.sendRequest(api_1.API.INIT_PRODUCER_ID, {
129
130
  transactionalId: null,
130
131
  transactionTimeoutMs: 0,
@@ -134,11 +135,7 @@ class Producer {
134
135
  this.producerId = result.producerId;
135
136
  this.producerEpoch = result.producerEpoch;
136
137
  this.sequences = {};
137
- }
138
- catch (error) {
139
- await this.handleError(error);
140
- return this.initProducerId();
141
- }
138
+ });
142
139
  }
143
140
  getSequence(topic, partition) {
144
141
  return this.sequences[topic]?.[partition] ?? 0;
@@ -149,13 +146,9 @@ class Producer {
149
146
  this.sequences[topic][partition] += messagesCount;
150
147
  }
151
148
  async fetchMetadata(topics, allowTopicAutoCreation) {
152
- try {
149
+ return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
153
150
  await this.metadata.fetchMetadata({ topics, allowTopicAutoCreation });
154
- }
155
- catch (error) {
156
- await this.handleError(error);
157
- return this.fetchMetadata(topics, allowTopicAutoCreation);
158
- }
151
+ });
159
152
  }
160
153
  async handleError(error) {
161
154
  await (0, api_1.handleApiError)(error).catch(async (error) => {
@@ -0,0 +1 @@
1
+ export declare const withRetry: (handleError: (error: unknown) => Promise<void>) => <T>(func: () => Promise<T>) => Promise<T>;
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.withRetry = void 0;
4
+ const logger_1 = require("./logger");
5
+ const withRetry = (handleError) => async (func) => {
6
+ let lastError;
7
+ for (let i = 0; i < 15; i++) {
8
+ try {
9
+ return await func();
10
+ }
11
+ catch (error) {
12
+ await handleError(error);
13
+ lastError = error;
14
+ }
15
+ }
16
+ logger_1.log.warn('Retries exhausted', { lastError });
17
+ throw lastError;
18
+ };
19
+ exports.withRetry = withRetry;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "1.1.8",
3
+ "version": "1.1.9",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",
@@ -1,24 +0,0 @@
1
- import { IsolationLevel } from "../api/fetch";
2
- import { Assignment } from "../api/sync-group";
3
- import { Cluster } from "../cluster";
4
- import { OffsetManager } from "./offset-manager";
5
- export type Metadata = ReturnType<typeof createMetadata>;
6
- type MetadataOptions = {
7
- cluster: Cluster;
8
- topics?: string[];
9
- isolationLevel?: IsolationLevel;
10
- allowTopicAutoCreation?: boolean;
11
- fromBeginning?: boolean;
12
- offsetManager?: OffsetManager;
13
- };
14
- export declare const createMetadata: ({ cluster, topics, isolationLevel, allowTopicAutoCreation, fromBeginning, offsetManager, }: MetadataOptions) => {
15
- init: () => Promise<void>;
16
- getTopicPartitions: () => Record<string, number[]>;
17
- getTopicIdByName: (name: string) => string;
18
- getTopicNameById: (id: string) => string;
19
- getAssignment: () => Assignment;
20
- setAssignment: (newAssignment: Assignment) => void;
21
- getLeaderIdByTopicPartition: (topic: string, partition: number) => number;
22
- getIsrNodeIdsByTopicPartition: (topic: string, partition: number) => number[];
23
- };
24
- export {};
@@ -1,64 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.createMetadata = void 0;
4
- const api_1 = require("../api");
5
- const createMetadata = ({ cluster, topics, isolationLevel = 0 /* IsolationLevel.READ_UNCOMMITTED */, allowTopicAutoCreation = true, fromBeginning = false, offsetManager, }) => {
6
- let topicPartitions = {};
7
- let topicNameById = {};
8
- let topicIdByName = {};
9
- let leaderIdByTopicPartition = {};
10
- let isrNodesByTopicPartition;
11
- let assignment = {};
12
- const fetchMetadata = async () => {
13
- const response = await cluster.sendRequest(api_1.API.METADATA, {
14
- allowTopicAutoCreation,
15
- includeTopicAuthorizedOperations: false,
16
- topics: topics?.map((name) => ({ id: null, name })) ?? null,
17
- });
18
- topicPartitions = Object.fromEntries(response.topics.map((topic) => [topic.name, topic.partitions.map((partition) => partition.partitionIndex)]));
19
- topicNameById = Object.fromEntries(response.topics.map((topic) => [topic.topicId, topic.name]));
20
- topicIdByName = Object.fromEntries(response.topics.map((topic) => [topic.name, topic.topicId]));
21
- leaderIdByTopicPartition = Object.fromEntries(response.topics.map((topic) => [
22
- topic.name,
23
- Object.fromEntries(topic.partitions.map((partition) => [partition.partitionIndex, partition.leaderId])),
24
- ]));
25
- isrNodesByTopicPartition = Object.fromEntries(response.topics.map((topic) => [
26
- topic.name,
27
- Object.fromEntries(topic.partitions.map((partition) => [partition.partitionIndex, partition.isrNodes])),
28
- ]));
29
- assignment = topicPartitions;
30
- };
31
- const listOffsets = async () => {
32
- const offsets = await cluster.sendRequest(api_1.API.LIST_OFFSETS, {
33
- replicaId: -1,
34
- isolationLevel,
35
- topics: Object.entries(assignment)
36
- .flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
37
- .map(({ topic, partition }) => ({
38
- name: topic,
39
- partitions: [{ partitionIndex: partition, currentLeaderEpoch: -1, timestamp: -1n }],
40
- })),
41
- });
42
- offsets.topics.forEach(({ name, partitions }) => {
43
- partitions.forEach(({ partitionIndex, offset }) => {
44
- offsetManager?.resolve(name, partitionIndex, fromBeginning ? 0n : offset);
45
- });
46
- });
47
- };
48
- return {
49
- init: async () => {
50
- await fetchMetadata();
51
- await listOffsets();
52
- },
53
- getTopicPartitions: () => topicPartitions,
54
- getTopicIdByName: (name) => topicIdByName[name],
55
- getTopicNameById: (id) => topicNameById[id],
56
- getAssignment: () => assignment,
57
- setAssignment: (newAssignment) => {
58
- assignment = newAssignment;
59
- },
60
- getLeaderIdByTopicPartition: (topic, partition) => leaderIdByTopicPartition[topic][partition],
61
- getIsrNodeIdsByTopicPartition: (topic, partition) => isrNodesByTopicPartition[topic][partition],
62
- };
63
- };
64
- exports.createMetadata = createMetadata;
@@ -1,34 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const kafkats_1 = require("kafkats");
4
- const json_1 = require("./utils/json");
5
- (async () => {
6
- const brokers = [{ host: "localhost", port: 9092 }];
7
- const topic = "playground-topic";
8
- // const producer = createProducer({ brokers });
9
- // const producerInterval = setInterval(async () => {
10
- // await producer.send([
11
- // {
12
- // topic,
13
- // partition: 0,
14
- // offset: 1n,
15
- // timestamp: BigInt(Date.now()),
16
- // key: null,
17
- // value: `PING ${Math.random()}`,
18
- // headers: { timestamp: Date.now().toString() }
19
- // }
20
- // ])
21
- // }, 5000);
22
- const consumer = await (0, kafkats_1.startConsumer)({
23
- topics: [topic],
24
- brokers,
25
- onBatch: (messages) => {
26
- console.log(JSON.stringify(messages, json_1.serializer, 2));
27
- },
28
- });
29
- process.on("SIGINT", async () => {
30
- await consumer.close();
31
- // clearInterval(producerInterval);
32
- // await producer.close();
33
- });
34
- })();
@@ -1,5 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.serializer = void 0;
4
- const serializer = (_, value) => (typeof value === "bigint" ? value.toString() : value);
5
- exports.serializer = serializer;
@@ -1,16 +0,0 @@
1
- import { Connection } from "./connection";
2
- import { Api } from "./utils/api";
3
- type RequestHandlerOptions = {
4
- clientId: string | null;
5
- };
6
- export declare class RequestHandler {
7
- private connection;
8
- private options;
9
- private queue;
10
- private currentBuffer;
11
- constructor(connection: Connection, options: RequestHandlerOptions);
12
- private handleData;
13
- sendRequest<Request, Response>(api: Api<Request, Response>, args: Request): Promise<Response>;
14
- }
15
- export type SendRequest = typeof RequestHandler.prototype.sendRequest;
16
- export {};
@@ -1,67 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.RequestHandler = void 0;
7
- const node_assert_1 = __importDefault(require("node:assert"));
8
- const decoder_1 = require("./utils/decoder");
9
- const encoder_1 = require("./utils/encoder");
10
- class RequestHandler {
11
- connection;
12
- options;
13
- queue = {};
14
- currentBuffer = null;
15
- constructor(connection, options) {
16
- this.connection = connection;
17
- this.options = options;
18
- this.connection.on("data", this.handleData);
19
- }
20
- handleData(buffer) {
21
- this.currentBuffer = this.currentBuffer ? Buffer.concat([this.currentBuffer, buffer]) : buffer;
22
- if (this.currentBuffer.length < 4) {
23
- return;
24
- }
25
- const decoder = (0, decoder_1.createDecoder)({ buffer: this.currentBuffer });
26
- const size = decoder.readInt32();
27
- if (size > decoder.buffer.length) {
28
- return;
29
- }
30
- const correlationId = decoder.readInt32();
31
- const request = this.queue[correlationId];
32
- delete this.queue[correlationId];
33
- request.callback(decoder);
34
- // debug(handleData.name, 'Response offsets', {
35
- // offset: decoder.offset,
36
- // length: decoder.buffer.length,
37
- // rest: decoder.buffer.subarray(decoder.offset, decoder.buffer.length)?.toString(),
38
- // });
39
- (0, node_assert_1.default)(decoder.offset - 4 === size, `Buffer not correctly consumed: ${decoder.offset - 4} !== ${buffer.length}`);
40
- this.currentBuffer = null;
41
- }
42
- async sendRequest(api, args) {
43
- const correlationId = Math.floor(Math.random() * 1000000);
44
- const encoder = (0, encoder_1.createEncoder)()
45
- .writeInt16(api.apiKey)
46
- .writeInt16(api.apiVersion)
47
- .writeInt32(correlationId)
48
- .writeString(this.options.clientId);
49
- const request = api.request(encoder, args).value();
50
- const buffer = (0, encoder_1.createEncoder)().writeInt32(request.length).write(request).value();
51
- return new Promise(async (resolve, reject) => {
52
- await this.connection.write(buffer);
53
- this.queue[correlationId] = {
54
- callback: (decoder) => {
55
- try {
56
- const response = api.response(decoder);
57
- resolve(response);
58
- }
59
- catch (error) {
60
- reject(error);
61
- }
62
- },
63
- };
64
- });
65
- }
66
- }
67
- exports.RequestHandler = RequestHandler;
@@ -1 +0,0 @@
1
- export declare const kafka: import("./client").Client;