@platformatic/kafka 0.4.0 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -85,7 +85,7 @@ export class MessagesStream extends Readable {
85
85
  });
86
86
  });
87
87
  if (consumer[kPrometheus]) {
88
- this.#metricsConsumedMessages = ensureMetric(consumer[kPrometheus], 'Counter', 'kafka_consumers_messages', 'Number of consumed Kafka messages');
88
+ this.#metricsConsumedMessages = ensureMetric(consumer[kPrometheus], 'Counter', 'kafka_consumed_messages', 'Number of consumed Kafka messages');
89
89
  }
90
90
  }
91
91
  close(callback) {
@@ -260,44 +260,50 @@ export class MessagesStream extends Readable {
260
260
  const headerKeyDeserializer = this.#headerKeyDeserializer;
261
261
  const headerValueDeserializer = this.#headerValueDeserializer;
262
262
  // Parse results
263
- for (const topicResponse of response.responses) {
264
- const topic = topicIds.get(topicResponse.topicId);
265
- for (const { records, partitionIndex: partition } of topicResponse.partitions) {
266
- if (!records) {
267
- continue;
268
- }
269
- const firstTimestamp = records.firstTimestamp;
270
- const leaderEpoch = metadata.topics.get(topic).partitions[partition].leaderEpoch;
271
- for (const record of records.records) {
272
- const headers = new Map();
273
- for (const [headerKey, headerValue] of record.headers) {
274
- headers.set(headerKeyDeserializer(headerKey), headerValueDeserializer(headerValue));
263
+ try {
264
+ for (const topicResponse of response.responses) {
265
+ const topic = topicIds.get(topicResponse.topicId);
266
+ for (const { records, partitionIndex: partition } of topicResponse.partitions) {
267
+ if (!records) {
268
+ continue;
269
+ }
270
+ const firstTimestamp = records.firstTimestamp;
271
+ const leaderEpoch = metadata.topics.get(topic).partitions[partition].leaderEpoch;
272
+ for (const record of records.records) {
273
+ const headers = new Map();
274
+ for (const [headerKey, headerValue] of record.headers) {
275
+ headers.set(headerKeyDeserializer(headerKey), headerValueDeserializer(headerValue));
276
+ }
277
+ const key = keyDeserializer(record.key, headers);
278
+ const value = valueDeserializer(record.value, headers);
279
+ this.#metricsConsumedMessages?.inc();
280
+ canPush = this.push({
281
+ key,
282
+ value,
283
+ headers,
284
+ topic,
285
+ partition,
286
+ timestamp: firstTimestamp + record.timestampDelta,
287
+ offset: records.firstOffset + BigInt(record.offsetDelta),
288
+ commit: autocommit
289
+ ? noopCallback
290
+ : this.#commit.bind(this, topic, partition, records.firstOffset + BigInt(record.offsetDelta), leaderEpoch)
291
+ });
292
+ }
293
+ // Track the last read offset
294
+ const lastOffset = records.firstOffset + BigInt(records.lastOffsetDelta);
295
+ this.#offsetsToFetch.set(`${topic}:${partition}`, lastOffset + 1n);
296
+ // Autocommit if needed
297
+ if (autocommit) {
298
+ this.#offsetsToCommit.set(`${topic}:${partition}`, { topic, partition, offset: lastOffset, leaderEpoch });
275
299
  }
276
- const key = keyDeserializer(record.key, headers);
277
- const value = valueDeserializer(record.value, headers);
278
- this.#metricsConsumedMessages?.inc();
279
- canPush = this.push({
280
- key,
281
- value,
282
- headers,
283
- topic,
284
- partition,
285
- timestamp: firstTimestamp + record.timestampDelta,
286
- offset: records.firstOffset + BigInt(record.offsetDelta),
287
- commit: autocommit
288
- ? noopCallback
289
- : this.#commit.bind(this, topic, partition, records.firstOffset + BigInt(record.offsetDelta), leaderEpoch)
290
- });
291
- }
292
- // Track the last read offset
293
- const lastOffset = records.firstOffset + BigInt(records.lastOffsetDelta);
294
- this.#offsetsToFetch.set(`${topic}:${partition}`, lastOffset + 1n);
295
- // Autocommit if needed
296
- if (autocommit) {
297
- this.#offsetsToCommit.set(`${topic}:${partition}`, { topic, partition, offset: lastOffset, leaderEpoch });
298
300
  }
299
301
  }
300
302
  }
303
+ catch (error) {
304
+ this.destroy(new UserError('Failed to deserialize a message.', { cause: error }));
305
+ return;
306
+ }
301
307
  if (this.#autocommitEnabled && !this.#autocommitInterval) {
302
308
  this.#autocommit();
303
309
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/kafka",
3
- "version": "0.4.0",
3
+ "version": "0.4.2",
4
4
  "description": "Modern and performant client for Apache Kafka",
5
5
  "homepage": "https://github.com/platformatic/kafka",
6
6
  "author": "Platformatic Inc. <oss@platformatic.dev> (https://platformatic.dev)",
@@ -30,8 +30,7 @@
30
30
  "ajv-errors": "^3.0.0",
31
31
  "debug": "^4.4.0",
32
32
  "fastq": "^1.19.1",
33
- "mnemonist": "^0.40.3",
34
- "semver": "^7.7.1"
33
+ "mnemonist": "^0.40.3"
35
34
  },
36
35
  "optionalDependencies": {
37
36
  "lz4-napi": "^2.8.0",
@@ -41,6 +40,7 @@
41
40
  "@platformatic/rdkafka": "^4.0.0",
42
41
  "@types/debug": "^4.1.12",
43
42
  "@types/node": "^22.13.5",
43
+ "@types/semver": "^7.7.0",
44
44
  "c8": "^10.1.3",
45
45
  "cleaner-spec-reporter": "^0.4.0",
46
46
  "cronometro": "^5.3.0",
@@ -54,6 +54,7 @@
54
54
  "prettier": "^3.5.3",
55
55
  "prom-client": "^15.1.3",
56
56
  "scule": "^1.3.0",
57
+ "semver": "^7.7.1",
57
58
  "typescript": "^5.7.3"
58
59
  },
59
60
  "engines": {