@platformatic/kafka 1.15.0 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- import { ResponseError } from "../../errors.js";
1
+ import { OutOfBoundsError, ResponseError } from "../../errors.js";
2
2
  import { Reader } from "../../protocol/reader.js";
3
3
  import { readRecordsBatch } from "../../protocol/records.js";
4
4
  import { Writer } from "../../protocol/writer.js";
@@ -99,16 +99,27 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
99
99
  }),
100
100
  preferredReadReplica: r.readInt32()
101
101
  };
102
- let recordsSize = r.readUnsignedVarInt();
103
102
  if (partition.errorCode !== 0) {
104
103
  errors.push([`/responses/${i}/partitions/${j}`, partition.errorCode]);
105
104
  }
106
- if (recordsSize > 1) {
107
- recordsSize--;
105
+ // We need to reduce the size by one to follow the COMPACT_RECORDS specification.
106
+ const recordsSize = r.readUnsignedVarInt() - 1;
107
+ if (recordsSize > 0) {
108
108
  const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
109
  partition.records = [];
110
110
  do {
111
- partition.records.push(readRecordsBatch(recordsBatchesReader));
111
+ try {
112
+ partition.records.push(readRecordsBatch(recordsBatchesReader));
113
+ }
114
+ catch (err) {
115
+ // Contrary to other places in the protocol, records batches CAN BE truncated due to maxBytes argument.
116
+ // In that case we just ignore the error.
117
+ if (err.code === OutOfBoundsError.code) {
118
+ break;
119
+ }
120
+ /* c8 ignore next 3 - Hard to test */
121
+ throw err;
122
+ }
112
123
  } while (recordsBatchesReader.position < recordsSize);
113
124
  r.skip(recordsSize);
114
125
  }
@@ -1,4 +1,4 @@
1
- import { ResponseError } from "../../errors.js";
1
+ import { OutOfBoundsError, ResponseError } from "../../errors.js";
2
2
  import { Reader } from "../../protocol/reader.js";
3
3
  import { readRecordsBatch } from "../../protocol/records.js";
4
4
  import { Writer } from "../../protocol/writer.js";
@@ -99,16 +99,27 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
99
99
  }),
100
100
  preferredReadReplica: r.readInt32()
101
101
  };
102
- let recordsSize = r.readUnsignedVarInt();
103
102
  if (partition.errorCode !== 0) {
104
103
  errors.push([`/responses/${i}/partitions/${j}`, partition.errorCode]);
105
104
  }
106
- if (recordsSize > 1) {
107
- recordsSize--;
105
+ // We need to reduce the size by one to follow the COMPACT_RECORDS specification.
106
+ const recordsSize = r.readUnsignedVarInt() - 1;
107
+ if (recordsSize > 0) {
108
108
  const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
109
  partition.records = [];
110
110
  do {
111
- partition.records.push(readRecordsBatch(recordsBatchesReader));
111
+ try {
112
+ partition.records.push(readRecordsBatch(recordsBatchesReader));
113
+ }
114
+ catch (err) {
115
+ // Contrary to other places in the protocol, records batches CAN BE truncated due to maxBytes argument.
116
+ // In that case we just ignore the error.
117
+ if (err.code === OutOfBoundsError.code) {
118
+ break;
119
+ }
120
+ /* c8 ignore next 3 - Hard to test */
121
+ throw err;
122
+ }
112
123
  } while (recordsBatchesReader.position < recordsSize);
113
124
  r.skip(recordsSize);
114
125
  }
@@ -1,4 +1,4 @@
1
- import { ResponseError } from "../../errors.js";
1
+ import { OutOfBoundsError, ResponseError } from "../../errors.js";
2
2
  import { Reader } from "../../protocol/reader.js";
3
3
  import { readRecordsBatch } from "../../protocol/records.js";
4
4
  import { Writer } from "../../protocol/writer.js";
@@ -99,16 +99,27 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
99
99
  }),
100
100
  preferredReadReplica: r.readInt32()
101
101
  };
102
- let recordsSize = r.readUnsignedVarInt();
103
102
  if (partition.errorCode !== 0) {
104
103
  errors.push([`/responses/${i}/partitions/${j}`, partition.errorCode]);
105
104
  }
106
- if (recordsSize > 1) {
107
- recordsSize--;
105
+ // We need to reduce the size by one to follow the COMPACT_RECORDS specification.
106
+ const recordsSize = r.readUnsignedVarInt() - 1;
107
+ if (recordsSize > 0) {
108
108
  const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
109
  partition.records = [];
110
110
  do {
111
- partition.records.push(readRecordsBatch(recordsBatchesReader));
111
+ try {
112
+ partition.records.push(readRecordsBatch(recordsBatchesReader));
113
+ }
114
+ catch (err) {
115
+ // Contrary to other places in the protocol, records batches CAN BE truncated due to maxBytes argument.
116
+ // In that case we just ignore the error.
117
+ if (err.code === OutOfBoundsError.code) {
118
+ break;
119
+ }
120
+ /* c8 ignore next 3 - Hard to test */
121
+ throw err;
122
+ }
112
123
  } while (recordsBatchesReader.position < recordsSize);
113
124
  r.skip(recordsSize);
114
125
  }
package/dist/errors.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  declare const kGenericError: unique symbol;
2
2
  declare const kMultipleErrors: unique symbol;
3
3
  export declare const ERROR_PREFIX = "PLT_KFK_";
4
- export declare const errorCodes: readonly ["PLT_KFK_AUTHENTICATION", "PLT_KFK_MULTIPLE", "PLT_KFK_NETWORK", "PLT_KFK_PROTOCOL", "PLT_KFK_RESPONSE", "PLT_KFK_TIMEOUT", "PLT_KFK_UNEXPECTED_CORRELATION_ID", "PLT_KFK_UNFINISHED_WRITE_BUFFER", "PLT_KFK_UNSUPPORTED_API", "PLT_KFK_UNSUPPORTED_COMPRESSION", "PLT_KFK_UNSUPPORTED", "PLT_KFK_USER"];
4
+ export declare const errorCodes: readonly ["PLT_KFK_AUTHENTICATION", "PLT_KFK_MULTIPLE", "PLT_KFK_NETWORK", "PLT_KFK_OUT_OF_BOUNDS", "PLT_KFK_PROTOCOL", "PLT_KFK_RESPONSE", "PLT_KFK_TIMEOUT", "PLT_KFK_UNEXPECTED_CORRELATION_ID", "PLT_KFK_UNFINISHED_WRITE_BUFFER", "PLT_KFK_UNSUPPORTED_API", "PLT_KFK_UNSUPPORTED_COMPRESSION", "PLT_KFK_UNSUPPORTED", "PLT_KFK_USER"];
5
5
  export type ErrorCode = (typeof errorCodes)[number];
6
6
  export type ErrorProperties = {
7
7
  cause?: Error;
@@ -38,6 +38,10 @@ export declare class ProtocolError extends GenericError {
38
38
  static code: ErrorCode;
39
39
  constructor(codeOrId: string | number, properties?: ErrorProperties, response?: unknown);
40
40
  }
41
+ export declare class OutOfBoundsError extends GenericError {
42
+ static code: ErrorCode;
43
+ constructor(message: string, properties?: ErrorProperties);
44
+ }
41
45
  export declare class ResponseError extends MultipleErrors {
42
46
  static code: ErrorCode;
43
47
  constructor(apiName: number, apiVersion: number, errors: Record<string, number>, response: unknown, properties?: ErrorProperties);
package/dist/errors.js CHANGED
@@ -7,6 +7,7 @@ export const errorCodes = [
7
7
  'PLT_KFK_AUTHENTICATION',
8
8
  'PLT_KFK_MULTIPLE',
9
9
  'PLT_KFK_NETWORK',
10
+ 'PLT_KFK_OUT_OF_BOUNDS',
10
11
  'PLT_KFK_PROTOCOL',
11
12
  'PLT_KFK_RESPONSE',
12
13
  'PLT_KFK_TIMEOUT',
@@ -117,6 +118,12 @@ export class ProtocolError extends GenericError {
117
118
  });
118
119
  }
119
120
  }
121
+ export class OutOfBoundsError extends GenericError {
122
+ static code = 'PLT_KFK_OUT_OF_BOUNDS';
123
+ constructor(message, properties = {}) {
124
+ super(OutOfBoundsError.code, message, { isOut: true, ...properties });
125
+ }
126
+ }
120
127
  export class ResponseError extends MultipleErrors {
121
128
  static code = 'PLT_KFK_RESPONSE';
122
129
  constructor(apiName, apiVersion, errors, response, properties = {}) {
@@ -1,2 +1,4 @@
1
1
  import { DynamicBuffer } from './dynamic-buffer.ts';
2
- export declare function crc32c(data: Buffer | Uint8Array | DynamicBuffer): number;
2
+ export declare function loadNativeCRC32C(): typeof jsCRC32C | null;
3
+ export declare function jsCRC32C(data: Buffer | Uint8Array | DynamicBuffer): number;
4
+ export declare const crc32c: typeof jsCRC32C;
@@ -1,4 +1,5 @@
1
1
  // Based on the work from: https://github.com/tulios/kafkajs/blob/master/src/protocol/recordBatch/crc32C/crc32C.js
2
+ import { createRequire } from 'node:module';
2
3
  import { DynamicBuffer } from "./dynamic-buffer.js";
3
4
  /* prettier-ignore */
4
5
  const CRC = [
@@ -67,7 +68,22 @@ const CRC = [
67
68
  0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
68
69
  0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351
69
70
  ];
70
- export function crc32c(data) {
71
+ export function loadNativeCRC32C() {
72
+ try {
73
+ const require = createRequire(import.meta.url);
74
+ const { crc32c: nativeImplementation } = require('@node-rs/crc32');
75
+ return function nativeCRC32C(data) {
76
+ const bytes = DynamicBuffer.isDynamicBuffer(data)
77
+ ? data.buffer
78
+ : new Uint8Array(data);
79
+ return nativeImplementation(bytes);
80
+ };
81
+ }
82
+ catch (error) {
83
+ return null;
84
+ }
85
+ }
86
+ export function jsCRC32C(data) {
71
87
  const bytes = DynamicBuffer.isDynamicBuffer(data)
72
88
  ? data.buffer
73
89
  : new Uint8Array(data);
@@ -77,3 +93,4 @@ export function crc32c(data) {
77
93
  }
78
94
  return (crc ^ 0xffffffff) >>> 0;
79
95
  }
96
+ export const crc32c = loadNativeCRC32C() ?? jsCRC32C;
@@ -1,4 +1,4 @@
1
- import { UserError } from "../errors.js";
1
+ import { OutOfBoundsError } from "../errors.js";
2
2
  import { EMPTY_BUFFER, INT16_SIZE, INT32_SIZE, INT64_SIZE, INT8_SIZE } from "./definitions.js";
3
3
  import { BITS_8PLUS_MASK, BITS_8PLUS_MASK_64, int64ZigZagDecode, int64ZigZagEncode, intZigZagDecode, intZigZagEncode, LEAST_SIGNIFICANT_7_BITS, LEAST_SIGNIFICANT_7_BITS_64, MOST_SIGNIFICANT_BIT_FLAG, MOST_SIGNIFICANT_BIT_FLAG_64, sizeOfUnsignedVarInt, sizeOfUnsignedVarInt64 } from "./varint.js";
4
4
  const instanceIdentifier = Symbol('plt.kafka.dynamicBuffer.instanceIdentifier');
@@ -62,7 +62,7 @@ export class DynamicBuffer {
62
62
  end = this.length;
63
63
  }
64
64
  if (start < 0 || start > this.length || end > this.length) {
65
- throw new UserError('Out of bounds.');
65
+ throw new OutOfBoundsError('Out of bounds.');
66
66
  }
67
67
  if (this.buffers.length === 0) {
68
68
  return new DynamicBuffer(EMPTY_BUFFER);
@@ -93,13 +93,13 @@ export class DynamicBuffer {
93
93
  end = this.length;
94
94
  }
95
95
  if (start < 0 || start > this.length || end > this.length) {
96
- throw new UserError('Out of bounds.');
96
+ throw new OutOfBoundsError('Out of bounds.');
97
97
  }
98
98
  if (this.buffers.length === 0) {
99
99
  return EMPTY_BUFFER;
100
100
  }
101
101
  else if (this.buffers.length === 1) {
102
- return this.buffers[0].slice(start, end);
102
+ return this.buffers[0].subarray(start, end);
103
103
  }
104
104
  let position = 0;
105
105
  let length = end - start;
@@ -133,7 +133,7 @@ export class DynamicBuffer {
133
133
  }
134
134
  consume(offset) {
135
135
  if (offset < 0 || offset > this.length) {
136
- throw new UserError('Out of bounds.');
136
+ throw new OutOfBoundsError('Out of bounds.');
137
137
  }
138
138
  if (offset === 0) {
139
139
  return this;
@@ -159,14 +159,14 @@ export class DynamicBuffer {
159
159
  }
160
160
  get(offset) {
161
161
  if (offset < 0 || offset >= this.length) {
162
- throw new UserError('Out of bounds.');
162
+ throw new OutOfBoundsError('Out of bounds.');
163
163
  }
164
164
  const [finalIndex, current] = this.#findInitialBuffer(offset);
165
165
  return this.buffers[current][finalIndex];
166
166
  }
167
167
  readUInt8(offset = 0) {
168
168
  if (offset < 0 || offset >= this.length) {
169
- throw new UserError('Out of bounds.');
169
+ throw new OutOfBoundsError('Out of bounds.');
170
170
  }
171
171
  const [finalIndex, current] = this.#findInitialBuffer(offset);
172
172
  this.#readBuffer[0] = this.buffers[current][finalIndex];
@@ -202,7 +202,7 @@ export class DynamicBuffer {
202
202
  let value = 0;
203
203
  let read = 0;
204
204
  if (offset < 0 || offset >= this.length) {
205
- throw new UserError('Out of bounds.');
205
+ throw new OutOfBoundsError('Out of bounds.');
206
206
  }
207
207
  // Find the initial buffer
208
208
  let [startOffset, current] = this.#findInitialBuffer(offset);
@@ -224,7 +224,7 @@ export class DynamicBuffer {
224
224
  let value = 0n;
225
225
  let read = 0;
226
226
  if (offset < 0 || offset >= this.length) {
227
- throw new UserError('Out of bounds.');
227
+ throw new OutOfBoundsError('Out of bounds.');
228
228
  }
229
229
  // Find the initial buffer
230
230
  let [startOffset, current] = this.#findInitialBuffer(offset);
@@ -242,7 +242,7 @@ export class DynamicBuffer {
242
242
  }
243
243
  readInt8(offset = 0) {
244
244
  if (offset < 0 || offset >= this.length) {
245
- throw new UserError('Out of bounds.');
245
+ throw new OutOfBoundsError('Out of bounds.');
246
246
  }
247
247
  const [finalIndex, current] = this.#findInitialBuffer(offset);
248
248
  this.#readBuffer[0] = this.buffers[current][finalIndex];
@@ -543,7 +543,7 @@ export class DynamicBuffer {
543
543
  }
544
544
  #readMultiple(index, length) {
545
545
  if (index < 0 || index + length > this.length) {
546
- throw new UserError('Out of bounds.');
546
+ throw new OutOfBoundsError('Out of bounds.');
547
547
  }
548
548
  let [startOffset, current] = this.#findInitialBuffer(index);
549
549
  for (let i = 0; i < length; i++) {
@@ -1,6 +1,7 @@
1
1
  import { UnsupportedCompressionError } from "../errors.js";
2
2
  import { compressionsAlgorithms, compressionsAlgorithmsByBitmask } from "./compression.js";
3
3
  import { crc32c } from "./crc32c.js";
4
+ import { INT32_SIZE, INT64_SIZE } from "./definitions.js";
4
5
  import { DynamicBuffer } from "./dynamic-buffer.js";
5
6
  import { Reader } from "./reader.js";
6
7
  import { Writer } from "./writer.js";
@@ -117,6 +118,7 @@ export function createRecordsBatch(messages, options = {}) {
117
118
  .appendInt64(0n, false));
118
119
  }
119
120
  export function readRecordsBatch(reader) {
121
+ const initialPosition = reader.position;
120
122
  const batch = {
121
123
  firstOffset: reader.readInt64(),
122
124
  length: reader.readInt32(),
@@ -139,9 +141,12 @@ export function readRecordsBatch(reader) {
139
141
  if (!algorithm) {
140
142
  throw new UnsupportedCompressionError(`Unsupported compression algorithm with bitmask ${compression}`);
141
143
  }
142
- const buffer = algorithm.decompressSync(reader.buffer.slice(reader.position, reader.buffer.length));
143
- // Move the original reader to the end
144
- reader.skip(reader.buffer.length - reader.position);
144
+ // The length of all headers immediately following Length up to the length of the Records array
145
+ const headersLength = reader.position - initialPosition - INT32_SIZE - INT64_SIZE;
146
+ const compressedDataLen = batch.length - headersLength;
147
+ const buffer = algorithm.decompressSync(reader.buffer.slice(reader.position, reader.position + compressedDataLen));
148
+ // Move the original reader to the end of the compressed data
149
+ reader.skip(compressedDataLen);
145
150
  // Replace the reader with the decompressed buffer
146
151
  reader = Reader.from(buffer);
147
152
  }
package/dist/version.js CHANGED
@@ -1,2 +1,2 @@
1
1
  export const name = "@platformatic/kafka";
2
- export const version = "1.15.0";
2
+ export const version = "1.16.0";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/kafka",
3
- "version": "1.15.0",
3
+ "version": "1.16.0",
4
4
  "description": "Modern and performant client for Apache Kafka",
5
5
  "homepage": "https://github.com/platformatic/kafka",
6
6
  "author": "Platformatic Inc. <oss@platformatic.dev> (https://platformatic.dev)",
@@ -32,6 +32,7 @@
32
32
  "scule": "^1.3.0"
33
33
  },
34
34
  "optionalDependencies": {
35
+ "@node-rs/crc32": "^1.10.6",
35
36
  "lz4-napi": "^2.9.0",
36
37
  "snappy": "^7.3.3"
37
38
  },