@platformatic/kafka 1.16.0 → 1.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
- import { OutOfBoundsError, ResponseError } from "../../errors.js";
1
+ import { ResponseError } from "../../errors.js";
2
2
  import { Reader } from "../../protocol/reader.js";
3
- import { readRecordsBatch } from "../../protocol/records.js";
3
+ import { readRecordsBatches } from "../../protocol/records.js";
4
4
  import { Writer } from "../../protocol/writer.js";
5
5
  import { createAPI } from "../definitions.js";
6
6
  /*
@@ -105,22 +105,7 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
105
105
  // We need to reduce the size by one to follow the COMPACT_RECORDS specification.
106
106
  const recordsSize = r.readUnsignedVarInt() - 1;
107
107
  if (recordsSize > 0) {
108
- const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
- partition.records = [];
110
- do {
111
- try {
112
- partition.records.push(readRecordsBatch(recordsBatchesReader));
113
- }
114
- catch (err) {
115
- // Contrary to other places in the protocol, records batches CAN BE truncated due to maxBytes argument.
116
- // In that case we just ignore the error.
117
- if (err.code === OutOfBoundsError.code) {
118
- break;
119
- }
120
- /* c8 ignore next 3 - Hard to test */
121
- throw err;
122
- }
123
- } while (recordsBatchesReader.position < recordsSize);
108
+ partition.records = readRecordsBatches(Reader.from(r.buffer.subarray(r.position, r.position + recordsSize)));
124
109
  r.skip(recordsSize);
125
110
  }
126
111
  return partition;
@@ -1,6 +1,6 @@
1
- import { OutOfBoundsError, ResponseError } from "../../errors.js";
1
+ import { ResponseError } from "../../errors.js";
2
2
  import { Reader } from "../../protocol/reader.js";
3
- import { readRecordsBatch } from "../../protocol/records.js";
3
+ import { readRecordsBatches } from "../../protocol/records.js";
4
4
  import { Writer } from "../../protocol/writer.js";
5
5
  import { createAPI } from "../definitions.js";
6
6
  /*
@@ -105,22 +105,7 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
105
105
  // We need to reduce the size by one to follow the COMPACT_RECORDS specification.
106
106
  const recordsSize = r.readUnsignedVarInt() - 1;
107
107
  if (recordsSize > 0) {
108
- const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
- partition.records = [];
110
- do {
111
- try {
112
- partition.records.push(readRecordsBatch(recordsBatchesReader));
113
- }
114
- catch (err) {
115
- // Contrary to other places in the protocol, records batches CAN BE truncated due to maxBytes argument.
116
- // In that case we just ignore the error.
117
- if (err.code === OutOfBoundsError.code) {
118
- break;
119
- }
120
- /* c8 ignore next 3 - Hard to test */
121
- throw err;
122
- }
123
- } while (recordsBatchesReader.position < recordsSize);
108
+ partition.records = readRecordsBatches(Reader.from(r.buffer.subarray(r.position, r.position + recordsSize)));
124
109
  r.skip(recordsSize);
125
110
  }
126
111
  return partition;
@@ -1,6 +1,6 @@
1
- import { OutOfBoundsError, ResponseError } from "../../errors.js";
1
+ import { ResponseError } from "../../errors.js";
2
2
  import { Reader } from "../../protocol/reader.js";
3
- import { readRecordsBatch } from "../../protocol/records.js";
3
+ import { readRecordsBatches } from "../../protocol/records.js";
4
4
  import { Writer } from "../../protocol/writer.js";
5
5
  import { createAPI } from "../definitions.js";
6
6
  /*
@@ -105,22 +105,7 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
105
105
  // We need to reduce the size by one to follow the COMPACT_RECORDS specification.
106
106
  const recordsSize = r.readUnsignedVarInt() - 1;
107
107
  if (recordsSize > 0) {
108
- const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
- partition.records = [];
110
- do {
111
- try {
112
- partition.records.push(readRecordsBatch(recordsBatchesReader));
113
- }
114
- catch (err) {
115
- // Contrary to other places in the protocol, records batches CAN BE truncated due to maxBytes argument.
116
- // In that case we just ignore the error.
117
- if (err.code === OutOfBoundsError.code) {
118
- break;
119
- }
120
- /* c8 ignore next 3 - Hard to test */
121
- throw err;
122
- }
123
- } while (recordsBatchesReader.position < recordsSize);
108
+ partition.records = readRecordsBatches(Reader.from(r.buffer.subarray(r.position, r.position + recordsSize)));
124
109
  r.skip(recordsSize);
125
110
  }
126
111
  return partition;
@@ -74,6 +74,10 @@ export declare const baseOptionsSchema: {
74
74
  type: string;
75
75
  minimum: number;
76
76
  };
77
+ handleBackPressure: {
78
+ type: string;
79
+ default: boolean;
80
+ };
77
81
  tls: {
78
82
  type: string;
79
83
  additionalProperties: boolean;
@@ -31,6 +31,7 @@ export const baseOptionsSchema = {
31
31
  retries: { oneOf: [{ type: 'number', minimum: 0 }, { type: 'boolean' }] },
32
32
  retryDelay: { type: 'number', minimum: 0 },
33
33
  maxInflights: { type: 'number', minimum: 0 },
34
+ handleBackPressure: { type: 'boolean', default: false },
34
35
  tls: { type: 'object', additionalProperties: true }, // No validation as they come from Node.js
35
36
  tlsServerName: { oneOf: [{ type: 'boolean' }, { type: 'string' }] },
36
37
  sasl: {
@@ -24,14 +24,15 @@ export interface ConnectionOptions {
24
24
  tlsServerName?: string | boolean;
25
25
  sasl?: SASLOptions;
26
26
  ownerId?: number;
27
+ handleBackPressure?: boolean;
27
28
  }
28
29
  export interface Request {
29
30
  correlationId: number;
30
31
  apiKey: number;
31
32
  apiVersion: number;
32
- hasRequestHeaderTaggedFields: boolean;
33
33
  hasResponseHeaderTaggedFields: boolean;
34
- payload: () => Writer;
34
+ noResponse: boolean;
35
+ payload: Buffer;
35
36
  parser: ResponseParser<unknown>;
36
37
  callback: Callback<any>;
37
38
  diagnostic: Record<string, unknown>;
@@ -62,5 +63,5 @@ export declare class Connection extends EventEmitter {
62
63
  ready(): Promise<void>;
63
64
  close(callback: CallbackWithPromise<void>): void;
64
65
  close(): Promise<void>;
65
- send<ReturnType>(apiKey: number, apiVersion: number, payload: () => Writer, responseParser: ResponseParser<ReturnType>, hasRequestHeaderTaggedFields: boolean, hasResponseHeaderTaggedFields: boolean, callback: Callback<ReturnType>): void;
66
+ send<ReturnType>(apiKey: number, apiVersion: number, createPayload: () => Writer, responseParser: ResponseParser<ReturnType>, hasRequestHeaderTaggedFields: boolean, hasResponseHeaderTaggedFields: boolean, callback: Callback<ReturnType>): void;
66
67
  }
@@ -38,6 +38,7 @@ export class Connection extends EventEmitter {
38
38
  #clientId;
39
39
  // @ts-ignore This is used just for debugging
40
40
  #ownerId;
41
+ #handleBackPressure;
41
42
  #correlationId;
42
43
  #nextMessage;
43
44
  #afterDrainRequests;
@@ -56,6 +57,7 @@ export class Connection extends EventEmitter {
56
57
  this.#status = ConnectionStatuses.NONE;
57
58
  this.#clientId = clientId;
58
59
  this.#ownerId = options.ownerId;
60
+ this.#handleBackPressure = options.handleBackPressure ?? false;
59
61
  this.#correlationId = 0;
60
62
  this.#nextMessage = 0;
61
63
  this.#afterDrainRequests = [];
@@ -134,7 +136,9 @@ export class Connection extends EventEmitter {
134
136
  this.#socket.removeListener('error', connectionErrorHandler);
135
137
  this.#socket.on('error', this.#onError.bind(this));
136
138
  this.#socket.on('data', this.#onData.bind(this));
137
- this.#socket.on('drain', this.#onDrain.bind(this));
139
+ if (this.#handleBackPressure) {
140
+ this.#socket.on('drain', this.#onDrain.bind(this));
141
+ }
138
142
  this.#socket.on('close', this.#onClose.bind(this));
139
143
  this.#socket.setTimeout(0);
140
144
  if (this.#options.sasl) {
@@ -205,26 +209,43 @@ export class Connection extends EventEmitter {
205
209
  this.#socket.end();
206
210
  return callback[kCallbackPromise];
207
211
  }
208
- send(apiKey, apiVersion, payload, responseParser, hasRequestHeaderTaggedFields, hasResponseHeaderTaggedFields, callback) {
212
+ send(apiKey, apiVersion, createPayload, responseParser, hasRequestHeaderTaggedFields, hasResponseHeaderTaggedFields, callback) {
213
+ const correlationId = ++this.#correlationId;
214
+ const diagnostic = createDiagnosticContext({
215
+ connection: this,
216
+ operation: 'send',
217
+ apiKey,
218
+ apiVersion,
219
+ correlationId
220
+ });
221
+ const writer = Writer.create();
222
+ writer.appendInt16(apiKey).appendInt16(apiVersion).appendInt32(correlationId).appendString(this.#clientId, false);
223
+ if (hasRequestHeaderTaggedFields) {
224
+ writer.appendTaggedFields();
225
+ }
226
+ let payload;
227
+ try {
228
+ payload = createPayload();
229
+ }
230
+ catch (err) {
231
+ diagnostic.error = err;
232
+ connectionsApiChannel.error.publish(diagnostic);
233
+ throw err;
234
+ }
235
+ writer.appendFrom(payload).prependLength();
236
+ const request = {
237
+ correlationId,
238
+ apiKey,
239
+ apiVersion,
240
+ parser: responseParser,
241
+ payload: writer.buffer,
242
+ callback: null, // Will be set later
243
+ hasResponseHeaderTaggedFields,
244
+ noResponse: payload.context.noResponse ?? false,
245
+ diagnostic
246
+ };
209
247
  this.#requestsQueue.push(fastQueueCallback => {
210
- const correlationId = ++this.#correlationId;
211
- const request = {
212
- correlationId,
213
- apiKey,
214
- apiVersion,
215
- hasRequestHeaderTaggedFields,
216
- hasResponseHeaderTaggedFields,
217
- parser: responseParser,
218
- payload,
219
- callback: fastQueueCallback,
220
- diagnostic: createDiagnosticContext({
221
- connection: this,
222
- operation: 'send',
223
- apiKey,
224
- apiVersion,
225
- correlationId
226
- })
227
- };
248
+ request.callback = fastQueueCallback;
228
249
  if (this.#socketMustBeDrained) {
229
250
  this.#afterDrainRequests.push(request);
230
251
  return false;
@@ -274,37 +295,31 @@ export class Connection extends EventEmitter {
274
295
  request.callback(new NetworkError('Connection closed'), undefined);
275
296
  return false;
276
297
  }
277
- const writer = Writer.create();
278
- writer
279
- .appendInt16(request.apiKey)
280
- .appendInt16(request.apiVersion)
281
- .appendInt32(request.correlationId)
282
- .appendString(this.#clientId, false);
283
- if (request.hasRequestHeaderTaggedFields) {
284
- writer.appendTaggedFields();
285
- }
286
- const payload = request.payload();
287
- writer.appendFrom(payload).prependLength();
288
- const expectResponse = !payload.context.noResponse;
289
- if (expectResponse)
298
+ if (!request.noResponse) {
290
299
  this.#inflightRequests.set(request.correlationId, request);
291
- const canWrite = this.#socket.write(writer.buffer);
292
- if (!canWrite)
300
+ }
301
+ let canWrite = this.#socket.write(request.payload);
302
+ if (!this.#handleBackPressure) {
303
+ canWrite = true;
304
+ }
305
+ if (!canWrite) {
293
306
  this.#socketMustBeDrained = true;
294
- if (!expectResponse)
307
+ }
308
+ if (request.noResponse) {
295
309
  request.callback(null, canWrite);
310
+ }
296
311
  loggers.protocol('Sending request.', {
297
312
  apiKey: protocolAPIsById[request.apiKey],
298
313
  correlationId: request.correlationId,
299
314
  request
300
315
  });
301
316
  return canWrite;
317
+ /* c8 ignore next 8 - Hard to test */
302
318
  }
303
319
  catch (err) {
304
320
  request.diagnostic.error = err;
305
321
  connectionsApiChannel.error.publish(request.diagnostic);
306
322
  throw err;
307
- /* c8 ignore next 3 - Hard to test */
308
323
  }
309
324
  finally {
310
325
  connectionsApiChannel.end.publish(request.diagnostic);
@@ -453,8 +468,7 @@ export class Connection extends EventEmitter {
453
468
  this.emit('close');
454
469
  const error = new NetworkError('Connection closed');
455
470
  for (const request of this.#afterDrainRequests) {
456
- const payload = request.payload();
457
- if (!payload.context.noResponse) {
471
+ if (!request.noResponse) {
458
472
  request.callback(error, undefined);
459
473
  }
460
474
  }
@@ -78,6 +78,7 @@ export function loadNativeCRC32C() {
78
78
  : new Uint8Array(data);
79
79
  return nativeImplementation(bytes);
80
80
  };
81
+ /* c8 ignore next 3 - Hard to test */
81
82
  }
82
83
  catch (error) {
83
84
  return null;
@@ -93,4 +94,5 @@ export function jsCRC32C(data) {
93
94
  }
94
95
  return (crc ^ 0xffffffff) >>> 0;
95
96
  }
97
+ /* c8 ignore next - Hard to test */
96
98
  export const crc32c = loadNativeCRC32C() ?? jsCRC32C;
@@ -48,12 +48,18 @@ export class DynamicBuffer {
48
48
  return this;
49
49
  }
50
50
  appendFrom(DynamicBuffer) {
51
- this.buffers.push(...DynamicBuffer.buffers);
51
+ const buffers = DynamicBuffer.buffers;
52
+ for (let i = 0; i < buffers.length; i++) {
53
+ this.buffers.push(buffers[i]);
54
+ }
52
55
  this.length += DynamicBuffer.length;
53
56
  return this;
54
57
  }
55
58
  prependFrom(DynamicBuffer) {
56
- this.buffers.unshift(...DynamicBuffer.buffers);
59
+ const buffers = DynamicBuffer.buffers;
60
+ for (let i = buffers.length - 1; i >= 0; i--) {
61
+ this.buffers.unshift(buffers[i]);
62
+ }
57
63
  this.length += DynamicBuffer.length;
58
64
  return this;
59
65
  }
@@ -374,7 +380,7 @@ export class DynamicBuffer {
374
380
  return this;
375
381
  }
376
382
  writeUnsignedVarInt(value, append = true) {
377
- const buffer = Buffer.alloc(sizeOfUnsignedVarInt(value));
383
+ const buffer = Buffer.allocUnsafe(sizeOfUnsignedVarInt(value));
378
384
  let position = 0;
379
385
  while ((value & BITS_8PLUS_MASK) !== 0) {
380
386
  buffer.writeUInt8((value & LEAST_SIGNIFICANT_7_BITS) | MOST_SIGNIFICANT_BIT_FLAG, position);
@@ -390,7 +396,7 @@ export class DynamicBuffer {
390
396
  }
391
397
  }
392
398
  writeUnsignedVarInt64(value, append = true) {
393
- const buffer = Buffer.alloc(sizeOfUnsignedVarInt64(value));
399
+ const buffer = Buffer.allocUnsafe(sizeOfUnsignedVarInt64(value));
394
400
  let position = 0;
395
401
  while ((value & BITS_8PLUS_MASK_64) !== 0n) {
396
402
  buffer.writeUInt8(Number((value & LEAST_SIGNIFICANT_7_BITS_64) | MOST_SIGNIFICANT_BIT_FLAG_64), position);
@@ -5,28 +5,30 @@ declare const instanceIdentifier: unique symbol;
5
5
  export declare class Reader {
6
6
  buffer: DynamicBuffer;
7
7
  position: number;
8
+ length: number;
8
9
  [instanceIdentifier]: boolean;
9
10
  static isReader(target: any): boolean;
10
11
  static from(buffer: Buffer | DynamicBuffer | Writer): Reader;
11
12
  constructor(buffer: DynamicBuffer);
13
+ get remaining(): number;
12
14
  reset(buffer?: Buffer | DynamicBuffer): void;
13
15
  inspect(): string;
14
16
  skip(length: number): this;
15
- peekUnsignedInt8(): number;
16
- peekUnsignedInt16(): number;
17
- peekUnsignedInt32(): number;
18
- peekUnsignedInt64(): bigint;
19
- peekUnsignedVarInt(): number;
20
- peekUnsignedVarInt64(): bigint;
21
- peekInt8(): number;
22
- peekInt16(): number;
23
- peekInt32(): number;
24
- peekInt64(): bigint;
25
- peekFloat64(): number;
26
- peekVarInt(): number;
27
- peekVarInt64(): bigint;
28
- peekBoolean(): boolean;
29
- peekUUID(): string;
17
+ peekUnsignedInt8(position?: number): number;
18
+ peekUnsignedInt16(position?: number): number;
19
+ peekUnsignedInt32(position?: number): number;
20
+ peekUnsignedInt64(position?: number): bigint;
21
+ peekUnsignedVarInt(position?: number): number;
22
+ peekUnsignedVarInt64(position?: number): bigint;
23
+ peekInt8(position?: number): number;
24
+ peekInt16(position?: number): number;
25
+ peekInt32(position?: number): number;
26
+ peekInt64(position?: number): bigint;
27
+ peekFloat64(position?: number): number;
28
+ peekVarInt(position?: number): number;
29
+ peekVarInt64(position?: number): bigint;
30
+ peekBoolean(position?: number): boolean;
31
+ peekUUID(position?: number): string;
30
32
  readUnsignedInt8(): number;
31
33
  readUnsignedInt16(): number;
32
34
  readUnsignedInt32(): number;
@@ -5,6 +5,7 @@ const instanceIdentifier = Symbol('plt.kafka.reader.instanceIdentifier');
5
5
  export class Reader {
6
6
  buffer;
7
7
  position;
8
+ length;
8
9
  [instanceIdentifier];
9
10
  static isReader(target) {
10
11
  return target?.[instanceIdentifier] === true;
@@ -21,8 +22,12 @@ export class Reader {
21
22
  constructor(buffer) {
22
23
  this.buffer = buffer;
23
24
  this.position = 0;
25
+ this.length = this.buffer.length;
24
26
  this[instanceIdentifier] = true;
25
27
  }
28
+ get remaining() {
29
+ return this.length - this.position;
30
+ }
26
31
  reset(buffer) {
27
32
  if (buffer) {
28
33
  if (Buffer.isBuffer(buffer)) {
@@ -43,50 +48,51 @@ export class Reader {
43
48
  this.position += length;
44
49
  return this;
45
50
  }
46
- peekUnsignedInt8() {
47
- return this.buffer.readUInt8(this.position);
51
+ peekUnsignedInt8(position) {
52
+ return this.buffer.readUInt8(position ?? this.position);
48
53
  }
49
- peekUnsignedInt16() {
50
- return this.buffer.readUInt16BE(this.position);
54
+ peekUnsignedInt16(position) {
55
+ return this.buffer.readUInt16BE(position ?? this.position);
51
56
  }
52
- peekUnsignedInt32() {
53
- return this.buffer.readUInt32BE(this.position);
57
+ peekUnsignedInt32(position) {
58
+ return this.buffer.readUInt32BE(position ?? this.position);
54
59
  }
55
- peekUnsignedInt64() {
56
- return this.buffer.readBigUInt64BE(this.position);
60
+ peekUnsignedInt64(position) {
61
+ return this.buffer.readBigUInt64BE(position ?? this.position);
57
62
  }
58
- peekUnsignedVarInt() {
59
- return this.buffer.readUnsignedVarInt(this.position)[0];
63
+ peekUnsignedVarInt(position) {
64
+ return this.buffer.readUnsignedVarInt(position ?? this.position)[0];
60
65
  }
61
- peekUnsignedVarInt64() {
62
- return this.buffer.readUnsignedVarInt64(this.position)[0];
66
+ peekUnsignedVarInt64(position) {
67
+ return this.buffer.readUnsignedVarInt64(position ?? this.position)[0];
63
68
  }
64
- peekInt8() {
65
- return this.buffer.readInt8(this.position);
69
+ peekInt8(position) {
70
+ return this.buffer.readInt8(position ?? this.position);
66
71
  }
67
- peekInt16() {
68
- return this.buffer.readInt16BE(this.position);
72
+ peekInt16(position) {
73
+ return this.buffer.readInt16BE(position ?? this.position);
69
74
  }
70
- peekInt32() {
71
- return this.buffer.readInt32BE(this.position);
75
+ peekInt32(position) {
76
+ return this.buffer.readInt32BE(position ?? this.position);
72
77
  }
73
- peekInt64() {
74
- return this.buffer.readBigInt64BE(this.position);
78
+ peekInt64(position) {
79
+ return this.buffer.readBigInt64BE(position ?? this.position);
75
80
  }
76
- peekFloat64() {
77
- return this.buffer.readDoubleBE(this.position);
81
+ peekFloat64(position) {
82
+ return this.buffer.readDoubleBE(position ?? this.position);
78
83
  }
79
- peekVarInt() {
80
- return this.buffer.readVarInt(this.position)[0];
84
+ peekVarInt(position) {
85
+ return this.buffer.readVarInt(position ?? this.position)[0];
81
86
  }
82
- peekVarInt64() {
83
- return this.buffer.readVarInt64(this.position)[0];
87
+ peekVarInt64(position) {
88
+ return this.buffer.readVarInt64(position ?? this.position)[0];
84
89
  }
85
- peekBoolean() {
86
- return this.buffer.readInt8(this.position) === 1;
90
+ peekBoolean(position) {
91
+ return this.buffer.readInt8(position ?? this.position) === 1;
87
92
  }
88
- peekUUID() {
89
- return this.buffer.toString('hex', this.position, this.position + UUID_SIZE);
93
+ peekUUID(position) {
94
+ position ??= this.position;
95
+ return this.buffer.toString('hex', position, position + UUID_SIZE);
90
96
  }
91
97
  readUnsignedInt8() {
92
98
  const value = this.peekUnsignedInt8();
@@ -108,3 +108,4 @@ export declare function createRecord(message: MessageRecord, offsetDelta: number
108
108
  export declare function readRecord(reader: Reader): KafkaRecord;
109
109
  export declare function createRecordsBatch(messages: MessageRecord[], options?: Partial<CreateRecordsBatchOptions>): Writer;
110
110
  export declare function readRecordsBatch(reader: Reader): RecordsBatch;
111
+ export declare function readRecordsBatches(reader: Reader): RecordsBatch[];
@@ -8,6 +8,7 @@ import { Writer } from "./writer.js";
8
8
  const CURRENT_RECORD_VERSION = 2;
9
9
  const IS_TRANSACTIONAL = 0b10000; // Bit 4 set
10
10
  const IS_COMPRESSED = 0b111; // Bits 0, 1 and/or 2 set
11
+ const BATCH_HEAD = INT64_SIZE + INT32_SIZE; // FirstOffset + Length
11
12
  export const messageSchema = {
12
13
  type: 'object',
13
14
  properties: {
@@ -117,6 +118,7 @@ export function createRecordsBatch(messages, options = {}) {
117
118
  // FirstOffset is 0
118
119
  .appendInt64(0n, false));
119
120
  }
121
+ // TODO: Early bail out if there are not enough bytes to read all the records as it might be truncated
120
122
  export function readRecordsBatch(reader) {
121
123
  const initialPosition = reader.position;
122
124
  const batch = {
@@ -155,3 +157,11 @@ export function readRecordsBatch(reader) {
155
157
  }
156
158
  return batch;
157
159
  }
160
+ export function readRecordsBatches(reader) {
161
+ const batches = [];
162
+ while (reader.remaining >= BATCH_HEAD &&
163
+ reader.remaining - BATCH_HEAD >= reader.peekInt32(reader.position + INT64_SIZE)) {
164
+ batches.push(readRecordsBatch(reader));
165
+ }
166
+ return batches;
167
+ }
@@ -1,5 +1,5 @@
1
1
  import { humanize } from "../utils.js";
2
- import { EMPTY_UUID } from "./definitions.js";
2
+ import { EMPTY_TAGGED_FIELDS_BUFFER, EMPTY_UUID } from "./definitions.js";
3
3
  import { DynamicBuffer } from "./dynamic-buffer.js";
4
4
  const instanceIdentifier = Symbol('plt.kafka.writer.instanceIdentifier');
5
5
  export class Writer {
@@ -212,7 +212,7 @@ export class Writer {
212
212
  }
213
213
  // TODO(ShogunPanda): Tagged fields are not supported yet
214
214
  appendTaggedFields(_ = []) {
215
- return this.appendInt8(0);
215
+ return this.append(EMPTY_TAGGED_FIELDS_BUFFER);
216
216
  }
217
217
  prependLength() {
218
218
  return this.appendInt32(this.length, false);
package/dist/version.js CHANGED
@@ -1,2 +1,2 @@
1
1
  export const name = "@platformatic/kafka";
2
- export const version = "1.16.0";
2
+ export const version = "1.17.0";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/kafka",
3
- "version": "1.16.0",
3
+ "version": "1.17.0",
4
4
  "description": "Modern and performant client for Apache Kafka",
5
5
  "homepage": "https://github.com/platformatic/kafka",
6
6
  "author": "Platformatic Inc. <oss@platformatic.dev> (https://platformatic.dev)",
@@ -37,6 +37,7 @@
37
37
  "snappy": "^7.3.3"
38
38
  },
39
39
  "devDependencies": {
40
+ "@confluentinc/kafka-javascript": "^1.5.0",
40
41
  "@platformatic/rdkafka": "^4.0.0",
41
42
  "@types/debug": "^4.1.12",
42
43
  "@types/node": "^22.18.5",
@@ -49,10 +50,9 @@
49
50
  "eslint": "^9.35.0",
50
51
  "fast-jwt": "^6.0.2",
51
52
  "hwp": "^0.4.1",
52
- "json5": "^2.2.3",
53
53
  "kafkajs": "^2.2.4",
54
+ "json5": "^2.2.3",
54
55
  "neostandard": "^0.12.2",
55
- "node-rdkafka": "^3.5.0",
56
56
  "parse5": "^7.3.0",
57
57
  "prettier": "^3.6.2",
58
58
  "prettier-plugin-space-before-function-paren": "^0.0.8",