kafka-ts 0.0.17-beta.2 → 0.0.17-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -41,17 +41,17 @@ export declare const FETCH: import("../utils/api").Api<{
41
41
  value: string;
42
42
  }[];
43
43
  }[];
44
- compression: number;
45
- timestampType: string;
46
- isTransactional: boolean;
47
- isControlBatch: boolean;
48
- hasDeleteHorizonMs: boolean;
49
44
  baseOffset: bigint;
50
45
  batchLength: number;
51
46
  partitionLeaderEpoch: number;
52
47
  magic: number;
53
48
  crc: number;
54
49
  attributes: number;
50
+ compression: number;
51
+ timestampType: string;
52
+ isTransactional: boolean;
53
+ isControlBatch: boolean;
54
+ hasDeleteHorizonMs: boolean;
55
55
  lastOffsetDelta: number;
56
56
  baseTimestamp: bigint;
57
57
  maxTimestamp: bigint;
package/dist/api/fetch.js CHANGED
@@ -5,6 +5,7 @@ const codecs_1 = require("../codecs");
5
5
  const api_1 = require("../utils/api");
6
6
  const decoder_1 = require("../utils/decoder");
7
7
  const error_1 = require("../utils/error");
8
+ const logger_1 = require("../utils/logger");
8
9
  exports.FETCH = (0, api_1.createApi)({
9
10
  apiKey: 1,
10
11
  apiVersion: 15,
@@ -90,41 +91,62 @@ const decodeRecordBatch = (decoder) => {
90
91
  }
91
92
  const recordBatchDecoder = new decoder_1.Decoder(decoder.read(size));
92
93
  const results = [];
93
- while (recordBatchDecoder.getBufferLength() > recordBatchDecoder.getOffset() + 12) {
94
+ while (recordBatchDecoder.canReadBytes(12)) {
94
95
  const baseOffset = recordBatchDecoder.readInt64();
95
96
  const batchLength = recordBatchDecoder.readInt32();
96
97
  if (!batchLength) {
97
98
  continue;
98
99
  }
100
+ if (!recordBatchDecoder.canReadBytes(batchLength)) {
101
+ // likely running into maxBytes limit
102
+ logger_1.log.debug('Record batch is incomplete, skipping last batch.');
103
+ recordBatchDecoder.read();
104
+ continue;
105
+ }
99
106
  const batchDecoder = new decoder_1.Decoder(recordBatchDecoder.read(batchLength));
100
- const result = {
107
+ const partitionLeaderEpoch = batchDecoder.readInt32();
108
+ const magic = batchDecoder.readInt8();
109
+ if (magic !== 2) {
110
+ throw new error_1.KafkaTSError(`Unsupported magic byte: ${magic}`);
111
+ }
112
+ const crc = batchDecoder.readInt32();
113
+ const attributes = batchDecoder.readInt16();
114
+ const compression = attributes & 0x07;
115
+ const timestampType = (attributes & 0x08) >> 3 ? 'LogAppendTime' : 'CreateTime';
116
+ const isTransactional = !!((attributes & 0x10) >> 4);
117
+ const isControlBatch = !!((attributes & 0x20) >> 5);
118
+ const hasDeleteHorizonMs = !!((attributes & 0x40) >> 6);
119
+ if (compression !== 0) {
120
+ throw new error_1.KafkaTSError(`Unsupported compression: ${compression}`);
121
+ }
122
+ const lastOffsetDelta = batchDecoder.readInt32();
123
+ const baseTimestamp = batchDecoder.readInt64();
124
+ const maxTimestamp = batchDecoder.readInt64();
125
+ const producerId = batchDecoder.readInt64();
126
+ const producerEpoch = batchDecoder.readInt16();
127
+ const baseSequence = batchDecoder.readInt32();
128
+ const recordsLength = batchDecoder.read(4);
129
+ const compressedRecords = batchDecoder.read();
130
+ results.push({
101
131
  baseOffset,
102
132
  batchLength,
103
- partitionLeaderEpoch: batchDecoder.readInt32(),
104
- magic: batchDecoder.readInt8(),
105
- crc: batchDecoder.readUInt32(),
106
- attributes: batchDecoder.readInt16(),
107
- lastOffsetDelta: batchDecoder.readInt32(),
108
- baseTimestamp: batchDecoder.readInt64(),
109
- maxTimestamp: batchDecoder.readInt64(),
110
- producerId: batchDecoder.readInt64(),
111
- producerEpoch: batchDecoder.readInt16(),
112
- baseSequence: batchDecoder.readInt32(),
113
- recordsLength: batchDecoder.read(4),
114
- compressedRecords: batchDecoder.read(),
115
- };
116
- const compression = result.attributes & 0x07;
117
- const timestampType = (result.attributes & 0x08) >> 3 ? 'LogAppendTime' : 'CreateTime';
118
- const isTransactional = !!((result.attributes & 0x10) >> 4);
119
- const isControlBatch = !!((result.attributes & 0x20) >> 5);
120
- const hasDeleteHorizonMs = !!((result.attributes & 0x40) >> 6);
121
- results.push({
122
- ...result,
133
+ partitionLeaderEpoch,
134
+ magic,
135
+ crc,
136
+ attributes,
123
137
  compression,
124
138
  timestampType,
125
139
  isTransactional,
126
140
  isControlBatch,
127
141
  hasDeleteHorizonMs,
142
+ lastOffsetDelta,
143
+ baseTimestamp,
144
+ maxTimestamp,
145
+ producerId,
146
+ producerEpoch,
147
+ baseSequence,
148
+ recordsLength,
149
+ compressedRecords,
128
150
  });
129
151
  }
130
152
  return results;
@@ -104,17 +104,17 @@ export declare const API: {
104
104
  value: string;
105
105
  }[];
106
106
  }[];
107
- compression: number;
108
- timestampType: string;
109
- isTransactional: boolean;
110
- isControlBatch: boolean;
111
- hasDeleteHorizonMs: boolean;
112
107
  baseOffset: bigint;
113
108
  batchLength: number;
114
109
  partitionLeaderEpoch: number;
115
110
  magic: number;
116
111
  crc: number;
117
112
  attributes: number;
113
+ compression: number;
114
+ timestampType: string;
115
+ isTransactional: boolean;
116
+ isControlBatch: boolean;
117
+ hasDeleteHorizonMs: boolean;
118
118
  lastOffsetDelta: number;
119
119
  baseTimestamp: bigint;
120
120
  maxTimestamp: bigint;
@@ -144,7 +144,7 @@ class Connection {
144
144
  handleData(buffer) {
145
145
  this.chunks.push(buffer);
146
146
  const decoder = new decoder_1.Decoder(Buffer.concat(this.chunks));
147
- if (decoder.getBufferLength() < 4) {
147
+ if (!decoder.canReadBytes(4)) {
148
148
  return;
149
149
  }
150
150
  const size = decoder.readInt32();
@@ -5,6 +5,7 @@ export declare class Decoder {
5
5
  constructor(buffer: Buffer);
6
6
  getOffset(): number;
7
7
  getBufferLength(): number;
8
+ canReadBytes(bytes: number): boolean;
8
9
  readInt8(): number;
9
10
  readInt16(): number;
10
11
  readInt32(): number;
@@ -13,6 +13,9 @@ class Decoder {
13
13
  getBufferLength() {
14
14
  return this.buffer.length;
15
15
  }
16
+ canReadBytes(bytes) {
17
+ return this.getBufferLength() - this.getOffset() >= bytes;
18
+ }
16
19
  readInt8() {
17
20
  const value = this.buffer.readInt8(this.offset);
18
21
  this.offset += 1;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.17-beta.2",
3
+ "version": "0.0.17-beta.3",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",