kafka-ts 0.0.6-beta.3 → 0.0.6-beta.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  export declare const enum IsolationLevel {
3
2
  READ_UNCOMMITTED = 0,
4
3
  READ_COMMITTED = 1
@@ -35,11 +34,11 @@ export declare const FETCH: import("../utils/api").Api<{
35
34
  attributes: number;
36
35
  timestampDelta: bigint;
37
36
  offsetDelta: number;
38
- key: Buffer | null;
39
- value: Buffer | null;
37
+ key: string | null;
38
+ value: string | null;
40
39
  headers: {
41
- key: Buffer | null;
42
- value: Buffer | null;
40
+ key: string;
41
+ value: string;
43
42
  }[];
44
43
  }[];
45
44
  compression: number;
package/dist/api/fetch.js CHANGED
@@ -133,10 +133,10 @@ const decodeRecord = (decoder) => decoder.readRecords((record) => ({
133
133
  attributes: record.readInt8(),
134
134
  timestampDelta: record.readVarLong(),
135
135
  offsetDelta: record.readVarInt(),
136
- key: record.readVarIntBuffer(),
137
- value: record.readVarIntBuffer(),
136
+ key: record.readVarIntString(),
137
+ value: record.readVarIntString(),
138
138
  headers: record.readVarIntArray((header) => ({
139
- key: header.readVarIntBuffer(),
140
- value: header.readVarIntBuffer(),
139
+ key: header.readVarIntString(),
140
+ value: header.readVarIntString(),
141
141
  })),
142
142
  }));
@@ -97,11 +97,11 @@ export declare const API: {
97
97
  attributes: number;
98
98
  timestampDelta: bigint;
99
99
  offsetDelta: number;
100
- key: Buffer | null;
101
- value: Buffer | null;
100
+ key: string | null;
101
+ value: string | null;
102
102
  headers: {
103
- key: Buffer | null;
104
- value: Buffer | null;
103
+ key: string;
104
+ value: string;
105
105
  }[];
106
106
  }[];
107
107
  compression: number;
@@ -384,11 +384,11 @@ export declare const API: {
384
384
  attributes: number;
385
385
  timestampDelta: bigint;
386
386
  offsetDelta: number;
387
- key: Buffer | null;
388
- value: Buffer | null;
387
+ key: string | null;
388
+ value: string | null;
389
389
  headers: {
390
- key: Buffer;
391
- value: Buffer;
390
+ key: string;
391
+ value: string;
392
392
  }[];
393
393
  }[];
394
394
  }[];
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  export declare const PRODUCE: import("../utils/api.js").Api<{
3
2
  transactionalId: string | null;
4
3
  acks: number;
@@ -20,11 +19,11 @@ export declare const PRODUCE: import("../utils/api.js").Api<{
20
19
  attributes: number;
21
20
  timestampDelta: bigint;
22
21
  offsetDelta: number;
23
- key: Buffer | null;
24
- value: Buffer | null;
22
+ key: string | null;
23
+ value: string | null;
25
24
  headers: {
26
- key: Buffer;
27
- value: Buffer;
25
+ key: string;
26
+ value: string;
28
27
  }[];
29
28
  }[];
30
29
  }[];
@@ -28,10 +28,10 @@ exports.PRODUCE = (0, api_js_1.createApi)({
28
28
  .writeInt8(record.attributes)
29
29
  .writeVarLong(record.timestampDelta)
30
30
  .writeVarInt(record.offsetDelta)
31
- .writeVarIntBuffer(record.key)
32
- .writeVarIntBuffer(record.value)
33
- .writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntBuffer(header.key).writeVarIntBuffer(header.value));
34
- return encoder.writeVarInt(recordBody.getByteLength()).writeEncoder(recordBody);
31
+ .writeVarIntString(record.key)
32
+ .writeVarIntString(record.value)
33
+ .writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntString(header.key).writeVarIntString(header.value));
34
+ return encoder.writeVarInt(recordBody.getBufferLength()).writeEncoder(recordBody);
35
35
  })
36
36
  .value();
37
37
  const batchHeader = new encoder_js_1.Encoder()
@@ -41,11 +41,11 @@ exports.PRODUCE = (0, api_js_1.createApi)({
41
41
  .write(batchBody);
42
42
  const batch = new encoder_js_1.Encoder()
43
43
  .writeInt64(partition.baseOffset)
44
- .writeInt32(batchHeader.getByteLength())
44
+ .writeInt32(batchHeader.getBufferLength())
45
45
  .writeEncoder(batchHeader);
46
46
  return encoder
47
47
  .writeInt32(partition.index)
48
- .writeUVarInt(batch.getByteLength() + 1) // batch size
48
+ .writeUVarInt(batch.getBufferLength() + 1)
49
49
  .writeEncoder(batch)
50
50
  .writeUVarInt(0);
51
51
  })
@@ -136,12 +136,12 @@ vitest_1.describe.sequential('Low-level API', () => {
136
136
  attributes: 0,
137
137
  offsetDelta: 0,
138
138
  timestampDelta: 0n,
139
- key: Buffer.from('key'),
140
- value: Buffer.from('value'),
139
+ key: 'key',
140
+ value: 'value',
141
141
  headers: [
142
142
  {
143
- key: Buffer.from('header-key'),
144
- value: Buffer.from('header-value'),
143
+ key: 'header-key',
144
+ value: 'header-value',
145
145
  },
146
146
  ],
147
147
  },
@@ -96,7 +96,7 @@ class Connection {
96
96
  .writeInt32(correlationId)
97
97
  .writeString(this.options.clientId);
98
98
  const request = api.request(encoder, body);
99
- const requestEncoder = new encoder_1.Encoder().writeInt32(request.getByteLength()).writeEncoder(request);
99
+ const requestEncoder = new encoder_1.Encoder().writeInt32(request.getBufferLength()).writeEncoder(request);
100
100
  let timeout;
101
101
  const { responseDecoder, responseSize } = await new Promise(async (resolve, reject) => {
102
102
  timeout = setTimeout(() => {
@@ -15,7 +15,7 @@ const defaultPartitioner = ({ metadata }) => {
15
15
  const partitions = metadata.getTopicPartitions()[topic];
16
16
  const numPartitions = partitions.length;
17
17
  if (key) {
18
- return (0, murmur2_1.toPositive)((0, murmur2_1.murmur2)(key)) % numPartitions;
18
+ return (0, murmur2_1.toPositive)((0, murmur2_1.murmur2)(Buffer.from(key))) % numPartitions;
19
19
  }
20
20
  return (0, murmur2_1.toPositive)(getNextValue(topic)) % numPartitions;
21
21
  };
@@ -20,6 +20,6 @@ export declare class Producer {
20
20
  close(): Promise<void>;
21
21
  private ensureConnected;
22
22
  private initProducerId;
23
- private nextBaseSequence;
24
- private revertBaseSequence;
23
+ private getSequence;
24
+ private updateSequence;
25
25
  }
@@ -58,7 +58,6 @@ class Producer {
58
58
  maxTimestamp = timestamp;
59
59
  }
60
60
  });
61
- const baseSequence = this.nextBaseSequence(topic, partitionIndex, messages.length);
62
61
  return {
63
62
  index: partitionIndex,
64
63
  baseOffset: 0n,
@@ -69,37 +68,29 @@ class Producer {
69
68
  maxTimestamp: maxTimestamp ?? 0n,
70
69
  producerId: this.producerId,
71
70
  producerEpoch: 0,
72
- baseSequence,
71
+ baseSequence: this.getSequence(topic, partitionIndex),
73
72
  records: messages.map((message, index) => ({
74
73
  attributes: 0,
75
74
  timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
76
75
  offsetDelta: index,
77
76
  key: message.key ?? null,
78
77
  value: message.value,
79
- headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
80
- key: Buffer.from(key),
81
- value: Buffer.from(value),
82
- })),
78
+ headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({ key, value })),
83
79
  })),
84
80
  };
85
81
  }),
86
82
  }));
87
- try {
88
- return await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
89
- transactionalId: null,
90
- acks,
91
- timeoutMs: 5000,
92
- topicData,
93
- });
94
- }
95
- catch (error) {
96
- topicData.forEach(({ name, partitionData }) => {
97
- partitionData.forEach(({ index, records }) => {
98
- this.revertBaseSequence(name, index, records.length);
99
- });
83
+ await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
84
+ transactionalId: null,
85
+ acks,
86
+ timeoutMs: 5000,
87
+ topicData,
88
+ });
89
+ topicData.forEach(({ name, partitionData }) => {
90
+ partitionData.forEach(({ index, records }) => {
91
+ this.updateSequence(name, index, records.length);
100
92
  });
101
- throw error;
102
- }
93
+ });
103
94
  }));
104
95
  }
105
96
  async close() {
@@ -129,15 +120,13 @@ class Producer {
129
120
  throw error;
130
121
  }
131
122
  }
132
- nextBaseSequence(topic, partition, messagesCount) {
123
+ getSequence(topic, partition) {
124
+ return this.sequences[topic]?.[partition] ?? 0;
125
+ }
126
+ updateSequence(topic, partition, messagesCount) {
133
127
  this.sequences[topic] ??= {};
134
128
  this.sequences[topic][partition] ??= 0;
135
- const baseSequence = this.sequences[topic][partition];
136
129
  this.sequences[topic][partition] += messagesCount;
137
- return baseSequence;
138
- }
139
- revertBaseSequence(topic, partition, messagesCount) {
140
- this.sequences[topic][partition] -= messagesCount;
141
130
  }
142
131
  }
143
132
  exports.Producer = Producer;
package/dist/types.d.ts CHANGED
@@ -1,11 +1,10 @@
1
- /// <reference types="node" />
2
1
  export type Message = {
3
2
  topic: string;
4
3
  partition?: number;
5
4
  offset?: bigint;
6
5
  timestamp?: bigint;
7
- key?: Buffer | null;
8
- value: Buffer | null;
6
+ key?: string | null;
7
+ value: string | null;
9
8
  headers?: Record<string, string>;
10
9
  };
11
10
  export type Batch = Required<Message>[];
@@ -16,7 +16,7 @@ export declare class Decoder {
16
16
  readVarLong(): bigint;
17
17
  readString(): string | null;
18
18
  readCompactString(): string | null;
19
- readVarIntBuffer(): Buffer | null;
19
+ readVarIntString(): string | null;
20
20
  readUUID(): string;
21
21
  readBoolean(): boolean;
22
22
  readArray<T>(callback: (opts: Decoder) => T): T[];
@@ -86,12 +86,12 @@ class Decoder {
86
86
  this.offset += length;
87
87
  return value;
88
88
  }
89
- readVarIntBuffer() {
89
+ readVarIntString() {
90
90
  const length = this.readVarInt();
91
91
  if (length < 0) {
92
92
  return null;
93
93
  }
94
- const value = this.buffer.subarray(this.offset, this.offset + length);
94
+ const value = this.buffer.toString('utf-8', this.offset, this.offset + length);
95
95
  this.offset += length;
96
96
  return value;
97
97
  }
@@ -131,7 +131,7 @@ class Decoder {
131
131
  }
132
132
  read(length) {
133
133
  const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
134
- this.offset += Buffer.byteLength(value);
134
+ this.offset += value.length;
135
135
  return value;
136
136
  }
137
137
  readBytes() {
@@ -2,7 +2,7 @@
2
2
  export declare class Encoder {
3
3
  private chunks;
4
4
  getChunks(): Buffer[];
5
- getByteLength(): number;
5
+ getBufferLength(): number;
6
6
  write(...buffers: Buffer[]): this;
7
7
  writeEncoder(encoder: Encoder): this;
8
8
  writeInt8(value: number): this;
@@ -16,7 +16,7 @@ export declare class Encoder {
16
16
  writeVarLong(value: bigint): this;
17
17
  writeString(value: string | null): this;
18
18
  writeCompactString(value: string | null): this;
19
- writeVarIntBuffer(buffer: Buffer | null): this;
19
+ writeVarIntString(value: string | null): this;
20
20
  writeUUID(value: string | null): this;
21
21
  writeBoolean(value: boolean): this;
22
22
  writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => Encoder): this;
@@ -6,8 +6,8 @@ class Encoder {
6
6
  getChunks() {
7
7
  return this.chunks;
8
8
  }
9
- getByteLength() {
10
- return this.chunks.reduce((acc, chunk) => acc + chunk.byteLength, 0);
9
+ getBufferLength() {
10
+ return this.chunks.reduce((acc, chunk) => acc + chunk.length, 0);
11
11
  }
12
12
  write(...buffers) {
13
13
  this.chunks.push(...buffers);
@@ -71,25 +71,22 @@ class Encoder {
71
71
  if (value === null) {
72
72
  return this.writeInt16(-1);
73
73
  }
74
- const byteLength = Buffer.byteLength(value, 'utf-8');
75
- const buffer = Buffer.allocUnsafe(byteLength);
76
- buffer.write(value, 0, byteLength, 'utf-8');
77
- return this.writeInt16(byteLength).write(buffer);
74
+ const buffer = Buffer.from(value, 'utf-8');
75
+ return this.writeInt16(buffer.length).write(buffer);
78
76
  }
79
77
  writeCompactString(value) {
80
78
  if (value === null) {
81
79
  return this.writeUVarInt(0);
82
80
  }
83
- const byteLength = Buffer.byteLength(value, 'utf-8');
84
- const buffer = Buffer.allocUnsafe(byteLength);
85
- buffer.write(value, 0, byteLength, 'utf-8');
86
- return this.writeUVarInt(byteLength + 1).write(buffer);
81
+ const buffer = Buffer.from(value, 'utf-8');
82
+ return this.writeUVarInt(buffer.length + 1).write(buffer);
87
83
  }
88
- writeVarIntBuffer(buffer) {
89
- if (buffer === null) {
84
+ writeVarIntString(value) {
85
+ if (value === null) {
90
86
  return this.writeVarInt(-1);
91
87
  }
92
- return this.writeVarInt(buffer.byteLength).write(buffer);
88
+ const buffer = Buffer.from(value, 'utf-8');
89
+ return this.writeVarInt(buffer.length).write(buffer);
93
90
  }
94
91
  writeUUID(value) {
95
92
  if (value === null) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.6-beta.3",
3
+ "version": "0.0.6-beta.4",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",