kafka-ts 0.0.6-beta.3 → 0.0.6-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  export declare const enum IsolationLevel {
3
2
  READ_UNCOMMITTED = 0,
4
3
  READ_COMMITTED = 1
@@ -35,11 +34,11 @@ export declare const FETCH: import("../utils/api").Api<{
35
34
  attributes: number;
36
35
  timestampDelta: bigint;
37
36
  offsetDelta: number;
38
- key: Buffer | null;
39
- value: Buffer | null;
37
+ key: string | null;
38
+ value: string | null;
40
39
  headers: {
41
- key: Buffer | null;
42
- value: Buffer | null;
40
+ key: string;
41
+ value: string;
43
42
  }[];
44
43
  }[];
45
44
  compression: number;
package/dist/api/fetch.js CHANGED
@@ -133,10 +133,10 @@ const decodeRecord = (decoder) => decoder.readRecords((record) => ({
133
133
  attributes: record.readInt8(),
134
134
  timestampDelta: record.readVarLong(),
135
135
  offsetDelta: record.readVarInt(),
136
- key: record.readVarIntBuffer(),
137
- value: record.readVarIntBuffer(),
136
+ key: record.readVarIntString(),
137
+ value: record.readVarIntString(),
138
138
  headers: record.readVarIntArray((header) => ({
139
- key: header.readVarIntBuffer(),
140
- value: header.readVarIntBuffer(),
139
+ key: header.readVarIntString(),
140
+ value: header.readVarIntString(),
141
141
  })),
142
142
  }));
@@ -97,11 +97,11 @@ export declare const API: {
97
97
  attributes: number;
98
98
  timestampDelta: bigint;
99
99
  offsetDelta: number;
100
- key: Buffer | null;
101
- value: Buffer | null;
100
+ key: string | null;
101
+ value: string | null;
102
102
  headers: {
103
- key: Buffer | null;
104
- value: Buffer | null;
103
+ key: string;
104
+ value: string;
105
105
  }[];
106
106
  }[];
107
107
  compression: number;
@@ -384,11 +384,11 @@ export declare const API: {
384
384
  attributes: number;
385
385
  timestampDelta: bigint;
386
386
  offsetDelta: number;
387
- key: Buffer | null;
388
- value: Buffer | null;
387
+ key: string | null;
388
+ value: string | null;
389
389
  headers: {
390
- key: Buffer;
391
- value: Buffer;
390
+ key: string;
391
+ value: string;
392
392
  }[];
393
393
  }[];
394
394
  }[];
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  export declare const PRODUCE: import("../utils/api.js").Api<{
3
2
  transactionalId: string | null;
4
3
  acks: number;
@@ -20,11 +19,11 @@ export declare const PRODUCE: import("../utils/api.js").Api<{
20
19
  attributes: number;
21
20
  timestampDelta: bigint;
22
21
  offsetDelta: number;
23
- key: Buffer | null;
24
- value: Buffer | null;
22
+ key: string | null;
23
+ value: string | null;
25
24
  headers: {
26
- key: Buffer;
27
- value: Buffer;
25
+ key: string;
26
+ value: string;
28
27
  }[];
29
28
  }[];
30
29
  }[];
@@ -28,10 +28,10 @@ exports.PRODUCE = (0, api_js_1.createApi)({
28
28
  .writeInt8(record.attributes)
29
29
  .writeVarLong(record.timestampDelta)
30
30
  .writeVarInt(record.offsetDelta)
31
- .writeVarIntBuffer(record.key)
32
- .writeVarIntBuffer(record.value)
33
- .writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntBuffer(header.key).writeVarIntBuffer(header.value));
34
- return encoder.writeVarInt(recordBody.getByteLength()).writeEncoder(recordBody);
31
+ .writeVarIntString(record.key)
32
+ .writeVarIntString(record.value)
33
+ .writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntString(header.key).writeVarIntString(header.value));
34
+ return encoder.writeVarInt(recordBody.getBufferLength()).writeEncoder(recordBody);
35
35
  })
36
36
  .value();
37
37
  const batchHeader = new encoder_js_1.Encoder()
@@ -41,11 +41,11 @@ exports.PRODUCE = (0, api_js_1.createApi)({
41
41
  .write(batchBody);
42
42
  const batch = new encoder_js_1.Encoder()
43
43
  .writeInt64(partition.baseOffset)
44
- .writeInt32(batchHeader.getByteLength())
44
+ .writeInt32(batchHeader.getBufferLength())
45
45
  .writeEncoder(batchHeader);
46
46
  return encoder
47
47
  .writeInt32(partition.index)
48
- .writeUVarInt(batch.getByteLength() + 1) // batch size
48
+ .writeUVarInt(batch.getBufferLength() + 1)
49
49
  .writeEncoder(batch)
50
50
  .writeUVarInt(0);
51
51
  })
@@ -136,12 +136,12 @@ vitest_1.describe.sequential('Low-level API', () => {
136
136
  attributes: 0,
137
137
  offsetDelta: 0,
138
138
  timestampDelta: 0n,
139
- key: Buffer.from('key'),
140
- value: Buffer.from('value'),
139
+ key: 'key',
140
+ value: 'value',
141
141
  headers: [
142
142
  {
143
- key: Buffer.from('header-key'),
144
- value: Buffer.from('header-value'),
143
+ key: 'header-key',
144
+ value: 'header-value',
145
145
  },
146
146
  ],
147
147
  },
@@ -96,7 +96,7 @@ class Connection {
96
96
  .writeInt32(correlationId)
97
97
  .writeString(this.options.clientId);
98
98
  const request = api.request(encoder, body);
99
- const requestEncoder = new encoder_1.Encoder().writeInt32(request.getByteLength()).writeEncoder(request);
99
+ const requestEncoder = new encoder_1.Encoder().writeInt32(request.getBufferLength()).writeEncoder(request);
100
100
  let timeout;
101
101
  const { responseDecoder, responseSize } = await new Promise(async (resolve, reject) => {
102
102
  timeout = setTimeout(() => {
@@ -15,7 +15,7 @@ const defaultPartitioner = ({ metadata }) => {
15
15
  const partitions = metadata.getTopicPartitions()[topic];
16
16
  const numPartitions = partitions.length;
17
17
  if (key) {
18
- return (0, murmur2_1.toPositive)((0, murmur2_1.murmur2)(key)) % numPartitions;
18
+ return (0, murmur2_1.toPositive)((0, murmur2_1.murmur2)(Buffer.from(key))) % numPartitions;
19
19
  }
20
20
  return (0, murmur2_1.toPositive)(getNextValue(topic)) % numPartitions;
21
21
  };
@@ -20,6 +20,6 @@ export declare class Producer {
20
20
  close(): Promise<void>;
21
21
  private ensureConnected;
22
22
  private initProducerId;
23
- private nextBaseSequence;
24
- private revertBaseSequence;
23
+ private getSequence;
24
+ private updateSequence;
25
25
  }
@@ -15,6 +15,7 @@ const messages_to_topic_partition_leaders_1 = require("../distributors/messages-
15
15
  const partitioner_1 = require("../distributors/partitioner");
16
16
  const metadata_1 = require("../metadata");
17
17
  const delay_1 = require("../utils/delay");
18
+ const error_1 = require("../utils/error");
18
19
  const memo_1 = require("../utils/memo");
19
20
  const tracer_1 = require("../utils/tracer");
20
21
  const trace = (0, tracer_1.createTracer)('Producer');
@@ -43,64 +44,63 @@ class Producer {
43
44
  const topics = Array.from(new Set(messages.map((message) => message.topic)));
44
45
  await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
45
46
  const nodeTopicPartitionMessages = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(messages.map((message) => ({ ...message, partition: this.partition(message) })), this.metadata.getTopicPartitionLeaderIds());
46
- await Promise.all(Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
47
- const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
48
- name: topic,
49
- partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
50
- const partitionIndex = parseInt(partition);
51
- let baseTimestamp;
52
- let maxTimestamp;
53
- messages.forEach(({ timestamp = defaultTimestamp }) => {
54
- if (!baseTimestamp || timestamp < baseTimestamp) {
55
- baseTimestamp = timestamp;
56
- }
57
- if (!maxTimestamp || timestamp > maxTimestamp) {
58
- maxTimestamp = timestamp;
59
- }
60
- });
61
- const baseSequence = this.nextBaseSequence(topic, partitionIndex, messages.length);
62
- return {
63
- index: partitionIndex,
64
- baseOffset: 0n,
65
- partitionLeaderEpoch: -1,
66
- attributes: 0,
67
- lastOffsetDelta: messages.length - 1,
68
- baseTimestamp: baseTimestamp ?? 0n,
69
- maxTimestamp: maxTimestamp ?? 0n,
70
- producerId: this.producerId,
71
- producerEpoch: 0,
72
- baseSequence,
73
- records: messages.map((message, index) => ({
47
+ try {
48
+ await Promise.all(Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
49
+ const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
50
+ name: topic,
51
+ partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
52
+ const partitionIndex = parseInt(partition);
53
+ let baseTimestamp;
54
+ let maxTimestamp;
55
+ messages.forEach(({ timestamp = defaultTimestamp }) => {
56
+ if (!baseTimestamp || timestamp < baseTimestamp) {
57
+ baseTimestamp = timestamp;
58
+ }
59
+ if (!maxTimestamp || timestamp > maxTimestamp) {
60
+ maxTimestamp = timestamp;
61
+ }
62
+ });
63
+ return {
64
+ index: partitionIndex,
65
+ baseOffset: 0n,
66
+ partitionLeaderEpoch: -1,
74
67
  attributes: 0,
75
- timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
76
- offsetDelta: index,
77
- key: message.key ?? null,
78
- value: message.value,
79
- headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
80
- key: Buffer.from(key),
81
- value: Buffer.from(value),
68
+ lastOffsetDelta: messages.length - 1,
69
+ baseTimestamp: baseTimestamp ?? 0n,
70
+ maxTimestamp: maxTimestamp ?? 0n,
71
+ producerId: this.producerId,
72
+ producerEpoch: 0,
73
+ baseSequence: this.getSequence(topic, partitionIndex),
74
+ records: messages.map((message, index) => ({
75
+ attributes: 0,
76
+ timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
77
+ offsetDelta: index,
78
+ key: message.key ?? null,
79
+ value: message.value,
80
+ headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({ key, value })),
82
81
  })),
83
- })),
84
- };
85
- }),
86
- }));
87
- try {
88
- return await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
82
+ };
83
+ }),
84
+ }));
85
+ await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
89
86
  transactionalId: null,
90
87
  acks,
91
88
  timeoutMs: 5000,
92
89
  topicData,
93
90
  });
94
- }
95
- catch (error) {
96
91
  topicData.forEach(({ name, partitionData }) => {
97
92
  partitionData.forEach(({ index, records }) => {
98
- this.revertBaseSequence(name, index, records.length);
93
+ this.updateSequence(name, index, records.length);
99
94
  });
100
95
  });
101
- throw error;
96
+ }));
97
+ }
98
+ catch (error) {
99
+ if ((error instanceof error_1.KafkaTSApiError) && error.errorCode === api_1.API_ERROR.OUT_OF_ORDER_SEQUENCE_NUMBER) {
100
+ await this.initProducerId();
102
101
  }
103
- }));
102
+ throw error;
103
+ }
104
104
  }
105
105
  async close() {
106
106
  await this.cluster.disconnect();
@@ -129,15 +129,13 @@ class Producer {
129
129
  throw error;
130
130
  }
131
131
  }
132
- nextBaseSequence(topic, partition, messagesCount) {
132
+ getSequence(topic, partition) {
133
+ return this.sequences[topic]?.[partition] ?? 0;
134
+ }
135
+ updateSequence(topic, partition, messagesCount) {
133
136
  this.sequences[topic] ??= {};
134
137
  this.sequences[topic][partition] ??= 0;
135
- const baseSequence = this.sequences[topic][partition];
136
138
  this.sequences[topic][partition] += messagesCount;
137
- return baseSequence;
138
- }
139
- revertBaseSequence(topic, partition, messagesCount) {
140
- this.sequences[topic][partition] -= messagesCount;
141
139
  }
142
140
  }
143
141
  exports.Producer = Producer;
package/dist/types.d.ts CHANGED
@@ -1,11 +1,10 @@
1
- /// <reference types="node" />
2
1
  export type Message = {
3
2
  topic: string;
4
3
  partition?: number;
5
4
  offset?: bigint;
6
5
  timestamp?: bigint;
7
- key?: Buffer | null;
8
- value: Buffer | null;
6
+ key?: string | null;
7
+ value: string | null;
9
8
  headers?: Record<string, string>;
10
9
  };
11
10
  export type Batch = Required<Message>[];
@@ -16,7 +16,7 @@ export declare class Decoder {
16
16
  readVarLong(): bigint;
17
17
  readString(): string | null;
18
18
  readCompactString(): string | null;
19
- readVarIntBuffer(): Buffer | null;
19
+ readVarIntString(): string | null;
20
20
  readUUID(): string;
21
21
  readBoolean(): boolean;
22
22
  readArray<T>(callback: (opts: Decoder) => T): T[];
@@ -86,12 +86,12 @@ class Decoder {
86
86
  this.offset += length;
87
87
  return value;
88
88
  }
89
- readVarIntBuffer() {
89
+ readVarIntString() {
90
90
  const length = this.readVarInt();
91
91
  if (length < 0) {
92
92
  return null;
93
93
  }
94
- const value = this.buffer.subarray(this.offset, this.offset + length);
94
+ const value = this.buffer.toString('utf-8', this.offset, this.offset + length);
95
95
  this.offset += length;
96
96
  return value;
97
97
  }
@@ -131,7 +131,7 @@ class Decoder {
131
131
  }
132
132
  read(length) {
133
133
  const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
134
- this.offset += Buffer.byteLength(value);
134
+ this.offset += value.length;
135
135
  return value;
136
136
  }
137
137
  readBytes() {
@@ -2,7 +2,7 @@
2
2
  export declare class Encoder {
3
3
  private chunks;
4
4
  getChunks(): Buffer[];
5
- getByteLength(): number;
5
+ getBufferLength(): number;
6
6
  write(...buffers: Buffer[]): this;
7
7
  writeEncoder(encoder: Encoder): this;
8
8
  writeInt8(value: number): this;
@@ -16,7 +16,7 @@ export declare class Encoder {
16
16
  writeVarLong(value: bigint): this;
17
17
  writeString(value: string | null): this;
18
18
  writeCompactString(value: string | null): this;
19
- writeVarIntBuffer(buffer: Buffer | null): this;
19
+ writeVarIntString(value: string | null): this;
20
20
  writeUUID(value: string | null): this;
21
21
  writeBoolean(value: boolean): this;
22
22
  writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => Encoder): this;
@@ -6,8 +6,8 @@ class Encoder {
6
6
  getChunks() {
7
7
  return this.chunks;
8
8
  }
9
- getByteLength() {
10
- return this.chunks.reduce((acc, chunk) => acc + chunk.byteLength, 0);
9
+ getBufferLength() {
10
+ return this.chunks.reduce((acc, chunk) => acc + chunk.length, 0);
11
11
  }
12
12
  write(...buffers) {
13
13
  this.chunks.push(...buffers);
@@ -71,25 +71,22 @@ class Encoder {
71
71
  if (value === null) {
72
72
  return this.writeInt16(-1);
73
73
  }
74
- const byteLength = Buffer.byteLength(value, 'utf-8');
75
- const buffer = Buffer.allocUnsafe(byteLength);
76
- buffer.write(value, 0, byteLength, 'utf-8');
77
- return this.writeInt16(byteLength).write(buffer);
74
+ const buffer = Buffer.from(value, 'utf-8');
75
+ return this.writeInt16(buffer.length).write(buffer);
78
76
  }
79
77
  writeCompactString(value) {
80
78
  if (value === null) {
81
79
  return this.writeUVarInt(0);
82
80
  }
83
- const byteLength = Buffer.byteLength(value, 'utf-8');
84
- const buffer = Buffer.allocUnsafe(byteLength);
85
- buffer.write(value, 0, byteLength, 'utf-8');
86
- return this.writeUVarInt(byteLength + 1).write(buffer);
81
+ const buffer = Buffer.from(value, 'utf-8');
82
+ return this.writeUVarInt(buffer.length + 1).write(buffer);
87
83
  }
88
- writeVarIntBuffer(buffer) {
89
- if (buffer === null) {
84
+ writeVarIntString(value) {
85
+ if (value === null) {
90
86
  return this.writeVarInt(-1);
91
87
  }
92
- return this.writeVarInt(buffer.byteLength).write(buffer);
88
+ const buffer = Buffer.from(value, 'utf-8');
89
+ return this.writeVarInt(buffer.length).write(buffer);
93
90
  }
94
91
  writeUUID(value) {
95
92
  if (value === null) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.6-beta.3",
3
+ "version": "0.0.6-beta.5",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",