kafka-ts 0.0.6-beta.2 → 0.0.6-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/fetch.d.ts +4 -5
- package/dist/api/fetch.js +4 -4
- package/dist/api/index.d.ts +8 -8
- package/dist/api/produce.d.ts +4 -5
- package/dist/api/produce.js +6 -6
- package/dist/cluster.test.js +4 -4
- package/dist/connection.js +2 -2
- package/dist/distributors/partitioner.js +1 -1
- package/dist/producer/producer.d.ts +2 -2
- package/dist/producer/producer.js +16 -27
- package/dist/types.d.ts +2 -3
- package/dist/utils/decoder.d.ts +1 -1
- package/dist/utils/decoder.js +3 -3
- package/dist/utils/encoder.d.ts +2 -2
- package/dist/utils/encoder.js +10 -13
- package/dist/utils/error.d.ts +1 -1
- package/dist/utils/retrier.js +2 -0
- package/package.json +1 -1
package/dist/api/fetch.d.ts
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
1
|
export declare const enum IsolationLevel {
|
|
3
2
|
READ_UNCOMMITTED = 0,
|
|
4
3
|
READ_COMMITTED = 1
|
|
@@ -35,11 +34,11 @@ export declare const FETCH: import("../utils/api").Api<{
|
|
|
35
34
|
attributes: number;
|
|
36
35
|
timestampDelta: bigint;
|
|
37
36
|
offsetDelta: number;
|
|
38
|
-
key:
|
|
39
|
-
value:
|
|
37
|
+
key: string | null;
|
|
38
|
+
value: string | null;
|
|
40
39
|
headers: {
|
|
41
|
-
key:
|
|
42
|
-
value:
|
|
40
|
+
key: string;
|
|
41
|
+
value: string;
|
|
43
42
|
}[];
|
|
44
43
|
}[];
|
|
45
44
|
compression: number;
|
package/dist/api/fetch.js
CHANGED
|
@@ -133,10 +133,10 @@ const decodeRecord = (decoder) => decoder.readRecords((record) => ({
|
|
|
133
133
|
attributes: record.readInt8(),
|
|
134
134
|
timestampDelta: record.readVarLong(),
|
|
135
135
|
offsetDelta: record.readVarInt(),
|
|
136
|
-
key: record.
|
|
137
|
-
value: record.
|
|
136
|
+
key: record.readVarIntString(),
|
|
137
|
+
value: record.readVarIntString(),
|
|
138
138
|
headers: record.readVarIntArray((header) => ({
|
|
139
|
-
key: header.
|
|
140
|
-
value: header.
|
|
139
|
+
key: header.readVarIntString(),
|
|
140
|
+
value: header.readVarIntString(),
|
|
141
141
|
})),
|
|
142
142
|
}));
|
package/dist/api/index.d.ts
CHANGED
|
@@ -97,11 +97,11 @@ export declare const API: {
|
|
|
97
97
|
attributes: number;
|
|
98
98
|
timestampDelta: bigint;
|
|
99
99
|
offsetDelta: number;
|
|
100
|
-
key:
|
|
101
|
-
value:
|
|
100
|
+
key: string | null;
|
|
101
|
+
value: string | null;
|
|
102
102
|
headers: {
|
|
103
|
-
key:
|
|
104
|
-
value:
|
|
103
|
+
key: string;
|
|
104
|
+
value: string;
|
|
105
105
|
}[];
|
|
106
106
|
}[];
|
|
107
107
|
compression: number;
|
|
@@ -384,11 +384,11 @@ export declare const API: {
|
|
|
384
384
|
attributes: number;
|
|
385
385
|
timestampDelta: bigint;
|
|
386
386
|
offsetDelta: number;
|
|
387
|
-
key:
|
|
388
|
-
value:
|
|
387
|
+
key: string | null;
|
|
388
|
+
value: string | null;
|
|
389
389
|
headers: {
|
|
390
|
-
key:
|
|
391
|
-
value:
|
|
390
|
+
key: string;
|
|
391
|
+
value: string;
|
|
392
392
|
}[];
|
|
393
393
|
}[];
|
|
394
394
|
}[];
|
package/dist/api/produce.d.ts
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
1
|
export declare const PRODUCE: import("../utils/api.js").Api<{
|
|
3
2
|
transactionalId: string | null;
|
|
4
3
|
acks: number;
|
|
@@ -20,11 +19,11 @@ export declare const PRODUCE: import("../utils/api.js").Api<{
|
|
|
20
19
|
attributes: number;
|
|
21
20
|
timestampDelta: bigint;
|
|
22
21
|
offsetDelta: number;
|
|
23
|
-
key:
|
|
24
|
-
value:
|
|
22
|
+
key: string | null;
|
|
23
|
+
value: string | null;
|
|
25
24
|
headers: {
|
|
26
|
-
key:
|
|
27
|
-
value:
|
|
25
|
+
key: string;
|
|
26
|
+
value: string;
|
|
28
27
|
}[];
|
|
29
28
|
}[];
|
|
30
29
|
}[];
|
package/dist/api/produce.js
CHANGED
|
@@ -28,10 +28,10 @@ exports.PRODUCE = (0, api_js_1.createApi)({
|
|
|
28
28
|
.writeInt8(record.attributes)
|
|
29
29
|
.writeVarLong(record.timestampDelta)
|
|
30
30
|
.writeVarInt(record.offsetDelta)
|
|
31
|
-
.
|
|
32
|
-
.
|
|
33
|
-
.writeVarIntArray(record.headers, (encoder, header) => encoder.
|
|
34
|
-
return encoder.writeVarInt(recordBody.
|
|
31
|
+
.writeVarIntString(record.key)
|
|
32
|
+
.writeVarIntString(record.value)
|
|
33
|
+
.writeVarIntArray(record.headers, (encoder, header) => encoder.writeVarIntString(header.key).writeVarIntString(header.value));
|
|
34
|
+
return encoder.writeVarInt(recordBody.getBufferLength()).writeEncoder(recordBody);
|
|
35
35
|
})
|
|
36
36
|
.value();
|
|
37
37
|
const batchHeader = new encoder_js_1.Encoder()
|
|
@@ -41,11 +41,11 @@ exports.PRODUCE = (0, api_js_1.createApi)({
|
|
|
41
41
|
.write(batchBody);
|
|
42
42
|
const batch = new encoder_js_1.Encoder()
|
|
43
43
|
.writeInt64(partition.baseOffset)
|
|
44
|
-
.writeInt32(batchHeader.
|
|
44
|
+
.writeInt32(batchHeader.getBufferLength())
|
|
45
45
|
.writeEncoder(batchHeader);
|
|
46
46
|
return encoder
|
|
47
47
|
.writeInt32(partition.index)
|
|
48
|
-
.writeUVarInt(batch.
|
|
48
|
+
.writeUVarInt(batch.getBufferLength() + 1)
|
|
49
49
|
.writeEncoder(batch)
|
|
50
50
|
.writeUVarInt(0);
|
|
51
51
|
})
|
package/dist/cluster.test.js
CHANGED
|
@@ -136,12 +136,12 @@ vitest_1.describe.sequential('Low-level API', () => {
|
|
|
136
136
|
attributes: 0,
|
|
137
137
|
offsetDelta: 0,
|
|
138
138
|
timestampDelta: 0n,
|
|
139
|
-
key:
|
|
140
|
-
value:
|
|
139
|
+
key: 'key',
|
|
140
|
+
value: 'value',
|
|
141
141
|
headers: [
|
|
142
142
|
{
|
|
143
|
-
key:
|
|
144
|
-
value:
|
|
143
|
+
key: 'header-key',
|
|
144
|
+
value: 'header-value',
|
|
145
145
|
},
|
|
146
146
|
],
|
|
147
147
|
},
|
package/dist/connection.js
CHANGED
|
@@ -96,7 +96,7 @@ class Connection {
|
|
|
96
96
|
.writeInt32(correlationId)
|
|
97
97
|
.writeString(this.options.clientId);
|
|
98
98
|
const request = api.request(encoder, body);
|
|
99
|
-
const requestEncoder = new encoder_1.Encoder().writeInt32(request.
|
|
99
|
+
const requestEncoder = new encoder_1.Encoder().writeInt32(request.getBufferLength()).writeEncoder(request);
|
|
100
100
|
let timeout;
|
|
101
101
|
const { responseDecoder, responseSize } = await new Promise(async (resolve, reject) => {
|
|
102
102
|
timeout = setTimeout(() => {
|
|
@@ -119,7 +119,7 @@ class Connection {
|
|
|
119
119
|
}
|
|
120
120
|
catch (error) {
|
|
121
121
|
if (error instanceof error_1.KafkaTSApiError) {
|
|
122
|
-
error.request =
|
|
122
|
+
error.request = body;
|
|
123
123
|
}
|
|
124
124
|
throw error;
|
|
125
125
|
}
|
|
@@ -15,7 +15,7 @@ const defaultPartitioner = ({ metadata }) => {
|
|
|
15
15
|
const partitions = metadata.getTopicPartitions()[topic];
|
|
16
16
|
const numPartitions = partitions.length;
|
|
17
17
|
if (key) {
|
|
18
|
-
return (0, murmur2_1.toPositive)((0, murmur2_1.murmur2)(key)) % numPartitions;
|
|
18
|
+
return (0, murmur2_1.toPositive)((0, murmur2_1.murmur2)(Buffer.from(key))) % numPartitions;
|
|
19
19
|
}
|
|
20
20
|
return (0, murmur2_1.toPositive)(getNextValue(topic)) % numPartitions;
|
|
21
21
|
};
|
|
@@ -58,7 +58,6 @@ class Producer {
|
|
|
58
58
|
maxTimestamp = timestamp;
|
|
59
59
|
}
|
|
60
60
|
});
|
|
61
|
-
const baseSequence = this.nextBaseSequence(topic, partitionIndex, messages.length);
|
|
62
61
|
return {
|
|
63
62
|
index: partitionIndex,
|
|
64
63
|
baseOffset: 0n,
|
|
@@ -69,37 +68,29 @@ class Producer {
|
|
|
69
68
|
maxTimestamp: maxTimestamp ?? 0n,
|
|
70
69
|
producerId: this.producerId,
|
|
71
70
|
producerEpoch: 0,
|
|
72
|
-
baseSequence,
|
|
71
|
+
baseSequence: this.getSequence(topic, partitionIndex),
|
|
73
72
|
records: messages.map((message, index) => ({
|
|
74
73
|
attributes: 0,
|
|
75
74
|
timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
|
|
76
75
|
offsetDelta: index,
|
|
77
76
|
key: message.key ?? null,
|
|
78
77
|
value: message.value,
|
|
79
|
-
headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
|
|
80
|
-
key: Buffer.from(key),
|
|
81
|
-
value: Buffer.from(value),
|
|
82
|
-
})),
|
|
78
|
+
headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({ key, value })),
|
|
83
79
|
})),
|
|
84
80
|
};
|
|
85
81
|
}),
|
|
86
82
|
}));
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
topicData.forEach(({ name, partitionData }) => {
|
|
97
|
-
partitionData.forEach(({ index, records }) => {
|
|
98
|
-
this.revertBaseSequence(name, index, records.length);
|
|
99
|
-
});
|
|
83
|
+
await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
|
|
84
|
+
transactionalId: null,
|
|
85
|
+
acks,
|
|
86
|
+
timeoutMs: 5000,
|
|
87
|
+
topicData,
|
|
88
|
+
});
|
|
89
|
+
topicData.forEach(({ name, partitionData }) => {
|
|
90
|
+
partitionData.forEach(({ index, records }) => {
|
|
91
|
+
this.updateSequence(name, index, records.length);
|
|
100
92
|
});
|
|
101
|
-
|
|
102
|
-
}
|
|
93
|
+
});
|
|
103
94
|
}));
|
|
104
95
|
}
|
|
105
96
|
async close() {
|
|
@@ -129,15 +120,13 @@ class Producer {
|
|
|
129
120
|
throw error;
|
|
130
121
|
}
|
|
131
122
|
}
|
|
132
|
-
|
|
123
|
+
getSequence(topic, partition) {
|
|
124
|
+
return this.sequences[topic]?.[partition] ?? 0;
|
|
125
|
+
}
|
|
126
|
+
updateSequence(topic, partition, messagesCount) {
|
|
133
127
|
this.sequences[topic] ??= {};
|
|
134
128
|
this.sequences[topic][partition] ??= 0;
|
|
135
|
-
const baseSequence = this.sequences[topic][partition];
|
|
136
129
|
this.sequences[topic][partition] += messagesCount;
|
|
137
|
-
return baseSequence;
|
|
138
|
-
}
|
|
139
|
-
revertBaseSequence(topic, partition, messagesCount) {
|
|
140
|
-
this.sequences[topic][partition] -= messagesCount;
|
|
141
130
|
}
|
|
142
131
|
}
|
|
143
132
|
exports.Producer = Producer;
|
package/dist/types.d.ts
CHANGED
|
@@ -1,11 +1,10 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
1
|
export type Message = {
|
|
3
2
|
topic: string;
|
|
4
3
|
partition?: number;
|
|
5
4
|
offset?: bigint;
|
|
6
5
|
timestamp?: bigint;
|
|
7
|
-
key?:
|
|
8
|
-
value:
|
|
6
|
+
key?: string | null;
|
|
7
|
+
value: string | null;
|
|
9
8
|
headers?: Record<string, string>;
|
|
10
9
|
};
|
|
11
10
|
export type Batch = Required<Message>[];
|
package/dist/utils/decoder.d.ts
CHANGED
|
@@ -16,7 +16,7 @@ export declare class Decoder {
|
|
|
16
16
|
readVarLong(): bigint;
|
|
17
17
|
readString(): string | null;
|
|
18
18
|
readCompactString(): string | null;
|
|
19
|
-
|
|
19
|
+
readVarIntString(): string | null;
|
|
20
20
|
readUUID(): string;
|
|
21
21
|
readBoolean(): boolean;
|
|
22
22
|
readArray<T>(callback: (opts: Decoder) => T): T[];
|
package/dist/utils/decoder.js
CHANGED
|
@@ -86,12 +86,12 @@ class Decoder {
|
|
|
86
86
|
this.offset += length;
|
|
87
87
|
return value;
|
|
88
88
|
}
|
|
89
|
-
|
|
89
|
+
readVarIntString() {
|
|
90
90
|
const length = this.readVarInt();
|
|
91
91
|
if (length < 0) {
|
|
92
92
|
return null;
|
|
93
93
|
}
|
|
94
|
-
const value = this.buffer.
|
|
94
|
+
const value = this.buffer.toString('utf-8', this.offset, this.offset + length);
|
|
95
95
|
this.offset += length;
|
|
96
96
|
return value;
|
|
97
97
|
}
|
|
@@ -131,7 +131,7 @@ class Decoder {
|
|
|
131
131
|
}
|
|
132
132
|
read(length) {
|
|
133
133
|
const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
|
|
134
|
-
this.offset +=
|
|
134
|
+
this.offset += value.length;
|
|
135
135
|
return value;
|
|
136
136
|
}
|
|
137
137
|
readBytes() {
|
package/dist/utils/encoder.d.ts
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
export declare class Encoder {
|
|
3
3
|
private chunks;
|
|
4
4
|
getChunks(): Buffer[];
|
|
5
|
-
|
|
5
|
+
getBufferLength(): number;
|
|
6
6
|
write(...buffers: Buffer[]): this;
|
|
7
7
|
writeEncoder(encoder: Encoder): this;
|
|
8
8
|
writeInt8(value: number): this;
|
|
@@ -16,7 +16,7 @@ export declare class Encoder {
|
|
|
16
16
|
writeVarLong(value: bigint): this;
|
|
17
17
|
writeString(value: string | null): this;
|
|
18
18
|
writeCompactString(value: string | null): this;
|
|
19
|
-
|
|
19
|
+
writeVarIntString(value: string | null): this;
|
|
20
20
|
writeUUID(value: string | null): this;
|
|
21
21
|
writeBoolean(value: boolean): this;
|
|
22
22
|
writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => Encoder): this;
|
package/dist/utils/encoder.js
CHANGED
|
@@ -6,8 +6,8 @@ class Encoder {
|
|
|
6
6
|
getChunks() {
|
|
7
7
|
return this.chunks;
|
|
8
8
|
}
|
|
9
|
-
|
|
10
|
-
return this.chunks.reduce((acc, chunk) => acc + chunk.
|
|
9
|
+
getBufferLength() {
|
|
10
|
+
return this.chunks.reduce((acc, chunk) => acc + chunk.length, 0);
|
|
11
11
|
}
|
|
12
12
|
write(...buffers) {
|
|
13
13
|
this.chunks.push(...buffers);
|
|
@@ -71,25 +71,22 @@ class Encoder {
|
|
|
71
71
|
if (value === null) {
|
|
72
72
|
return this.writeInt16(-1);
|
|
73
73
|
}
|
|
74
|
-
const
|
|
75
|
-
|
|
76
|
-
buffer.write(value, 0, byteLength, 'utf-8');
|
|
77
|
-
return this.writeInt16(byteLength).write(buffer);
|
|
74
|
+
const buffer = Buffer.from(value, 'utf-8');
|
|
75
|
+
return this.writeInt16(buffer.length).write(buffer);
|
|
78
76
|
}
|
|
79
77
|
writeCompactString(value) {
|
|
80
78
|
if (value === null) {
|
|
81
79
|
return this.writeUVarInt(0);
|
|
82
80
|
}
|
|
83
|
-
const
|
|
84
|
-
|
|
85
|
-
buffer.write(value, 0, byteLength, 'utf-8');
|
|
86
|
-
return this.writeUVarInt(byteLength + 1).write(buffer);
|
|
81
|
+
const buffer = Buffer.from(value, 'utf-8');
|
|
82
|
+
return this.writeUVarInt(buffer.length + 1).write(buffer);
|
|
87
83
|
}
|
|
88
|
-
|
|
89
|
-
if (
|
|
84
|
+
writeVarIntString(value) {
|
|
85
|
+
if (value === null) {
|
|
90
86
|
return this.writeVarInt(-1);
|
|
91
87
|
}
|
|
92
|
-
|
|
88
|
+
const buffer = Buffer.from(value, 'utf-8');
|
|
89
|
+
return this.writeVarInt(buffer.length).write(buffer);
|
|
93
90
|
}
|
|
94
91
|
writeUUID(value) {
|
|
95
92
|
if (value === null) {
|
package/dist/utils/error.d.ts
CHANGED
|
@@ -5,7 +5,7 @@ export declare class KafkaTSApiError<T = any> extends KafkaTSError {
|
|
|
5
5
|
errorCode: number;
|
|
6
6
|
errorMessage: string | null;
|
|
7
7
|
response: T;
|
|
8
|
-
request:
|
|
8
|
+
request: unknown | undefined;
|
|
9
9
|
constructor(errorCode: number, errorMessage: string | null, response: T);
|
|
10
10
|
}
|
|
11
11
|
export declare class ConnectionError extends KafkaTSError {
|
package/dist/utils/retrier.js
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.defaultRetrier = exports.createExponentialBackoffRetrier = void 0;
|
|
4
4
|
const delay_1 = require("./delay");
|
|
5
|
+
const logger_1 = require("./logger");
|
|
5
6
|
const createExponentialBackoffRetrier = ({ retries = 5, initialDelayMs = 100, maxDelayMs = 3000, multiplier = 2, onFailure = (error) => {
|
|
6
7
|
throw error;
|
|
7
8
|
}, } = {}) => async (func) => {
|
|
@@ -16,6 +17,7 @@ const createExponentialBackoffRetrier = ({ retries = 5, initialDelayMs = 100, ma
|
|
|
16
17
|
catch (error) {
|
|
17
18
|
lastError = error;
|
|
18
19
|
}
|
|
20
|
+
logger_1.log.debug(`Failed to process batch (retriesLeft: ${retriesLeft})`);
|
|
19
21
|
if (--retriesLeft < 1)
|
|
20
22
|
break;
|
|
21
23
|
await (0, delay_1.delay)(delayMs);
|