kafka-ts 0.0.3 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/dist/client.d.ts +1 -2
- package/package.json +1 -1
- package/.prettierrc +0 -8
- package/src/__snapshots__/cluster.test.ts.snap +0 -1281
- package/src/api/api-versions.ts +0 -21
- package/src/api/create-topics.ts +0 -78
- package/src/api/delete-topics.ts +0 -42
- package/src/api/fetch.ts +0 -198
- package/src/api/find-coordinator.ts +0 -39
- package/src/api/heartbeat.ts +0 -33
- package/src/api/index.ts +0 -166
- package/src/api/init-producer-id.ts +0 -35
- package/src/api/join-group.ts +0 -67
- package/src/api/leave-group.ts +0 -48
- package/src/api/list-offsets.ts +0 -65
- package/src/api/metadata.ts +0 -66
- package/src/api/offset-commit.ts +0 -67
- package/src/api/offset-fetch.ts +0 -70
- package/src/api/produce.ts +0 -170
- package/src/api/sasl-authenticate.ts +0 -21
- package/src/api/sasl-handshake.ts +0 -16
- package/src/api/sync-group.ts +0 -54
- package/src/auth/index.ts +0 -2
- package/src/auth/plain.ts +0 -10
- package/src/auth/scram.ts +0 -52
- package/src/broker.ts +0 -72
- package/src/client.ts +0 -47
- package/src/cluster.test.ts +0 -371
- package/src/cluster.ts +0 -85
- package/src/codecs/gzip.ts +0 -9
- package/src/codecs/index.ts +0 -16
- package/src/codecs/none.ts +0 -6
- package/src/codecs/types.ts +0 -4
- package/src/connection.ts +0 -157
- package/src/consumer/consumer-group.ts +0 -229
- package/src/consumer/consumer-metadata.ts +0 -14
- package/src/consumer/consumer.ts +0 -252
- package/src/consumer/fetch-manager.ts +0 -169
- package/src/consumer/fetcher.ts +0 -64
- package/src/consumer/offset-manager.ts +0 -104
- package/src/consumer/processor.ts +0 -53
- package/src/distributors/assignments-to-replicas.test.ts +0 -43
- package/src/distributors/assignments-to-replicas.ts +0 -83
- package/src/distributors/messages-to-topic-partition-leaders.test.ts +0 -32
- package/src/distributors/messages-to-topic-partition-leaders.ts +0 -19
- package/src/distributors/partitioner.ts +0 -27
- package/src/index.ts +0 -9
- package/src/metadata.ts +0 -126
- package/src/producer/producer.ts +0 -142
- package/src/types.ts +0 -11
- package/src/utils/api.ts +0 -11
- package/src/utils/crypto.ts +0 -15
- package/src/utils/decoder.ts +0 -174
- package/src/utils/delay.ts +0 -1
- package/src/utils/encoder.ts +0 -148
- package/src/utils/error.ts +0 -21
- package/src/utils/logger.ts +0 -37
- package/src/utils/memo.ts +0 -12
- package/src/utils/murmur2.ts +0 -44
- package/src/utils/retrier.ts +0 -39
- package/src/utils/tracer.ts +0 -49
- package/tsconfig.json +0 -17
package/src/metadata.ts
DELETED
|
@@ -1,126 +0,0 @@
|
|
|
1
|
-
import { API, API_ERROR } from './api';
|
|
2
|
-
import { Cluster } from './cluster';
|
|
3
|
-
import { delay } from './utils/delay';
|
|
4
|
-
import { KafkaTSApiError } from './utils/error';
|
|
5
|
-
import { createTracer } from './utils/tracer';
|
|
6
|
-
|
|
7
|
-
const trace = createTracer('Metadata');
|
|
8
|
-
|
|
9
|
-
type MetadataOptions = {
|
|
10
|
-
cluster: Cluster;
|
|
11
|
-
};
|
|
12
|
-
|
|
13
|
-
export class Metadata {
|
|
14
|
-
private topicPartitions: Record<string, number[]> = {};
|
|
15
|
-
private topicNameById: Record<string, string> = {};
|
|
16
|
-
private topicIdByName: Record<string, string> = {};
|
|
17
|
-
private leaderIdByTopicPartition: Record<string, Record<number, number>> = {};
|
|
18
|
-
private isrNodesByTopicPartition: Record<string, Record<number, number[]>> = {};
|
|
19
|
-
|
|
20
|
-
constructor(private options: MetadataOptions) {}
|
|
21
|
-
|
|
22
|
-
public getTopicPartitionLeaderIds() {
|
|
23
|
-
return this.leaderIdByTopicPartition;
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
public getTopicPartitionReplicaIds() {
|
|
27
|
-
return this.isrNodesByTopicPartition;
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
public getTopicPartitions() {
|
|
31
|
-
return this.topicPartitions;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
public getTopicIdByName(name: string) {
|
|
35
|
-
return this.topicIdByName[name];
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
public getTopicNameById(id: string) {
|
|
39
|
-
return this.topicNameById[id];
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
@trace()
|
|
43
|
-
public async fetchMetadataIfNecessary({
|
|
44
|
-
topics,
|
|
45
|
-
allowTopicAutoCreation,
|
|
46
|
-
}: {
|
|
47
|
-
topics: string[];
|
|
48
|
-
allowTopicAutoCreation: boolean;
|
|
49
|
-
}) {
|
|
50
|
-
const missingTopics = topics.filter((topic) => !this.topicPartitions[topic]);
|
|
51
|
-
if (!missingTopics.length) {
|
|
52
|
-
return;
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
try {
|
|
56
|
-
return await this.fetchMetadata({ topics: missingTopics, allowTopicAutoCreation });
|
|
57
|
-
} catch (error) {
|
|
58
|
-
if (
|
|
59
|
-
error instanceof KafkaTSApiError &&
|
|
60
|
-
error.errorCode === API_ERROR.UNKNOWN_TOPIC_OR_PARTITION &&
|
|
61
|
-
allowTopicAutoCreation
|
|
62
|
-
) {
|
|
63
|
-
// TODO: investigate if we can avoid the delay
|
|
64
|
-
await delay(1000);
|
|
65
|
-
return await this.fetchMetadata({ topics: missingTopics, allowTopicAutoCreation });
|
|
66
|
-
}
|
|
67
|
-
throw error;
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
private async fetchMetadata({
|
|
72
|
-
topics,
|
|
73
|
-
allowTopicAutoCreation,
|
|
74
|
-
}: {
|
|
75
|
-
topics: string[] | null;
|
|
76
|
-
allowTopicAutoCreation: boolean;
|
|
77
|
-
}) {
|
|
78
|
-
const { cluster } = this.options;
|
|
79
|
-
|
|
80
|
-
const response = await cluster.sendRequest(API.METADATA, {
|
|
81
|
-
allowTopicAutoCreation,
|
|
82
|
-
includeTopicAuthorizedOperations: false,
|
|
83
|
-
topics: topics?.map((name) => ({ id: null, name })) ?? null,
|
|
84
|
-
});
|
|
85
|
-
|
|
86
|
-
this.topicPartitions = {
|
|
87
|
-
...this.topicPartitions,
|
|
88
|
-
...Object.fromEntries(
|
|
89
|
-
response.topics.map((topic) => [
|
|
90
|
-
topic.name,
|
|
91
|
-
topic.partitions.map((partition) => partition.partitionIndex),
|
|
92
|
-
]),
|
|
93
|
-
),
|
|
94
|
-
};
|
|
95
|
-
this.topicNameById = {
|
|
96
|
-
...this.topicNameById,
|
|
97
|
-
...Object.fromEntries(response.topics.map((topic) => [topic.topicId, topic.name])),
|
|
98
|
-
};
|
|
99
|
-
this.topicIdByName = {
|
|
100
|
-
...this.topicIdByName,
|
|
101
|
-
...Object.fromEntries(response.topics.map((topic) => [topic.name, topic.topicId])),
|
|
102
|
-
};
|
|
103
|
-
this.leaderIdByTopicPartition = {
|
|
104
|
-
...this.leaderIdByTopicPartition,
|
|
105
|
-
...Object.fromEntries(
|
|
106
|
-
response.topics.map((topic) => [
|
|
107
|
-
topic.name,
|
|
108
|
-
Object.fromEntries(
|
|
109
|
-
topic.partitions.map((partition) => [partition.partitionIndex, partition.leaderId]),
|
|
110
|
-
),
|
|
111
|
-
]),
|
|
112
|
-
),
|
|
113
|
-
};
|
|
114
|
-
this.isrNodesByTopicPartition = {
|
|
115
|
-
...this.isrNodesByTopicPartition,
|
|
116
|
-
...Object.fromEntries(
|
|
117
|
-
response.topics.map((topic) => [
|
|
118
|
-
topic.name,
|
|
119
|
-
Object.fromEntries(
|
|
120
|
-
topic.partitions.map((partition) => [partition.partitionIndex, partition.isrNodes]),
|
|
121
|
-
),
|
|
122
|
-
]),
|
|
123
|
-
),
|
|
124
|
-
};
|
|
125
|
-
}
|
|
126
|
-
}
|
package/src/producer/producer.ts
DELETED
|
@@ -1,142 +0,0 @@
|
|
|
1
|
-
import { API, API_ERROR } from '../api';
|
|
2
|
-
import { Cluster } from '../cluster';
|
|
3
|
-
import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
|
|
4
|
-
import { defaultPartitioner, Partition, Partitioner } from '../distributors/partitioner';
|
|
5
|
-
import { Metadata } from '../metadata';
|
|
6
|
-
import { Message } from '../types';
|
|
7
|
-
import { delay } from '../utils/delay';
|
|
8
|
-
import { KafkaTSApiError } from '../utils/error';
|
|
9
|
-
import { memo } from '../utils/memo';
|
|
10
|
-
import { createTracer } from '../utils/tracer';
|
|
11
|
-
|
|
12
|
-
const trace = createTracer('Producer');
|
|
13
|
-
|
|
14
|
-
export type ProducerOptions = {
|
|
15
|
-
allowTopicAutoCreation?: boolean;
|
|
16
|
-
partitioner?: Partitioner;
|
|
17
|
-
};
|
|
18
|
-
|
|
19
|
-
export class Producer {
|
|
20
|
-
private options: Required<ProducerOptions>;
|
|
21
|
-
private metadata: Metadata;
|
|
22
|
-
private producerId = 0n;
|
|
23
|
-
private producerEpoch = 0;
|
|
24
|
-
private sequences: Record<string, Record<number, number>> = {};
|
|
25
|
-
private partition: Partition;
|
|
26
|
-
|
|
27
|
-
constructor(
|
|
28
|
-
private cluster: Cluster,
|
|
29
|
-
options: ProducerOptions,
|
|
30
|
-
) {
|
|
31
|
-
this.options = {
|
|
32
|
-
...options,
|
|
33
|
-
allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
|
|
34
|
-
partitioner: options.partitioner ?? defaultPartitioner,
|
|
35
|
-
};
|
|
36
|
-
this.metadata = new Metadata({ cluster });
|
|
37
|
-
this.partition = this.options.partitioner({ metadata: this.metadata });
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
@trace(() => ({ root: true }))
|
|
41
|
-
public async send(messages: Message[], { acks = -1 }: { acks?: -1 | 1 } = {}) {
|
|
42
|
-
await this.ensureConnected();
|
|
43
|
-
|
|
44
|
-
const { allowTopicAutoCreation } = this.options;
|
|
45
|
-
const defaultTimestamp = BigInt(Date.now());
|
|
46
|
-
|
|
47
|
-
const topics = Array.from(new Set(messages.map((message) => message.topic)));
|
|
48
|
-
await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
|
|
49
|
-
|
|
50
|
-
const nodeTopicPartitionMessages = distributeMessagesToTopicPartitionLeaders(
|
|
51
|
-
messages.map((message) => ({ ...message, partition: this.partition(message) })),
|
|
52
|
-
this.metadata.getTopicPartitionLeaderIds(),
|
|
53
|
-
);
|
|
54
|
-
|
|
55
|
-
await Promise.all(
|
|
56
|
-
Object.entries(nodeTopicPartitionMessages).map(([nodeId, topicPartitionMessages]) =>
|
|
57
|
-
this.cluster.sendRequestToNode(parseInt(nodeId))(API.PRODUCE, {
|
|
58
|
-
transactionalId: null,
|
|
59
|
-
acks,
|
|
60
|
-
timeoutMs: 5000,
|
|
61
|
-
topicData: Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
|
|
62
|
-
name: topic,
|
|
63
|
-
partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
|
|
64
|
-
const partitionIndex = parseInt(partition);
|
|
65
|
-
let baseTimestamp: bigint | undefined;
|
|
66
|
-
let maxTimestamp: bigint | undefined;
|
|
67
|
-
|
|
68
|
-
messages.forEach(({ timestamp = defaultTimestamp }) => {
|
|
69
|
-
if (!baseTimestamp || timestamp < baseTimestamp) {
|
|
70
|
-
baseTimestamp = timestamp;
|
|
71
|
-
}
|
|
72
|
-
if (!maxTimestamp || timestamp > maxTimestamp) {
|
|
73
|
-
maxTimestamp = timestamp;
|
|
74
|
-
}
|
|
75
|
-
});
|
|
76
|
-
|
|
77
|
-
const baseSequence = this.nextSequence(topic, partitionIndex, messages.length);
|
|
78
|
-
return {
|
|
79
|
-
index: partitionIndex,
|
|
80
|
-
baseOffset: 0n,
|
|
81
|
-
partitionLeaderEpoch: -1,
|
|
82
|
-
attributes: 0,
|
|
83
|
-
lastOffsetDelta: messages.length - 1,
|
|
84
|
-
baseTimestamp: baseTimestamp ?? 0n,
|
|
85
|
-
maxTimestamp: maxTimestamp ?? 0n,
|
|
86
|
-
producerId: this.producerId,
|
|
87
|
-
producerEpoch: 0,
|
|
88
|
-
baseSequence,
|
|
89
|
-
records: messages.map((message, index) => ({
|
|
90
|
-
attributes: 0,
|
|
91
|
-
timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
|
|
92
|
-
offsetDelta: index,
|
|
93
|
-
key: message.key ?? null,
|
|
94
|
-
value: message.value,
|
|
95
|
-
headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
|
|
96
|
-
key: Buffer.from(key),
|
|
97
|
-
value: Buffer.from(value),
|
|
98
|
-
})),
|
|
99
|
-
})),
|
|
100
|
-
};
|
|
101
|
-
}),
|
|
102
|
-
})),
|
|
103
|
-
}),
|
|
104
|
-
),
|
|
105
|
-
);
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
public async close() {
|
|
109
|
-
await this.cluster.disconnect();
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
private ensureConnected = memo(async () => {
|
|
113
|
-
await this.cluster.connect();
|
|
114
|
-
await this.initProducerId();
|
|
115
|
-
});
|
|
116
|
-
|
|
117
|
-
private async initProducerId(): Promise<void> {
|
|
118
|
-
try {
|
|
119
|
-
const result = await this.cluster.sendRequest(API.INIT_PRODUCER_ID, {
|
|
120
|
-
transactionalId: null,
|
|
121
|
-
transactionTimeoutMs: 0,
|
|
122
|
-
producerId: this.producerId,
|
|
123
|
-
producerEpoch: this.producerEpoch,
|
|
124
|
-
});
|
|
125
|
-
this.producerId = result.producerId;
|
|
126
|
-
this.producerEpoch = result.producerEpoch;
|
|
127
|
-
this.sequences = {};
|
|
128
|
-
} catch (error) {
|
|
129
|
-
if ((error as KafkaTSApiError).errorCode === API_ERROR.COORDINATOR_LOAD_IN_PROGRESS) {
|
|
130
|
-
await delay(100);
|
|
131
|
-
return this.initProducerId();
|
|
132
|
-
}
|
|
133
|
-
throw error;
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
private nextSequence(topic: string, partition: number, messagesCount: number) {
|
|
138
|
-
this.sequences[topic] ??= {};
|
|
139
|
-
this.sequences[topic][partition] ??= 0;
|
|
140
|
-
return (this.sequences[topic][partition] += messagesCount || 1);
|
|
141
|
-
}
|
|
142
|
-
}
|
package/src/types.ts
DELETED
package/src/utils/api.ts
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import { Decoder } from './decoder';
|
|
2
|
-
import { Encoder } from './encoder';
|
|
3
|
-
|
|
4
|
-
export type Api<Request, Response> = {
|
|
5
|
-
apiKey: number;
|
|
6
|
-
apiVersion: number;
|
|
7
|
-
request: (encoder: Encoder, body: Request) => Encoder;
|
|
8
|
-
response: (buffer: Decoder) => Promise<Response> | Response;
|
|
9
|
-
};
|
|
10
|
-
|
|
11
|
-
export const createApi = <Request, Response>(api: Api<Request, Response>) => api;
|
package/src/utils/crypto.ts
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { createHash, createHmac, pbkdf2, randomBytes } from 'crypto';
|
|
2
|
-
|
|
3
|
-
export const generateNonce = () => randomBytes(16).toString('base64').replace(/[\/=]/g, '');
|
|
4
|
-
|
|
5
|
-
export const saltPassword = (password: string, salt: string, iterations: number, keyLength: number, digest: string) =>
|
|
6
|
-
new Promise<Buffer>((resolve, reject) =>
|
|
7
|
-
pbkdf2(password, salt, iterations, keyLength, digest, (err, key) => (err ? reject(err) : resolve(key))),
|
|
8
|
-
);
|
|
9
|
-
|
|
10
|
-
export const base64Encode = (input: Buffer | string) => Buffer.from(input).toString('base64');
|
|
11
|
-
export const base64Decode = (input: string) => Buffer.from(input, 'base64').toString();
|
|
12
|
-
export const hash = (data: Buffer, digest: string) => createHash(digest).update(data).digest();
|
|
13
|
-
export const hmac = (key: Buffer, data: Buffer | string, digest: string) =>
|
|
14
|
-
createHmac(digest, key).update(data).digest();
|
|
15
|
-
export const xor = (a: Buffer, b: Buffer) => Buffer.from(a.map((byte, i) => byte ^ b[i]));
|
package/src/utils/decoder.ts
DELETED
|
@@ -1,174 +0,0 @@
|
|
|
1
|
-
export class Decoder {
|
|
2
|
-
private offset = 0;
|
|
3
|
-
|
|
4
|
-
constructor(private buffer: Buffer) {}
|
|
5
|
-
|
|
6
|
-
public getOffset() {
|
|
7
|
-
return this.offset;
|
|
8
|
-
}
|
|
9
|
-
|
|
10
|
-
public getBufferLength() {
|
|
11
|
-
return this.buffer.length;
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
public readInt8() {
|
|
15
|
-
const value = this.buffer.readInt8(this.offset);
|
|
16
|
-
this.offset += 1;
|
|
17
|
-
return value;
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
public readInt16() {
|
|
21
|
-
const value = this.buffer.readInt16BE(this.offset);
|
|
22
|
-
this.offset += 2;
|
|
23
|
-
return value;
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
public readInt32() {
|
|
27
|
-
const value = this.buffer.readInt32BE(this.offset);
|
|
28
|
-
this.offset += 4;
|
|
29
|
-
return value;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
public readUInt32() {
|
|
33
|
-
const value = this.buffer.readUInt32BE(this.offset);
|
|
34
|
-
this.offset += 4;
|
|
35
|
-
return value;
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
public readInt64() {
|
|
39
|
-
const value = this.buffer.readBigInt64BE(this.offset);
|
|
40
|
-
this.offset += 8;
|
|
41
|
-
return value;
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
public readUVarInt() {
|
|
45
|
-
let result = 0;
|
|
46
|
-
let shift = 0;
|
|
47
|
-
let currentByte;
|
|
48
|
-
do {
|
|
49
|
-
currentByte = this.buffer[this.offset++];
|
|
50
|
-
result |= (currentByte & 0x7f) << shift;
|
|
51
|
-
shift += 7;
|
|
52
|
-
} while ((currentByte & 0x80) !== 0);
|
|
53
|
-
return result;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
public readVarInt() {
|
|
57
|
-
const decodedValue = this.readUVarInt();
|
|
58
|
-
return (decodedValue >>> 1) ^ -(decodedValue & 1);
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
public readUVarLong() {
|
|
62
|
-
let result = BigInt(0);
|
|
63
|
-
let shift = BigInt(0);
|
|
64
|
-
let currentByte;
|
|
65
|
-
do {
|
|
66
|
-
currentByte = BigInt(this.buffer[this.offset++]);
|
|
67
|
-
result |= (currentByte & BigInt(0x7f)) << shift;
|
|
68
|
-
shift += BigInt(7);
|
|
69
|
-
} while ((currentByte & BigInt(0x80)) !== BigInt(0));
|
|
70
|
-
return result;
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
public readVarLong() {
|
|
74
|
-
const decodedValue = this.readUVarLong();
|
|
75
|
-
return (decodedValue >> BigInt(1)) ^ -(decodedValue & BigInt(1));
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
public readString() {
|
|
79
|
-
const length = this.readInt16();
|
|
80
|
-
if (length < 0) {
|
|
81
|
-
return null;
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
const value = this.buffer.toString('utf-8', this.offset, this.offset + length);
|
|
85
|
-
this.offset += length;
|
|
86
|
-
return value;
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
public readCompactString() {
|
|
90
|
-
const length = this.readUVarInt() - 1;
|
|
91
|
-
if (length < 0) {
|
|
92
|
-
return null;
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
const value = this.buffer.toString('utf-8', this.offset, this.offset + length);
|
|
96
|
-
this.offset += length;
|
|
97
|
-
return value;
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
public readVarIntBuffer() {
|
|
101
|
-
const length = this.readVarInt();
|
|
102
|
-
if (length < 0) {
|
|
103
|
-
return null;
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
const value = this.buffer.subarray(this.offset, this.offset + length);
|
|
107
|
-
this.offset += length;
|
|
108
|
-
return value;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
public readUUID() {
|
|
112
|
-
const value = this.buffer.toString('hex', this.offset, this.offset + 16);
|
|
113
|
-
this.offset += 16;
|
|
114
|
-
return value;
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
public readBoolean() {
|
|
118
|
-
const value = this.buffer.readInt8(this.offset) === 1;
|
|
119
|
-
this.offset += 1;
|
|
120
|
-
return value;
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
public readArray<T>(callback: (opts: Decoder) => T): T[] {
|
|
124
|
-
const length = this.readInt32();
|
|
125
|
-
const results = Array.from({ length }).map(() => callback(this));
|
|
126
|
-
return results;
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
public readCompactArray<T>(callback: (opts: Decoder) => T): T[] {
|
|
130
|
-
const length = this.readUVarInt() - 1;
|
|
131
|
-
const results = Array.from({ length }).map(() => callback(this));
|
|
132
|
-
return results;
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
public readVarIntArray<T>(callback: (opts: Decoder) => T): T[] {
|
|
136
|
-
const length = this.readVarInt();
|
|
137
|
-
const results = Array.from({ length }).map(() => callback(this));
|
|
138
|
-
return results;
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
public readRecords<T>(callback: (opts: Decoder) => T): T[] {
|
|
142
|
-
const length = this.readInt32();
|
|
143
|
-
|
|
144
|
-
return Array.from({ length }).map(() => {
|
|
145
|
-
const size = this.readVarInt();
|
|
146
|
-
const child = new Decoder(this.buffer.subarray(this.offset, this.offset + size));
|
|
147
|
-
this.offset += size;
|
|
148
|
-
return callback(child);
|
|
149
|
-
});
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
public read(length?: number) {
|
|
153
|
-
const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
|
|
154
|
-
this.offset += Buffer.byteLength(value);
|
|
155
|
-
return value;
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
public readBytes() {
|
|
159
|
-
const length = this.readInt32();
|
|
160
|
-
return this.read(length);
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
public readCompactBytes() {
|
|
164
|
-
const length = this.readUVarInt() - 1;
|
|
165
|
-
if (length < 0) {
|
|
166
|
-
return null;
|
|
167
|
-
}
|
|
168
|
-
return this.read(length);
|
|
169
|
-
}
|
|
170
|
-
|
|
171
|
-
public readTagBuffer() {
|
|
172
|
-
this.readUVarInt();
|
|
173
|
-
}
|
|
174
|
-
}
|
package/src/utils/delay.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export const delay = (delayMs: number) => new Promise<void>((resolve) => setTimeout(resolve, delayMs));
|
package/src/utils/encoder.ts
DELETED
|
@@ -1,148 +0,0 @@
|
|
|
1
|
-
export class Encoder {
|
|
2
|
-
private chunks: Buffer[] = [];
|
|
3
|
-
|
|
4
|
-
public getChunks() {
|
|
5
|
-
return this.chunks;
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
public getByteLength() {
|
|
9
|
-
return this.chunks.reduce((acc, chunk) => acc + chunk.byteLength, 0);
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
public write(...buffers: Buffer[]) {
|
|
13
|
-
this.chunks.push(...buffers);
|
|
14
|
-
return this;
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
public writeEncoder(encoder: Encoder) {
|
|
18
|
-
return this.write(...encoder.getChunks());
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
public writeInt8(value: number) {
|
|
22
|
-
const buffer = Buffer.allocUnsafe(1);
|
|
23
|
-
buffer.writeInt8(value);
|
|
24
|
-
return this.write(buffer);
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
public writeInt16(value: number) {
|
|
28
|
-
const buffer = Buffer.allocUnsafe(2);
|
|
29
|
-
buffer.writeInt16BE(value);
|
|
30
|
-
return this.write(buffer);
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
public writeInt32(value: number) {
|
|
34
|
-
const buffer = Buffer.allocUnsafe(4);
|
|
35
|
-
buffer.writeInt32BE(value);
|
|
36
|
-
return this.write(buffer);
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
public writeUInt32(value: number) {
|
|
40
|
-
const buffer = Buffer.allocUnsafe(4);
|
|
41
|
-
buffer.writeUInt32BE(value);
|
|
42
|
-
return this.write(buffer);
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
public writeInt64(value: bigint) {
|
|
46
|
-
const buffer = Buffer.allocUnsafe(8);
|
|
47
|
-
buffer.writeBigInt64BE(value);
|
|
48
|
-
return this.write(buffer);
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
public writeUVarInt(value: number) {
|
|
52
|
-
const byteArray = [];
|
|
53
|
-
while ((value & 0xffffffff) !== 0) {
|
|
54
|
-
byteArray.push((value & 0x7f) | 0x80);
|
|
55
|
-
value >>>= 7;
|
|
56
|
-
}
|
|
57
|
-
byteArray.push(value & 0x7f);
|
|
58
|
-
return this.write(Buffer.from(byteArray));
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
public writeVarInt(value: number) {
|
|
62
|
-
const encodedValue = (value << 1) ^ (value >> 31);
|
|
63
|
-
return this.writeUVarInt(encodedValue);
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
public writeUVarLong(value: bigint) {
|
|
67
|
-
const byteArray = [];
|
|
68
|
-
while ((value & BigInt(0xffffffffffffffff)) !== BigInt(0)) {
|
|
69
|
-
byteArray.push(Number((value & BigInt(0x7f)) | BigInt(0x80)));
|
|
70
|
-
value = value >> BigInt(7);
|
|
71
|
-
}
|
|
72
|
-
byteArray.push(Number(value));
|
|
73
|
-
return this.write(Buffer.from(byteArray));
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
public writeVarLong(value: bigint) {
|
|
77
|
-
const encodedValue = (value << BigInt(1)) ^ (value >> BigInt(63));
|
|
78
|
-
return this.writeUVarLong(encodedValue);
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
public writeString(value: string | null) {
|
|
82
|
-
if (value === null) {
|
|
83
|
-
return this.writeInt16(-1);
|
|
84
|
-
}
|
|
85
|
-
const byteLength = Buffer.byteLength(value, 'utf-8');
|
|
86
|
-
const buffer = Buffer.allocUnsafe(byteLength);
|
|
87
|
-
buffer.write(value, 0, byteLength, 'utf-8');
|
|
88
|
-
return this.writeInt16(byteLength).write(buffer);
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
public writeCompactString(value: string | null) {
|
|
92
|
-
if (value === null) {
|
|
93
|
-
return this.writeUVarInt(0);
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
const byteLength = Buffer.byteLength(value, 'utf-8');
|
|
97
|
-
const buffer = Buffer.allocUnsafe(byteLength);
|
|
98
|
-
buffer.write(value, 0, byteLength, 'utf-8');
|
|
99
|
-
return this.writeUVarInt(byteLength + 1).write(buffer);
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
public writeVarIntBuffer(buffer: Buffer | null) {
|
|
103
|
-
if (buffer === null) {
|
|
104
|
-
return this.writeVarInt(-1);
|
|
105
|
-
}
|
|
106
|
-
return this.writeVarInt(buffer.byteLength).write(buffer);
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
public writeUUID(value: string | null) {
|
|
110
|
-
if (value === null) {
|
|
111
|
-
return this.write(Buffer.alloc(16));
|
|
112
|
-
}
|
|
113
|
-
return this.write(Buffer.from(value, 'hex'));
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
public writeBoolean(value: boolean) {
|
|
117
|
-
return this.writeInt8(value ? 1 : 0);
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
public writeArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => Encoder) {
|
|
121
|
-
return this.writeInt32(arr.length).write(...arr.flatMap((item) => callback(new Encoder(), item).getChunks()));
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
public writeCompactArray<T>(arr: T[] | null, callback: (encoder: Encoder, item: T) => Encoder) {
|
|
125
|
-
if (arr === null) {
|
|
126
|
-
return this.writeUVarInt(0);
|
|
127
|
-
}
|
|
128
|
-
return this.writeUVarInt(arr.length + 1).write(
|
|
129
|
-
...arr.flatMap((item) => callback(new Encoder(), item).getChunks()),
|
|
130
|
-
);
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
public writeVarIntArray<T>(arr: T[], callback: (encoder: Encoder, item: T) => Encoder) {
|
|
134
|
-
return this.writeVarInt(arr.length).write(...arr.flatMap((item) => callback(new Encoder(), item).getChunks()));
|
|
135
|
-
}
|
|
136
|
-
|
|
137
|
-
public writeBytes(value: Buffer) {
|
|
138
|
-
return this.writeInt32(value.length).write(value);
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
public writeCompactBytes(value: Buffer) {
|
|
142
|
-
return this.writeUVarInt(value.length + 1).write(value);
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
public value() {
|
|
146
|
-
return Buffer.concat(this.chunks);
|
|
147
|
-
}
|
|
148
|
-
}
|
package/src/utils/error.ts
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
import { API_ERROR } from '../api';
|
|
2
|
-
|
|
3
|
-
export class KafkaTSError extends Error {
|
|
4
|
-
constructor(message: string) {
|
|
5
|
-
super(message);
|
|
6
|
-
this.name = this.constructor.name;
|
|
7
|
-
}
|
|
8
|
-
}
|
|
9
|
-
|
|
10
|
-
export class KafkaTSApiError<T = any> extends KafkaTSError {
|
|
11
|
-
constructor(
|
|
12
|
-
public errorCode: number,
|
|
13
|
-
public errorMessage: string | null,
|
|
14
|
-
public response: T,
|
|
15
|
-
) {
|
|
16
|
-
const [errorName] = Object.entries(API_ERROR).find(([, value]) => value === errorCode) ?? ['UNKNOWN'];
|
|
17
|
-
super(`${errorName}${errorMessage ? `: ${errorMessage}` : ''}`);
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
export class ConnectionError extends KafkaTSError {}
|
package/src/utils/logger.ts
DELETED
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
export interface Logger {
|
|
2
|
-
debug: (message: string, metadata?: unknown) => void;
|
|
3
|
-
info: (message: string, metadata?: unknown) => void;
|
|
4
|
-
warn: (message: string, metadata?: unknown) => void;
|
|
5
|
-
error: (message: string, metadata?: unknown) => void;
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
export const jsonSerializer = (_: unknown, v: unknown) => {
|
|
9
|
-
if (v instanceof Error) {
|
|
10
|
-
return { name: v.name, message: v.message, stack: v.stack, cause: v.cause };
|
|
11
|
-
}
|
|
12
|
-
if (typeof v === 'bigint') {
|
|
13
|
-
return v.toString();
|
|
14
|
-
}
|
|
15
|
-
return v;
|
|
16
|
-
};
|
|
17
|
-
|
|
18
|
-
class JsonLogger implements Logger {
|
|
19
|
-
debug(message: string, metadata?: unknown) {
|
|
20
|
-
console.debug(JSON.stringify({ message, metadata, level: 'debug' }, jsonSerializer));
|
|
21
|
-
}
|
|
22
|
-
info(message: string, metadata?: unknown) {
|
|
23
|
-
console.info(JSON.stringify({ message, metadata, level: 'info' }, jsonSerializer));
|
|
24
|
-
}
|
|
25
|
-
warn(message: string, metadata?: unknown) {
|
|
26
|
-
console.warn(JSON.stringify({ message, metadata, level: 'warning' }, jsonSerializer));
|
|
27
|
-
}
|
|
28
|
-
error(message: string, metadata?: unknown) {
|
|
29
|
-
console.error(JSON.stringify({ message, metadata, level: 'error' }, jsonSerializer));
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
export let log: Logger = new JsonLogger();
|
|
34
|
-
|
|
35
|
-
export const setLogger = (newLogger: Logger) => {
|
|
36
|
-
log = newLogger;
|
|
37
|
-
};
|