kafka-ts 0.0.1-beta.0 → 0.0.1-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -2
- package/examples/src/client.ts +2 -2
- package/package.json +1 -1
- package/src/auth/index.ts +2 -0
- package/src/auth/plain.ts +10 -0
- package/src/auth/scram.ts +52 -0
- package/src/broker.ts +5 -79
- package/src/client.ts +2 -2
- package/src/cluster.test.ts +2 -1
- package/src/cluster.ts +2 -2
- package/src/index.ts +4 -2
- package/src/utils/crypto.ts +3 -2
package/README.md
CHANGED
|
@@ -92,10 +92,18 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
|
|
|
92
92
|
- **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
|
|
93
93
|
- **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
|
|
94
94
|
|
|
95
|
+
|
|
96
|
+
## Supported SASL mechanisms
|
|
97
|
+
|
|
98
|
+
- PLAIN
|
|
99
|
+
- SCRAM-SHA-256
|
|
100
|
+
- SCRAM-SHA-512
|
|
101
|
+
|
|
102
|
+
Custom SASL mechanisms can be implemented following the `SASLProvider` interface. See [src/auth](./src/auth) for examples.
|
|
103
|
+
|
|
95
104
|
## Backlog
|
|
96
105
|
|
|
97
|
-
Minimal set of features
|
|
106
|
+
Minimal set of features left to implement before a stable release:
|
|
98
107
|
|
|
99
108
|
- Partitioner (Currently have to specify the partition on producer.send())
|
|
100
109
|
- API versioning (Currently only tested against Kafka 3.7+)
|
|
101
|
-
- SASL SCRAM-SHA-512 support
|
package/examples/src/client.ts
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { readFileSync } from 'fs';
|
|
2
|
-
import { createKafkaClient } from 'kafka-ts';
|
|
2
|
+
import { createKafkaClient, saslScramSha512 } from 'kafka-ts';
|
|
3
3
|
|
|
4
4
|
export const kafka = createKafkaClient({
|
|
5
5
|
clientId: 'examples',
|
|
6
6
|
bootstrapServers: [{ host: 'localhost', port: 9092 }],
|
|
7
|
-
sasl: {
|
|
7
|
+
sasl: saslScramSha512({ username: 'admin', password: 'admin' }),
|
|
8
8
|
ssl: { ca: readFileSync('../certs/ca.crt').toString() },
|
|
9
9
|
});
|
package/package.json
CHANGED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { API } from "../api";
|
|
2
|
+
import { SASLProvider } from "../broker";
|
|
3
|
+
|
|
4
|
+
export const saslPlain = ({ username, password }: { username: string; password: string }): SASLProvider => ({
|
|
5
|
+
mechanism: 'PLAIN',
|
|
6
|
+
authenticate: async ({ sendRequest }) => {
|
|
7
|
+
const authBytes = [null, username, password].join('\u0000');
|
|
8
|
+
await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
|
|
9
|
+
},
|
|
10
|
+
});
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { API } from '../api';
|
|
2
|
+
import { SASLProvider } from '../broker';
|
|
3
|
+
import { base64Decode, base64Encode, generateNonce, hash, hmac, saltPassword, xor } from '../utils/crypto';
|
|
4
|
+
import { KafkaTSError } from '../utils/error';
|
|
5
|
+
|
|
6
|
+
const saslScram =
|
|
7
|
+
({ mechanism, keyLength, digest }: { mechanism: string; keyLength: number; digest: string }) =>
|
|
8
|
+
({ username, password }: { username: string; password: string }): SASLProvider => ({
|
|
9
|
+
mechanism,
|
|
10
|
+
authenticate: async ({ sendRequest }) => {
|
|
11
|
+
const nonce = generateNonce();
|
|
12
|
+
const firstMessage = `n=${username},r=${nonce}`;
|
|
13
|
+
|
|
14
|
+
const { authBytes } = await sendRequest(API.SASL_AUTHENTICATE, {
|
|
15
|
+
authBytes: Buffer.from(`n,,${firstMessage}`),
|
|
16
|
+
});
|
|
17
|
+
if (!authBytes) {
|
|
18
|
+
throw new KafkaTSError('No auth response');
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const response = Object.fromEntries(
|
|
22
|
+
authBytes
|
|
23
|
+
.toString()
|
|
24
|
+
.split(',')
|
|
25
|
+
.map((pair) => pair.split('=')),
|
|
26
|
+
) as { r: string; s: string; i: string };
|
|
27
|
+
|
|
28
|
+
const rnonce = response.r;
|
|
29
|
+
if (!rnonce.startsWith(nonce)) {
|
|
30
|
+
throw new KafkaTSError('Invalid nonce');
|
|
31
|
+
}
|
|
32
|
+
const iterations = parseInt(response.i);
|
|
33
|
+
const salt = base64Decode(response.s);
|
|
34
|
+
|
|
35
|
+
const saltedPassword = await saltPassword(password, salt, iterations, keyLength, digest);
|
|
36
|
+
const clientKey = hmac(saltedPassword, 'Client Key', digest);
|
|
37
|
+
const clientKeyHash = hash(clientKey, digest);
|
|
38
|
+
|
|
39
|
+
let finalMessage = `c=${base64Encode('n,,')},r=${rnonce}`;
|
|
40
|
+
|
|
41
|
+
const fullMessage = `${firstMessage},${authBytes.toString()},${finalMessage}`;
|
|
42
|
+
const clientSignature = hmac(clientKeyHash, fullMessage, digest);
|
|
43
|
+
const clientProof = base64Encode(xor(clientKey, clientSignature));
|
|
44
|
+
|
|
45
|
+
finalMessage += `,p=${clientProof}`;
|
|
46
|
+
|
|
47
|
+
await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(finalMessage) });
|
|
48
|
+
},
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
export const saslScramSha256 = saslScram({ mechanism: 'SCRAM-SHA-256', keyLength: 32, digest: 'sha256' });
|
|
52
|
+
export const saslScramSha512 = saslScram({ mechanism: 'SCRAM-SHA-512', keyLength: 64, digest: 'sha512' });
|
package/src/broker.ts
CHANGED
|
@@ -2,20 +2,18 @@ import { TcpSocketConnectOpts } from 'net';
|
|
|
2
2
|
import { TLSSocketOptions } from 'tls';
|
|
3
3
|
import { API } from './api';
|
|
4
4
|
import { Connection, SendRequest } from './connection';
|
|
5
|
-
import { base64Decode, base64Encode, generateNonce, hash, hmac, saltPassword, xor } from './utils/crypto';
|
|
6
5
|
import { KafkaTSError } from './utils/error';
|
|
7
6
|
import { memo } from './utils/memo';
|
|
8
7
|
|
|
9
|
-
export type
|
|
10
|
-
mechanism:
|
|
11
|
-
|
|
12
|
-
password: string;
|
|
8
|
+
export type SASLProvider = {
|
|
9
|
+
mechanism: string;
|
|
10
|
+
authenticate: (context: { sendRequest: SendRequest }) => Promise<void>;
|
|
13
11
|
};
|
|
14
12
|
|
|
15
13
|
type BrokerOptions = {
|
|
16
14
|
clientId: string | null;
|
|
17
15
|
options: TcpSocketConnectOpts;
|
|
18
|
-
sasl:
|
|
16
|
+
sasl: SASLProvider | null;
|
|
19
17
|
ssl: TLSSocketOptions | null;
|
|
20
18
|
};
|
|
21
19
|
|
|
@@ -69,78 +67,6 @@ export class Broker {
|
|
|
69
67
|
}
|
|
70
68
|
|
|
71
69
|
private async saslAuthenticate() {
|
|
72
|
-
|
|
73
|
-
return;
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
const { mechanism } = this.options.sasl;
|
|
77
|
-
const authenticate = { PLAIN: plainProvider, 'SCRAM-SHA-256': scramSha256Provider }[mechanism as string];
|
|
78
|
-
if (!authenticate) {
|
|
79
|
-
throw new KafkaTSError(`SASL mechanism ${mechanism} is not supported`);
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
await authenticate({
|
|
83
|
-
...this.options.sasl,
|
|
84
|
-
sendRequest: this.sendRequest.bind(this),
|
|
85
|
-
});
|
|
70
|
+
await this.options.sasl?.authenticate({ sendRequest: this.sendRequest });
|
|
86
71
|
}
|
|
87
72
|
}
|
|
88
|
-
|
|
89
|
-
const plainProvider = async ({
|
|
90
|
-
username,
|
|
91
|
-
password,
|
|
92
|
-
sendRequest,
|
|
93
|
-
}: {
|
|
94
|
-
username: string;
|
|
95
|
-
password: string;
|
|
96
|
-
sendRequest: SendRequest;
|
|
97
|
-
}) => {
|
|
98
|
-
const authBytes = [null, username, password].join('\u0000');
|
|
99
|
-
await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
|
|
100
|
-
};
|
|
101
|
-
|
|
102
|
-
const scramSha256Provider = async ({
|
|
103
|
-
username,
|
|
104
|
-
password,
|
|
105
|
-
sendRequest,
|
|
106
|
-
}: {
|
|
107
|
-
username: string;
|
|
108
|
-
password: string;
|
|
109
|
-
sendRequest: SendRequest;
|
|
110
|
-
}) => {
|
|
111
|
-
const nonce = generateNonce();
|
|
112
|
-
const firstMessage = `n=${username},r=${nonce}`;
|
|
113
|
-
|
|
114
|
-
const { authBytes } = await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(`n,,${firstMessage}`) });
|
|
115
|
-
if (!authBytes) {
|
|
116
|
-
throw new KafkaTSError('No auth response');
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
const response = Object.fromEntries(
|
|
120
|
-
authBytes
|
|
121
|
-
.toString()
|
|
122
|
-
.split(',')
|
|
123
|
-
.map((pair) => pair.split('=')),
|
|
124
|
-
) as { r: string; s: string; i: string };
|
|
125
|
-
|
|
126
|
-
const rnonce = response.r;
|
|
127
|
-
if (!rnonce.startsWith(nonce)) {
|
|
128
|
-
throw new KafkaTSError('Invalid nonce');
|
|
129
|
-
}
|
|
130
|
-
const iterations = parseInt(response.i);
|
|
131
|
-
const salt = base64Decode(response.s);
|
|
132
|
-
|
|
133
|
-
const saltedPassword = await saltPassword(password, salt, iterations, 32, 'sha256');
|
|
134
|
-
const clientKey = hmac(saltedPassword, 'Client Key');
|
|
135
|
-
const clientKeyHash = hash(clientKey);
|
|
136
|
-
|
|
137
|
-
let finalMessage = `c=${base64Encode('n,,')},r=${rnonce}`;
|
|
138
|
-
|
|
139
|
-
const fullMessage = `${firstMessage},${authBytes.toString()},${finalMessage}`;
|
|
140
|
-
const clientSignature = hmac(clientKeyHash, fullMessage);
|
|
141
|
-
const clientProof = base64Encode(xor(clientKey, clientSignature));
|
|
142
|
-
|
|
143
|
-
finalMessage += `,p=${clientProof}`;
|
|
144
|
-
|
|
145
|
-
await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(finalMessage) });
|
|
146
|
-
};
|
package/src/client.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { TcpSocketConnectOpts } from 'net';
|
|
2
2
|
import { TLSSocketOptions } from 'tls';
|
|
3
|
-
import {
|
|
3
|
+
import { SASLProvider } from './broker';
|
|
4
4
|
import { Cluster } from './cluster';
|
|
5
5
|
import { Consumer, ConsumerOptions } from './consumer/consumer';
|
|
6
6
|
import { Producer, ProducerOptions } from './producer/producer';
|
|
@@ -8,7 +8,7 @@ import { Producer, ProducerOptions } from './producer/producer';
|
|
|
8
8
|
type ClientOptions = {
|
|
9
9
|
clientId?: string | null;
|
|
10
10
|
bootstrapServers: TcpSocketConnectOpts[];
|
|
11
|
-
sasl?:
|
|
11
|
+
sasl?: SASLProvider | null;
|
|
12
12
|
ssl?: TLSSocketOptions | null;
|
|
13
13
|
};
|
|
14
14
|
|
package/src/cluster.test.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { readFileSync } from 'fs';
|
|
|
3
3
|
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
|
4
4
|
import { API } from './api';
|
|
5
5
|
import { KEY_TYPE } from './api/find-coordinator';
|
|
6
|
+
import { saslPlain } from './auth';
|
|
6
7
|
import { createKafkaClient } from './client';
|
|
7
8
|
import { Cluster } from './cluster';
|
|
8
9
|
import { KafkaTSApiError } from './utils/error';
|
|
@@ -10,7 +11,7 @@ import { KafkaTSApiError } from './utils/error';
|
|
|
10
11
|
export const kafka = createKafkaClient({
|
|
11
12
|
clientId: 'kafka-ts',
|
|
12
13
|
bootstrapServers: [{ host: 'localhost', port: 9092 }],
|
|
13
|
-
sasl: {
|
|
14
|
+
sasl: saslPlain({ username: 'admin', password: 'admin' }),
|
|
14
15
|
ssl: { ca: readFileSync('./certs/ca.crt').toString() },
|
|
15
16
|
});
|
|
16
17
|
|
package/src/cluster.ts
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import { TcpSocketConnectOpts } from 'net';
|
|
2
2
|
import { TLSSocketOptions } from 'tls';
|
|
3
3
|
import { API } from './api';
|
|
4
|
-
import { Broker,
|
|
4
|
+
import { Broker, SASLProvider } from './broker';
|
|
5
5
|
import { SendRequest } from './connection';
|
|
6
6
|
import { ConnectionError, KafkaTSError } from './utils/error';
|
|
7
7
|
|
|
8
8
|
type ClusterOptions = {
|
|
9
9
|
clientId: string | null;
|
|
10
10
|
bootstrapServers: TcpSocketConnectOpts[];
|
|
11
|
-
sasl:
|
|
11
|
+
sasl: SASLProvider | null;
|
|
12
12
|
ssl: TLSSocketOptions | null;
|
|
13
13
|
};
|
|
14
14
|
|
package/src/index.ts
CHANGED
package/src/utils/crypto.ts
CHANGED
|
@@ -9,6 +9,7 @@ export const saltPassword = (password: string, salt: string, iterations: number,
|
|
|
9
9
|
|
|
10
10
|
export const base64Encode = (input: Buffer | string) => Buffer.from(input).toString('base64');
|
|
11
11
|
export const base64Decode = (input: string) => Buffer.from(input, 'base64').toString();
|
|
12
|
-
export const hash = (data: Buffer) => createHash(
|
|
13
|
-
export const hmac = (key: Buffer, data: Buffer | string
|
|
12
|
+
export const hash = (data: Buffer, digest: string) => createHash(digest).update(data).digest();
|
|
13
|
+
export const hmac = (key: Buffer, data: Buffer | string, digest: string) =>
|
|
14
|
+
createHmac(digest, key).update(data).digest();
|
|
14
15
|
export const xor = (a: Buffer, b: Buffer) => Buffer.from(a.map((byte, i) => byte ^ b[i]));
|