kafka-ts 0.0.1-beta.0 → 0.0.1-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -92,10 +92,56 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
92
92
  - **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
93
93
  - **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
94
94
 
95
+ ## Configuration
96
+
97
+ ### `createKafkaClient()`
98
+
99
+ | Name | Type | Required | Default | Description |
100
+ | ---------------- | ---------------------- | -------- | ------- | ----------------------------------------------------- |
101
+ | clientId | string | false | _null_ | The client id used for all requests. |
102
+ | bootstrapServers | TcpSocketConnectOpts[] | true | | List of kafka brokers for initial cluster discovery. |
103
+ | sasl | SASLProvider | false | | SASL provider (see "Supported SASL mechanisms" below) |
104
+ | ssl | TLSSocketOptions | false | | SSL configuration. |
105
+
106
+ ### `kafka.startConsumer()`
107
+
108
+ | Name | Type | Required | Default | Description |
109
+ | ---------------------- | -------------------------------------- | -------- | --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
110
+ | topics | string[] | true | | List of topics to subscribe to |
111
+ | groupId | string | false | _null_ | Consumer group id |
112
+ | groupInstanceId | string | false | _null_ | Consumer group instance id |
113
+ | rackId | string | false | _null_ | Rack id |
114
+ | isolationLevel | IsolationLevel | false | | Isolation level |
115
+ | sessionTimeoutMs | number | false | 30000 | Session timeout in milliseconds |
116
+ | rebalanceTimeoutMs | number | false | 60000 | Rebalance timeout in milliseconds |
117
+ | maxWaitMs | number | false | 5000 | Fetch long poll timeout in milliseconds |
118
+ | minBytes | number | false | 1 | Minimum number of bytes to wait for before returning a fetch response |
119
+ | maxBytes | number | false | 1_048_576 | Maximum number of bytes to return in the fetch response |
120
+ | partitionMaxBytes | number | false | 1_048_576 | Maximum number of bytes to return per partition in the fetch response |
121
+ | allowTopicAutoCreation | boolean | false | false | Allow kafka to auto-create topic when it doesn't exist |
122
+ | fromBeginning | boolean | false | false | Start consuming from the beginning of the topic |
123
+ | batchGranularity | BatchGranularity | false | partition | Controls messages split from fetch response. Also controls how often offsets are committed. **onBatch** will include messages:<br/>- **partition** - from a single batch<br/>- **topic** - from all topic partitions<br/>- **broker** - from all assignned topics and partitions |
124
+ | concurrency | number | false | 1 | How many batches to process concurrently |
125
+ | onMessage | (message: Message) => Promise<unknown> | true | | Callback executed on every message |
126
+ | onBatch | (batch: Message[]) => Promise<unknown> | true | | Callback executed on every batch of messages (based on **batchGranuality**) |
127
+
128
+ ### `kafka.createProducer()`
129
+
130
+ | Name | Type | Required | Default | Description |
131
+ | ---------------------- | ----------- | -------- | ------- | --------------------------------------------------------------------------------------- |
132
+ | allowTopicAutoCreation | boolean | false | false | Allow kafka to auto-create topic when it doesn't exist |
133
+ | partitioner | Partitioner | false | | Custom partitioner function. By default, it uses a default java-compatible partitioner. |
134
+
135
+ ### Supported SASL mechanisms
136
+
137
+ - PLAIN: `saslPlain({ username, password })`
138
+ - SCRAM-SHA-256: `saslScramSha256({ username, password })`
139
+ - SCRAM-SHA-512: `saslScramSha512({ username, password })`
140
+
141
+ Custom SASL mechanisms can be implemented following the `SASLProvider` interface. See [src/auth](./src/auth) for examples.
142
+
95
143
  ## Backlog
96
144
 
97
- Minimal set of features required before a stable release:
145
+ Minimal set of features left to implement before a stable release:
98
146
 
99
- - Partitioner (Currently have to specify the partition on producer.send())
100
147
  - API versioning (Currently only tested against Kafka 3.7+)
101
- - SASL SCRAM-SHA-512 support
@@ -4,7 +4,8 @@
4
4
  "description": "",
5
5
  "main": "dist/replicator.js",
6
6
  "scripts": {
7
- "test": "echo \"Error: no test specified\" && exit 1"
7
+ "test": "echo \"Error: no test specified\" && exit 1",
8
+ "build": "tsc"
8
9
  },
9
10
  "dependencies": {
10
11
  "kafka-ts": "file:../"
@@ -1,9 +1,9 @@
1
1
  import { readFileSync } from 'fs';
2
- import { createKafkaClient } from 'kafka-ts';
2
+ import { createKafkaClient, saslScramSha512 } from 'kafka-ts';
3
3
 
4
4
  export const kafka = createKafkaClient({
5
5
  clientId: 'examples',
6
6
  bootstrapServers: [{ host: 'localhost', port: 9092 }],
7
- sasl: { mechanism: 'SCRAM-SHA-256', username: 'admin', password: 'admin' },
7
+ sasl: saslScramSha512({ username: 'admin', password: 'admin' }),
8
8
  ssl: { ca: readFileSync('../certs/ca.crt').toString() },
9
9
  });
@@ -8,7 +8,7 @@ import { kafka } from './client';
8
8
  onBatch: (batch) => {
9
9
  console.log(batch);
10
10
  },
11
- granularity: 'broker',
11
+ batchGranularity: 'broker',
12
12
  concurrency: 10,
13
13
  });
14
14
 
@@ -11,7 +11,7 @@ rl.on('line', async (line) => {
11
11
  {
12
12
  topic: 'example-topic-f',
13
13
  key: null,
14
- value: line,
14
+ value: Buffer.from(line),
15
15
  partition: 0,
16
16
  },
17
17
  ]);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.1-beta.0",
3
+ "version": "0.0.1-beta.2",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",
@@ -360,14 +360,60 @@ exports[`Request handler > should fetch messages 1`] = `
360
360
  "attributes": 0,
361
361
  "headers": [
362
362
  {
363
- "key": "header-key",
364
- "value": "header-value",
363
+ "key": {
364
+ "data": [
365
+ 104,
366
+ 101,
367
+ 97,
368
+ 100,
369
+ 101,
370
+ 114,
371
+ 45,
372
+ 107,
373
+ 101,
374
+ 121,
375
+ ],
376
+ "type": "Buffer",
377
+ },
378
+ "value": {
379
+ "data": [
380
+ 104,
381
+ 101,
382
+ 97,
383
+ 100,
384
+ 101,
385
+ 114,
386
+ 45,
387
+ 118,
388
+ 97,
389
+ 108,
390
+ 117,
391
+ 101,
392
+ ],
393
+ "type": "Buffer",
394
+ },
365
395
  },
366
396
  ],
367
- "key": "key",
397
+ "key": {
398
+ "data": [
399
+ 107,
400
+ 101,
401
+ 121,
402
+ ],
403
+ "type": "Buffer",
404
+ },
368
405
  "offsetDelta": 0,
369
406
  "timestampDelta": 0n,
370
- "value": "value",
407
+ "value": {
408
+ "data": [
409
+ 118,
410
+ 97,
411
+ 108,
412
+ 117,
413
+ 101,
414
+ ],
415
+ "type": "Buffer",
416
+ },
371
417
  },
372
418
  ],
373
419
  },
@@ -937,6 +983,156 @@ exports[`Request handler > should request metadata for all topics 1`] = `
937
983
  "controllerId": 0,
938
984
  "throttleTimeMs": 0,
939
985
  "topics": [
986
+ {
987
+ "_tag": undefined,
988
+ "errorCode": 0,
989
+ "isInternal": false,
990
+ "name": "my-topic",
991
+ "partitions": [
992
+ {
993
+ "_tag": undefined,
994
+ "errorCode": 0,
995
+ "isrNodes": [
996
+ 0,
997
+ ],
998
+ "leaderEpoch": 0,
999
+ "leaderId": 0,
1000
+ "offlineReplicas": [],
1001
+ "partitionIndex": 5,
1002
+ "replicaNodes": [
1003
+ 0,
1004
+ ],
1005
+ },
1006
+ {
1007
+ "_tag": undefined,
1008
+ "errorCode": 0,
1009
+ "isrNodes": [
1010
+ 0,
1011
+ ],
1012
+ "leaderEpoch": 0,
1013
+ "leaderId": 0,
1014
+ "offlineReplicas": [],
1015
+ "partitionIndex": 6,
1016
+ "replicaNodes": [
1017
+ 0,
1018
+ ],
1019
+ },
1020
+ {
1021
+ "_tag": undefined,
1022
+ "errorCode": 0,
1023
+ "isrNodes": [
1024
+ 0,
1025
+ ],
1026
+ "leaderEpoch": 0,
1027
+ "leaderId": 0,
1028
+ "offlineReplicas": [],
1029
+ "partitionIndex": 0,
1030
+ "replicaNodes": [
1031
+ 0,
1032
+ ],
1033
+ },
1034
+ {
1035
+ "_tag": undefined,
1036
+ "errorCode": 0,
1037
+ "isrNodes": [
1038
+ 0,
1039
+ ],
1040
+ "leaderEpoch": 0,
1041
+ "leaderId": 0,
1042
+ "offlineReplicas": [],
1043
+ "partitionIndex": 2,
1044
+ "replicaNodes": [
1045
+ 0,
1046
+ ],
1047
+ },
1048
+ {
1049
+ "_tag": undefined,
1050
+ "errorCode": 0,
1051
+ "isrNodes": [
1052
+ 0,
1053
+ ],
1054
+ "leaderEpoch": 0,
1055
+ "leaderId": 0,
1056
+ "offlineReplicas": [],
1057
+ "partitionIndex": 7,
1058
+ "replicaNodes": [
1059
+ 0,
1060
+ ],
1061
+ },
1062
+ {
1063
+ "_tag": undefined,
1064
+ "errorCode": 0,
1065
+ "isrNodes": [
1066
+ 0,
1067
+ ],
1068
+ "leaderEpoch": 0,
1069
+ "leaderId": 0,
1070
+ "offlineReplicas": [],
1071
+ "partitionIndex": 1,
1072
+ "replicaNodes": [
1073
+ 0,
1074
+ ],
1075
+ },
1076
+ {
1077
+ "_tag": undefined,
1078
+ "errorCode": 0,
1079
+ "isrNodes": [
1080
+ 0,
1081
+ ],
1082
+ "leaderEpoch": 0,
1083
+ "leaderId": 0,
1084
+ "offlineReplicas": [],
1085
+ "partitionIndex": 8,
1086
+ "replicaNodes": [
1087
+ 0,
1088
+ ],
1089
+ },
1090
+ {
1091
+ "_tag": undefined,
1092
+ "errorCode": 0,
1093
+ "isrNodes": [
1094
+ 0,
1095
+ ],
1096
+ "leaderEpoch": 0,
1097
+ "leaderId": 0,
1098
+ "offlineReplicas": [],
1099
+ "partitionIndex": 9,
1100
+ "replicaNodes": [
1101
+ 0,
1102
+ ],
1103
+ },
1104
+ {
1105
+ "_tag": undefined,
1106
+ "errorCode": 0,
1107
+ "isrNodes": [
1108
+ 0,
1109
+ ],
1110
+ "leaderEpoch": 0,
1111
+ "leaderId": 0,
1112
+ "offlineReplicas": [],
1113
+ "partitionIndex": 3,
1114
+ "replicaNodes": [
1115
+ 0,
1116
+ ],
1117
+ },
1118
+ {
1119
+ "_tag": undefined,
1120
+ "errorCode": 0,
1121
+ "isrNodes": [
1122
+ 0,
1123
+ ],
1124
+ "leaderEpoch": 0,
1125
+ "leaderId": 0,
1126
+ "offlineReplicas": [],
1127
+ "partitionIndex": 4,
1128
+ "replicaNodes": [
1129
+ 0,
1130
+ ],
1131
+ },
1132
+ ],
1133
+ "topicAuthorizedOperations": -2147483648,
1134
+ "topicId": "Any<UUID>",
1135
+ },
940
1136
  {
941
1137
  "_tag": undefined,
942
1138
  "errorCode": 0,
package/src/api/fetch.ts CHANGED
@@ -130,11 +130,11 @@ const decodeRecords = (decoder: Decoder) => {
130
130
  attributes: record.readInt8(),
131
131
  timestampDelta: record.readVarLong(),
132
132
  offsetDelta: record.readVarInt(),
133
- key: record.readVarIntString(),
134
- value: record.readVarIntString(),
133
+ key: record.readVarIntBuffer(),
134
+ value: record.readVarIntBuffer(),
135
135
  headers: record.readCompactArray((header) => ({
136
- key: header.readVarIntString(),
137
- value: header.readVarIntString(),
136
+ key: header.readVarIntBuffer(),
137
+ value: header.readVarIntBuffer(),
138
138
  })),
139
139
  })),
140
140
  });
@@ -28,11 +28,11 @@ export const PRODUCE = createApi({
28
28
  attributes: number;
29
29
  timestampDelta: bigint;
30
30
  offsetDelta: number;
31
- key: string | null;
32
- value: string | null;
31
+ key: Buffer | null;
32
+ value: Buffer | null;
33
33
  headers: {
34
- key: string;
35
- value: string;
34
+ key: Buffer;
35
+ value: Buffer;
36
36
  }[];
37
37
  }[];
38
38
  }[];
@@ -61,10 +61,10 @@ export const PRODUCE = createApi({
61
61
  .writeInt8(record.attributes)
62
62
  .writeVarLong(record.timestampDelta)
63
63
  .writeVarInt(record.offsetDelta)
64
- .writeVarIntString(record.key)
65
- .writeVarIntString(record.value)
64
+ .writeVarIntBuffer(record.key)
65
+ .writeVarIntBuffer(record.value)
66
66
  .writeVarIntArray(record.headers, (encoder, header) =>
67
- encoder.writeVarIntString(header.key).writeVarIntString(header.value),
67
+ encoder.writeVarIntBuffer(header.key).writeVarIntBuffer(header.value),
68
68
  )
69
69
  .value();
70
70
 
@@ -0,0 +1,2 @@
1
+ export { saslPlain } from './plain';
2
+ export { saslScramSha256, saslScramSha512 } from './scram';
@@ -0,0 +1,10 @@
1
+ import { API } from "../api";
2
+ import { SASLProvider } from "../broker";
3
+
4
+ export const saslPlain = ({ username, password }: { username: string; password: string }): SASLProvider => ({
5
+ mechanism: 'PLAIN',
6
+ authenticate: async ({ sendRequest }) => {
7
+ const authBytes = [null, username, password].join('\u0000');
8
+ await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
9
+ },
10
+ });
@@ -0,0 +1,52 @@
1
+ import { API } from '../api';
2
+ import { SASLProvider } from '../broker';
3
+ import { base64Decode, base64Encode, generateNonce, hash, hmac, saltPassword, xor } from '../utils/crypto';
4
+ import { KafkaTSError } from '../utils/error';
5
+
6
+ const saslScram =
7
+ ({ mechanism, keyLength, digest }: { mechanism: string; keyLength: number; digest: string }) =>
8
+ ({ username, password }: { username: string; password: string }): SASLProvider => ({
9
+ mechanism,
10
+ authenticate: async ({ sendRequest }) => {
11
+ const nonce = generateNonce();
12
+ const firstMessage = `n=${username},r=${nonce}`;
13
+
14
+ const { authBytes } = await sendRequest(API.SASL_AUTHENTICATE, {
15
+ authBytes: Buffer.from(`n,,${firstMessage}`),
16
+ });
17
+ if (!authBytes) {
18
+ throw new KafkaTSError('No auth response');
19
+ }
20
+
21
+ const response = Object.fromEntries(
22
+ authBytes
23
+ .toString()
24
+ .split(',')
25
+ .map((pair) => pair.split('=')),
26
+ ) as { r: string; s: string; i: string };
27
+
28
+ const rnonce = response.r;
29
+ if (!rnonce.startsWith(nonce)) {
30
+ throw new KafkaTSError('Invalid nonce');
31
+ }
32
+ const iterations = parseInt(response.i);
33
+ const salt = base64Decode(response.s);
34
+
35
+ const saltedPassword = await saltPassword(password, salt, iterations, keyLength, digest);
36
+ const clientKey = hmac(saltedPassword, 'Client Key', digest);
37
+ const clientKeyHash = hash(clientKey, digest);
38
+
39
+ let finalMessage = `c=${base64Encode('n,,')},r=${rnonce}`;
40
+
41
+ const fullMessage = `${firstMessage},${authBytes.toString()},${finalMessage}`;
42
+ const clientSignature = hmac(clientKeyHash, fullMessage, digest);
43
+ const clientProof = base64Encode(xor(clientKey, clientSignature));
44
+
45
+ finalMessage += `,p=${clientProof}`;
46
+
47
+ await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(finalMessage) });
48
+ },
49
+ });
50
+
51
+ export const saslScramSha256 = saslScram({ mechanism: 'SCRAM-SHA-256', keyLength: 32, digest: 'sha256' });
52
+ export const saslScramSha512 = saslScram({ mechanism: 'SCRAM-SHA-512', keyLength: 64, digest: 'sha512' });
package/src/broker.ts CHANGED
@@ -2,20 +2,18 @@ import { TcpSocketConnectOpts } from 'net';
2
2
  import { TLSSocketOptions } from 'tls';
3
3
  import { API } from './api';
4
4
  import { Connection, SendRequest } from './connection';
5
- import { base64Decode, base64Encode, generateNonce, hash, hmac, saltPassword, xor } from './utils/crypto';
6
5
  import { KafkaTSError } from './utils/error';
7
6
  import { memo } from './utils/memo';
8
7
 
9
- export type SASLOptions = {
10
- mechanism: 'PLAIN' | 'SCRAM-SHA-256';
11
- username: string;
12
- password: string;
8
+ export type SASLProvider = {
9
+ mechanism: string;
10
+ authenticate: (context: { sendRequest: SendRequest }) => Promise<void>;
13
11
  };
14
12
 
15
13
  type BrokerOptions = {
16
14
  clientId: string | null;
17
15
  options: TcpSocketConnectOpts;
18
- sasl: SASLOptions | null;
16
+ sasl: SASLProvider | null;
19
17
  ssl: TLSSocketOptions | null;
20
18
  };
21
19
 
@@ -69,78 +67,6 @@ export class Broker {
69
67
  }
70
68
 
71
69
  private async saslAuthenticate() {
72
- if (!this.options.sasl) {
73
- return;
74
- }
75
-
76
- const { mechanism } = this.options.sasl;
77
- const authenticate = { PLAIN: plainProvider, 'SCRAM-SHA-256': scramSha256Provider }[mechanism as string];
78
- if (!authenticate) {
79
- throw new KafkaTSError(`SASL mechanism ${mechanism} is not supported`);
80
- }
81
-
82
- await authenticate({
83
- ...this.options.sasl,
84
- sendRequest: this.sendRequest.bind(this),
85
- });
70
+ await this.options.sasl?.authenticate({ sendRequest: this.sendRequest });
86
71
  }
87
72
  }
88
-
89
- const plainProvider = async ({
90
- username,
91
- password,
92
- sendRequest,
93
- }: {
94
- username: string;
95
- password: string;
96
- sendRequest: SendRequest;
97
- }) => {
98
- const authBytes = [null, username, password].join('\u0000');
99
- await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
100
- };
101
-
102
- const scramSha256Provider = async ({
103
- username,
104
- password,
105
- sendRequest,
106
- }: {
107
- username: string;
108
- password: string;
109
- sendRequest: SendRequest;
110
- }) => {
111
- const nonce = generateNonce();
112
- const firstMessage = `n=${username},r=${nonce}`;
113
-
114
- const { authBytes } = await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(`n,,${firstMessage}`) });
115
- if (!authBytes) {
116
- throw new KafkaTSError('No auth response');
117
- }
118
-
119
- const response = Object.fromEntries(
120
- authBytes
121
- .toString()
122
- .split(',')
123
- .map((pair) => pair.split('=')),
124
- ) as { r: string; s: string; i: string };
125
-
126
- const rnonce = response.r;
127
- if (!rnonce.startsWith(nonce)) {
128
- throw new KafkaTSError('Invalid nonce');
129
- }
130
- const iterations = parseInt(response.i);
131
- const salt = base64Decode(response.s);
132
-
133
- const saltedPassword = await saltPassword(password, salt, iterations, 32, 'sha256');
134
- const clientKey = hmac(saltedPassword, 'Client Key');
135
- const clientKeyHash = hash(clientKey);
136
-
137
- let finalMessage = `c=${base64Encode('n,,')},r=${rnonce}`;
138
-
139
- const fullMessage = `${firstMessage},${authBytes.toString()},${finalMessage}`;
140
- const clientSignature = hmac(clientKeyHash, fullMessage);
141
- const clientProof = base64Encode(xor(clientKey, clientSignature));
142
-
143
- finalMessage += `,p=${clientProof}`;
144
-
145
- await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(finalMessage) });
146
- };
package/src/client.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { TcpSocketConnectOpts } from 'net';
2
2
  import { TLSSocketOptions } from 'tls';
3
- import { SASLOptions } from './broker';
3
+ import { SASLProvider } from './broker';
4
4
  import { Cluster } from './cluster';
5
5
  import { Consumer, ConsumerOptions } from './consumer/consumer';
6
6
  import { Producer, ProducerOptions } from './producer/producer';
@@ -8,7 +8,7 @@ import { Producer, ProducerOptions } from './producer/producer';
8
8
  type ClientOptions = {
9
9
  clientId?: string | null;
10
10
  bootstrapServers: TcpSocketConnectOpts[];
11
- sasl?: SASLOptions | null;
11
+ sasl?: SASLProvider | null;
12
12
  ssl?: TLSSocketOptions | null;
13
13
  };
14
14
 
@@ -3,6 +3,7 @@ import { readFileSync } from 'fs';
3
3
  import { afterAll, beforeAll, describe, expect, it } from 'vitest';
4
4
  import { API } from './api';
5
5
  import { KEY_TYPE } from './api/find-coordinator';
6
+ import { saslPlain } from './auth';
6
7
  import { createKafkaClient } from './client';
7
8
  import { Cluster } from './cluster';
8
9
  import { KafkaTSApiError } from './utils/error';
@@ -10,7 +11,7 @@ import { KafkaTSApiError } from './utils/error';
10
11
  export const kafka = createKafkaClient({
11
12
  clientId: 'kafka-ts',
12
13
  bootstrapServers: [{ host: 'localhost', port: 9092 }],
13
- sasl: { mechanism: 'PLAIN', username: 'admin', password: 'admin' },
14
+ sasl: saslPlain({ username: 'admin', password: 'admin' }),
14
15
  ssl: { ca: readFileSync('./certs/ca.crt').toString() },
15
16
  });
16
17
 
@@ -148,12 +149,12 @@ describe.sequential('Request handler', () => {
148
149
  attributes: 0,
149
150
  offsetDelta: 0,
150
151
  timestampDelta: 0n,
151
- key: 'key',
152
- value: 'value',
152
+ key: Buffer.from('key'),
153
+ value: Buffer.from('value'),
153
154
  headers: [
154
155
  {
155
- key: 'header-key',
156
- value: 'header-value',
156
+ key: Buffer.from('header-key'),
157
+ value: Buffer.from('header-value'),
157
158
  },
158
159
  ],
159
160
  },
package/src/cluster.ts CHANGED
@@ -1,14 +1,14 @@
1
1
  import { TcpSocketConnectOpts } from 'net';
2
2
  import { TLSSocketOptions } from 'tls';
3
3
  import { API } from './api';
4
- import { Broker, SASLOptions } from './broker';
4
+ import { Broker, SASLProvider } from './broker';
5
5
  import { SendRequest } from './connection';
6
6
  import { ConnectionError, KafkaTSError } from './utils/error';
7
7
 
8
8
  type ClusterOptions = {
9
9
  clientId: string | null;
10
10
  bootstrapServers: TcpSocketConnectOpts[];
11
- sasl: SASLOptions | null;
11
+ sasl: SASLProvider | null;
12
12
  ssl: TLSSocketOptions | null;
13
13
  };
14
14
 
@@ -9,7 +9,7 @@ import { ConnectionError, KafkaTSApiError } from '../utils/error';
9
9
  import { defaultRetrier, Retrier } from '../utils/retrier';
10
10
  import { ConsumerGroup } from './consumer-group';
11
11
  import { ConsumerMetadata } from './consumer-metadata';
12
- import { FetchManager, Granularity } from './fetch-manager';
12
+ import { FetchManager, BatchGranularity } from './fetch-manager';
13
13
  import { OffsetManager } from './offset-manager';
14
14
 
15
15
  export type ConsumerOptions = {
@@ -27,7 +27,7 @@ export type ConsumerOptions = {
27
27
  allowTopicAutoCreation?: boolean;
28
28
  fromBeginning?: boolean;
29
29
  retrier?: Retrier;
30
- granularity?: Granularity;
30
+ batchGranularity?: BatchGranularity;
31
31
  concurrency?: number;
32
32
  } & ({ onBatch: (messages: Required<Message>[]) => unknown } | { onMessage: (message: Required<Message>) => unknown });
33
33
 
@@ -52,13 +52,13 @@ export class Consumer {
52
52
  rebalanceTimeoutMs: options.rebalanceTimeoutMs ?? 60_000,
53
53
  maxWaitMs: options.maxWaitMs ?? 5000,
54
54
  minBytes: options.minBytes ?? 1,
55
- maxBytes: options.maxBytes ?? 1_000_000,
56
- partitionMaxBytes: options.partitionMaxBytes ?? 1_000_000,
55
+ maxBytes: options.maxBytes ?? 1_048_576,
56
+ partitionMaxBytes: options.partitionMaxBytes ?? 1_048_576,
57
57
  isolationLevel: options.isolationLevel ?? IsolationLevel.READ_UNCOMMITTED,
58
58
  allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
59
59
  fromBeginning: options.fromBeginning ?? false,
60
60
  retrier: options.retrier ?? defaultRetrier,
61
- granularity: options.granularity ?? 'broker',
61
+ batchGranularity: options.batchGranularity ?? 'partition',
62
62
  concurrency: options.concurrency ?? 1,
63
63
  };
64
64
 
@@ -118,7 +118,7 @@ export class Consumer {
118
118
  }
119
119
 
120
120
  private startFetchManager = async () => {
121
- const { granularity, concurrency } = this.options;
121
+ const { batchGranularity, concurrency } = this.options;
122
122
 
123
123
  while (!this.stopHook) {
124
124
  const nodeAssignments = Object.entries(
@@ -137,7 +137,7 @@ export class Consumer {
137
137
  metadata: this.metadata,
138
138
  consumerGroup: this.consumerGroup,
139
139
  nodeAssignments,
140
- granularity,
140
+ batchGranularity,
141
141
  concurrency: numProcessors,
142
142
  });
143
143
 
@@ -11,7 +11,7 @@ import { Processor } from './processor';
11
11
 
12
12
  const trace = createTracer('FetchManager');
13
13
 
14
- export type Granularity = 'partition' | 'topic' | 'broker';
14
+ export type BatchGranularity = 'partition' | 'topic' | 'broker';
15
15
 
16
16
  type FetchManagerOptions = {
17
17
  fetch: (nodeId: number, assignment: Assignment) => Promise<ReturnType<(typeof API.FETCH)['response']>>;
@@ -19,7 +19,7 @@ type FetchManagerOptions = {
19
19
  metadata: Metadata;
20
20
  consumerGroup?: ConsumerGroup;
21
21
  nodeAssignments: { nodeId: number; assignment: Assignment }[];
22
- granularity: Granularity;
22
+ batchGranularity: BatchGranularity;
23
23
  concurrency: number;
24
24
  };
25
25
 
@@ -110,9 +110,9 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
110
110
  }
111
111
 
112
112
  private async onResponse(fetcherId: number, response: ReturnType<(typeof API.FETCH)['response']>) {
113
- const { metadata, granularity } = this.options;
113
+ const { metadata, batchGranularity } = this.options;
114
114
 
115
- const batches = fetchResponseToBatches(response, granularity, metadata);
115
+ const batches = fetchResponseToBatches(response, batchGranularity, metadata);
116
116
  if (batches.length) {
117
117
  this.queue.push(...batches);
118
118
  this.queue.push({ kind: 'checkpoint', fetcherId });
@@ -138,7 +138,7 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
138
138
 
139
139
  const fetchResponseToBatches = (
140
140
  batch: ReturnType<typeof API.FETCH.response>,
141
- granularity: Granularity,
141
+ batchGranularity: BatchGranularity,
142
142
  metadata: Metadata,
143
143
  ): Batch[] => {
144
144
  const brokerTopics = batch.responses.map(({ topicId, partitions }) =>
@@ -159,7 +159,7 @@ const fetchResponseToBatches = (
159
159
  ),
160
160
  );
161
161
 
162
- switch (granularity) {
162
+ switch (batchGranularity) {
163
163
  case 'broker':
164
164
  const messages = brokerTopics.flatMap((topicPartition) =>
165
165
  topicPartition.flatMap((partitionMessages) => partitionMessages),
@@ -174,6 +174,6 @@ const fetchResponseToBatches = (
174
174
  topicPartition.map((partitionMessages) => partitionMessages),
175
175
  );
176
176
  default:
177
- throw new KafkaTSError(`Unhandled batch granularity: ${granularity}`);
177
+ throw new KafkaTSError(`Unhandled batch granularity: ${batchGranularity}`);
178
178
  }
179
179
  };
@@ -0,0 +1,27 @@
1
+ import { Metadata } from '../metadata';
2
+ import { Message } from '../types';
3
+ import { murmur2, toPositive } from '../utils/murmur2';
4
+
5
+ export type Partition = (message: Message) => number;
6
+ export type Partitioner = (context: { metadata: Metadata }) => Partition;
7
+
8
+ export const defaultPartitioner: Partitioner = ({ metadata }) => {
9
+ const topicCounterMap: Record<string, number> = {};
10
+
11
+ const getNextValue = (topic: string) => {
12
+ topicCounterMap[topic] ??= 0;
13
+ return topicCounterMap[topic]++;
14
+ };
15
+
16
+ return ({ topic, partition, key }: Message) => {
17
+ if (partition !== null && partition !== undefined) {
18
+ return partition;
19
+ }
20
+ const partitions = metadata.getTopicPartitions()[topic];
21
+ const numPartitions = partitions.length;
22
+ if (key) {
23
+ return toPositive(murmur2(key)) % numPartitions;
24
+ }
25
+ return toPositive(getNextValue(topic)) % numPartitions;
26
+ };
27
+ };
package/src/index.ts CHANGED
@@ -1,4 +1,7 @@
1
- export * from './utils/error';
2
- export * from './client';
3
1
  export * from './api';
2
+ export * from './auth';
3
+ export { SASLProvider } from './broker';
4
+ export * from './client';
5
+ export * from './distributors/partitioner';
4
6
  export * from './types';
7
+ export * from './utils/error';
@@ -1,6 +1,7 @@
1
1
  import { API, API_ERROR } from '../api';
2
2
  import { Cluster } from '../cluster';
3
3
  import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
4
+ import { defaultPartitioner, Partition, Partitioner } from '../distributors/partitioner';
4
5
  import { Metadata } from '../metadata';
5
6
  import { Message } from '../types';
6
7
  import { delay } from '../utils/delay';
@@ -9,6 +10,7 @@ import { memo } from '../utils/memo';
9
10
 
10
11
  export type ProducerOptions = {
11
12
  allowTopicAutoCreation?: boolean;
13
+ partitioner?: Partitioner;
12
14
  };
13
15
 
14
16
  export class Producer {
@@ -17,6 +19,7 @@ export class Producer {
17
19
  private producerId = 0n;
18
20
  private producerEpoch = 0;
19
21
  private sequences: Record<string, Record<number, number>> = {};
22
+ private partition: Partition;
20
23
 
21
24
  constructor(
22
25
  private cluster: Cluster,
@@ -25,8 +28,10 @@ export class Producer {
25
28
  this.options = {
26
29
  ...options,
27
30
  allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
31
+ partitioner: options.partitioner ?? defaultPartitioner,
28
32
  };
29
33
  this.metadata = new Metadata({ cluster });
34
+ this.partition = this.options.partitioner({ metadata: this.metadata });
30
35
  }
31
36
 
32
37
  public async send(messages: Message[]) {
@@ -39,7 +44,7 @@ export class Producer {
39
44
  await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
40
45
 
41
46
  const nodeTopicPartitionMessages = distributeMessagesToTopicPartitionLeaders(
42
- messages,
47
+ messages.map(message => ({ ...message, partition: this.partition(message) })),
43
48
  this.metadata.getTopicPartitionLeaderIds(),
44
49
  );
45
50
 
@@ -83,8 +88,8 @@ export class Producer {
83
88
  key: message.key,
84
89
  value: message.value,
85
90
  headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
86
- key,
87
- value,
91
+ key: Buffer.from(key),
92
+ value: Buffer.from(value),
88
93
  })),
89
94
  })),
90
95
  };
package/src/types.ts CHANGED
@@ -3,8 +3,8 @@ export type Message = {
3
3
  partition: number;
4
4
  offset?: bigint;
5
5
  timestamp?: bigint;
6
- key: string | null;
7
- value: string | null;
6
+ key: Buffer | null;
7
+ value: Buffer | null;
8
8
  headers?: Record<string, string>;
9
9
  };
10
10
 
@@ -9,6 +9,7 @@ export const saltPassword = (password: string, salt: string, iterations: number,
9
9
 
10
10
  export const base64Encode = (input: Buffer | string) => Buffer.from(input).toString('base64');
11
11
  export const base64Decode = (input: string) => Buffer.from(input, 'base64').toString();
12
- export const hash = (data: Buffer) => createHash('sha256').update(data).digest();
13
- export const hmac = (key: Buffer, data: Buffer | string) => createHmac('sha256', key).update(data).digest();
12
+ export const hash = (data: Buffer, digest: string) => createHash(digest).update(data).digest();
13
+ export const hmac = (key: Buffer, data: Buffer | string, digest: string) =>
14
+ createHmac(digest, key).update(data).digest();
14
15
  export const xor = (a: Buffer, b: Buffer) => Buffer.from(a.map((byte, i) => byte ^ b[i]));
@@ -97,13 +97,13 @@ export class Decoder {
97
97
  return value;
98
98
  }
99
99
 
100
- public readVarIntString() {
100
+ public readVarIntBuffer() {
101
101
  const length = this.readVarInt();
102
102
  if (length < 0) {
103
103
  return null;
104
104
  }
105
105
 
106
- const value = this.buffer.toString('utf-8', this.offset, this.offset + length);
106
+ const value = this.buffer.subarray(this.offset, this.offset + length);
107
107
  this.offset += length;
108
108
  return value;
109
109
  }
@@ -91,11 +91,11 @@ export class Encoder {
91
91
  return this.writeUVarInt(byteLength + 1).write(buffer);
92
92
  }
93
93
 
94
- public writeVarIntString(value: string | null) {
95
- if (value === null) {
94
+ public writeVarIntBuffer(buffer: Buffer | null) {
95
+ if (buffer === null) {
96
96
  return this.writeVarInt(-1);
97
97
  }
98
- return this.writeVarInt(Buffer.byteLength(value, 'utf-8')).write(Buffer.from(value, 'utf-8'));
98
+ return this.writeVarInt(buffer.byteLength).write(buffer);
99
99
  }
100
100
 
101
101
  public writeUUID(value: string | null) {
@@ -0,0 +1,44 @@
1
+ /* https://github.com/apache/kafka/blob/0.10.2/clients/src/main/java/org/apache/kafka/common/utils/Utils.java#L364 */
2
+
3
+ export const murmur2 = (data: Buffer): number => {
4
+ const length = data.length;
5
+ const seed = 0x9747b28c;
6
+
7
+ const m = 0x5bd1e995;
8
+ const r = 24;
9
+
10
+ let h = seed ^ length;
11
+ let length4 = Math.floor(length / 4);
12
+
13
+ for (let i = 0; i < length4; i++) {
14
+ const i4 = i * 4;
15
+ let k =
16
+ (data[i4 + 0] & 0xff) +
17
+ ((data[i4 + 1] & 0xff) << 8) +
18
+ ((data[i4 + 2] & 0xff) << 16) +
19
+ ((data[i4 + 3] & 0xff) << 24);
20
+ k *= m;
21
+ k ^= k >> r;
22
+ k *= m;
23
+ h *= m;
24
+ h ^= k;
25
+ }
26
+
27
+ switch (length % 4) {
28
+ case 3:
29
+ h = h ^ ((data[(length & ~3) + 2] & 0xff) << 16);
30
+ case 2:
31
+ h = h ^ ((data[(length & ~3) + 1] & 0xff) << 8);
32
+ case 1:
33
+ h = h ^ (data[length & ~3] & 0xff);
34
+ h *= m;
35
+ }
36
+
37
+ h ^= h >> 13;
38
+ h *= m;
39
+ h ^= h >> 15;
40
+
41
+ return h;
42
+ };
43
+
44
+ export const toPositive = (input: number) => input & 0x7fffffff;