kafka-ts 0.0.2-beta → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. package/.prettierrc +3 -2
  2. package/README.md +109 -39
  3. package/dist/api/api-versions.d.ts +9 -0
  4. package/dist/api/api-versions.js +24 -0
  5. package/dist/api/create-topics.d.ts +38 -0
  6. package/dist/api/create-topics.js +53 -0
  7. package/dist/api/delete-topics.d.ts +18 -0
  8. package/dist/api/delete-topics.js +33 -0
  9. package/dist/api/fetch.d.ts +84 -0
  10. package/dist/api/fetch.js +142 -0
  11. package/dist/api/find-coordinator.d.ts +21 -0
  12. package/dist/api/find-coordinator.js +39 -0
  13. package/dist/api/heartbeat.d.ts +11 -0
  14. package/dist/api/heartbeat.js +27 -0
  15. package/dist/api/index.d.ts +578 -0
  16. package/dist/api/index.js +165 -0
  17. package/dist/api/init-producer-id.d.ts +13 -0
  18. package/dist/api/init-producer-id.js +29 -0
  19. package/dist/api/join-group.d.ts +34 -0
  20. package/dist/api/join-group.js +51 -0
  21. package/dist/api/leave-group.d.ts +19 -0
  22. package/dist/api/leave-group.js +39 -0
  23. package/dist/api/list-offsets.d.ts +29 -0
  24. package/dist/api/list-offsets.js +48 -0
  25. package/dist/api/metadata.d.ts +40 -0
  26. package/dist/api/metadata.js +58 -0
  27. package/dist/api/offset-commit.d.ts +28 -0
  28. package/dist/api/offset-commit.js +48 -0
  29. package/dist/api/offset-fetch.d.ts +33 -0
  30. package/dist/api/offset-fetch.js +57 -0
  31. package/dist/api/produce.d.ts +54 -0
  32. package/dist/api/produce.js +126 -0
  33. package/dist/api/sasl-authenticate.d.ts +11 -0
  34. package/dist/api/sasl-authenticate.js +23 -0
  35. package/dist/api/sasl-handshake.d.ts +6 -0
  36. package/dist/api/sasl-handshake.js +19 -0
  37. package/dist/api/sync-group.d.ts +24 -0
  38. package/dist/api/sync-group.js +36 -0
  39. package/dist/auth/index.d.ts +2 -0
  40. package/dist/auth/index.js +8 -0
  41. package/dist/auth/plain.d.ts +5 -0
  42. package/dist/auth/plain.js +12 -0
  43. package/dist/auth/scram.d.ts +9 -0
  44. package/dist/auth/scram.js +40 -0
  45. package/dist/broker.d.ts +30 -0
  46. package/dist/broker.js +55 -0
  47. package/dist/client.d.ts +23 -0
  48. package/dist/client.js +36 -0
  49. package/dist/cluster.d.ts +27 -0
  50. package/dist/cluster.js +70 -0
  51. package/dist/cluster.test.d.ts +1 -0
  52. package/dist/cluster.test.js +345 -0
  53. package/dist/codecs/gzip.d.ts +2 -0
  54. package/dist/codecs/gzip.js +8 -0
  55. package/dist/codecs/index.d.ts +2 -0
  56. package/dist/codecs/index.js +17 -0
  57. package/dist/codecs/none.d.ts +2 -0
  58. package/dist/codecs/none.js +7 -0
  59. package/dist/codecs/types.d.ts +5 -0
  60. package/dist/codecs/types.js +2 -0
  61. package/dist/connection.d.ts +26 -0
  62. package/dist/connection.js +175 -0
  63. package/dist/consumer/consumer-group.d.ts +41 -0
  64. package/dist/consumer/consumer-group.js +217 -0
  65. package/dist/consumer/consumer-metadata.d.ts +7 -0
  66. package/dist/consumer/consumer-metadata.js +14 -0
  67. package/dist/consumer/consumer.d.ts +44 -0
  68. package/dist/consumer/consumer.js +225 -0
  69. package/dist/consumer/fetch-manager.d.ts +33 -0
  70. package/dist/consumer/fetch-manager.js +140 -0
  71. package/dist/consumer/fetcher.d.ts +25 -0
  72. package/dist/consumer/fetcher.js +64 -0
  73. package/dist/consumer/offset-manager.d.ts +22 -0
  74. package/dist/consumer/offset-manager.js +66 -0
  75. package/dist/consumer/processor.d.ts +19 -0
  76. package/dist/consumer/processor.js +59 -0
  77. package/dist/distributors/assignments-to-replicas.d.ts +16 -0
  78. package/dist/distributors/assignments-to-replicas.js +59 -0
  79. package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
  80. package/dist/distributors/assignments-to-replicas.test.js +40 -0
  81. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  82. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  83. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  84. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  85. package/dist/distributors/partitioner.d.ts +7 -0
  86. package/dist/distributors/partitioner.js +23 -0
  87. package/dist/index.d.ts +9 -0
  88. package/dist/index.js +26 -0
  89. package/dist/metadata.d.ts +24 -0
  90. package/dist/metadata.js +106 -0
  91. package/dist/producer/producer.d.ts +24 -0
  92. package/dist/producer/producer.js +131 -0
  93. package/dist/types.d.ts +11 -0
  94. package/dist/types.js +2 -0
  95. package/dist/utils/api.d.ts +9 -0
  96. package/dist/utils/api.js +5 -0
  97. package/dist/utils/crypto.d.ts +8 -0
  98. package/dist/utils/crypto.js +18 -0
  99. package/dist/utils/decoder.d.ts +30 -0
  100. package/dist/utils/decoder.js +152 -0
  101. package/dist/utils/delay.d.ts +1 -0
  102. package/dist/utils/delay.js +5 -0
  103. package/dist/utils/encoder.d.ts +28 -0
  104. package/dist/utils/encoder.js +125 -0
  105. package/dist/utils/error.d.ts +11 -0
  106. package/dist/utils/error.js +27 -0
  107. package/dist/utils/logger.d.ts +9 -0
  108. package/dist/utils/logger.js +32 -0
  109. package/dist/utils/memo.d.ts +1 -0
  110. package/dist/utils/memo.js +16 -0
  111. package/dist/utils/murmur2.d.ts +3 -0
  112. package/dist/utils/murmur2.js +40 -0
  113. package/dist/utils/retrier.d.ts +10 -0
  114. package/dist/utils/retrier.js +22 -0
  115. package/dist/utils/tracer.d.ts +5 -0
  116. package/dist/utils/tracer.js +39 -0
  117. package/package.json +30 -19
  118. package/src/__snapshots__/{request-handler.test.ts.snap → cluster.test.ts.snap} +329 -26
  119. package/src/api/api-versions.ts +2 -2
  120. package/src/api/create-topics.ts +2 -2
  121. package/src/api/delete-topics.ts +2 -2
  122. package/src/api/fetch.ts +86 -31
  123. package/src/api/find-coordinator.ts +2 -2
  124. package/src/api/heartbeat.ts +2 -2
  125. package/src/api/index.ts +21 -19
  126. package/src/api/init-producer-id.ts +2 -2
  127. package/src/api/join-group.ts +3 -3
  128. package/src/api/leave-group.ts +2 -2
  129. package/src/api/list-offsets.ts +3 -3
  130. package/src/api/metadata.ts +3 -3
  131. package/src/api/offset-commit.ts +2 -2
  132. package/src/api/offset-fetch.ts +2 -2
  133. package/src/api/produce.ts +17 -20
  134. package/src/api/sasl-authenticate.ts +2 -2
  135. package/src/api/sasl-handshake.ts +2 -2
  136. package/src/api/sync-group.ts +2 -2
  137. package/src/auth/index.ts +2 -0
  138. package/src/auth/plain.ts +10 -0
  139. package/src/auth/scram.ts +52 -0
  140. package/src/broker.ts +12 -14
  141. package/src/client.ts +7 -7
  142. package/src/cluster.test.ts +78 -74
  143. package/src/cluster.ts +43 -45
  144. package/src/codecs/gzip.ts +9 -0
  145. package/src/codecs/index.ts +16 -0
  146. package/src/codecs/none.ts +6 -0
  147. package/src/codecs/types.ts +4 -0
  148. package/src/connection.ts +49 -33
  149. package/src/consumer/consumer-group.ts +57 -35
  150. package/src/consumer/consumer-metadata.ts +2 -2
  151. package/src/consumer/consumer.ts +115 -92
  152. package/src/consumer/fetch-manager.ts +169 -0
  153. package/src/consumer/fetcher.ts +64 -0
  154. package/src/consumer/offset-manager.ts +24 -13
  155. package/src/consumer/processor.ts +53 -0
  156. package/src/distributors/assignments-to-replicas.test.ts +7 -7
  157. package/src/distributors/assignments-to-replicas.ts +2 -4
  158. package/src/distributors/messages-to-topic-partition-leaders.test.ts +6 -6
  159. package/src/distributors/partitioner.ts +27 -0
  160. package/src/index.ts +9 -3
  161. package/src/metadata.ts +8 -4
  162. package/src/producer/producer.ts +30 -20
  163. package/src/types.ts +5 -3
  164. package/src/utils/api.ts +5 -5
  165. package/src/utils/crypto.ts +15 -0
  166. package/src/utils/decoder.ts +14 -8
  167. package/src/utils/encoder.ts +34 -27
  168. package/src/utils/error.ts +3 -3
  169. package/src/utils/logger.ts +37 -0
  170. package/src/utils/murmur2.ts +44 -0
  171. package/src/utils/retrier.ts +1 -1
  172. package/src/utils/tracer.ts +41 -20
  173. package/tsconfig.json +16 -16
  174. package/.github/workflows/release.yml +0 -17
  175. package/certs/ca.crt +0 -29
  176. package/certs/ca.key +0 -52
  177. package/certs/ca.srl +0 -1
  178. package/certs/kafka.crt +0 -29
  179. package/certs/kafka.csr +0 -26
  180. package/certs/kafka.key +0 -52
  181. package/certs/kafka.keystore.jks +0 -0
  182. package/certs/kafka.truststore.jks +0 -0
  183. package/docker-compose.yml +0 -104
  184. package/examples/package-lock.json +0 -31
  185. package/examples/package.json +0 -14
  186. package/examples/src/client.ts +0 -9
  187. package/examples/src/consumer.ts +0 -17
  188. package/examples/src/create-topic.ts +0 -37
  189. package/examples/src/producer.ts +0 -24
  190. package/examples/src/replicator.ts +0 -25
  191. package/examples/src/utils/json.ts +0 -1
  192. package/examples/tsconfig.json +0 -7
  193. package/log4j.properties +0 -95
  194. package/scripts/generate-certs.sh +0 -24
  195. package/src/utils/debug.ts +0 -9
@@ -0,0 +1,169 @@
1
+ import { FetchResponse } from '../api/fetch';
2
+ import { Assignment } from '../api/sync-group';
3
+ import { Metadata } from '../metadata';
4
+ import { Batch, Message } from '../types';
5
+ import { KafkaTSError } from '../utils/error';
6
+ import { createTracer } from '../utils/tracer';
7
+ import { ConsumerGroup } from './consumer-group';
8
+ import { Fetcher } from './fetcher';
9
+ import { Processor } from './processor';
10
+
11
+ const trace = createTracer('FetchManager');
12
+
13
+ export type BatchGranularity = 'partition' | 'topic' | 'broker';
14
+
15
+ type FetchManagerOptions = {
16
+ fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
17
+ process: (batch: Batch) => Promise<void>;
18
+ metadata: Metadata;
19
+ consumerGroup?: ConsumerGroup;
20
+ nodeAssignments: { nodeId: number; assignment: Assignment }[];
21
+ batchGranularity: BatchGranularity;
22
+ concurrency: number;
23
+ };
24
+
25
+ type Checkpoint = { kind: 'checkpoint'; fetcherId: number };
26
+ type Entry = Batch | Checkpoint;
27
+
28
+ export class FetchManager {
29
+ private queue: Entry[] = [];
30
+ private isRunning = false;
31
+ private fetchers: Fetcher[];
32
+ private processors: Processor[];
33
+ private pollQueue: (() => void)[] = [];
34
+ private fetcherCallbacks: Record<number, () => void> = {};
35
+
36
+ constructor(private options: FetchManagerOptions) {
37
+ const { fetch, process, consumerGroup, nodeAssignments, concurrency } = this.options;
38
+
39
+ this.fetchers = nodeAssignments.map(
40
+ ({ nodeId, assignment }, index) =>
41
+ new Fetcher(index, {
42
+ nodeId,
43
+ assignment,
44
+ consumerGroup,
45
+ fetch,
46
+ onResponse: this.onResponse.bind(this),
47
+ }),
48
+ );
49
+ this.processors = Array.from({ length: concurrency }).map(
50
+ () => new Processor({ process, poll: this.poll.bind(this) }),
51
+ );
52
+ }
53
+
54
+ @trace(() => ({ root: true }))
55
+ public async start() {
56
+ this.queue = [];
57
+ this.isRunning = true;
58
+
59
+ try {
60
+ await Promise.all([
61
+ ...this.fetchers.map((fetcher) => fetcher.loop()),
62
+ ...this.processors.map((processor) => processor.loop()),
63
+ ]);
64
+ } finally {
65
+ await this.stop();
66
+ }
67
+ }
68
+
69
+ public async stop() {
70
+ this.isRunning = false;
71
+
72
+ const stopPromise = Promise.all([
73
+ ...this.fetchers.map((fetcher) => fetcher.stop()),
74
+ ...this.processors.map((processor) => processor.stop()),
75
+ ]);
76
+
77
+ this.pollQueue.forEach((resolve) => resolve());
78
+ this.pollQueue = [];
79
+
80
+ Object.values(this.fetcherCallbacks).forEach((callback) => callback());
81
+ this.fetcherCallbacks = {};
82
+
83
+ await stopPromise;
84
+ }
85
+
86
+ @trace()
87
+ public async poll(): Promise<Batch> {
88
+ if (!this.isRunning) {
89
+ return [];
90
+ }
91
+
92
+ const batch = this.queue.shift();
93
+ if (!batch) {
94
+ // wait until new data is available or fetch manager is requested to stop
95
+ await new Promise<void>((resolve) => {
96
+ this.pollQueue.push(resolve);
97
+ });
98
+ return this.poll();
99
+ }
100
+
101
+ if ('kind' in batch && batch.kind === 'checkpoint') {
102
+ this.fetcherCallbacks[batch.fetcherId]?.();
103
+ return this.poll();
104
+ }
105
+
106
+ this.pollQueue?.shift()?.();
107
+
108
+ return batch as Exclude<Entry, Checkpoint>;
109
+ }
110
+
111
+ @trace()
112
+ private async onResponse(fetcherId: number, response: FetchResponse) {
113
+ const { metadata, batchGranularity } = this.options;
114
+
115
+ const batches = fetchResponseToBatches(response, batchGranularity, metadata);
116
+ if (!batches.length) {
117
+ return;
118
+ }
119
+
120
+ // wait until all broker batches have been processed or fetch manager is requested to stop
121
+ await new Promise<void>((resolve) => {
122
+ this.fetcherCallbacks[fetcherId] = resolve;
123
+ this.queue.push(...batches, { kind: 'checkpoint', fetcherId });
124
+ this.pollQueue?.shift()?.();
125
+ });
126
+ }
127
+ }
128
+
129
+ const fetchResponseToBatches = (
130
+ batch: FetchResponse,
131
+ batchGranularity: BatchGranularity,
132
+ metadata: Metadata,
133
+ ): Batch[] => {
134
+ const brokerTopics = batch.responses.map(({ topicId, partitions }) =>
135
+ partitions.map(({ partitionIndex, records }) =>
136
+ records.flatMap(({ baseTimestamp, baseOffset, records }) =>
137
+ records.map(
138
+ (message): Required<Message> => ({
139
+ topic: metadata.getTopicNameById(topicId),
140
+ partition: partitionIndex,
141
+ key: message.key ?? null,
142
+ value: message.value ?? null,
143
+ headers: Object.fromEntries(message.headers.map(({ key, value }) => [key, value])),
144
+ timestamp: baseTimestamp + BigInt(message.timestampDelta),
145
+ offset: baseOffset + BigInt(message.offsetDelta),
146
+ }),
147
+ ),
148
+ ),
149
+ ),
150
+ );
151
+
152
+ switch (batchGranularity) {
153
+ case 'broker':
154
+ const messages = brokerTopics.flatMap((topicPartition) =>
155
+ topicPartition.flatMap((partitionMessages) => partitionMessages),
156
+ );
157
+ return messages.length ? [messages] : [];
158
+ case 'topic':
159
+ return brokerTopics
160
+ .map((topicPartition) => topicPartition.flatMap((partitionMessages) => partitionMessages))
161
+ .filter((messages) => messages.length);
162
+ case 'partition':
163
+ return brokerTopics
164
+ .flatMap((topicPartition) => topicPartition.map((partitionMessages) => partitionMessages))
165
+ .filter((messages) => messages.length);
166
+ default:
167
+ throw new KafkaTSError(`Unhandled batch granularity: ${batchGranularity}`);
168
+ }
169
+ };
@@ -0,0 +1,64 @@
1
+ import { EventEmitter } from 'stream';
2
+ import { FetchResponse } from '../api/fetch';
3
+ import { Assignment } from '../api/sync-group';
4
+ import { createTracer } from '../utils/tracer';
5
+ import { ConsumerGroup } from './consumer-group';
6
+
7
+ const trace = createTracer('Fetcher');
8
+
9
+ type FetcherOptions = {
10
+ nodeId: number;
11
+ assignment: Assignment;
12
+ consumerGroup?: ConsumerGroup;
13
+ fetch: (nodeId: number, assignment: Assignment) => Promise<FetchResponse>;
14
+ onResponse: (fetcherId: number, response: FetchResponse) => Promise<void>;
15
+ };
16
+
17
+ export class Fetcher extends EventEmitter<{ stopped: [] }> {
18
+ private isRunning = false;
19
+
20
+ constructor(
21
+ private fetcherId: number,
22
+ private options: FetcherOptions,
23
+ ) {
24
+ super();
25
+ }
26
+
27
+ public async loop() {
28
+ this.isRunning = true;
29
+
30
+ try {
31
+ while (this.isRunning) {
32
+ await this.step();
33
+ }
34
+ } finally {
35
+ this.isRunning = false;
36
+ this.emit('stopped');
37
+ }
38
+ }
39
+
40
+ @trace()
41
+ private async step() {
42
+ const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
43
+
44
+ const response = await fetch(nodeId, assignment);
45
+ if (!this.isRunning) {
46
+ return;
47
+ }
48
+ consumerGroup?.handleLastHeartbeat();
49
+ await onResponse(this.fetcherId, response);
50
+ consumerGroup?.handleLastHeartbeat();
51
+ }
52
+
53
+ public async stop() {
54
+ if (!this.isRunning) {
55
+ return;
56
+ }
57
+
58
+ const stopPromise = new Promise<void>((resolve) => {
59
+ this.once('stopped', resolve);
60
+ });
61
+ this.isRunning = false;
62
+ return stopPromise;
63
+ }
64
+ }
@@ -1,9 +1,12 @@
1
- import { API } from "../api";
2
- import { IsolationLevel } from "../api/fetch";
3
- import { Assignment } from "../api/sync-group";
4
- import { Cluster } from "../cluster";
5
- import { distributeMessagesToTopicPartitionLeaders } from "../distributors/messages-to-topic-partition-leaders";
6
- import { ConsumerMetadata } from "./consumer-metadata";
1
+ import { API } from '../api';
2
+ import { IsolationLevel } from '../api/fetch';
3
+ import { Assignment } from '../api/sync-group';
4
+ import { Cluster } from '../cluster';
5
+ import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
6
+ import { createTracer } from '../utils/tracer';
7
+ import { ConsumerMetadata } from './consumer-metadata';
8
+
9
+ const trace = createTracer('OffsetManager');
7
10
 
8
11
  type OffsetManagerOptions = {
9
12
  cluster: Cluster;
@@ -24,13 +27,18 @@ export class OffsetManager {
24
27
  public resolve(topic: string, partition: number, offset: bigint) {
25
28
  this.pendingOffsets[topic] ??= {};
26
29
  this.pendingOffsets[topic][partition] = offset;
27
-
28
- this.currentOffsets[topic] ??= {};
29
- this.currentOffsets[topic][partition] = offset;
30
30
  }
31
31
 
32
- public flush() {
33
- this.pendingOffsets = {};
32
+ public flush(topicPartitions: Record<string, Set<number>>) {
33
+ Object.entries(topicPartitions).forEach(([topic, partitions]) => {
34
+ this.currentOffsets[topic] ??= {};
35
+ partitions.forEach((partition) => {
36
+ if (this.pendingOffsets[topic]?.[partition]) {
37
+ this.currentOffsets[topic][partition] = this.pendingOffsets[topic][partition];
38
+ delete this.pendingOffsets[topic][partition];
39
+ }
40
+ });
41
+ });
34
42
  }
35
43
 
36
44
  public async fetchOffsets(options: { fromBeginning: boolean }) {
@@ -58,7 +66,6 @@ export class OffsetManager {
58
66
  }),
59
67
  ),
60
68
  );
61
- this.flush();
62
69
  }
63
70
 
64
71
  private async listOffsets({
@@ -83,11 +90,15 @@ export class OffsetManager {
83
90
  })),
84
91
  });
85
92
 
93
+ const topicPartitions: Record<string, Set<number>> = {};
86
94
  offsets.topics.forEach(({ name, partitions }) => {
95
+ topicPartitions[name] ??= new Set();
87
96
  partitions.forEach(({ partitionIndex, offset }) => {
97
+ topicPartitions[name].add(partitionIndex);
88
98
  this.resolve(name, partitionIndex, fromBeginning ? 0n : offset);
89
99
  });
90
100
  });
91
- this.flush();
101
+
102
+ this.flush(topicPartitions);
92
103
  }
93
104
  }
@@ -0,0 +1,53 @@
1
+ import { EventEmitter } from 'stream';
2
+ import { Batch } from '../types';
3
+ import { createTracer } from '../utils/tracer';
4
+
5
+ const trace = createTracer('Processor');
6
+
7
+ type ProcessorOptions = {
8
+ poll: () => Promise<Batch>;
9
+ process: (batch: Batch) => Promise<void>;
10
+ };
11
+
12
+ export class Processor extends EventEmitter<{ stopped: [] }> {
13
+ private isRunning = false;
14
+
15
+ constructor(private options: ProcessorOptions) {
16
+ super();
17
+ }
18
+
19
+ public async loop() {
20
+ this.isRunning = true;
21
+
22
+ try {
23
+ while (this.isRunning) {
24
+ await this.step();
25
+ }
26
+ } finally {
27
+ this.isRunning = false;
28
+ this.emit('stopped');
29
+ }
30
+ }
31
+
32
+ @trace()
33
+ private async step() {
34
+ const { poll, process } = this.options;
35
+
36
+ const batch = await poll();
37
+ if (batch.length) {
38
+ await process(batch);
39
+ }
40
+ }
41
+
42
+ public async stop() {
43
+ if (!this.isRunning) {
44
+ return;
45
+ }
46
+
47
+ const stopPromise = new Promise<void>((resolve) => {
48
+ this.once('stopped', resolve);
49
+ });
50
+ this.isRunning = false;
51
+ return stopPromise;
52
+ }
53
+ }
@@ -1,9 +1,9 @@
1
- import { describe, expect, it } from "vitest";
2
- import { distributeAssignmentsToNodesBalanced, distributeAssignmentsToNodesOptimized } from "./assignments-to-replicas";
1
+ import { describe, expect, it } from 'vitest';
2
+ import { distributeAssignmentsToNodesBalanced, distributeAssignmentsToNodesOptimized } from './assignments-to-replicas';
3
3
 
4
- describe("Distribute assignments to replica ids", () => {
5
- describe("distributeAssignmentsToNodesBalanced", () => {
6
- it("smoke", () => {
4
+ describe('Distribute assignments to replica ids', () => {
5
+ describe('distributeAssignmentsToNodesBalanced', () => {
6
+ it('smoke', () => {
7
7
  const result = distributeAssignmentsToNodesBalanced({ topic: [0, 1] }, { topic: { 0: [0, 1], 1: [1, 2] } });
8
8
  expect(result).toMatchInlineSnapshot(`
9
9
  {
@@ -22,8 +22,8 @@ describe("Distribute assignments to replica ids", () => {
22
22
  });
23
23
  });
24
24
 
25
- describe("distributeAssignmentsToNodesOptimized", () => {
26
- it("smoke", () => {
25
+ describe('distributeAssignmentsToNodesOptimized', () => {
26
+ it('smoke', () => {
27
27
  const result = distributeAssignmentsToNodesOptimized(
28
28
  { topic: [0, 1] },
29
29
  { topic: { 0: [0, 1], 1: [1, 2] } },
@@ -1,6 +1,6 @@
1
1
  type Assignment = { [topicName: string]: number[] };
2
2
  type TopicPartitionReplicaIds = { [topicName: string]: { [partition: number]: number[] } };
3
- export type NodeAssignment = { [replicaId: number]: Assignment };
3
+ type NodeAssignment = { [replicaId: number]: Assignment };
4
4
 
5
5
  /** From replica ids pick the one with fewest assignments to balance the load across brokers */
6
6
  export const distributeAssignmentsToNodesBalanced = (
@@ -50,7 +50,7 @@ export const distributeAssignmentsToNodesOptimized = (
50
50
  }
51
51
 
52
52
  result[parseInt(replicaId)] = partitions.reduce((acc, partition) => {
53
- const [topicName, partitionId] = partition.split(":");
53
+ const [topicName, partitionId] = partition.split(':');
54
54
  acc[topicName] ??= [];
55
55
  acc[topicName].push(parseInt(partitionId));
56
56
  return acc;
@@ -81,5 +81,3 @@ const getPartitionsByReplica = (assignment: Assignment, topicPartitionReplicaIds
81
81
  }
82
82
  return Object.entries(partitionsByReplicaId);
83
83
  };
84
-
85
- export const distributeAssignmentsToNodes = distributeAssignmentsToNodesBalanced;
@@ -1,11 +1,11 @@
1
- import { describe, expect, it } from "vitest";
2
- import { distributeMessagesToTopicPartitionLeaders } from "./messages-to-topic-partition-leaders";
1
+ import { describe, expect, it } from 'vitest';
2
+ import { distributeMessagesToTopicPartitionLeaders } from './messages-to-topic-partition-leaders';
3
3
 
4
- describe("Distribute messages to partition leader ids", () => {
5
- describe("distributeMessagesToTopicPartitionLeaders", () => {
6
- it("snoke", () => {
4
+ describe('Distribute messages to partition leader ids', () => {
5
+ describe('distributeMessagesToTopicPartitionLeaders', () => {
6
+ it('snoke', () => {
7
7
  const result = distributeMessagesToTopicPartitionLeaders(
8
- [{ topic: "topic", partition: 0, key: null, value: null, offset: 0n, timestamp: 0n, headers: {} }],
8
+ [{ topic: 'topic', partition: 0, key: null, value: null, offset: 0n, timestamp: 0n, headers: {} }],
9
9
  { topic: { 0: 1 } },
10
10
  );
11
11
  expect(result).toMatchInlineSnapshot(`
@@ -0,0 +1,27 @@
1
+ import { Metadata } from '../metadata';
2
+ import { Message } from '../types';
3
+ import { murmur2, toPositive } from '../utils/murmur2';
4
+
5
+ export type Partition = (message: Message) => number;
6
+ export type Partitioner = (context: { metadata: Metadata }) => Partition;
7
+
8
+ export const defaultPartitioner: Partitioner = ({ metadata }) => {
9
+ const topicCounterMap: Record<string, number> = {};
10
+
11
+ const getNextValue = (topic: string) => {
12
+ topicCounterMap[topic] ??= 0;
13
+ return topicCounterMap[topic]++;
14
+ };
15
+
16
+ return ({ topic, partition, key }: Message) => {
17
+ if (partition !== null && partition !== undefined) {
18
+ return partition;
19
+ }
20
+ const partitions = metadata.getTopicPartitions()[topic];
21
+ const numPartitions = partitions.length;
22
+ if (key) {
23
+ return toPositive(murmur2(key)) % numPartitions;
24
+ }
25
+ return toPositive(getNextValue(topic)) % numPartitions;
26
+ };
27
+ };
package/src/index.ts CHANGED
@@ -1,3 +1,9 @@
1
- export * from "./client";
2
- export * from "./api";
3
- export * from "./types";
1
+ export * from './api';
2
+ export * from './auth';
3
+ export { SASLProvider } from './broker';
4
+ export * from './client';
5
+ export * from './distributors/partitioner';
6
+ export * from './types';
7
+ export * from './utils/error';
8
+ export * from './utils/logger';
9
+ export { Tracer, setTracer } from './utils/tracer';
package/src/metadata.ts CHANGED
@@ -1,7 +1,10 @@
1
- import { API, API_ERROR } from "./api";
2
- import { Cluster } from "./cluster";
3
- import { delay } from "./utils/delay";
4
- import { KafkaTSApiError } from "./utils/error";
1
+ import { API, API_ERROR } from './api';
2
+ import { Cluster } from './cluster';
3
+ import { delay } from './utils/delay';
4
+ import { KafkaTSApiError } from './utils/error';
5
+ import { createTracer } from './utils/tracer';
6
+
7
+ const trace = createTracer('Metadata');
5
8
 
6
9
  type MetadataOptions = {
7
10
  cluster: Cluster;
@@ -36,6 +39,7 @@ export class Metadata {
36
39
  return this.topicNameById[id];
37
40
  }
38
41
 
42
+ @trace()
39
43
  public async fetchMetadataIfNecessary({
40
44
  topics,
41
45
  allowTopicAutoCreation,
@@ -1,14 +1,19 @@
1
- import { API, API_ERROR } from "../api";
2
- import { Cluster } from "../cluster";
3
- import { distributeMessagesToTopicPartitionLeaders } from "../distributors/messages-to-topic-partition-leaders";
4
- import { Metadata } from "../metadata";
5
- import { Message } from "../types";
6
- import { delay } from "../utils/delay";
7
- import { KafkaTSApiError } from "../utils/error";
8
- import { memo } from "../utils/memo";
1
+ import { API, API_ERROR } from '../api';
2
+ import { Cluster } from '../cluster';
3
+ import { distributeMessagesToTopicPartitionLeaders } from '../distributors/messages-to-topic-partition-leaders';
4
+ import { defaultPartitioner, Partition, Partitioner } from '../distributors/partitioner';
5
+ import { Metadata } from '../metadata';
6
+ import { Message } from '../types';
7
+ import { delay } from '../utils/delay';
8
+ import { KafkaTSApiError } from '../utils/error';
9
+ import { memo } from '../utils/memo';
10
+ import { createTracer } from '../utils/tracer';
11
+
12
+ const trace = createTracer('Producer');
9
13
 
10
14
  export type ProducerOptions = {
11
15
  allowTopicAutoCreation?: boolean;
16
+ partitioner?: Partitioner;
12
17
  };
13
18
 
14
19
  export class Producer {
@@ -17,6 +22,7 @@ export class Producer {
17
22
  private producerId = 0n;
18
23
  private producerEpoch = 0;
19
24
  private sequences: Record<string, Record<number, number>> = {};
25
+ private partition: Partition;
20
26
 
21
27
  constructor(
22
28
  private cluster: Cluster,
@@ -25,11 +31,14 @@ export class Producer {
25
31
  this.options = {
26
32
  ...options,
27
33
  allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
34
+ partitioner: options.partitioner ?? defaultPartitioner,
28
35
  };
29
36
  this.metadata = new Metadata({ cluster });
37
+ this.partition = this.options.partitioner({ metadata: this.metadata });
30
38
  }
31
39
 
32
- public async send(messages: Message[]) {
40
+ @trace(() => ({ root: true }))
41
+ public async send(messages: Message[], { acks = -1 }: { acks?: -1 | 1 } = {}) {
33
42
  await this.ensureConnected();
34
43
 
35
44
  const { allowTopicAutoCreation } = this.options;
@@ -39,19 +48,20 @@ export class Producer {
39
48
  await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
40
49
 
41
50
  const nodeTopicPartitionMessages = distributeMessagesToTopicPartitionLeaders(
42
- messages,
51
+ messages.map((message) => ({ ...message, partition: this.partition(message) })),
43
52
  this.metadata.getTopicPartitionLeaderIds(),
44
53
  );
45
54
 
46
55
  await Promise.all(
47
- Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
48
- await this.cluster.sendRequestToNode(parseInt(nodeId))(API.PRODUCE, {
56
+ Object.entries(nodeTopicPartitionMessages).map(([nodeId, topicPartitionMessages]) =>
57
+ this.cluster.sendRequestToNode(parseInt(nodeId))(API.PRODUCE, {
49
58
  transactionalId: null,
50
- acks: 1,
59
+ acks,
51
60
  timeoutMs: 5000,
52
61
  topicData: Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
53
62
  name: topic,
54
63
  partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
64
+ const partitionIndex = parseInt(partition);
55
65
  let baseTimestamp: bigint | undefined;
56
66
  let maxTimestamp: bigint | undefined;
57
67
 
@@ -64,9 +74,9 @@ export class Producer {
64
74
  }
65
75
  });
66
76
 
67
- const baseSequence = this.nextSequence(topic, parseInt(partition), messages.length);
77
+ const baseSequence = this.nextSequence(topic, partitionIndex, messages.length);
68
78
  return {
69
- index: parseInt(partition),
79
+ index: partitionIndex,
70
80
  baseOffset: 0n,
71
81
  partitionLeaderEpoch: -1,
72
82
  attributes: 0,
@@ -80,18 +90,18 @@ export class Producer {
80
90
  attributes: 0,
81
91
  timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
82
92
  offsetDelta: index,
83
- key: message.key,
93
+ key: message.key ?? null,
84
94
  value: message.value,
85
95
  headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
86
- key,
87
- value,
96
+ key: Buffer.from(key),
97
+ value: Buffer.from(value),
88
98
  })),
89
99
  })),
90
100
  };
91
101
  }),
92
102
  })),
93
- });
94
- }),
103
+ }),
104
+ ),
95
105
  );
96
106
  }
97
107
 
package/src/types.ts CHANGED
@@ -1,9 +1,11 @@
1
1
  export type Message = {
2
2
  topic: string;
3
- partition: number;
3
+ partition?: number;
4
4
  offset?: bigint;
5
5
  timestamp?: bigint;
6
- key: string | null;
7
- value: string | null;
6
+ key?: Buffer | null;
7
+ value: Buffer | null;
8
8
  headers?: Record<string, string>;
9
9
  };
10
+
11
+ export type Batch = Required<Message>[];
package/src/utils/api.ts CHANGED
@@ -1,11 +1,11 @@
1
- import { Decoder } from "./decoder";
2
- import { Encoder } from "./encoder";
1
+ import { Decoder } from './decoder';
2
+ import { Encoder } from './encoder';
3
3
 
4
4
  export type Api<Request, Response> = {
5
5
  apiKey: number;
6
6
  apiVersion: number;
7
7
  request: (encoder: Encoder, body: Request) => Encoder;
8
- response: (buffer: Decoder) => Response;
9
- }
8
+ response: (buffer: Decoder) => Promise<Response> | Response;
9
+ };
10
10
 
11
- export const createApi = <Request, Response>(api: Api<Request, Response>) => api;
11
+ export const createApi = <Request, Response>(api: Api<Request, Response>) => api;
@@ -0,0 +1,15 @@
1
+ import { createHash, createHmac, pbkdf2, randomBytes } from 'crypto';
2
+
3
+ export const generateNonce = () => randomBytes(16).toString('base64').replace(/[\/=]/g, '');
4
+
5
+ export const saltPassword = (password: string, salt: string, iterations: number, keyLength: number, digest: string) =>
6
+ new Promise<Buffer>((resolve, reject) =>
7
+ pbkdf2(password, salt, iterations, keyLength, digest, (err, key) => (err ? reject(err) : resolve(key))),
8
+ );
9
+
10
+ export const base64Encode = (input: Buffer | string) => Buffer.from(input).toString('base64');
11
+ export const base64Decode = (input: string) => Buffer.from(input, 'base64').toString();
12
+ export const hash = (data: Buffer, digest: string) => createHash(digest).update(data).digest();
13
+ export const hmac = (key: Buffer, data: Buffer | string, digest: string) =>
14
+ createHmac(digest, key).update(data).digest();
15
+ export const xor = (a: Buffer, b: Buffer) => Buffer.from(a.map((byte, i) => byte ^ b[i]));