kafka-ts 1.2.1 → 1.3.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -148,6 +148,7 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
148
148
  - PLAIN: `saslPlain({ username, password })`
149
149
  - SCRAM-SHA-256: `saslScramSha256({ username, password })`
150
150
  - SCRAM-SHA-512: `saslScramSha512({ username, password })`
151
+ - OAUTHBEARER: `oAuthBearer(oAuthAuthenticator({ endpoint, clientId, clientSecret }))`
151
152
 
152
153
  Custom SASL mechanisms can be implemented following the `SASLProvider` interface. See [src/auth](./src/auth) for examples.
153
154
 
@@ -1,2 +1,3 @@
1
+ export { oAuthAuthenticator, oAuthBearer } from './oauthbearer';
1
2
  export { saslPlain } from './plain';
2
3
  export { saslScramSha256, saslScramSha512 } from './scram';
@@ -1,6 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.saslScramSha512 = exports.saslScramSha256 = exports.saslPlain = void 0;
3
+ exports.saslScramSha512 = exports.saslScramSha256 = exports.saslPlain = exports.oAuthBearer = exports.oAuthAuthenticator = void 0;
4
+ var oauthbearer_1 = require("./oauthbearer");
5
+ Object.defineProperty(exports, "oAuthAuthenticator", { enumerable: true, get: function () { return oauthbearer_1.oAuthAuthenticator; } });
6
+ Object.defineProperty(exports, "oAuthBearer", { enumerable: true, get: function () { return oauthbearer_1.oAuthBearer; } });
4
7
  var plain_1 = require("./plain");
5
8
  Object.defineProperty(exports, "saslPlain", { enumerable: true, get: function () { return plain_1.saslPlain; } });
6
9
  var scram_1 = require("./scram");
@@ -0,0 +1,16 @@
1
+ import { SASLProvider } from '../broker';
2
+ export declare const oAuthBearer: (getToken: () => Promise<{
3
+ access_token: string;
4
+ }>) => SASLProvider;
5
+ export declare const oAuthAuthenticator: ({ endpoint, clientId, clientSecret, refreshThresholdSeconds, }: {
6
+ endpoint: string;
7
+ clientId: string;
8
+ clientSecret: string;
9
+ refreshThresholdSeconds?: number;
10
+ }) => () => Promise<TokenResponse>;
11
+ type TokenResponse = {
12
+ access_token: string;
13
+ refresh_token: string;
14
+ expires_in: number;
15
+ };
16
+ export {};
@@ -0,0 +1,54 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.oAuthAuthenticator = exports.oAuthBearer = void 0;
4
+ const api_1 = require("../api");
5
+ const retry_1 = require("../utils/retry");
6
+ const oAuthBearer = (getToken) => {
7
+ return {
8
+ mechanism: 'OAUTHBEARER',
9
+ authenticate: async ({ sendRequest }) => {
10
+ const { access_token: accessToken } = await getToken();
11
+ const sep = String.fromCharCode(1);
12
+ const authBytes = `n,,${sep}auth=Bearer ${accessToken}${sep}${sep}`;
13
+ await sendRequest(api_1.API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
14
+ },
15
+ };
16
+ };
17
+ exports.oAuthBearer = oAuthBearer;
18
+ const oAuthAuthenticator = ({ endpoint, clientId, clientSecret, refreshThresholdSeconds = 15, }) => {
19
+ let tokenPromise = createToken(endpoint, {
20
+ grant_type: 'client_credentials',
21
+ client_id: clientId,
22
+ client_secret: clientSecret,
23
+ });
24
+ const scheduleRefresh = () => {
25
+ tokenPromise.then((token) => {
26
+ const refreshInMs = (token.expires_in - refreshThresholdSeconds) * 1000;
27
+ setTimeout(() => {
28
+ tokenPromise = createToken(endpoint, {
29
+ grant_type: 'refresh_token',
30
+ client_id: clientId,
31
+ client_secret: clientSecret,
32
+ refresh_token: token.refresh_token,
33
+ });
34
+ scheduleRefresh();
35
+ }, refreshInMs);
36
+ });
37
+ };
38
+ scheduleRefresh();
39
+ return () => tokenPromise;
40
+ };
41
+ exports.oAuthAuthenticator = oAuthAuthenticator;
42
+ const createToken = async (endpoint, body) => {
43
+ return (0, retry_1.withRetry)((0, retry_1.exponentialBackoff)(100), 5)(async () => {
44
+ const response = await fetch(endpoint, {
45
+ method: 'POST',
46
+ headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
47
+ body: new URLSearchParams(body),
48
+ });
49
+ if (!response.ok) {
50
+ throw new Error(`Failed to obtain OAuth token: ${await response.text()}`);
51
+ }
52
+ return response.json();
53
+ });
54
+ };
@@ -48,6 +48,9 @@ class ConsumerGroup {
48
48
  }
49
49
  catch (error) {
50
50
  this.heartbeatError = error;
51
+ if (error instanceof error_1.KafkaTSApiError && error.errorCode === api_1.API_ERROR.REBALANCE_IN_PROGRESS) {
52
+ this.options.consumer.emit('rebalanceInProgress');
53
+ }
51
54
  }
52
55
  }, 5000);
53
56
  }
@@ -21,11 +21,13 @@ export type ConsumerOptions = {
21
21
  retrier?: Retrier;
22
22
  onBatch: (messages: Required<Message>[], context: {
23
23
  resolveOffset: (message: Pick<Required<Message>, 'topic' | 'partition' | 'offset'>) => void;
24
+ abortSignal: AbortSignal;
24
25
  }) => unknown;
25
26
  };
26
27
  export declare class Consumer extends EventEmitter<{
27
28
  offsetCommit: [];
28
29
  heartbeat: [];
30
+ rebalanceInProgress: [];
29
31
  }> {
30
32
  private cluster;
31
33
  private options;
@@ -75,6 +75,7 @@ class Consumer extends events_1.default {
75
75
  consumer: this,
76
76
  })
77
77
  : undefined;
78
+ this.setMaxListeners(Infinity);
78
79
  }
79
80
  async start() {
80
81
  this.stopHook = undefined;
@@ -191,26 +192,34 @@ class Consumer extends events_1.default {
191
192
  if (!messages.length) {
192
193
  return;
193
194
  }
195
+ const commitOffset = () => this.consumerGroup?.offsetCommit(topicPartitions).then(() => this.offsetManager.flush(topicPartitions));
194
196
  const resolveOffset = (message) => this.offsetManager.resolve(message.topic, message.partition, message.offset + 1n);
197
+ const abortController = new AbortController();
198
+ const onRebalance = () => {
199
+ abortController.abort();
200
+ commitOffset()?.catch();
201
+ };
202
+ this.once('rebalanceInProgress', onRebalance);
195
203
  try {
196
- await retrier(() => options.onBatch(messages.filter((message) => !this.offsetManager.isResolved(message)), { resolveOffset }));
204
+ await retrier(() => options.onBatch(messages.filter((message) => !this.offsetManager.isResolved(message)), { resolveOffset, abortSignal: abortController.signal }));
197
205
  }
198
206
  catch (error) {
199
- await this.consumerGroup
200
- ?.offsetCommit(topicPartitions)
201
- .then(() => this.offsetManager.flush(topicPartitions))
202
- .catch();
207
+ await commitOffset()?.catch();
203
208
  throw error;
204
209
  }
205
- response.responses.forEach(({ topicId, partitions }) => {
206
- partitions.forEach(({ partitionIndex, records }) => {
207
- records.forEach(({ baseOffset, lastOffsetDelta }) => {
208
- this.offsetManager.resolve(this.metadata.getTopicNameById(topicId), partitionIndex, baseOffset + BigInt(lastOffsetDelta) + 1n);
210
+ finally {
211
+ this.off('rebalanceInProgress', onRebalance);
212
+ }
213
+ if (!abortController.signal.aborted) {
214
+ response.responses.forEach(({ topicId, partitions }) => {
215
+ partitions.forEach(({ partitionIndex, records }) => {
216
+ records.forEach(({ baseOffset, lastOffsetDelta }) => {
217
+ this.offsetManager.resolve(this.metadata.getTopicNameById(topicId), partitionIndex, baseOffset + BigInt(lastOffsetDelta) + 1n);
218
+ });
209
219
  });
210
220
  });
211
- });
212
- await this.consumerGroup?.offsetCommit(topicPartitions);
213
- this.offsetManager.flush(topicPartitions);
221
+ }
222
+ await commitOffset();
214
223
  }
215
224
  async fetch(nodeId, assignment) {
216
225
  return (0, retry_1.withRetry)(this.handleError.bind(this))(async () => {
@@ -1 +1,2 @@
1
- export declare const withRetry: (handleError: (error: unknown) => Promise<void>) => <T>(func: () => Promise<T>) => Promise<T>;
1
+ export declare const withRetry: (handleError: (error: unknown, retry: number) => Promise<void>, maxRetries?: number) => <T>(func: () => Promise<T>) => Promise<T>;
2
+ export declare const exponentialBackoff: (initialDelayMs: number, maxDelayMs?: number) => (_: unknown, retry: number) => Promise<void>;
@@ -1,15 +1,16 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.withRetry = void 0;
3
+ exports.exponentialBackoff = exports.withRetry = void 0;
4
+ const delay_1 = require("./delay");
4
5
  const logger_1 = require("./logger");
5
- const withRetry = (handleError) => async (func) => {
6
+ const withRetry = (handleError, maxRetries = 15) => async (func) => {
6
7
  let lastError;
7
- for (let i = 0; i < 15; i++) {
8
+ for (let i = 0; i < maxRetries; i++) {
8
9
  try {
9
10
  return await func();
10
11
  }
11
12
  catch (error) {
12
- await handleError(error);
13
+ await handleError(error, i + 1);
13
14
  lastError = error;
14
15
  }
15
16
  }
@@ -17,3 +18,8 @@ const withRetry = (handleError) => async (func) => {
17
18
  throw lastError;
18
19
  };
19
20
  exports.withRetry = withRetry;
21
+ const exponentialBackoff = (initialDelayMs, maxDelayMs = 5_000) => async (_, retry) => {
22
+ const delayMs = Math.min(maxDelayMs, initialDelayMs * 2 ** retry);
23
+ await (0, delay_1.delay)(delayMs);
24
+ };
25
+ exports.exponentialBackoff = exponentialBackoff;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "1.2.1",
3
+ "version": "1.3.1-beta.0",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",
@@ -13,6 +13,7 @@
13
13
  "down": "KAFKA_VERSION=4.0.0 docker-compose down",
14
14
  "version:prerelease": "npm version prerelease --preid=beta",
15
15
  "version:patch": "npm version patch",
16
+ "version:minor": "npm version minor",
16
17
  "version:major": "npm version major",
17
18
  "format": "prettier --write .",
18
19
  "build": "tsc",