@platformatic/kafka 1.8.0 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist/apis/callbacks.js +3 -0
  2. package/dist/apis/consumer/fetch-v15.d.ts +1 -1
  3. package/dist/apis/consumer/fetch-v15.js +5 -1
  4. package/dist/apis/consumer/fetch-v16.d.ts +1 -1
  5. package/dist/apis/consumer/fetch-v16.js +5 -1
  6. package/dist/apis/consumer/fetch-v17.d.ts +1 -1
  7. package/dist/apis/consumer/fetch-v17.js +5 -1
  8. package/dist/apis/enumerations.d.ts +1 -1
  9. package/dist/apis/enumerations.js +1 -1
  10. package/dist/clients/admin/admin.js +7 -4
  11. package/dist/clients/admin/types.d.ts +2 -0
  12. package/dist/clients/base/base.d.ts +4 -0
  13. package/dist/clients/base/base.js +70 -3
  14. package/dist/clients/base/options.d.ts +4 -1
  15. package/dist/clients/base/options.js +6 -6
  16. package/dist/clients/consumer/consumer.d.ts +2 -0
  17. package/dist/clients/consumer/consumer.js +52 -21
  18. package/dist/clients/consumer/messages-stream.d.ts +3 -1
  19. package/dist/clients/consumer/messages-stream.js +93 -60
  20. package/dist/clients/consumer/options.d.ts +5 -0
  21. package/dist/clients/consumer/options.js +1 -0
  22. package/dist/clients/consumer/types.d.ts +1 -0
  23. package/dist/clients/producer/producer.js +1 -1
  24. package/dist/errors.d.ts +1 -0
  25. package/dist/errors.js +7 -2
  26. package/dist/network/connection-pool.d.ts +2 -0
  27. package/dist/network/connection-pool.js +26 -1
  28. package/dist/network/connection.d.ts +4 -2
  29. package/dist/network/connection.js +30 -34
  30. package/dist/protocol/crc32c.d.ts +1 -1
  31. package/dist/protocol/crc32c.js +7 -11
  32. package/dist/protocol/index.d.ts +1 -0
  33. package/dist/protocol/index.js +1 -0
  34. package/dist/protocol/records.js +8 -7
  35. package/dist/protocol/sasl/oauth-bearer.d.ts +5 -0
  36. package/dist/protocol/sasl/oauth-bearer.js +8 -0
  37. package/dist/protocol/writer.js +2 -2
  38. package/dist/utils.d.ts +1 -1
  39. package/dist/utils.js +6 -12
  40. package/dist/version.d.ts +2 -0
  41. package/dist/version.js +2 -0
  42. package/package.json +5 -1
@@ -37,6 +37,9 @@ export function runConcurrentCallbacks(errorMessage, collection, operation, call
37
37
  callback(hasErrors ? new MultipleErrors(errorMessage, errors) : null, results);
38
38
  }
39
39
  }
40
+ if (remaining === 0) {
41
+ callback(null, results);
42
+ }
40
43
  for (const item of collection) {
41
44
  operation(item, operationCallback.bind(null, i++));
42
45
  }
@@ -29,7 +29,7 @@ export interface FetchResponsePartition {
29
29
  logStartOffset: bigint;
30
30
  abortedTransactions: FetchResponsePartitionAbortedTransaction[];
31
31
  preferredReadReplica: number;
32
- records?: RecordsBatch;
32
+ records?: RecordsBatch[];
33
33
  }
34
34
  export interface FetchResponseTopic {
35
35
  topicId: string;
@@ -105,7 +105,11 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
105
105
  }
106
106
  if (recordsSize > 1) {
107
107
  recordsSize--;
108
- partition.records = readRecordsBatch(Reader.from(r.buffer.subarray(r.position, r.position + recordsSize)));
108
+ const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
+ partition.records = [];
110
+ do {
111
+ partition.records.push(readRecordsBatch(recordsBatchesReader));
112
+ } while (recordsBatchesReader.position < recordsSize);
109
113
  r.skip(recordsSize);
110
114
  }
111
115
  return partition;
@@ -29,7 +29,7 @@ export interface FetchResponsePartition {
29
29
  logStartOffset: bigint;
30
30
  abortedTransactions: FetchResponsePartitionAbortedTransaction[];
31
31
  preferredReadReplica: number;
32
- records?: RecordsBatch;
32
+ records?: RecordsBatch[];
33
33
  }
34
34
  export interface FetchResponseTopic {
35
35
  topicId: string;
@@ -105,7 +105,11 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
105
105
  }
106
106
  if (recordsSize > 1) {
107
107
  recordsSize--;
108
- partition.records = readRecordsBatch(Reader.from(r.buffer.subarray(r.position, r.position + recordsSize)));
108
+ const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
+ partition.records = [];
110
+ do {
111
+ partition.records.push(readRecordsBatch(recordsBatchesReader));
112
+ } while (recordsBatchesReader.position < recordsSize);
109
113
  r.skip(recordsSize);
110
114
  }
111
115
  return partition;
@@ -29,7 +29,7 @@ export interface FetchResponsePartition {
29
29
  logStartOffset: bigint;
30
30
  abortedTransactions: FetchResponsePartitionAbortedTransaction[];
31
31
  preferredReadReplica: number;
32
- records?: RecordsBatch;
32
+ records?: RecordsBatch[];
33
33
  }
34
34
  export interface FetchResponseTopic {
35
35
  topicId: string;
@@ -105,7 +105,11 @@ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
105
105
  }
106
106
  if (recordsSize > 1) {
107
107
  recordsSize--;
108
- partition.records = readRecordsBatch(Reader.from(r.buffer.subarray(r.position, r.position + recordsSize)));
108
+ const recordsBatchesReader = Reader.from(r.buffer.subarray(r.position, r.position + recordsSize));
109
+ partition.records = [];
110
+ do {
111
+ partition.records.push(readRecordsBatch(recordsBatchesReader));
112
+ } while (recordsBatchesReader.position < recordsSize);
109
113
  r.skip(recordsSize);
110
114
  }
111
115
  return partition;
@@ -1,4 +1,4 @@
1
- export declare const SASLMechanisms: readonly ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"];
1
+ export declare const SASLMechanisms: readonly ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512", "OAUTHBEARER"];
2
2
  export type SASLMechanism = (typeof SASLMechanisms)[number];
3
3
  export declare const FindCoordinatorKeyTypes: {
4
4
  readonly GROUP: 0;
@@ -1,5 +1,5 @@
1
1
  // SASL Authentication
2
- export const SASLMechanisms = ['PLAIN', 'SCRAM-SHA-256', 'SCRAM-SHA-512'];
2
+ export const SASLMechanisms = ['PLAIN', 'SCRAM-SHA-256', 'SCRAM-SHA-512', 'OAUTHBEARER'];
3
3
  // Metadata API
4
4
  // ./metadata/find-coordinator.ts
5
5
  export const FindCoordinatorKeyTypes = { GROUP: 0, TRANSACTION: 1, SHARE: 2 };
@@ -144,6 +144,7 @@ export class Admin extends Base {
144
144
  const numPartitions = options.partitions ?? 1;
145
145
  const replicationFactor = options.replicas ?? 1;
146
146
  const assignments = [];
147
+ const configs = options.configs ?? [];
147
148
  for (const { partition, brokers } of options.assignments ?? []) {
148
149
  assignments.push({ partitionIndex: partition, brokerIds: brokers });
149
150
  }
@@ -154,7 +155,7 @@ export class Admin extends Base {
154
155
  numPartitions,
155
156
  replicationFactor,
156
157
  assignments,
157
- configs: []
158
+ configs
158
159
  });
159
160
  }
160
161
  this[kPerformDeduplicated]('createTopics', deduplicateCallback => {
@@ -322,10 +323,12 @@ export class Admin extends Base {
322
323
  metadata: reader.readBytes(false)
323
324
  };
324
325
  reader.reset(member.memberAssignment);
326
+ reader.skip(2); // Ignore Version information
325
327
  const memberAssignments = reader.readMap(r => {
326
- const topic = r.readString();
327
- return [topic, { topic, partitions: reader.readArray(r => r.readInt32(), true, false) }];
328
- }, true, false);
328
+ const topic = r.readString(false);
329
+ return [topic, { topic, partitions: reader.readArray(r => r.readInt32(), false, false) }];
330
+ }, false, false);
331
+ reader.readBytes(); // Ignore the user data
329
332
  group.members.set(member.memberId, {
330
333
  id: member.memberId,
331
334
  groupInstanceId: member.groupInstanceId,
@@ -1,3 +1,4 @@
1
+ import { type CreateTopicsRequestTopicConfig } from '../../apis/admin/create-topics-v7.ts';
1
2
  import { type ConsumerGroupState } from '../../apis/enumerations.ts';
2
3
  import { type NullableString } from '../../protocol/definitions.ts';
3
4
  import { type BaseOptions } from '../base/types.ts';
@@ -39,6 +40,7 @@ export interface CreateTopicsOptions {
39
40
  partitions?: number;
40
41
  replicas?: number;
41
42
  assignments?: BrokerAssignment[];
43
+ configs?: CreateTopicsRequestTopicConfig[];
42
44
  }
43
45
  export interface ListTopicsOptions {
44
46
  includeInternals?: boolean;
@@ -56,6 +56,10 @@ export declare class Base<OptionsType extends BaseOptions = BaseOptions> extends
56
56
  listApis(): Promise<ApiVersionsResponseApi[]>;
57
57
  metadata(options: MetadataOptions, callback: CallbackWithPromise<ClusterMetadata>): void;
58
58
  metadata(options: MetadataOptions): Promise<ClusterMetadata>;
59
+ connectToBrokers(nodeIds: number[] | null, callback: CallbackWithPromise<Map<number, Connection>>): void;
60
+ connectToBrokers(nodeIds?: number[] | null): Promise<Map<number, Connection>>;
61
+ isActive(): boolean;
62
+ isConnected(): boolean;
59
63
  [kCreateConnectionPool](): ConnectionPool;
60
64
  [kListApis](callback: CallbackWithPromise<ApiVersionsResponseApi[]>): void;
61
65
  [kMetadata](options: MetadataOptions, callback: CallbackWithPromise<ClusterMetadata>): void;
@@ -1,5 +1,5 @@
1
1
  import { EventEmitter } from 'node:events';
2
- import { createPromisifiedCallback, kCallbackPromise } from "../../apis/callbacks.js";
2
+ import { createPromisifiedCallback, kCallbackPromise, runConcurrentCallbacks } from "../../apis/callbacks.js";
3
3
  import * as apis from "../../apis/index.js";
4
4
  import { api as apiVersionsV3 } from "../../apis/metadata/api-versions-v3.js";
5
5
  import { baseApisChannel, baseMetadataChannel, createDiagnosticContext, notifyCreation } from "../../diagnostic.js";
@@ -49,6 +49,7 @@ export class Base extends EventEmitter {
49
49
  #inflightDeduplications;
50
50
  constructor(options) {
51
51
  super();
52
+ this.setMaxListeners(0);
52
53
  this[kClientType] = 'base';
53
54
  this[kInstance] = currentInstance++;
54
55
  this[kApis] = [];
@@ -97,6 +98,7 @@ export class Base extends EventEmitter {
97
98
  callback = createPromisifiedCallback();
98
99
  }
99
100
  this[kClosed] = true;
101
+ this.emitWithDebug('client', 'close');
100
102
  this[kConnections].close(callback);
101
103
  return callback[kCallbackPromise];
102
104
  }
@@ -119,6 +121,57 @@ export class Base extends EventEmitter {
119
121
  baseMetadataChannel.traceCallback(this[kMetadata], 1, createDiagnosticContext({ client: this, operation: 'metadata' }), this, options, callback);
120
122
  return callback[kCallbackPromise];
121
123
  }
124
+ connectToBrokers(nodeIds, callback) {
125
+ if (!callback) {
126
+ callback = createPromisifiedCallback();
127
+ }
128
+ // Fetch the metadata
129
+ this[kMetadata]({ topics: [] }, (error, metadata) => {
130
+ if (error) {
131
+ callback(error, undefined);
132
+ return;
133
+ }
134
+ let nodes = [];
135
+ if (nodeIds?.length) {
136
+ for (const node of nodeIds) {
137
+ if (metadata.brokers.has(node)) {
138
+ nodes.push(node);
139
+ }
140
+ }
141
+ }
142
+ else {
143
+ nodes = Array.from(metadata.brokers.keys());
144
+ }
145
+ runConcurrentCallbacks('Connecting to brokers failed.', nodes, (nodeId, concurrentCallback) => {
146
+ this[kGetConnection](metadata.brokers.get(nodeId), (error, connection) => {
147
+ if (error) {
148
+ concurrentCallback(error, undefined);
149
+ return;
150
+ }
151
+ concurrentCallback(null, [nodeId, connection]);
152
+ });
153
+ }, (error, connections) => {
154
+ if (error) {
155
+ callback(error, undefined);
156
+ return;
157
+ }
158
+ return callback(null, new Map(connections));
159
+ });
160
+ });
161
+ return callback[kCallbackPromise];
162
+ }
163
+ isActive() {
164
+ if (this[kClosed]) {
165
+ return false;
166
+ }
167
+ return true;
168
+ }
169
+ isConnected() {
170
+ if (this[kClosed]) {
171
+ return false;
172
+ }
173
+ return this[kConnections].isConnected();
174
+ }
122
175
  [kCreateConnectionPool]() {
123
176
  const pool = new ConnectionPool(this[kClientId], {
124
177
  ownerId: this[kInstance],
@@ -183,11 +236,17 @@ export class Base extends EventEmitter {
183
236
  retryCallback(error, undefined);
184
237
  return;
185
238
  }
186
- api(connection, options.topics, autocreateTopics, true, retryCallback);
239
+ api(connection, topicsToFetch, autocreateTopics, true, retryCallback);
187
240
  });
188
241
  });
189
242
  }, (error, metadata) => {
190
243
  if (error) {
244
+ const hasStaleMetadata = error.findBy('hasStaleMetadata', true);
245
+ // Stale metadata, we need to fetch everything again
246
+ if (hasStaleMetadata) {
247
+ this[kClearMetadata]();
248
+ topicsToFetch = options.topics;
249
+ }
191
250
  deduplicateCallback(error, undefined);
192
251
  return;
193
252
  }
@@ -268,9 +327,17 @@ export class Base extends EventEmitter {
268
327
  const retriable = genericError.findBy?.('code', NetworkError.code) || genericError.findBy?.('canRetry', true);
269
328
  errors.push(error);
270
329
  if (attempt < retries && retriable && !shouldSkipRetry?.(error)) {
271
- setTimeout(() => {
330
+ this.emitWithDebug('client', 'performWithRetry:retry', operationId, attempt, retries);
331
+ function onClose() {
332
+ clearTimeout(timeout);
333
+ errors.push(new UserError(`Client closed while retrying ${operationId}.`));
334
+ callback(new MultipleErrors(`${operationId} failed ${attempt + 1} times.`, errors), undefined);
335
+ }
336
+ const timeout = setTimeout(() => {
337
+ this.removeListener('client:close', onClose);
272
338
  this[kPerformWithRetry](operationId, operation, callback, attempt + 1, errors, shouldSkipRetry);
273
339
  }, this[kOptions].retryDelay);
340
+ this.once('client:close', onClose);
274
341
  }
275
342
  else {
276
343
  if (attempt === 0) {
@@ -88,7 +88,7 @@ export declare const baseOptionsSchema: {
88
88
  properties: {
89
89
  mechanism: {
90
90
  type: string;
91
- enum: readonly ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"];
91
+ enum: readonly ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512", "OAUTHBEARER"];
92
92
  };
93
93
  username: {
94
94
  type: string;
@@ -96,6 +96,9 @@ export declare const baseOptionsSchema: {
96
96
  password: {
97
97
  type: string;
98
98
  };
99
+ token: {
100
+ type: string;
101
+ };
99
102
  };
100
103
  required: string[];
101
104
  additionalProperties: boolean;
@@ -1,10 +1,9 @@
1
- import { readFileSync } from 'node:fs';
2
1
  import { SASLMechanisms } from "../../apis/enumerations.js";
3
2
  import { ajv } from "../../utils.js";
4
- const packageJson = JSON.parse(readFileSync(new URL('../../../package.json', import.meta.url), 'utf-8'));
3
+ import { version } from "../../version.js";
5
4
  // Note: clientSoftwareName can only contain alphanumeric characters, hyphens and dots
6
5
  export const clientSoftwareName = 'platformatic-kafka';
7
- export const clientSoftwareVersion = packageJson.version;
6
+ export const clientSoftwareVersion = version;
8
7
  export const idProperty = { type: 'string', pattern: '^\\S+$' };
9
8
  export const topicWithPartitionAndOffsetProperties = {
10
9
  topic: idProperty,
@@ -39,9 +38,10 @@ export const baseOptionsSchema = {
39
38
  properties: {
40
39
  mechanism: { type: 'string', enum: SASLMechanisms },
41
40
  username: { type: 'string' },
42
- password: { type: 'string' }
41
+ password: { type: 'string' },
42
+ token: { type: 'string' }
43
43
  },
44
- required: ['mechanism', 'username', 'password'],
44
+ required: ['mechanism'],
45
45
  additionalProperties: false
46
46
  },
47
47
  metadataMaxAge: { type: 'number', minimum: 0 },
@@ -72,7 +72,7 @@ export const defaultBaseOptions = {
72
72
  timeout: 5000,
73
73
  retries: 3,
74
74
  retryDelay: 1000,
75
- metadataMaxAge: 5000, // 5 minutes
75
+ metadataMaxAge: 5000, // 5 seconds
76
76
  autocreateTopics: false,
77
77
  strict: false
78
78
  };
@@ -15,8 +15,10 @@ export declare class Consumer<Key = Buffer, Value = Buffer, HeaderKey = Buffer,
15
15
  [kFetchConnections]: ConnectionPool;
16
16
  constructor(options: ConsumerOptions<Key, Value, HeaderKey, HeaderValue>);
17
17
  get streamsCount(): number;
18
+ get lastHeartbeat(): Date | null;
18
19
  close(force: boolean | CallbackWithPromise<void>, callback?: CallbackWithPromise<void>): void;
19
20
  close(force?: boolean): Promise<void>;
21
+ isActive(): boolean;
20
22
  consume(options: ConsumeOptions<Key, Value, HeaderKey, HeaderValue>, callback: CallbackWithPromise<MessagesStream<Key, Value, HeaderKey, HeaderValue>>): void;
21
23
  consume(options: ConsumeOptions<Key, Value, HeaderKey, HeaderValue>): Promise<MessagesStream<Key, Value, HeaderKey, HeaderValue>>;
22
24
  fetch(options: FetchOptions<Key, Value, HeaderKey, HeaderValue>, callback: CallbackWithPromise<FetchResponse>): void;
@@ -4,7 +4,7 @@ import { consumerCommitsChannel, consumerConsumesChannel, consumerFetchesChannel
4
4
  import { UserError } from "../../errors.js";
5
5
  import { Reader } from "../../protocol/reader.js";
6
6
  import { Writer } from "../../protocol/writer.js";
7
- import { Base, kAfterCreate, kCheckNotClosed, kClosed, kCreateConnectionPool, kFetchConnections, kFormatValidationErrors, kGetApi, kGetBootstrapConnection, kGetConnection, kMetadata, kOptions, kPerformDeduplicated, kPerformWithRetry, kPrometheus, kValidateOptions } from "../base/base.js";
7
+ import { Base, kAfterCreate, kCheckNotClosed, kClearMetadata, kClosed, kCreateConnectionPool, kFetchConnections, kFormatValidationErrors, kGetApi, kGetBootstrapConnection, kGetConnection, kMetadata, kOptions, kPerformDeduplicated, kPerformWithRetry, kPrometheus, kValidateOptions } from "../base/base.js";
8
8
  import { defaultBaseOptions } from "../base/options.js";
9
9
  import { ensureMetric } from "../metrics.js";
10
10
  import { MessagesStream } from "./messages-stream.js";
@@ -23,6 +23,7 @@ export class Consumer extends Base {
23
23
  #protocol;
24
24
  #coordinatorId;
25
25
  #heartbeatInterval;
26
+ #lastHeartbeat;
26
27
  #streams;
27
28
  #partitionsAssigner;
28
29
  /*
@@ -57,6 +58,7 @@ export class Consumer extends Base {
57
58
  this.#protocol = null;
58
59
  this.#coordinatorId = null;
59
60
  this.#heartbeatInterval = null;
61
+ this.#lastHeartbeat = null;
60
62
  this.#streams = new Set();
61
63
  this.#partitionsAssigner = this[kOptions].partitionAssigner ?? roundRobinAssigner;
62
64
  this.#validateGroupOptions(this[kOptions], groupIdAndOptionsValidator);
@@ -72,6 +74,9 @@ export class Consumer extends Base {
72
74
  get streamsCount() {
73
75
  return this.#streams.size;
74
76
  }
77
+ get lastHeartbeat() {
78
+ return this.#lastHeartbeat;
79
+ }
75
80
  close(force, callback) {
76
81
  if (typeof force === 'function') {
77
82
  callback = force;
@@ -118,6 +123,14 @@ export class Consumer extends Base {
118
123
  });
119
124
  return callback[kCallbackPromise];
120
125
  }
126
+ isActive() {
127
+ const baseReady = super.isActive();
128
+ if (!baseReady) {
129
+ return false;
130
+ }
131
+ // We consider the group ready if we have a groupId, a memberId and heartbeat interval
132
+ return this.#membershipActive && Boolean(this.groupId) && Boolean(this.memberId) && this.#heartbeatInterval !== null;
133
+ }
121
134
  consume(options, callback) {
122
135
  if (!callback) {
123
136
  callback = createPromisifiedCallback();
@@ -265,6 +278,7 @@ export class Consumer extends Base {
265
278
  callback(error);
266
279
  return;
267
280
  }
281
+ this.#lastHeartbeat = null;
268
282
  callback(null);
269
283
  });
270
284
  return callback[kCallbackPromise];
@@ -336,10 +350,10 @@ export class Consumer extends Base {
336
350
  const requests = new Map();
337
351
  for (const name of options.topics) {
338
352
  const topic = metadata.topics.get(name);
339
- const toInclude = options.partitions?.[name] ?? [];
340
- const hasPartitionsFilter = toInclude.length > 0;
353
+ const toInclude = new Set(options.partitions?.[name] ?? []);
354
+ const hasPartitionsFilter = toInclude.size > 0;
341
355
  for (let i = 0; i < topic.partitionsCount; i++) {
342
- if (hasPartitionsFilter && !toInclude.includes(i)) {
356
+ if (hasPartitionsFilter && !toInclude.delete(i)) {
343
357
  continue;
344
358
  }
345
359
  const partition = topic.partitions[i];
@@ -360,6 +374,10 @@ export class Consumer extends Base {
360
374
  timestamp: options.timestamp ?? -1n
361
375
  });
362
376
  }
377
+ if (toInclude.size > 0) {
378
+ callback(new UserError(`Specified partition(s) not found in topic ${name}`), undefined);
379
+ return;
380
+ }
363
381
  }
364
382
  runConcurrentCallbacks('Listing offsets failed.', requests, ([leader, requests], concurrentCallback) => {
365
383
  this[kPerformWithRetry]('listOffsets', retryCallback => {
@@ -379,7 +397,7 @@ export class Consumer extends Base {
379
397
  }, concurrentCallback, 0);
380
398
  }, (error, responses) => {
381
399
  if (error) {
382
- callback(error, undefined);
400
+ callback(this.#handleMetadataError(error), undefined);
383
401
  return;
384
402
  }
385
403
  let offsets = new Map();
@@ -434,7 +452,7 @@ export class Consumer extends Base {
434
452
  });
435
453
  }, (error, response) => {
436
454
  if (error) {
437
- callback(error, undefined);
455
+ callback(this.#handleMetadataError(error), undefined);
438
456
  return;
439
457
  }
440
458
  const committed = new Map();
@@ -506,6 +524,7 @@ export class Consumer extends Base {
506
524
  // Note that here we purposely do not return, since it was not a group related problem we schedule another heartbeat
507
525
  }
508
526
  else {
527
+ this.#lastHeartbeat = new Date();
509
528
  this.emitWithDebug('consumer:heartbeat', 'end', eventPayload);
510
529
  }
511
530
  this.#heartbeatInterval?.refresh();
@@ -542,6 +561,7 @@ export class Consumer extends Base {
542
561
  this.#metricActiveStreams?.inc();
543
562
  stream.once('close', () => {
544
563
  this.#metricActiveStreams?.dec();
564
+ this.topics.untrackAll(...options.topics);
545
565
  this.#streams.delete(stream);
546
566
  });
547
567
  callback(null, stream);
@@ -722,7 +742,7 @@ export class Consumer extends Base {
722
742
  }
723
743
  this[kMetadata]({ topics: Array.from(topicsSubscriptions.keys()) }, (error, metadata) => {
724
744
  if (error) {
725
- callback(error, undefined);
745
+ callback(this.#handleMetadataError(error), undefined);
726
746
  return;
727
747
  }
728
748
  this.#performSyncGroup(this.#createAssignments(metadata), callback);
@@ -751,15 +771,7 @@ export class Consumer extends Base {
751
771
  callback(error, undefined);
752
772
  return;
753
773
  }
754
- // Read the assignment back
755
- const reader = Reader.from(response.assignment);
756
- const assignments = reader.readArray(r => {
757
- return {
758
- topic: r.readString(),
759
- partitions: r.readArray(r => r.readInt32(), true, false)
760
- };
761
- }, true, false);
762
- callback(error, assignments);
774
+ callback(error, this.#decodeProtocolAssignment(response.assignment));
763
775
  });
764
776
  }
765
777
  #performDeduplicateGroupOperaton(operationId, operation, callback) {
@@ -775,7 +787,7 @@ export class Consumer extends Base {
775
787
  }
776
788
  this[kMetadata]({ topics: this.topics.current }, (error, metadata) => {
777
789
  if (error) {
778
- callback(error, undefined);
790
+ callback(this.#handleMetadataError(error), undefined);
779
791
  return;
780
792
  }
781
793
  this[kPerformWithRetry](operationId, retryCallback => {
@@ -818,13 +830,26 @@ export class Consumer extends Base {
818
830
  };
819
831
  }
820
832
  /*
821
- This follows:
833
+ The following two methods follow:
822
834
  https://github.com/apache/kafka/blob/trunk/clients/src/main/resources/common/message/ConsumerProtocolAssignment.json
823
835
  */
824
836
  #encodeProtocolAssignment(assignments) {
825
- return Writer.create().appendArray(assignments, (w, { topic, partitions }) => {
826
- w.appendString(topic).appendArray(partitions, (w, a) => w.appendInt32(a), true, false);
827
- }, true, false).buffer;
837
+ return Writer.create()
838
+ .appendInt16(0) // Version information
839
+ .appendArray(assignments, (w, { topic, partitions }) => {
840
+ w.appendString(topic, false).appendArray(partitions, (w, a) => w.appendInt32(a), false, false);
841
+ }, false, false)
842
+ .appendInt32(0).buffer; // No user data
843
+ }
844
+ #decodeProtocolAssignment(buffer) {
845
+ const reader = Reader.from(buffer);
846
+ reader.skip(2); // Ignore Version information
847
+ return reader.readArray(r => {
848
+ return {
849
+ topic: r.readString(false),
850
+ partitions: r.readArray(r => r.readInt32(), false, false)
851
+ };
852
+ }, false, false);
828
853
  }
829
854
  #createAssignments(metadata) {
830
855
  const partitionTracker = new Map();
@@ -875,4 +900,10 @@ export class Consumer extends Base {
875
900
  }
876
901
  return protocolError;
877
902
  }
903
+ #handleMetadataError(error) {
904
+ if (error && error?.findBy('hasStaleMetadata', true)) {
905
+ this[kClearMetadata]();
906
+ }
907
+ return error;
908
+ }
878
909
  }
@@ -9,8 +9,10 @@ export declare function defaultCorruptedMessageHandler(): boolean;
9
9
  export declare class MessagesStream<Key, Value, HeaderKey, HeaderValue> extends Readable {
10
10
  #private;
11
11
  constructor(consumer: Consumer<Key, Value, HeaderKey, HeaderValue>, options: ConsumeOptions<Key, Value, HeaderKey, HeaderValue>);
12
- close(callback?: CallbackWithPromise<void>): void;
12
+ close(callback: CallbackWithPromise<void>): void;
13
13
  close(): Promise<void>;
14
+ isActive(): boolean;
15
+ isConnected(): boolean;
14
16
  addListener(event: 'autocommit', listener: (err: Error, offsets: CommitOptionsPartition[]) => void): this;
15
17
  addListener(event: 'data', listener: (message: Message<Key, Value, HeaderKey, HeaderValue>) => void): this;
16
18
  addListener(event: 'close', listener: () => void): this;
@@ -20,6 +20,8 @@ export class MessagesStream extends Readable {
20
20
  #consumer;
21
21
  #mode;
22
22
  #fallbackMode;
23
+ #fetches;
24
+ #maxFetches;
23
25
  #options;
24
26
  #topics;
25
27
  #offsetsToFetch;
@@ -37,7 +39,7 @@ export class MessagesStream extends Readable {
37
39
  #metricsConsumedMessages;
38
40
  #corruptedMessageHandler;
39
41
  constructor(consumer, options) {
40
- const { autocommit, mode, fallbackMode, offsets, deserializers, onCorruptedMessage, ...otherOptions } = options;
42
+ const { autocommit, mode, fallbackMode, maxFetches, offsets, deserializers, onCorruptedMessage, ...otherOptions } = options;
41
43
  if (offsets && mode !== MessagesStreamModes.MANUAL) {
42
44
  throw new UserError('Cannot specify offsets when the stream mode is not MANUAL.');
43
45
  }
@@ -50,6 +52,8 @@ export class MessagesStream extends Readable {
50
52
  this.#mode = mode ?? MessagesStreamModes.LATEST;
51
53
  this.#fallbackMode = fallbackMode ?? MessagesStreamFallbackModes.LATEST;
52
54
  this.#offsetsToCommit = new Map();
55
+ this.#fetches = 0;
56
+ this.#maxFetches = maxFetches ?? 0;
53
57
  this.#topics = structuredClone(options.topics);
54
58
  this.#inflightNodes = new Set();
55
59
  this.#keyDeserializer = deserializers?.key ?? noopDeserializer;
@@ -138,6 +142,18 @@ export class MessagesStream extends Readable {
138
142
  });
139
143
  return callback[kCallbackPromise];
140
144
  }
145
+ isActive() {
146
+ if (this.#shouldClose || this.closed || this.destroyed) {
147
+ return false;
148
+ }
149
+ return this.#consumer.isActive();
150
+ }
151
+ isConnected() {
152
+ if (this.#shouldClose || this.closed || this.destroyed) {
153
+ return false;
154
+ }
155
+ return this.#consumer.isConnected();
156
+ }
141
157
  /* c8 ignore next 3 - Only forwards to Node.js implementation - Inserted here to please Typescript */
142
158
  addListener(event, listener) {
143
159
  return super.addListener(event, listener);
@@ -183,7 +199,7 @@ export class MessagesStream extends Readable {
183
199
  if (error) {
184
200
  // The stream has been closed, ignore any error
185
201
  /* c8 ignore next 4 - Hard to test */
186
- if (this.#shouldClose) {
202
+ if (this.#shouldClose || this.closed || this.destroyed) {
187
203
  this.push(null);
188
204
  return;
189
205
  }
@@ -241,7 +257,7 @@ export class MessagesStream extends Readable {
241
257
  if (error) {
242
258
  // The stream has been closed, ignore the error
243
259
  /* c8 ignore next 4 - Hard to test */
244
- if (this.#shouldClose) {
260
+ if (this.#shouldClose || this.closed || this.destroyed) {
245
261
  this.push(null);
246
262
  return;
247
263
  }
@@ -257,6 +273,9 @@ export class MessagesStream extends Readable {
257
273
  return;
258
274
  }
259
275
  this.#pushRecords(metadata, topicIds, response, requestedOffsets);
276
+ if (this.#maxFetches > 0 && ++this.#fetches >= this.#maxFetches) {
277
+ this.push(null);
278
+ }
260
279
  });
261
280
  }
262
281
  });
@@ -272,70 +291,74 @@ export class MessagesStream extends Readable {
272
291
  // Parse results
273
292
  for (const topicResponse of response.responses) {
274
293
  const topic = topicIds.get(topicResponse.topicId);
275
- for (const { records, partitionIndex: partition } of topicResponse.partitions) {
276
- if (!records) {
294
+ for (const { records: recordsBatches, partitionIndex: partition } of topicResponse.partitions) {
295
+ if (!recordsBatches) {
277
296
  continue;
278
297
  }
279
- const firstTimestamp = records.firstTimestamp;
280
- const firstOffset = records.firstOffset;
281
- const leaderEpoch = metadata.topics.get(topic).partitions[partition].leaderEpoch;
282
- for (const record of records.records) {
283
- const offset = records.firstOffset + BigInt(record.offsetDelta);
284
- if (offset < requestedOffsets.get(`${topic}:${partition}`)) {
285
- // Thi is a duplicate message, ignore it
286
- continue;
287
- }
288
- diagnosticContext = createDiagnosticContext({
289
- client: this.#consumer,
290
- stream: this,
291
- operation: 'receive',
292
- raw: record
293
- });
294
- consumerReceivesChannel.start.publish(diagnosticContext);
295
- const commit = autocommit ? noopCallback : this.#commit.bind(this, topic, partition, offset, leaderEpoch);
296
- try {
297
- const headers = new Map();
298
- for (const [headerKey, headerValue] of record.headers) {
299
- headers.set(headerKeyDeserializer(headerKey), headerValueDeserializer(headerValue));
298
+ for (const batch of recordsBatches) {
299
+ const firstTimestamp = batch.firstTimestamp;
300
+ const firstOffset = batch.firstOffset;
301
+ const leaderEpoch = metadata.topics.get(topic).partitions[partition].leaderEpoch;
302
+ for (const record of batch.records) {
303
+ const offset = batch.firstOffset + BigInt(record.offsetDelta);
304
+ if (offset < requestedOffsets.get(`${topic}:${partition}`)) {
305
+ // Thi is a duplicate message, ignore it
306
+ continue;
300
307
  }
301
- const key = keyDeserializer(record.key, headers);
302
- const value = valueDeserializer(record.value, headers);
303
- this.#metricsConsumedMessages?.inc();
304
- const message = {
305
- key,
306
- value,
307
- headers,
308
- topic,
309
- partition,
310
- timestamp: firstTimestamp + record.timestampDelta,
311
- offset,
312
- commit
313
- };
314
- diagnosticContext.result = message;
315
- consumerReceivesChannel.asyncStart.publish(diagnosticContext);
316
- canPush = this.push(message);
317
- consumerReceivesChannel.asyncEnd.publish(diagnosticContext);
318
- }
319
- catch (error) {
320
- const shouldDestroy = this.#corruptedMessageHandler(record, topic, partition, firstTimestamp, firstOffset, commit);
321
- if (shouldDestroy) {
322
- diagnosticContext.error = error;
323
- consumerReceivesChannel.error.publish(diagnosticContext);
324
- this.destroy(new UserError('Failed to deserialize a message.', { cause: error }));
325
- return;
308
+ diagnosticContext = createDiagnosticContext({
309
+ client: this.#consumer,
310
+ stream: this,
311
+ operation: 'receive',
312
+ raw: record
313
+ });
314
+ consumerReceivesChannel.start.publish(diagnosticContext);
315
+ const commit = autocommit ? noopCallback : this.#commit.bind(this, topic, partition, offset, leaderEpoch);
316
+ try {
317
+ const headers = new Map();
318
+ for (const [headerKey, headerValue] of record.headers) {
319
+ headers.set(headerKeyDeserializer(headerKey), headerValueDeserializer(headerValue));
320
+ }
321
+ const key = keyDeserializer(record.key, headers);
322
+ const value = valueDeserializer(record.value, headers);
323
+ this.#metricsConsumedMessages?.inc();
324
+ const message = {
325
+ key,
326
+ value,
327
+ headers,
328
+ topic,
329
+ partition,
330
+ timestamp: firstTimestamp + record.timestampDelta,
331
+ offset,
332
+ commit
333
+ };
334
+ diagnosticContext.result = message;
335
+ consumerReceivesChannel.asyncStart.publish(diagnosticContext);
336
+ canPush = this.push(message);
337
+ consumerReceivesChannel.asyncEnd.publish(diagnosticContext);
338
+ }
339
+ catch (error) {
340
+ const shouldDestroy = this.#corruptedMessageHandler(record, topic, partition, firstTimestamp, firstOffset, commit);
341
+ if (shouldDestroy) {
342
+ diagnosticContext.error = error;
343
+ consumerReceivesChannel.error.publish(diagnosticContext);
344
+ this.destroy(new UserError('Failed to deserialize a message.', { cause: error }));
345
+ return;
346
+ }
347
+ }
348
+ finally {
349
+ consumerReceivesChannel.end.publish(diagnosticContext);
326
350
  }
327
351
  }
328
- finally {
329
- consumerReceivesChannel.end.publish(diagnosticContext);
352
+ if (batch === recordsBatches[recordsBatches.length - 1]) {
353
+ // Track the last read offset
354
+ const lastOffset = batch.firstOffset + BigInt(batch.lastOffsetDelta);
355
+ this.#offsetsToFetch.set(`${topic}:${partition}`, lastOffset + 1n);
356
+ // Autocommit if needed
357
+ if (autocommit) {
358
+ this.#offsetsToCommit.set(`${topic}:${partition}`, { topic, partition, offset: lastOffset, leaderEpoch });
359
+ }
330
360
  }
331
361
  }
332
- // Track the last read offset
333
- const lastOffset = records.firstOffset + BigInt(records.lastOffsetDelta);
334
- this.#offsetsToFetch.set(`${topic}:${partition}`, lastOffset + 1n);
335
- // Autocommit if needed
336
- if (autocommit) {
337
- this.#offsetsToCommit.set(`${topic}:${partition}`, { topic, partition, offset: lastOffset, leaderEpoch });
338
- }
339
362
  }
340
363
  }
341
364
  if (this.#autocommitEnabled && !this.#autocommitInterval) {
@@ -387,6 +410,11 @@ export class MessagesStream extends Readable {
387
410
  : ListOffsetTimestamps.LATEST
388
411
  }, (error, offsets) => {
389
412
  if (error) {
413
+ /* c8 ignore next 4 - Hard to test */
414
+ if (this.#shouldClose || this.closed || this.destroyed) {
415
+ callback(null);
416
+ return;
417
+ }
390
418
  callback(error);
391
419
  return;
392
420
  }
@@ -409,6 +437,11 @@ export class MessagesStream extends Readable {
409
437
  }
410
438
  this.#consumer.listCommittedOffsets({ topics }, (error, commits) => {
411
439
  if (error) {
440
+ /* c8 ignore next 4 - Hard to test */
441
+ if (this.#shouldClose || this.closed || this.destroyed) {
442
+ callback(null);
443
+ return;
444
+ }
412
445
  callback(error);
413
446
  return;
414
447
  }
@@ -281,6 +281,11 @@ export declare const consumeOptionsSchema: {
281
281
  type: string;
282
282
  enum: ("latest" | "earliest" | "fail")[];
283
283
  };
284
+ maxFetches: {
285
+ type: string;
286
+ minimum: number;
287
+ default: number;
288
+ };
284
289
  offsets: {
285
290
  type: string;
286
291
  items: {
@@ -71,6 +71,7 @@ export const consumeOptionsSchema = {
71
71
  topics: { type: 'array', items: idProperty },
72
72
  mode: { type: 'string', enum: Object.values(MessagesStreamModes) },
73
73
  fallbackMode: { type: 'string', enum: Object.values(MessagesStreamFallbackModes) },
74
+ maxFetches: { type: 'number', minimum: 0, default: 0 },
74
75
  offsets: {
75
76
  type: 'array',
76
77
  items: {
@@ -62,6 +62,7 @@ export interface StreamOptions {
62
62
  topics: string[];
63
63
  mode?: MessagesStreamModeValue;
64
64
  fallbackMode?: MessagesStreamFallbackModeValue;
65
+ maxFetches?: number;
65
66
  offsets?: TopicWithPartitionAndOffset[];
66
67
  onCorruptedMessage?: CorruptedMessageHandler;
67
68
  }
@@ -198,7 +198,7 @@ export class Producer extends Base {
198
198
  }
199
199
  else {
200
200
  // Use the roundrobin
201
- partition = this.#partitionsRoundRobin.postIncrement(topic, 1, -1);
201
+ partition = this.#partitionsRoundRobin.postIncrement(topic, 1, 0);
202
202
  }
203
203
  }
204
204
  else {
package/dist/errors.d.ts CHANGED
@@ -35,6 +35,7 @@ export declare class NetworkError extends GenericError {
35
35
  constructor(message: string, properties?: ErrorProperties);
36
36
  }
37
37
  export declare class ProtocolError extends GenericError {
38
+ static code: ErrorCode;
38
39
  constructor(codeOrId: string | number, properties?: ErrorProperties, response?: unknown);
39
40
  }
40
41
  export declare class ResponseError extends MultipleErrors {
package/dist/errors.js CHANGED
@@ -76,9 +76,13 @@ export class MultipleErrors extends AggregateError {
76
76
  return this;
77
77
  }
78
78
  for (const error of this.errors) {
79
- if (error[kGenericError] ? error.findBy(property, value) : error[property] === value) {
79
+ if (error[property] === value) {
80
80
  return error;
81
81
  }
82
+ const found = error[kGenericError] ? error.findBy(property, value) : undefined;
83
+ if (found) {
84
+ return found;
85
+ }
82
86
  }
83
87
  return null;
84
88
  }
@@ -97,9 +101,10 @@ export class NetworkError extends GenericError {
97
101
  }
98
102
  }
99
103
  export class ProtocolError extends GenericError {
104
+ static code = 'PLT_KFK_PROTOCOL';
100
105
  constructor(codeOrId, properties = {}, response = undefined) {
101
106
  const { id, code, message, canRetry } = protocolErrors[typeof codeOrId === 'number' ? protocolErrorsCodesById[codeOrId] : codeOrId];
102
- super('PLT_KFK_PROTOCOL', message, {
107
+ super(ProtocolError.code, message, {
103
108
  apiId: id,
104
109
  apiCode: code,
105
110
  canRetry,
@@ -11,4 +11,6 @@ export declare class ConnectionPool extends EventEmitter {
11
11
  getFirstAvailable(brokers: Broker[]): Promise<Connection>;
12
12
  close(callback: CallbackWithPromise<void>): void;
13
13
  close(): Promise<void>;
14
+ isActive(): boolean;
15
+ isConnected(): boolean;
14
16
  }
@@ -7,12 +7,14 @@ let currentInstance = 0;
7
7
  export class ConnectionPool extends EventEmitter {
8
8
  #instanceId;
9
9
  #clientId;
10
+ #closed;
10
11
  // @ts-ignore This is used just for debugging
11
12
  #ownerId;
12
13
  #connections;
13
14
  #connectionOptions;
14
15
  constructor(clientId, connectionOptions = {}) {
15
16
  super();
17
+ this.#closed = false;
16
18
  this.#instanceId = currentInstance++;
17
19
  this.#clientId = clientId;
18
20
  this.#ownerId = connectionOptions.ownerId;
@@ -41,17 +43,40 @@ export class ConnectionPool extends EventEmitter {
41
43
  if (!callback) {
42
44
  callback = createPromisifiedCallback();
43
45
  }
44
- if (this.#connections.size === 0) {
46
+ if (this.#closed || this.#connections.size === 0) {
47
+ this.#closed = true;
45
48
  callback(null);
46
49
  return callback[kCallbackPromise];
47
50
  }
51
+ this.#closed = true;
48
52
  runConcurrentCallbacks('Closing connections failed.', this.#connections, ([key, connection], cb) => {
49
53
  connection.close(cb);
50
54
  this.#connections.delete(key);
51
55
  }, error => callback(error));
52
56
  return callback[kCallbackPromise];
53
57
  }
58
+ isActive() {
59
+ if (this.#connections.size === 0) {
60
+ return false;
61
+ }
62
+ return true;
63
+ }
64
+ isConnected() {
65
+ if (this.#connections.size === 0) {
66
+ return false;
67
+ }
68
+ for (const connection of this.#connections.values()) {
69
+ if (!connection.isConnected()) {
70
+ return false;
71
+ }
72
+ }
73
+ return true;
74
+ }
54
75
  #get(broker, callback) {
76
+ if (this.#closed) {
77
+ callback(new Error('Connection pool is closed.'), undefined);
78
+ return;
79
+ }
55
80
  const key = `${broker.host}:${broker.port}`;
56
81
  const existing = this.#connections.get(key);
57
82
  if (existing) {
@@ -11,8 +11,9 @@ export interface Broker {
11
11
  }
12
12
  export interface SASLOptions {
13
13
  mechanism: SASLMechanism;
14
- username: string;
15
- password: string;
14
+ username?: string;
15
+ password?: string;
16
+ token?: string;
16
17
  }
17
18
  export interface ConnectionOptions {
18
19
  connectTimeout?: number;
@@ -53,6 +54,7 @@ export declare class Connection extends EventEmitter {
53
54
  get instanceId(): number;
54
55
  get status(): ConnectionStatusValue;
55
56
  get socket(): Socket;
57
+ isConnected(): boolean;
56
58
  connect(host: string, port: number, callback?: CallbackWithPromise<void>): void | Promise<void>;
57
59
  ready(callback: CallbackWithPromise<void>): void;
58
60
  ready(): Promise<void>;
@@ -10,7 +10,7 @@ import { AuthenticationError, NetworkError, TimeoutError, UnexpectedCorrelationI
10
10
  import { protocolAPIsById } from "../protocol/apis.js";
11
11
  import { EMPTY_OR_SINGLE_COMPACT_LENGTH_SIZE, INT32_SIZE } from "../protocol/definitions.js";
12
12
  import { DynamicBuffer } from "../protocol/dynamic-buffer.js";
13
- import { saslPlain, saslScramSha } from "../protocol/index.js";
13
+ import { saslOAuthBearer, saslPlain, saslScramSha } from "../protocol/index.js";
14
14
  import { Reader } from "../protocol/reader.js";
15
15
  import { defaultCrypto } from "../protocol/sasl/scram-sha.js";
16
16
  import { Writer } from "../protocol/writer.js";
@@ -80,6 +80,9 @@ export class Connection extends EventEmitter {
80
80
  get socket() {
81
81
  return this.#socket;
82
82
  }
83
+ isConnected() {
84
+ return this.#status === ConnectionStatuses.CONNECTED;
85
+ }
83
86
  connect(host, port, callback) {
84
87
  if (!callback) {
85
88
  callback = createPromisifiedCallback();
@@ -229,7 +232,7 @@ export class Connection extends EventEmitter {
229
232
  }
230
233
  #authenticate(host, port, diagnosticContext) {
231
234
  this.#status = ConnectionStatuses.AUTHENTICATING;
232
- const { mechanism, username, password } = this.#options.sasl;
235
+ const { mechanism, username, password, token } = this.#options.sasl;
233
236
  if (!SASLMechanisms.includes(mechanism)) {
234
237
  this.#onConnectionError(host, port, diagnosticContext, new UserError(`SASL mechanism ${mechanism} not supported.`));
235
238
  return;
@@ -243,6 +246,9 @@ export class Connection extends EventEmitter {
243
246
  if (mechanism === 'PLAIN') {
244
247
  saslPlain.authenticate(saslAuthenticateV2.api, this, username, password, this.#onSaslAuthenticate.bind(this, host, port, diagnosticContext));
245
248
  }
249
+ else if (mechanism === 'OAUTHBEARER') {
250
+ saslOAuthBearer.authenticate(saslAuthenticateV2.api, this, token, this.#onSaslAuthenticate.bind(this, host, port, diagnosticContext));
251
+ }
246
252
  else {
247
253
  saslScramSha.authenticate(saslAuthenticateV2.api, this, mechanism.substring(6), username, password, defaultCrypto, this.#onSaslAuthenticate.bind(this, host, port, diagnosticContext));
248
254
  }
@@ -263,46 +269,36 @@ export class Connection extends EventEmitter {
263
269
  request.callback(new NetworkError('Connection closed'), undefined);
264
270
  return false;
265
271
  }
266
- let canWrite = true;
267
- const { correlationId, apiKey, apiVersion, payload: payloadFn, hasRequestHeaderTaggedFields } = request;
268
- const writer = Writer.create()
269
- .appendInt16(apiKey)
270
- .appendInt16(apiVersion)
271
- .appendInt32(correlationId)
272
+ const writer = Writer.create();
273
+ writer
274
+ .appendInt16(request.apiKey)
275
+ .appendInt16(request.apiVersion)
276
+ .appendInt32(request.correlationId)
272
277
  .appendString(this.#clientId, false);
273
- if (hasRequestHeaderTaggedFields) {
278
+ if (request.hasRequestHeaderTaggedFields) {
274
279
  writer.appendTaggedFields();
275
280
  }
276
- const payload = payloadFn();
277
- writer.appendFrom(payload);
278
- writer.prependLength();
279
- // Write the header
280
- this.#socket.cork();
281
- if (!payload.context.noResponse) {
282
- this.#inflightRequests.set(correlationId, request);
283
- }
284
- loggers.protocol('Sending request.', { apiKey: protocolAPIsById[apiKey], correlationId, request });
285
- for (const buf of writer.buffers) {
286
- if (!this.#socket.write(buf)) {
287
- canWrite = false;
288
- }
289
- }
290
- if (!canWrite) {
281
+ const payload = request.payload();
282
+ writer.appendFrom(payload).prependLength();
283
+ const expectResponse = !payload.context.noResponse;
284
+ if (expectResponse)
285
+ this.#inflightRequests.set(request.correlationId, request);
286
+ const canWrite = this.#socket.write(writer.buffer);
287
+ if (!canWrite)
291
288
  this.#socketMustBeDrained = true;
292
- }
293
- this.#socket.uncork();
294
- if (payload.context.noResponse) {
289
+ if (!expectResponse)
295
290
  request.callback(null, canWrite);
296
- }
297
- // debugDump(Date.now() % 100000, 'send', { owner: this.#ownerId, apiKey: protocolAPIsById[apiKey], correlationId })
291
+ loggers.protocol('Sending request.', {
292
+ apiKey: protocolAPIsById[request.apiKey],
293
+ correlationId: request.correlationId,
294
+ request
295
+ });
298
296
  return canWrite;
299
297
  }
300
- catch (error) {
301
- request.diagnostic.error = error;
298
+ catch (err) {
299
+ request.diagnostic.error = err;
302
300
  connectionsApiChannel.error.publish(request.diagnostic);
303
- connectionsApiChannel.end.publish(request.diagnostic);
304
- throw error;
305
- /* c8 ignore next 3 - C8 does not detect these as covered */
301
+ throw err;
306
302
  }
307
303
  finally {
308
304
  connectionsApiChannel.end.publish(request.diagnostic);
@@ -1,2 +1,2 @@
1
1
  import { DynamicBuffer } from './dynamic-buffer.ts';
2
- export declare function crc32c(data: Buffer | DynamicBuffer): number;
2
+ export declare function crc32c(data: Buffer | Uint8Array | DynamicBuffer): number;
@@ -68,16 +68,12 @@ const CRC = [
68
68
  0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351
69
69
  ];
70
70
  export function crc32c(data) {
71
- let crc = 0 ^ -1;
72
- if (DynamicBuffer.isDynamicBuffer(data)) {
73
- for (let i = 0; i < data.length; i++) {
74
- crc = CRC[(crc ^ data.get(i)) & 0xff] ^ (crc >>> 8);
75
- }
71
+ const bytes = DynamicBuffer.isDynamicBuffer(data)
72
+ ? data.buffer
73
+ : new Uint8Array(data);
74
+ let crc = 0xffffffff;
75
+ for (let i = 0, len = bytes.length; i < len; ++i) {
76
+ crc = CRC[(crc ^ bytes[i]) & 0xff] ^ (crc >>> 8);
76
77
  }
77
- else {
78
- for (let i = 0; i < data.length; i++) {
79
- crc = CRC[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
80
- }
81
- }
82
- return Uint32Array.from([(crc ^ -1) >>> 0])[0];
78
+ return (crc ^ 0xffffffff) >>> 0;
83
79
  }
@@ -8,6 +8,7 @@ export * from './index.ts';
8
8
  export * from './murmur2.ts';
9
9
  export * from './reader.ts';
10
10
  export * from './records.ts';
11
+ export * as saslOAuthBearer from './sasl/oauth-bearer.ts';
11
12
  export * as saslPlain from './sasl/plain.ts';
12
13
  export * as saslScramSha from './sasl/scram-sha.ts';
13
14
  export * from './varint.ts';
@@ -8,6 +8,7 @@ export * from "./index.js";
8
8
  export * from "./murmur2.js";
9
9
  export * from "./reader.js";
10
10
  export * from "./records.js";
11
+ export * as saslOAuthBearer from "./sasl/oauth-bearer.js";
11
12
  export * as saslPlain from "./sasl/plain.js";
12
13
  export * as saslScramSha from "./sasl/scram-sha.js";
13
14
  export * from "./varint.js";
@@ -60,15 +60,16 @@ export function readRecord(reader) {
60
60
  }
61
61
  export function createRecordsBatch(messages, options = {}) {
62
62
  const now = BigInt(Date.now());
63
- const timestamps = [];
64
- for (let i = 0; i < messages.length; i++) {
65
- timestamps.push(messages[i].timestamp ?? now);
66
- }
67
- messages.sort();
68
- const firstTimestamp = timestamps[0];
69
- const maxTimestamp = timestamps[timestamps.length - 1];
63
+ const firstTimestamp = messages[0].timestamp ?? now;
64
+ let maxTimestamp = firstTimestamp;
70
65
  let buffer = new DynamicBuffer();
71
66
  for (let i = 0; i < messages.length; i++) {
67
+ let ts = messages[i].timestamp ?? now;
68
+ if (typeof ts === 'number')
69
+ ts = BigInt(ts);
70
+ messages[i].timestamp = ts;
71
+ if (ts > maxTimestamp)
72
+ maxTimestamp = ts;
72
73
  const record = createRecord(messages[i], i, firstTimestamp);
73
74
  buffer.appendFrom(record.dynamicBuffer);
74
75
  }
@@ -0,0 +1,5 @@
1
+ import { type CallbackWithPromise } from '../../apis/callbacks.ts';
2
+ import { type SASLAuthenticationAPI, type SaslAuthenticateResponse } from '../../apis/security/sasl-authenticate-v2.ts';
3
+ import { type Connection } from '../../network/connection.ts';
4
+ export declare function authenticate(authenticateAPI: SASLAuthenticationAPI, connection: Connection, token: string, callback: CallbackWithPromise<SaslAuthenticateResponse>): void;
5
+ export declare function authenticate(authenticateAPI: SASLAuthenticationAPI, connection: Connection, token: string): Promise<SaslAuthenticateResponse>;
@@ -0,0 +1,8 @@
1
+ import { createPromisifiedCallback, kCallbackPromise } from "../../apis/callbacks.js";
2
+ export function authenticate(authenticateAPI, connection, token, callback) {
3
+ if (!callback) {
4
+ callback = createPromisifiedCallback();
5
+ }
6
+ authenticateAPI(connection, Buffer.from(`n,,\x01auth=Bearer ${token}\x01\x01`), callback);
7
+ return callback[kCallbackPromise];
8
+ }
@@ -152,7 +152,7 @@ export class Writer {
152
152
  }
153
153
  appendArray(value, entryWriter, compact = true, appendTrailingTaggedFields = true) {
154
154
  if (value == null) {
155
- return compact ? this.appendUnsignedVarInt(0) : this.appendInt32(0);
155
+ return compact ? this.appendUnsignedVarInt(0) : this.appendInt32(-1);
156
156
  }
157
157
  const length = value.length;
158
158
  if (compact) {
@@ -171,7 +171,7 @@ export class Writer {
171
171
  }
172
172
  appendMap(value, entryWriter, compact = true, appendTrailingTaggedFields = true) {
173
173
  if (value == null) {
174
- return compact ? this.appendUnsignedVarInt(0) : this.appendInt32(0);
174
+ return compact ? this.appendUnsignedVarInt(0) : this.appendInt32(-1);
175
175
  }
176
176
  const length = value.size;
177
177
  if (compact) {
package/dist/utils.d.ts CHANGED
@@ -25,7 +25,7 @@ export declare class NumericMap extends Map<string, number> {
25
25
  export declare function niceJoin(array: string[], lastSeparator?: string, separator?: string): string;
26
26
  export declare function listErrorMessage(type: string[]): string;
27
27
  export declare function enumErrorMessage(type: Record<string, unknown>, keysOnly?: boolean): string;
28
- export declare function groupByProperty<Key, Value>(entries: Value[], property: keyof Value): [Key, Value[]][];
28
+ export declare function groupByProperty<Key extends PropertyKey, Value>(entries: readonly Value[], property: keyof Value): [Key, Value[]][];
29
29
  export declare function humanize(label: string, buffer: Buffer | DynamicBuffer): string;
30
30
  export declare function setDebugDumpLogger(logger: DebugDumpLogger): void;
31
31
  export declare function debugDump(...values: unknown[]): void;
package/dist/utils.js CHANGED
@@ -121,19 +121,13 @@ export function enumErrorMessage(type, keysOnly = false) {
121
121
  return `should be one of ${niceJoin(Object.entries(type).map(([k, v]) => `${v} (${k})`), ' or ')}`;
122
122
  }
123
123
  export function groupByProperty(entries, property) {
124
- const grouped = new Map();
125
- const result = [];
126
- for (const entry of entries) {
127
- const value = entry[property];
128
- let values = grouped.get(value);
129
- if (!values) {
130
- values = [];
131
- grouped.set(value, values);
132
- result.push([value, values]);
133
- }
134
- values.push(entry);
124
+ const buckets = Object.create(null);
125
+ for (let i = 0, len = entries.length; i < len; ++i) {
126
+ const e = entries[i];
127
+ const key = e[property];
128
+ (buckets[key] ||= []).push(e);
135
129
  }
136
- return result;
130
+ return Object.entries(buckets);
137
131
  }
138
132
  export function humanize(label, buffer) {
139
133
  const formatted = buffer
@@ -0,0 +1,2 @@
1
+ export declare const name: any;
2
+ export declare const version: any;
@@ -0,0 +1,2 @@
1
+ export const name = "@platformatic/kafka";
2
+ export const version = "1.10.0";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/kafka",
3
- "version": "1.8.0",
3
+ "version": "1.10.0",
4
4
  "description": "Modern and performant client for Apache Kafka",
5
5
  "homepage": "https://github.com/platformatic/kafka",
6
6
  "author": "Platformatic Inc. <oss@platformatic.dev> (https://platformatic.dev)",
@@ -46,6 +46,7 @@
46
46
  "cleaner-spec-reporter": "^0.5.0",
47
47
  "cronometro": "^5.3.0",
48
48
  "eslint": "^9.21.0",
49
+ "fast-jwt": "^6.0.2",
49
50
  "hwp": "^0.4.1",
50
51
  "json5": "^2.2.3",
51
52
  "kafkajs": "^2.2.4",
@@ -64,11 +65,14 @@
64
65
  },
65
66
  "scripts": {
66
67
  "build": "rm -rf dist && tsc -p tsconfig.base.json",
68
+ "postbuild": "node --experimental-strip-types scripts/postbuild.ts",
67
69
  "lint": "eslint --cache",
68
70
  "typecheck": "tsc -p . --noEmit",
69
71
  "format": "prettier -w benchmarks playground src test",
70
72
  "test": "c8 -c test/config/c8-local.json node --env-file=test/config/env --no-warnings --test --test-timeout=300000 'test/**/*.test.ts'",
71
73
  "test:ci": "c8 -c test/config/c8-ci.json node --env-file=test/config/env --no-warnings --test --test-timeout=300000 'test/**/*.test.ts'",
74
+ "test:docker:up": "node scripts/docker.ts up -d --wait",
75
+ "test:docker:down": "node scripts/docker.ts down",
72
76
  "ci": "npm run build && npm run lint && npm run test:ci",
73
77
  "generate:apis": "node --experimental-strip-types scripts/generate-apis.ts",
74
78
  "generate:errors": "node --experimental-strip-types scripts/generate-errors.ts",