@platformatic/kafka 1.17.0 → 1.17.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/clients/consumer/consumer.js +12 -9
- package/dist/version.js +1 -1
- package/package.json +1 -1
|
@@ -2,6 +2,7 @@ import { createPromisifiedCallback, kCallbackPromise, runConcurrentCallbacks } f
|
|
|
2
2
|
import { FetchIsolationLevels, FindCoordinatorKeyTypes } from "../../apis/enumerations.js";
|
|
3
3
|
import { consumerCommitsChannel, consumerConsumesChannel, consumerFetchesChannel, consumerGroupChannel, consumerHeartbeatChannel, consumerOffsetsChannel, createDiagnosticContext } from "../../diagnostic.js";
|
|
4
4
|
import { UserError } from "../../errors.js";
|
|
5
|
+
import { INT32_SIZE } from "../../protocol/definitions.js";
|
|
5
6
|
import { Reader } from "../../protocol/reader.js";
|
|
6
7
|
import { Writer } from "../../protocol/writer.js";
|
|
7
8
|
import { Base, kAfterCreate, kCheckNotClosed, kClearMetadata, kClosed, kCreateConnectionPool, kFetchConnections, kFormatValidationErrors, kGetApi, kGetBootstrapConnection, kGetConnection, kMetadata, kOptions, kPerformDeduplicated, kPerformWithRetry, kPrometheus, kValidateOptions } from "../base/base.js";
|
|
@@ -25,7 +26,6 @@ export class Consumer extends Base {
|
|
|
25
26
|
#heartbeatInterval;
|
|
26
27
|
#lastHeartbeat;
|
|
27
28
|
#streams;
|
|
28
|
-
#partitionsAssigner;
|
|
29
29
|
/*
|
|
30
30
|
The following requests are blocking in Kafka:
|
|
31
31
|
|
|
@@ -60,7 +60,6 @@ export class Consumer extends Base {
|
|
|
60
60
|
this.#heartbeatInterval = null;
|
|
61
61
|
this.#lastHeartbeat = null;
|
|
62
62
|
this.#streams = new Set();
|
|
63
|
-
this.#partitionsAssigner = this[kOptions].partitionAssigner ?? roundRobinAssigner;
|
|
64
63
|
this.#validateGroupOptions(this[kOptions], groupIdAndOptionsValidator);
|
|
65
64
|
// Initialize connection pool
|
|
66
65
|
this[kFetchConnections] = this[kCreateConnectionPool]();
|
|
@@ -482,8 +481,8 @@ export class Consumer extends Base {
|
|
|
482
481
|
#leaveGroup(force, callback) {
|
|
483
482
|
consumerGroupChannel.traceCallback(this.#performLeaveGroup, 1, createDiagnosticContext({ client: this, operation: 'leaveGroup', force }), this, force, callback);
|
|
484
483
|
}
|
|
485
|
-
#syncGroup(callback) {
|
|
486
|
-
consumerGroupChannel.traceCallback(this.#performSyncGroup,
|
|
484
|
+
#syncGroup(partitionsAssigner, callback) {
|
|
485
|
+
consumerGroupChannel.traceCallback(this.#performSyncGroup, 2, createDiagnosticContext({ client: this, operation: 'syncGroup' }), this, partitionsAssigner, null, callback);
|
|
487
486
|
}
|
|
488
487
|
#heartbeat(options) {
|
|
489
488
|
const eventPayload = { groupId: this.groupId, memberId: this.memberId, generationId: this.generationId };
|
|
@@ -635,7 +634,7 @@ export class Consumer extends Base {
|
|
|
635
634
|
this.#members.set(member.memberId, this.#decodeProtocolSubscriptionMetadata(member.memberId, member.metadata));
|
|
636
635
|
}
|
|
637
636
|
// Send a syncGroup request
|
|
638
|
-
this.#syncGroup((error, response) => {
|
|
637
|
+
this.#syncGroup(options.partitionAssigner, (error, response) => {
|
|
639
638
|
if (!this.#membershipActive) {
|
|
640
639
|
callback(null, undefined);
|
|
641
640
|
return;
|
|
@@ -721,7 +720,7 @@ export class Consumer extends Base {
|
|
|
721
720
|
callback(null);
|
|
722
721
|
});
|
|
723
722
|
}
|
|
724
|
-
#performSyncGroup(assignments, callback) {
|
|
723
|
+
#performSyncGroup(partitionsAssigner, assignments, callback) {
|
|
725
724
|
if (!this.#membershipActive) {
|
|
726
725
|
callback(null, []);
|
|
727
726
|
return;
|
|
@@ -745,7 +744,7 @@ export class Consumer extends Base {
|
|
|
745
744
|
callback(this.#handleMetadataError(error), undefined);
|
|
746
745
|
return;
|
|
747
746
|
}
|
|
748
|
-
this.#performSyncGroup(this.#createAssignments(metadata), callback);
|
|
747
|
+
this.#performSyncGroup(partitionsAssigner, this.#createAssignments(partitionsAssigner, metadata), callback);
|
|
749
748
|
});
|
|
750
749
|
return;
|
|
751
750
|
}
|
|
@@ -844,6 +843,9 @@ export class Consumer extends Base {
|
|
|
844
843
|
#decodeProtocolAssignment(buffer) {
|
|
845
844
|
const reader = Reader.from(buffer);
|
|
846
845
|
reader.skip(2); // Ignore Version information
|
|
846
|
+
if (reader.remaining < INT32_SIZE) {
|
|
847
|
+
return [];
|
|
848
|
+
}
|
|
847
849
|
return reader.readArray(r => {
|
|
848
850
|
return {
|
|
849
851
|
topic: r.readString(false),
|
|
@@ -851,7 +853,7 @@ export class Consumer extends Base {
|
|
|
851
853
|
};
|
|
852
854
|
}, false, false);
|
|
853
855
|
}
|
|
854
|
-
#createAssignments(metadata) {
|
|
856
|
+
#createAssignments(partitionsAssigner, metadata) {
|
|
855
857
|
const partitionTracker = new Map();
|
|
856
858
|
// First of all, layout topics-partitions in a list
|
|
857
859
|
for (const [topic, partitions] of metadata.topics) {
|
|
@@ -872,7 +874,8 @@ export class Consumer extends Base {
|
|
|
872
874
|
return [{ memberId: this.memberId, assignment: this.#encodeProtocolAssignment(assignments) }];
|
|
873
875
|
}
|
|
874
876
|
const encodedAssignments = [];
|
|
875
|
-
|
|
877
|
+
partitionsAssigner ??= this[kOptions].partitionAssigner ?? roundRobinAssigner;
|
|
878
|
+
for (const member of partitionsAssigner(this.memberId, this.#members, new Set(this.topics.current), metadata)) {
|
|
876
879
|
encodedAssignments.push({
|
|
877
880
|
memberId: member.memberId,
|
|
878
881
|
assignment: this.#encodeProtocolAssignment(Array.from(member.assignments.values()))
|
package/dist/version.js
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
export const name = "@platformatic/kafka";
|
|
2
|
-
export const version = "1.17.
|
|
2
|
+
export const version = "1.17.1";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@platformatic/kafka",
|
|
3
|
-
"version": "1.17.
|
|
3
|
+
"version": "1.17.1",
|
|
4
4
|
"description": "Modern and performant client for Apache Kafka",
|
|
5
5
|
"homepage": "https://github.com/platformatic/kafka",
|
|
6
6
|
"author": "Platformatic Inc. <oss@platformatic.dev> (https://platformatic.dev)",
|