kafka-ts 0.0.2 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/api/fetch.js +1 -1
- package/dist/api/index.d.ts +0 -2
- package/dist/api/offset-commit.js +1 -1
- package/dist/api/offset-fetch.d.ts +0 -2
- package/dist/api/offset-fetch.js +1 -3
- package/dist/api/produce.js +1 -1
- package/dist/broker.js +1 -1
- package/dist/cluster.test.js +0 -2
- package/dist/consumer/consumer-group.js +0 -2
- package/package.json +1 -1
- package/src/api/fetch.ts +1 -1
- package/src/api/offset-commit.ts +1 -1
- package/src/api/offset-fetch.ts +1 -5
- package/src/api/produce.ts +1 -1
- package/src/broker.ts +1 -1
- package/src/cluster.test.ts +0 -2
- package/src/consumer/consumer-group.ts +0 -2
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
**KafkaTS** is a Apache Kafka client library for Node.js. It provides both a low-level API for communicating directly with the Apache Kafka cluster and high-level APIs for publishing and subscribing to Kafka topics.
|
|
4
4
|
|
|
5
|
-
Supported Kafka versions
|
|
5
|
+
**Supported Kafka versions:** 3.6 and later
|
|
6
6
|
|
|
7
7
|
## Installation
|
|
8
8
|
|
package/dist/api/fetch.js
CHANGED
|
@@ -7,7 +7,7 @@ const decoder_1 = require("../utils/decoder");
|
|
|
7
7
|
const error_1 = require("../utils/error");
|
|
8
8
|
exports.FETCH = (0, api_1.createApi)({
|
|
9
9
|
apiKey: 1,
|
|
10
|
-
apiVersion:
|
|
10
|
+
apiVersion: 15,
|
|
11
11
|
request: (encoder, data) => encoder
|
|
12
12
|
.writeUVarInt(0)
|
|
13
13
|
.writeInt32(data.maxWaitMs)
|
package/dist/api/index.d.ts
CHANGED
|
@@ -5,7 +5,7 @@ const api_1 = require("../utils/api");
|
|
|
5
5
|
const error_1 = require("../utils/error");
|
|
6
6
|
exports.OFFSET_COMMIT = (0, api_1.createApi)({
|
|
7
7
|
apiKey: 8,
|
|
8
|
-
apiVersion:
|
|
8
|
+
apiVersion: 8,
|
|
9
9
|
request: (encoder, data) => encoder
|
|
10
10
|
.writeUVarInt(0)
|
|
11
11
|
.writeCompactString(data.groupId)
|
package/dist/api/offset-fetch.js
CHANGED
|
@@ -5,13 +5,11 @@ const api_1 = require("../utils/api");
|
|
|
5
5
|
const error_1 = require("../utils/error");
|
|
6
6
|
exports.OFFSET_FETCH = (0, api_1.createApi)({
|
|
7
7
|
apiKey: 9,
|
|
8
|
-
apiVersion:
|
|
8
|
+
apiVersion: 8,
|
|
9
9
|
request: (encoder, data) => encoder
|
|
10
10
|
.writeUVarInt(0)
|
|
11
11
|
.writeCompactArray(data.groups, (encoder, group) => encoder
|
|
12
12
|
.writeCompactString(group.groupId)
|
|
13
|
-
.writeCompactString(group.memberId)
|
|
14
|
-
.writeInt32(group.memberEpoch)
|
|
15
13
|
.writeCompactArray(group.topics, (encoder, topic) => encoder
|
|
16
14
|
.writeCompactString(topic.name)
|
|
17
15
|
.writeCompactArray(topic.partitionIndexes, (encoder, partitionIndex) => encoder.writeInt32(partitionIndex))
|
package/dist/api/produce.js
CHANGED
|
@@ -6,7 +6,7 @@ const encoder_js_1 = require("../utils/encoder.js");
|
|
|
6
6
|
const error_js_1 = require("../utils/error.js");
|
|
7
7
|
exports.PRODUCE = (0, api_js_1.createApi)({
|
|
8
8
|
apiKey: 0,
|
|
9
|
-
apiVersion:
|
|
9
|
+
apiVersion: 9,
|
|
10
10
|
request: (encoder, data) => encoder
|
|
11
11
|
.writeUVarInt(0)
|
|
12
12
|
.writeCompactString(data.transactionalId)
|
package/dist/broker.js
CHANGED
|
@@ -38,7 +38,7 @@ class Broker {
|
|
|
38
38
|
}
|
|
39
39
|
const { apiVersion } = apiByKey[apiKey];
|
|
40
40
|
if (apiVersion < minVersion || apiVersion > maxVersion) {
|
|
41
|
-
throw new error_1.KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker`);
|
|
41
|
+
throw new error_1.KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker (minVersion=${minVersion}, maxVersion=${maxVersion})`);
|
|
42
42
|
}
|
|
43
43
|
});
|
|
44
44
|
}
|
package/dist/cluster.test.js
CHANGED
|
@@ -130,8 +130,6 @@ class ConsumerGroup extends events_1.default {
|
|
|
130
130
|
groups: [
|
|
131
131
|
{
|
|
132
132
|
groupId,
|
|
133
|
-
memberId: this.memberId,
|
|
134
|
-
memberEpoch: -1,
|
|
135
133
|
topics: topics
|
|
136
134
|
.map((topic) => ({ name: topic, partitionIndexes: assignment[topic] ?? [] }))
|
|
137
135
|
.filter(({ partitionIndexes }) => partitionIndexes.length),
|
package/package.json
CHANGED
package/src/api/fetch.ts
CHANGED
package/src/api/offset-commit.ts
CHANGED
package/src/api/offset-fetch.ts
CHANGED
|
@@ -3,14 +3,12 @@ import { KafkaTSApiError } from '../utils/error';
|
|
|
3
3
|
|
|
4
4
|
export const OFFSET_FETCH = createApi({
|
|
5
5
|
apiKey: 9,
|
|
6
|
-
apiVersion:
|
|
6
|
+
apiVersion: 8,
|
|
7
7
|
request: (
|
|
8
8
|
encoder,
|
|
9
9
|
data: {
|
|
10
10
|
groups: {
|
|
11
11
|
groupId: string;
|
|
12
|
-
memberId: string | null;
|
|
13
|
-
memberEpoch: number;
|
|
14
12
|
topics: {
|
|
15
13
|
name: string;
|
|
16
14
|
partitionIndexes: number[];
|
|
@@ -24,8 +22,6 @@ export const OFFSET_FETCH = createApi({
|
|
|
24
22
|
.writeCompactArray(data.groups, (encoder, group) =>
|
|
25
23
|
encoder
|
|
26
24
|
.writeCompactString(group.groupId)
|
|
27
|
-
.writeCompactString(group.memberId)
|
|
28
|
-
.writeInt32(group.memberEpoch)
|
|
29
25
|
.writeCompactArray(group.topics, (encoder, topic) =>
|
|
30
26
|
encoder
|
|
31
27
|
.writeCompactString(topic.name)
|
package/src/api/produce.ts
CHANGED
package/src/broker.ts
CHANGED
|
@@ -54,7 +54,7 @@ export class Broker {
|
|
|
54
54
|
}
|
|
55
55
|
const { apiVersion } = apiByKey[apiKey];
|
|
56
56
|
if (apiVersion < minVersion || apiVersion > maxVersion) {
|
|
57
|
-
throw new KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker`);
|
|
57
|
+
throw new KafkaTSError(`API ${apiKey} version ${apiVersion} is not supported by the broker (minVersion=${minVersion}, maxVersion=${maxVersion})`);
|
|
58
58
|
}
|
|
59
59
|
});
|
|
60
60
|
}
|
package/src/cluster.test.ts
CHANGED
|
@@ -147,8 +147,6 @@ export class ConsumerGroup extends EventEmitter<{ offsetCommit: [] }> {
|
|
|
147
147
|
groups: [
|
|
148
148
|
{
|
|
149
149
|
groupId,
|
|
150
|
-
memberId: this.memberId,
|
|
151
|
-
memberEpoch: -1,
|
|
152
150
|
topics: topics
|
|
153
151
|
.map((topic) => ({ name: topic, partitionIndexes: assignment[topic] ?? [] }))
|
|
154
152
|
.filter(({ partitionIndexes }) => partitionIndexes.length),
|