kafka-ts 0.0.17-beta.0 → 0.0.17-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,19 +1,19 @@
1
1
  export declare const CREATE_TOPICS: import("../utils/api").Api<{
2
2
  topics: {
3
3
  name: string;
4
- numPartitions: number;
5
- replicationFactor: number;
6
- assignments: {
4
+ numPartitions?: number;
5
+ replicationFactor?: number;
6
+ assignments?: {
7
7
  partitionIndex: number;
8
8
  brokerIds: number[];
9
9
  }[];
10
- configs: {
10
+ configs?: {
11
11
  name: string;
12
12
  value: string | null;
13
13
  }[];
14
14
  }[];
15
- timeoutMs: number;
16
- validateOnly: boolean;
15
+ timeoutMs?: number | undefined;
16
+ validateOnly?: boolean | undefined;
17
17
  }, {
18
18
  _tag: void;
19
19
  throttleTimeMs: number;
@@ -10,16 +10,16 @@ exports.CREATE_TOPICS = (0, api_1.createApi)({
10
10
  .writeUVarInt(0)
11
11
  .writeCompactArray(data.topics, (encoder, topic) => encoder
12
12
  .writeCompactString(topic.name)
13
- .writeInt32(topic.numPartitions)
14
- .writeInt16(topic.replicationFactor)
15
- .writeCompactArray(topic.assignments, (encoder, assignment) => encoder
13
+ .writeInt32(topic.numPartitions ?? -1)
14
+ .writeInt16(topic.replicationFactor ?? -1)
15
+ .writeCompactArray(topic.assignments ?? [], (encoder, assignment) => encoder
16
16
  .writeInt32(assignment.partitionIndex)
17
17
  .writeCompactArray(assignment.brokerIds, (encoder, brokerId) => encoder.writeInt32(brokerId))
18
18
  .writeUVarInt(0))
19
- .writeCompactArray(topic.configs, (encoder, config) => encoder.writeCompactString(config.name).writeCompactString(config.value).writeUVarInt(0))
19
+ .writeCompactArray(topic.configs ?? [], (encoder, config) => encoder.writeCompactString(config.name).writeCompactString(config.value).writeUVarInt(0))
20
20
  .writeUVarInt(0))
21
- .writeInt32(data.timeoutMs)
22
- .writeBoolean(data.validateOnly)
21
+ .writeInt32(data.timeoutMs ?? 10_000)
22
+ .writeBoolean(data.validateOnly ?? false)
23
23
  .writeUVarInt(0),
24
24
  response: (decoder) => {
25
25
  const result = {
@@ -3,7 +3,7 @@ export declare const DELETE_TOPICS: import("../utils/api").Api<{
3
3
  name: string | null;
4
4
  topicId: string | null;
5
5
  }[];
6
- timeoutMs: number;
6
+ timeoutMs?: number | undefined;
7
7
  }, {
8
8
  _tag: void;
9
9
  throttleTimeMs: number;
@@ -9,7 +9,7 @@ exports.DELETE_TOPICS = (0, api_1.createApi)({
9
9
  request: (encoder, data) => encoder
10
10
  .writeUVarInt(0)
11
11
  .writeCompactArray(data.topics, (encoder, topic) => encoder.writeCompactString(topic.name).writeUUID(topic.topicId).writeUVarInt(0))
12
- .writeInt32(data.timeoutMs)
12
+ .writeInt32(data.timeoutMs ?? 10_000)
13
13
  .writeUVarInt(0),
14
14
  response: (decoder) => {
15
15
  const result = {
@@ -13,19 +13,19 @@ export declare const API: {
13
13
  CREATE_TOPICS: Api<{
14
14
  topics: {
15
15
  name: string;
16
- numPartitions: number;
17
- replicationFactor: number;
18
- assignments: {
16
+ numPartitions?: number | undefined;
17
+ replicationFactor?: number | undefined;
18
+ assignments?: {
19
19
  partitionIndex: number;
20
20
  brokerIds: number[];
21
- }[];
22
- configs: {
21
+ }[] | undefined;
22
+ configs?: {
23
23
  name: string;
24
24
  value: string | null;
25
- }[];
25
+ }[] | undefined;
26
26
  }[];
27
- timeoutMs: number;
28
- validateOnly: boolean;
27
+ timeoutMs?: number | undefined;
28
+ validateOnly?: boolean | undefined;
29
29
  }, {
30
30
  _tag: void;
31
31
  throttleTimeMs: number;
@@ -53,7 +53,7 @@ export declare const API: {
53
53
  name: string | null;
54
54
  topicId: string | null;
55
55
  }[];
56
- timeoutMs: number;
56
+ timeoutMs?: number | undefined;
57
57
  }, {
58
58
  _tag: void;
59
59
  throttleTimeMs: number;
@@ -266,12 +266,12 @@ export declare const API: {
266
266
  _tag2: void;
267
267
  }>;
268
268
  METADATA: Api<{
269
- topics: {
269
+ topics?: {
270
270
  id: string | null;
271
271
  name: string;
272
- }[] | null;
273
- allowTopicAutoCreation: boolean;
274
- includeTopicAuthorizedOperations: boolean;
272
+ }[] | null | undefined;
273
+ allowTopicAutoCreation?: boolean | undefined;
274
+ includeTopicAuthorizedOperations?: boolean | undefined;
275
275
  }, {
276
276
  _tag: void;
277
277
  throttleTimeMs: number;
@@ -1,11 +1,11 @@
1
1
  export type Metadata = Awaited<ReturnType<(typeof METADATA)['response']>>;
2
2
  export declare const METADATA: import("../utils/api").Api<{
3
- topics: {
3
+ topics?: {
4
4
  id: string | null;
5
5
  name: string;
6
- }[] | null;
7
- allowTopicAutoCreation: boolean;
8
- includeTopicAuthorizedOperations: boolean;
6
+ }[] | null | undefined;
7
+ allowTopicAutoCreation?: boolean | undefined;
8
+ includeTopicAuthorizedOperations?: boolean | undefined;
9
9
  }, {
10
10
  _tag: void;
11
11
  throttleTimeMs: number;
@@ -8,9 +8,9 @@ exports.METADATA = (0, api_1.createApi)({
8
8
  apiVersion: 12,
9
9
  request: (encoder, data) => encoder
10
10
  .writeUVarInt(0)
11
- .writeCompactArray(data.topics, (encoder, topic) => encoder.writeUUID(topic.id).writeCompactString(topic.name).writeUVarInt(0))
12
- .writeBoolean(data.allowTopicAutoCreation)
13
- .writeBoolean(data.includeTopicAuthorizedOperations)
11
+ .writeCompactArray(data.topics ?? null, (encoder, topic) => encoder.writeUUID(topic.id).writeCompactString(topic.name).writeUVarInt(0))
12
+ .writeBoolean(data.allowTopicAutoCreation ?? false)
13
+ .writeBoolean(data.includeTopicAuthorizedOperations ?? false)
14
14
  .writeUVarInt(0),
15
15
  response: (decoder) => {
16
16
  const result = {
package/dist/cluster.js CHANGED
@@ -16,11 +16,7 @@ class Cluster {
16
16
  async connect() {
17
17
  this.seedBroker = await this.findSeedBroker();
18
18
  this.brokerById = {};
19
- const metadata = await this.sendRequest(api_1.API.METADATA, {
20
- allowTopicAutoCreation: false,
21
- includeTopicAuthorizedOperations: false,
22
- topics: [],
23
- });
19
+ const metadata = await this.sendRequest(api_1.API.METADATA, { topics: [] });
24
20
  this.brokerMetadata = Object.fromEntries(metadata.brokers.map((options) => [options.nodeId, options]));
25
21
  }
26
22
  async ensureConnected() {
@@ -19,6 +19,7 @@ export type ConsumerOptions = {
19
19
  partitionMaxBytes?: number;
20
20
  allowTopicAutoCreation?: boolean;
21
21
  fromBeginning?: boolean;
22
+ fromTimestamp?: bigint;
22
23
  batchGranularity?: BatchGranularity;
23
24
  concurrency?: number;
24
25
  retrier?: Retrier;
@@ -51,6 +51,7 @@ class Consumer extends events_1.default {
51
51
  isolationLevel: options.isolationLevel ?? 0 /* IsolationLevel.READ_UNCOMMITTED */,
52
52
  allowTopicAutoCreation: options.allowTopicAutoCreation ?? false,
53
53
  fromBeginning: options.fromBeginning ?? false,
54
+ fromTimestamp: options.fromTimestamp ?? (options.fromBeginning ? -2n : -1n),
54
55
  batchGranularity: options.batchGranularity ?? 'broker',
55
56
  concurrency: options.concurrency ?? 1,
56
57
  retrier: options.retrier ?? retrier_1.defaultRetrier,
@@ -76,13 +77,13 @@ class Consumer extends events_1.default {
76
77
  : undefined;
77
78
  }
78
79
  async start() {
79
- const { topics, allowTopicAutoCreation, fromBeginning } = this.options;
80
+ const { topics, allowTopicAutoCreation, fromTimestamp } = this.options;
80
81
  this.stopHook = undefined;
81
82
  try {
82
83
  await this.cluster.connect();
83
84
  await this.metadata.fetchMetadata({ topics, allowTopicAutoCreation });
84
85
  this.metadata.setAssignment(this.metadata.getTopicPartitions());
85
- await this.offsetManager.fetchOffsets({ fromBeginning });
86
+ await this.offsetManager.fetchOffsets({ fromTimestamp });
86
87
  await this.consumerGroup?.init();
87
88
  }
88
89
  catch (error) {
@@ -15,7 +15,7 @@ export declare class OffsetManager {
15
15
  resolve(topic: string, partition: number, offset: bigint): void;
16
16
  flush(topicPartitions: Record<string, Set<number>>): void;
17
17
  fetchOffsets(options: {
18
- fromBeginning: boolean;
18
+ fromTimestamp: bigint;
19
19
  }): Promise<void>;
20
20
  private listOffsets;
21
21
  }
@@ -40,7 +40,7 @@ class OffsetManager {
40
40
  nodeAssignment: Object.fromEntries(Object.entries(topicPartitions).map(([topicName, partitions]) => [topicName, Object.keys(partitions).map(Number)])),
41
41
  })));
42
42
  }
43
- async listOffsets({ nodeId, nodeAssignment, fromBeginning, }) {
43
+ async listOffsets({ nodeId, nodeAssignment, fromTimestamp, }) {
44
44
  const { cluster, isolationLevel } = this.options;
45
45
  const offsets = await cluster.sendRequestToNode(nodeId)(api_1.API.LIST_OFFSETS, {
46
46
  replicaId: -1,
@@ -53,7 +53,7 @@ class OffsetManager {
53
53
  {
54
54
  partitionIndex: partition,
55
55
  currentLeaderEpoch: -1,
56
- timestamp: fromBeginning ? -2n : -1n,
56
+ timestamp: fromTimestamp,
57
57
  },
58
58
  ],
59
59
  })),
package/dist/metadata.js CHANGED
@@ -63,7 +63,6 @@ class Metadata {
63
63
  const { cluster } = this.options;
64
64
  const response = await cluster.sendRequest(api_1.API.METADATA, {
65
65
  allowTopicAutoCreation,
66
- includeTopicAuthorizedOperations: false,
67
66
  topics: topics?.map((name) => ({ id: null, name })) ?? null,
68
67
  });
69
68
  this.topicPartitions = {
@@ -124,10 +124,13 @@ class Decoder {
124
124
  const length = this.readInt32();
125
125
  return Array.from({ length }).map(() => {
126
126
  const size = this.readVarInt();
127
+ if (!size) {
128
+ return null;
129
+ }
127
130
  const child = new Decoder(this.buffer.subarray(this.offset, this.offset + size));
128
131
  this.offset += size;
129
132
  return callback(child);
130
- });
133
+ }).filter(x => x !== null);
131
134
  }
132
135
  read(length) {
133
136
  const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.17-beta.0",
3
+ "version": "0.0.17-beta.2",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",