kafka-ts 1.3.1-beta.0 → 1.3.1-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. package/README.md +7 -7
  2. package/dist/api/alter-configs.d.ts +26 -0
  3. package/dist/api/alter-configs.js +33 -0
  4. package/dist/api/api-versions.d.ts +5 -2
  5. package/dist/api/api-versions.js +13 -0
  6. package/dist/api/create-topics.d.ts +14 -12
  7. package/dist/api/create-topics.js +104 -12
  8. package/dist/api/delete-topics.d.ts +10 -8
  9. package/dist/api/delete-topics.js +61 -7
  10. package/dist/api/fetch.d.ts +15 -12
  11. package/dist/api/fetch.js +131 -13
  12. package/dist/api/find-coordinator.d.ts +9 -7
  13. package/dist/api/find-coordinator.js +63 -5
  14. package/dist/api/heartbeat.d.ts +7 -5
  15. package/dist/api/heartbeat.js +42 -4
  16. package/dist/api/index.d.ts +47 -118
  17. package/dist/api/init-producer-id.d.ts +7 -5
  18. package/dist/api/init-producer-id.js +53 -9
  19. package/dist/api/join-group.d.ts +9 -7
  20. package/dist/api/join-group.js +95 -6
  21. package/dist/api/leave-group.d.ts +8 -6
  22. package/dist/api/leave-group.js +49 -6
  23. package/dist/api/list-offsets.d.ts +9 -7
  24. package/dist/api/list-offsets.js +85 -8
  25. package/dist/api/metadata.d.ts +10 -9
  26. package/dist/api/metadata.js +109 -8
  27. package/dist/api/offset-commit.d.ts +10 -8
  28. package/dist/api/offset-commit.js +88 -8
  29. package/dist/api/offset-fetch.d.ts +11 -9
  30. package/dist/api/offset-fetch.js +94 -9
  31. package/dist/api/produce.d.ts +8 -10
  32. package/dist/api/produce.js +132 -38
  33. package/dist/api/sasl-authenticate.d.ts +8 -6
  34. package/dist/api/sasl-authenticate.js +43 -3
  35. package/dist/api/sasl-handshake.d.ts +7 -4
  36. package/dist/api/sasl-handshake.js +10 -0
  37. package/dist/api/sync-group.d.ts +7 -5
  38. package/dist/api/sync-group.js +62 -5
  39. package/dist/auth/oauthbearer.js +3 -1
  40. package/dist/broker.js +6 -5
  41. package/dist/cluster.test.js +17 -14
  42. package/dist/connection.d.ts +11 -1
  43. package/dist/connection.js +27 -2
  44. package/dist/consumer/consumer.js +13 -9
  45. package/dist/consumer/metadata.d.ts +24 -0
  46. package/dist/consumer/metadata.js +64 -0
  47. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  48. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  49. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  50. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  51. package/dist/examples/src/replicator.js +34 -0
  52. package/dist/examples/src/utils/json.js +5 -0
  53. package/dist/request-handler.d.ts +16 -0
  54. package/dist/request-handler.js +67 -0
  55. package/dist/request-handler.test.d.ts +1 -0
  56. package/dist/request-handler.test.js +340 -0
  57. package/dist/src/api/api-versions.js +18 -0
  58. package/dist/src/api/create-topics.js +46 -0
  59. package/dist/src/api/delete-topics.js +26 -0
  60. package/dist/src/api/fetch.js +95 -0
  61. package/dist/src/api/find-coordinator.js +34 -0
  62. package/dist/src/api/heartbeat.js +22 -0
  63. package/dist/src/api/index.js +38 -0
  64. package/dist/src/api/init-producer-id.js +24 -0
  65. package/dist/src/api/join-group.js +48 -0
  66. package/dist/src/api/leave-group.js +30 -0
  67. package/dist/src/api/list-offsets.js +39 -0
  68. package/dist/src/api/metadata.js +47 -0
  69. package/dist/src/api/offset-commit.js +39 -0
  70. package/dist/src/api/offset-fetch.js +44 -0
  71. package/dist/src/api/produce.js +119 -0
  72. package/dist/src/api/sync-group.js +31 -0
  73. package/dist/src/broker.js +35 -0
  74. package/dist/src/connection.js +21 -0
  75. package/dist/src/consumer/consumer-group.js +131 -0
  76. package/dist/src/consumer/consumer.js +103 -0
  77. package/dist/src/consumer/metadata.js +52 -0
  78. package/dist/src/consumer/offset-manager.js +23 -0
  79. package/dist/src/index.js +19 -0
  80. package/dist/src/producer/producer.js +84 -0
  81. package/dist/src/request-handler.js +57 -0
  82. package/dist/src/request-handler.test.js +321 -0
  83. package/dist/src/types.js +2 -0
  84. package/dist/src/utils/api.js +5 -0
  85. package/dist/src/utils/decoder.js +161 -0
  86. package/dist/src/utils/encoder.js +137 -0
  87. package/dist/src/utils/error.js +10 -0
  88. package/dist/utils/api.d.ts +4 -1
  89. package/dist/utils/cached.d.ts +3 -0
  90. package/dist/utils/cached.js +19 -0
  91. package/dist/utils/debug.d.ts +2 -0
  92. package/dist/utils/debug.js +11 -0
  93. package/dist/utils/decoder.d.ts +2 -2
  94. package/dist/utils/decoder.js +14 -1
  95. package/dist/utils/encoder.d.ts +1 -0
  96. package/dist/utils/encoder.js +14 -0
  97. package/dist/utils/lock.d.ts +8 -0
  98. package/dist/utils/lock.js +44 -0
  99. package/dist/utils/memo.d.ts +1 -0
  100. package/dist/utils/memo.js +16 -0
  101. package/dist/utils/mutex.d.ts +3 -0
  102. package/dist/utils/mutex.js +32 -0
  103. package/dist/utils/number.d.ts +1 -0
  104. package/dist/utils/number.js +7 -0
  105. package/package.json +1 -1
package/README.md CHANGED
@@ -127,9 +127,9 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
127
127
 
128
128
  ### New features compared to kafkajs
129
129
 
130
- - **Static consumer membership** - Rebalancing during rolling deployments causes delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue consuming partitions in the existing assignment.
131
- - **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
132
- - **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
130
+ - **Static consumer membership** - Rebalancing during rolling deployments causes delays. Using `groupInstanceId` in addition to `groupId` can avoid rebalancing and continue consuming partitions in the existing assignment.
131
+ - **Consuming messages without consumer groups** - When you don't need the consumer to track the partition offsets, you can simply create a consumer without groupId and always either start consuming messages from the beginning or from the latest partition offset.
132
+ - **Low-level API requests** - It's possible to communicate directly with the Kafka cluster using the kafka api protocol.
133
133
 
134
134
  ## Configuration
135
135
 
@@ -145,10 +145,10 @@ The existing high-level libraries (e.g. kafkajs) are missing a few crucial featu
145
145
 
146
146
  #### Supported SASL mechanisms
147
147
 
148
- - PLAIN: `saslPlain({ username, password })`
149
- - SCRAM-SHA-256: `saslScramSha256({ username, password })`
150
- - SCRAM-SHA-512: `saslScramSha512({ username, password })`
151
- - OAUTHBEARER: `oAuthBearer(oAuthAuthenticator({ endpoint, clientId, clientSecret }))`
148
+ - PLAIN: `saslPlain({ username, password })`
149
+ - SCRAM-SHA-256: `saslScramSha256({ username, password })`
150
+ - SCRAM-SHA-512: `saslScramSha512({ username, password })`
151
+ - OAUTHBEARER: `oAuthBearer(oAuthAuthenticator({ endpoint, clientId, clientSecret }))`
152
152
 
153
153
  Custom SASL mechanisms can be implemented following the `SASLProvider` interface. See [src/auth](./src/auth) for examples.
154
154
 
@@ -0,0 +1,26 @@
1
+ export declare const ALTER_CONFIGS: import("../utils/api").Api<{
2
+ resources: {
3
+ resourceType: number;
4
+ resourceName: string;
5
+ configs: {
6
+ name: string;
7
+ value: string | null;
8
+ }[];
9
+ }[];
10
+ validateOnly: boolean;
11
+ }, {
12
+ _tag: void;
13
+ responses: {
14
+ name: string | null;
15
+ partitionResponses: {
16
+ index: number;
17
+ errorCode: number;
18
+ baseOffset: bigint;
19
+ logAppendTimeMs: bigint;
20
+ _tag: void;
21
+ }[];
22
+ _tag: void;
23
+ }[];
24
+ throttleTimeMs: number;
25
+ _tag2: void;
26
+ }>;
@@ -0,0 +1,33 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ALTER_CONFIGS = void 0;
4
+ const api_1 = require("../utils/api");
5
+ exports.ALTER_CONFIGS = (0, api_1.createApi)({
6
+ apiKey: 33,
7
+ apiVersion: 2,
8
+ request: (encoder, data) => encoder
9
+ .writeUVarInt(0)
10
+ .writeCompactArray(data.resources, (encoder, item) => encoder
11
+ .writeInt8(item.resourceType)
12
+ .writeCompactString(item.resourceName)
13
+ .writeCompactArray(item.configs, (encoder, item) => encoder.writeCompactString(item.name).writeCompactString(item.value).writeUVarInt(0))
14
+ .writeUVarInt(0))
15
+ .writeBoolean(data.validateOnly)
16
+ .writeUVarInt(0),
17
+ response: (decoder) => ({
18
+ _tag: decoder.readTagBuffer(),
19
+ responses: decoder.readCompactArray((decoder) => ({
20
+ name: decoder.readCompactString(),
21
+ partitionResponses: decoder.readCompactArray((decoder) => ({
22
+ index: decoder.readInt32(),
23
+ errorCode: decoder.readInt16(),
24
+ baseOffset: decoder.readInt64(),
25
+ logAppendTimeMs: decoder.readInt64(),
26
+ _tag: decoder.readTagBuffer(),
27
+ })),
28
+ _tag: decoder.readTagBuffer(),
29
+ })),
30
+ throttleTimeMs: decoder.readInt32(),
31
+ _tag2: decoder.readTagBuffer(),
32
+ }),
33
+ });
@@ -1,4 +1,5 @@
1
- export declare const API_VERSIONS: import("../utils/api.js").Api<unknown, {
1
+ type ApiVersionsRequest = {};
2
+ type ApiVersionsResponse = {
2
3
  errorCode: number;
3
4
  versions: {
4
5
  apiKey: number;
@@ -6,4 +7,6 @@ export declare const API_VERSIONS: import("../utils/api.js").Api<unknown, {
6
7
  maxVersion: number;
7
8
  }[];
8
9
  throttleTimeMs: number;
9
- }>;
10
+ };
11
+ export declare const API_VERSIONS: import("../utils/api.js").Api<ApiVersionsRequest, ApiVersionsResponse>;
12
+ export {};
@@ -3,9 +3,22 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.API_VERSIONS = void 0;
4
4
  const api_js_1 = require("../utils/api.js");
5
5
  const error_js_1 = require("../utils/error.js");
6
+ /*
7
+ ApiVersions Request (Version: 2) =>
8
+
9
+ ApiVersions Response (Version: 2) => error_code [api_keys] throttle_time_ms
10
+ error_code => INT16
11
+ api_keys => api_key min_version max_version
12
+ api_key => INT16
13
+ min_version => INT16
14
+ max_version => INT16
15
+ throttle_time_ms => INT32
16
+ */
6
17
  exports.API_VERSIONS = (0, api_js_1.createApi)({
7
18
  apiKey: 18,
8
19
  apiVersion: 2,
20
+ requestHeaderVersion: 1,
21
+ responseHeaderVersion: 0,
9
22
  request: (encoder) => encoder,
10
23
  response: (decoder) => {
11
24
  const result = {
@@ -1,4 +1,4 @@
1
- export declare const CREATE_TOPICS: import("../utils/api").Api<{
1
+ type CreateTopicsRequest = {
2
2
  topics: {
3
3
  name: string;
4
4
  numPartitions?: number;
@@ -14,25 +14,27 @@ export declare const CREATE_TOPICS: import("../utils/api").Api<{
14
14
  }[];
15
15
  timeoutMs?: number;
16
16
  validateOnly?: boolean;
17
- }, {
18
- _tag: void;
17
+ };
18
+ type CreateTopicsResponse = {
19
19
  throttleTimeMs: number;
20
20
  topics: {
21
- name: string | null;
22
- topicId: string;
21
+ name: string;
22
+ _topicId: string;
23
23
  errorCode: number;
24
24
  errorMessage: string | null;
25
- numPartitions: number;
26
- replicationFactor: number;
25
+ _numPartitions: number;
26
+ _replicationFactor: number;
27
27
  configs: {
28
- name: string | null;
28
+ name: string;
29
29
  value: string | null;
30
30
  readOnly: boolean;
31
31
  configSource: number;
32
32
  isSensitive: boolean;
33
- _tag: void;
33
+ tags: Record<number, Buffer>;
34
34
  }[];
35
- _tag: void;
35
+ tags: Record<number, Buffer>;
36
36
  }[];
37
- _tag2: void;
38
- }>;
37
+ tags: Record<number, Buffer>;
38
+ };
39
+ export declare const CREATE_TOPICS: import("../utils/api").Api<CreateTopicsRequest, CreateTopicsResponse>;
40
+ export {};
@@ -3,11 +3,104 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.CREATE_TOPICS = void 0;
4
4
  const api_1 = require("../utils/api");
5
5
  const error_1 = require("../utils/error");
6
+ /*
7
+ CreateTopics Request (Version: 2) => [topics] timeout_ms validate_only
8
+ topics => name num_partitions replication_factor [assignments] [configs]
9
+ name => STRING
10
+ num_partitions => INT32
11
+ replication_factor => INT16
12
+ assignments => partition_index [broker_ids]
13
+ partition_index => INT32
14
+ broker_ids => INT32
15
+ configs => name value
16
+ name => STRING
17
+ value => NULLABLE_STRING
18
+ timeout_ms => INT32
19
+ validate_only => BOOLEAN
20
+
21
+ CreateTopics Response (Version: 2) => throttle_time_ms [topics]
22
+ throttle_time_ms => INT32
23
+ topics => name error_code error_message
24
+ name => STRING
25
+ error_code => INT16
26
+ error_message => NULLABLE_STRING
27
+ */
28
+ const CREATE_TOPICS_V2 = (0, api_1.createApi)({
29
+ apiKey: 19,
30
+ apiVersion: 2,
31
+ requestHeaderVersion: 1,
32
+ responseHeaderVersion: 0,
33
+ request: (encoder, data) => encoder
34
+ .writeArray(data.topics, (encoder, topic) => encoder
35
+ .writeString(topic.name)
36
+ .writeInt32(topic.numPartitions ?? -1)
37
+ .writeInt16(topic.replicationFactor ?? -1)
38
+ .writeArray(topic.assignments ?? [], (encoder, assignment) => encoder
39
+ .writeInt32(assignment.partitionIndex)
40
+ .writeArray(assignment.brokerIds, (encoder, brokerId) => encoder.writeInt32(brokerId)))
41
+ .writeArray(topic.configs ?? [], (encoder, config) => encoder.writeString(config.name).writeString(config.value)))
42
+ .writeInt32(data.timeoutMs ?? 10_000)
43
+ .writeBoolean(data.validateOnly ?? false),
44
+ response: (decoder) => {
45
+ const result = {
46
+ throttleTimeMs: decoder.readInt32(),
47
+ topics: decoder.readArray((topic) => ({
48
+ name: topic.readString(),
49
+ _topicId: '', // TopicId not present in v2 response
50
+ errorCode: topic.readInt16(),
51
+ errorMessage: topic.readString(),
52
+ _numPartitions: 0, // Not present in v2 response
53
+ _replicationFactor: 0, // Not present in v2 response
54
+ configs: [], // Not present in v2 response
55
+ tags: {},
56
+ })),
57
+ tags: {},
58
+ };
59
+ result.topics.forEach((topic) => {
60
+ if (topic.errorCode)
61
+ throw new error_1.KafkaTSApiError(topic.errorCode, topic.errorMessage, result);
62
+ });
63
+ return result;
64
+ },
65
+ });
66
+ /*
67
+ CreateTopics Request (Version: 7) => [topics] timeout_ms validate_only _tagged_fields
68
+ topics => name num_partitions replication_factor [assignments] [configs] _tagged_fields
69
+ name => COMPACT_STRING
70
+ num_partitions => INT32
71
+ replication_factor => INT16
72
+ assignments => partition_index [broker_ids] _tagged_fields
73
+ partition_index => INT32
74
+ broker_ids => INT32
75
+ configs => name value _tagged_fields
76
+ name => COMPACT_STRING
77
+ value => COMPACT_NULLABLE_STRING
78
+ timeout_ms => INT32
79
+ validate_only => BOOLEAN
80
+
81
+ CreateTopics Response (Version: 7) => throttle_time_ms [topics] _tagged_fields
82
+ throttle_time_ms => INT32
83
+ topics => name topic_id error_code error_message num_partitions replication_factor [configs] _tagged_fields
84
+ name => COMPACT_STRING
85
+ topic_id => UUID
86
+ error_code => INT16
87
+ error_message => COMPACT_NULLABLE_STRING
88
+ num_partitions => INT32
89
+ replication_factor => INT16
90
+ configs => name value read_only config_source is_sensitive _tagged_fields
91
+ name => COMPACT_STRING
92
+ value => COMPACT_NULLABLE_STRING
93
+ read_only => BOOLEAN
94
+ config_source => INT8
95
+ is_sensitive => BOOLEAN
96
+ */
6
97
  exports.CREATE_TOPICS = (0, api_1.createApi)({
7
98
  apiKey: 19,
8
99
  apiVersion: 7,
100
+ fallback: CREATE_TOPICS_V2,
101
+ requestHeaderVersion: 2,
102
+ responseHeaderVersion: 1,
9
103
  request: (encoder, data) => encoder
10
- .writeUVarInt(0)
11
104
  .writeCompactArray(data.topics, (encoder, topic) => encoder
12
105
  .writeCompactString(topic.name)
13
106
  .writeInt32(topic.numPartitions ?? -1)
@@ -15,34 +108,33 @@ exports.CREATE_TOPICS = (0, api_1.createApi)({
15
108
  .writeCompactArray(topic.assignments ?? [], (encoder, assignment) => encoder
16
109
  .writeInt32(assignment.partitionIndex)
17
110
  .writeCompactArray(assignment.brokerIds, (encoder, brokerId) => encoder.writeInt32(brokerId))
18
- .writeUVarInt(0))
19
- .writeCompactArray(topic.configs ?? [], (encoder, config) => encoder.writeCompactString(config.name).writeCompactString(config.value).writeUVarInt(0))
20
- .writeUVarInt(0))
111
+ .writeTagBuffer())
112
+ .writeCompactArray(topic.configs ?? [], (encoder, config) => encoder.writeCompactString(config.name).writeCompactString(config.value).writeTagBuffer())
113
+ .writeTagBuffer())
21
114
  .writeInt32(data.timeoutMs ?? 10_000)
22
115
  .writeBoolean(data.validateOnly ?? false)
23
- .writeUVarInt(0),
116
+ .writeTagBuffer(),
24
117
  response: (decoder) => {
25
118
  const result = {
26
- _tag: decoder.readTagBuffer(),
27
119
  throttleTimeMs: decoder.readInt32(),
28
120
  topics: decoder.readCompactArray((topic) => ({
29
121
  name: topic.readCompactString(),
30
- topicId: topic.readUUID(),
122
+ _topicId: topic.readUUID(),
31
123
  errorCode: topic.readInt16(),
32
124
  errorMessage: topic.readCompactString(),
33
- numPartitions: topic.readInt32(),
34
- replicationFactor: topic.readInt16(),
125
+ _numPartitions: topic.readInt32(),
126
+ _replicationFactor: topic.readInt16(),
35
127
  configs: topic.readCompactArray((config) => ({
36
128
  name: config.readCompactString(),
37
129
  value: config.readCompactString(),
38
130
  readOnly: config.readBoolean(),
39
131
  configSource: config.readInt8(),
40
132
  isSensitive: config.readBoolean(),
41
- _tag: config.readTagBuffer(),
133
+ tags: config.readTagBuffer(),
42
134
  })),
43
- _tag: topic.readTagBuffer(),
135
+ tags: topic.readTagBuffer(),
44
136
  })),
45
- _tag2: decoder.readTagBuffer(),
137
+ tags: decoder.readTagBuffer(),
46
138
  };
47
139
  result.topics.forEach((topic) => {
48
140
  if (topic.errorCode)
@@ -1,18 +1,20 @@
1
- export declare const DELETE_TOPICS: import("../utils/api").Api<{
1
+ type DeleteTopicsRequest = {
2
2
  topics: {
3
- name: string | null;
3
+ name: string;
4
4
  topicId: string | null;
5
5
  }[];
6
6
  timeoutMs?: number;
7
- }, {
8
- _tag: void;
7
+ };
8
+ type DeleteTopicsResponse = {
9
9
  throttleTimeMs: number;
10
10
  responses: {
11
11
  name: string | null;
12
- topicId: string;
12
+ _topicId: string;
13
13
  errorCode: number;
14
14
  errorMessage: string | null;
15
- _tag: void;
15
+ tags: Record<number, Buffer>;
16
16
  }[];
17
- _tag2: void;
18
- }>;
17
+ tags: Record<number, Buffer>;
18
+ };
19
+ export declare const DELETE_TOPICS: import("../utils/api").Api<DeleteTopicsRequest, DeleteTopicsResponse>;
20
+ export {};
@@ -3,26 +3,80 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DELETE_TOPICS = void 0;
4
4
  const api_1 = require("../utils/api");
5
5
  const error_1 = require("../utils/error");
6
+ /*
7
+ DeleteTopics Request (Version: 1) => [topic_names] timeout_ms
8
+ topic_names => STRING
9
+ timeout_ms => INT32
10
+
11
+ DeleteTopics Response (Version: 1) => throttle_time_ms [responses]
12
+ throttle_time_ms => INT32
13
+ responses => name error_code
14
+ name => STRING
15
+ error_code => INT16
16
+ */
17
+ const DELETE_TOPICS_V1 = (0, api_1.createApi)({
18
+ apiKey: 20,
19
+ apiVersion: 1,
20
+ requestHeaderVersion: 1,
21
+ responseHeaderVersion: 0,
22
+ request: (encoder, data) => encoder
23
+ .writeArray(data.topics, (encoder, topic) => encoder.writeString(topic.name))
24
+ .writeInt32(data.timeoutMs ?? 10_000),
25
+ response: (decoder) => {
26
+ const result = {
27
+ throttleTimeMs: decoder.readInt32(),
28
+ responses: decoder.readArray((decoder) => ({
29
+ name: decoder.readString(),
30
+ _topicId: '', // TopicId not present in v1 response
31
+ errorCode: decoder.readInt16(),
32
+ errorMessage: null,
33
+ tags: {},
34
+ })),
35
+ tags: {},
36
+ };
37
+ result.responses.forEach((response) => {
38
+ if (response.errorCode)
39
+ throw new error_1.KafkaTSApiError(response.errorCode, response.errorMessage, result);
40
+ });
41
+ return result;
42
+ },
43
+ });
44
+ /*
45
+ DeleteTopics Request (Version: 6) => [topics] timeout_ms _tagged_fields
46
+ topics => name topic_id _tagged_fields
47
+ name => COMPACT_NULLABLE_STRING
48
+ topic_id => UUID
49
+ timeout_ms => INT32
50
+
51
+ DeleteTopics Response (Version: 6) => throttle_time_ms [responses] _tagged_fields
52
+ throttle_time_ms => INT32
53
+ responses => name topic_id error_code error_message _tagged_fields
54
+ name => COMPACT_NULLABLE_STRING
55
+ topic_id => UUID
56
+ error_code => INT16
57
+ error_message => COMPACT_NULLABLE_STRING
58
+ */
6
59
  exports.DELETE_TOPICS = (0, api_1.createApi)({
7
60
  apiKey: 20,
8
61
  apiVersion: 6,
62
+ fallback: DELETE_TOPICS_V1,
63
+ requestHeaderVersion: 2,
64
+ responseHeaderVersion: 1,
9
65
  request: (encoder, data) => encoder
10
- .writeUVarInt(0)
11
- .writeCompactArray(data.topics, (encoder, topic) => encoder.writeCompactString(topic.name).writeUUID(topic.topicId).writeUVarInt(0))
66
+ .writeCompactArray(data.topics, (encoder, topic) => encoder.writeCompactString(topic.name).writeUUID(topic.topicId).writeTagBuffer())
12
67
  .writeInt32(data.timeoutMs ?? 10_000)
13
- .writeUVarInt(0),
68
+ .writeTagBuffer(),
14
69
  response: (decoder) => {
15
70
  const result = {
16
- _tag: decoder.readTagBuffer(),
17
71
  throttleTimeMs: decoder.readInt32(),
18
72
  responses: decoder.readCompactArray((decoder) => ({
19
73
  name: decoder.readCompactString(),
20
- topicId: decoder.readUUID(),
74
+ _topicId: decoder.readUUID(),
21
75
  errorCode: decoder.readInt16(),
22
76
  errorMessage: decoder.readCompactString(),
23
- _tag: decoder.readTagBuffer(),
77
+ tags: decoder.readTagBuffer(),
24
78
  })),
25
- _tag2: decoder.readTagBuffer(),
79
+ tags: decoder.readTagBuffer(),
26
80
  };
27
81
  result.responses.forEach((response) => {
28
82
  if (response.errorCode)
@@ -2,8 +2,7 @@ export declare const enum IsolationLevel {
2
2
  READ_UNCOMMITTED = 0,
3
3
  READ_COMMITTED = 1
4
4
  }
5
- export type FetchResponse = Awaited<ReturnType<(typeof FETCH)['response']>>;
6
- export declare const FETCH: import("../utils/api").Api<{
5
+ type FetchRequest = {
7
6
  maxWaitMs: number;
8
7
  minBytes: number;
9
8
  maxBytes: number;
@@ -12,6 +11,7 @@ export declare const FETCH: import("../utils/api").Api<{
12
11
  sessionEpoch: number;
13
12
  topics: {
14
13
  topicId: string;
14
+ topicName: string;
15
15
  partitions: {
16
16
  partition: number;
17
17
  currentLeaderEpoch: number;
@@ -23,16 +23,20 @@ export declare const FETCH: import("../utils/api").Api<{
23
23
  }[];
24
24
  forgottenTopicsData: {
25
25
  topicId: string;
26
+ topicName: string;
26
27
  partitions: number[];
27
28
  }[];
28
29
  rackId: string;
29
- }, {
30
- _tag: void;
30
+ };
31
+ export type FetchResponse = {
31
32
  throttleTimeMs: number;
32
33
  errorCode: number;
33
34
  sessionId: number;
34
- responses: {
35
+ responses: (({
35
36
  topicId: string;
37
+ } | {
38
+ topicName: string;
39
+ }) & {
36
40
  partitions: {
37
41
  partitionIndex: number;
38
42
  errorCode: number;
@@ -42,7 +46,6 @@ export declare const FETCH: import("../utils/api").Api<{
42
46
  abortedTransactions: {
43
47
  producerId: bigint;
44
48
  firstOffset: bigint;
45
- _tag: void;
46
49
  }[];
47
50
  preferredReadReplica: number;
48
51
  records: {
@@ -53,10 +56,11 @@ export declare const FETCH: import("../utils/api").Api<{
53
56
  crc: number;
54
57
  attributes: number;
55
58
  compression: number;
56
- timestampType: string;
59
+ timestampType: 'CreateTime' | 'LogAppendTime';
57
60
  isTransactional: boolean;
58
61
  isControlBatch: boolean;
59
62
  hasDeleteHorizonMs: boolean;
63
+ deleteHorizonMs: bigint | null;
60
64
  lastOffsetDelta: number;
61
65
  baseTimestamp: bigint;
62
66
  maxTimestamp: bigint;
@@ -75,9 +79,8 @@ export declare const FETCH: import("../utils/api").Api<{
75
79
  }[];
76
80
  }[];
77
81
  }[];
78
- _tag: void;
79
82
  }[];
80
- _tag: void;
81
- }[];
82
- _tag2: void;
83
- }>;
83
+ })[];
84
+ };
85
+ export declare const FETCH: import("../utils/api").Api<FetchRequest, FetchResponse>;
86
+ export {};