@platformatic/kafka 1.4.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -13,6 +13,10 @@ A modern, high-performance, pure TypeScript/JavaScript type safe client for Apac
13
13
  - **Connection Management**: Automatic connection pooling and recovery.
14
14
  - **Low Dependencies**: Minimal external dependencies.
15
15
 
16
+ ## Supported Kafka Version
17
+
18
+ Supported Kafka version are from **3.5.0** to **4.0.0** and equivalent, edges included.
19
+
16
20
  ## Installation
17
21
 
18
22
  ```bash
@@ -0,0 +1,46 @@
1
+ import { Reader } from '../../protocol/reader.ts';
2
+ import { type RecordsBatch } from '../../protocol/records.ts';
3
+ import { Writer } from '../../protocol/writer.ts';
4
+ export interface FetchRequestPartition {
5
+ partition: number;
6
+ currentLeaderEpoch: number;
7
+ fetchOffset: bigint;
8
+ lastFetchedEpoch: number;
9
+ partitionMaxBytes: number;
10
+ }
11
+ export interface FetchRequestTopic {
12
+ topicId: string;
13
+ partitions: FetchRequestPartition[];
14
+ }
15
+ export interface FetchRequestForgottenTopicsData {
16
+ topic: string;
17
+ partitions: number[];
18
+ }
19
+ export type FetchRequest = Parameters<typeof createRequest>;
20
+ export interface FetchResponsePartitionAbortedTransaction {
21
+ producerId: bigint;
22
+ firstOffset: bigint;
23
+ }
24
+ export interface FetchResponsePartition {
25
+ partitionIndex: number;
26
+ errorCode: number;
27
+ highWatermark: bigint;
28
+ lastStableOffset: bigint;
29
+ logStartOffset: bigint;
30
+ abortedTransactions: FetchResponsePartitionAbortedTransaction[];
31
+ preferredReadReplica: number;
32
+ records?: RecordsBatch;
33
+ }
34
+ export interface FetchResponseTopic {
35
+ topicId: string;
36
+ partitions: FetchResponsePartition[];
37
+ }
38
+ export type FetchResponse = {
39
+ throttleTimeMs: number;
40
+ errorCode: number;
41
+ sessionId: number;
42
+ responses: FetchResponseTopic[];
43
+ };
44
+ export declare function createRequest(maxWaitMs: number, minBytes: number, maxBytes: number, isolationLevel: number, sessionId: number, sessionEpoch: number, topics: FetchRequestTopic[], forgottenTopicsData: FetchRequestForgottenTopicsData[], rackId: string): Writer;
45
+ export declare function parseResponse(_correlationId: number, apiKey: number, apiVersion: number, reader: Reader): FetchResponse;
46
+ export declare const api: import("../definitions.ts").API<[maxWaitMs: number, minBytes: number, maxBytes: number, isolationLevel: number, sessionId: number, sessionEpoch: number, topics: FetchRequestTopic[], forgottenTopicsData: FetchRequestForgottenTopicsData[], rackId: string], FetchResponse>;
@@ -0,0 +1,121 @@
1
+ import { ResponseError } from "../../errors.js";
2
+ import { Reader } from "../../protocol/reader.js";
3
+ import { readRecordsBatch } from "../../protocol/records.js";
4
+ import { Writer } from "../../protocol/writer.js";
5
+ import { createAPI } from "../definitions.js";
6
+ /*
7
+ Fetch Request (Version: 15) => max_wait_ms min_bytes max_bytes isolation_level session_id session_epoch [topics] [forgotten_topics_data] rack_id TAG_BUFFER
8
+ max_wait_ms => INT32
9
+ min_bytes => INT32
10
+ max_bytes => INT32
11
+ isolation_level => INT8
12
+ session_id => INT32
13
+ session_epoch => INT32
14
+ topics => topic_id [partitions] TAG_BUFFER
15
+ topic_id => UUID
16
+ partitions => partition current_leader_epoch fetch_offset last_fetched_epoch log_start_offset partition_max_bytes TAG_BUFFER
17
+ partition => INT32
18
+ current_leader_epoch => INT32
19
+ fetch_offset => INT64
20
+ last_fetched_epoch => INT32
21
+ log_start_offset => INT64
22
+ partition_max_bytes => INT32
23
+ forgotten_topics_data => topic_id [partitions] TAG_BUFFER
24
+ topic_id => UUID
25
+ partitions => INT32
26
+ rack_id => COMPACT_STRING
27
+ */
28
+ export function createRequest(maxWaitMs, minBytes, maxBytes, isolationLevel, sessionId, sessionEpoch, topics, forgottenTopicsData, rackId) {
29
+ return Writer.create()
30
+ .appendInt32(maxWaitMs)
31
+ .appendInt32(minBytes)
32
+ .appendInt32(maxBytes)
33
+ .appendInt8(isolationLevel)
34
+ .appendInt32(sessionId)
35
+ .appendInt32(sessionEpoch)
36
+ .appendArray(topics, (w, t) => {
37
+ w.appendUUID(t.topicId).appendArray(t.partitions, (w, p) => {
38
+ w.appendInt32(p.partition)
39
+ .appendInt32(p.currentLeaderEpoch)
40
+ .appendInt64(p.fetchOffset)
41
+ .appendInt32(p.lastFetchedEpoch)
42
+ .appendInt64(-1n)
43
+ .appendInt32(p.partitionMaxBytes);
44
+ });
45
+ })
46
+ .appendArray(forgottenTopicsData, (w, t) => {
47
+ w.appendUUID(t.topic).appendArray(t.partitions, (w, p) => {
48
+ w.appendInt32(p);
49
+ }, true, false);
50
+ })
51
+ .appendString(rackId)
52
+ .appendTaggedFields();
53
+ }
54
+ /*
55
+ Fetch Response (Version: 15) => throttle_time_ms error_code session_id [responses] TAG_BUFFER
56
+ throttle_time_ms => INT32
57
+ error_code => INT16
58
+ session_id => INT32
59
+ responses => topic_id [partitions] TAG_BUFFER
60
+ topic_id => UUID
61
+ partitions => partition_index error_code high_watermark last_stable_offset log_start_offset [aborted_transactions] preferred_read_replica records TAG_BUFFER
62
+ partition_index => INT32
63
+ error_code => INT16
64
+ high_watermark => INT64
65
+ last_stable_offset => INT64
66
+ log_start_offset => INT64
67
+ aborted_transactions => producer_id first_offset TAG_BUFFER
68
+ producer_id => INT64
69
+ first_offset => INT64
70
+ preferred_read_replica => INT32
71
+ records => COMPACT_RECORDS
72
+ */
73
+ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
74
+ const errors = [];
75
+ const throttleTimeMs = reader.readInt32();
76
+ const errorCode = reader.readInt16();
77
+ if (errorCode !== 0) {
78
+ errors.push(['', errorCode]);
79
+ }
80
+ const response = {
81
+ throttleTimeMs,
82
+ errorCode,
83
+ sessionId: reader.readInt32(),
84
+ responses: reader.readArray((r, i) => {
85
+ return {
86
+ topicId: r.readUUID(),
87
+ partitions: r.readArray((r, j) => {
88
+ const partition = {
89
+ partitionIndex: r.readInt32(),
90
+ errorCode: r.readInt16(),
91
+ highWatermark: r.readInt64(),
92
+ lastStableOffset: r.readInt64(),
93
+ logStartOffset: r.readInt64(),
94
+ abortedTransactions: r.readArray(r => {
95
+ return {
96
+ producerId: r.readInt64(),
97
+ firstOffset: r.readInt64()
98
+ };
99
+ }),
100
+ preferredReadReplica: r.readInt32()
101
+ };
102
+ let recordsSize = r.readUnsignedVarInt();
103
+ if (partition.errorCode !== 0) {
104
+ errors.push([`/responses/${i}/partitions/${j}`, partition.errorCode]);
105
+ }
106
+ if (recordsSize > 1) {
107
+ recordsSize--;
108
+ partition.records = readRecordsBatch(Reader.from(r.buffer.subarray(r.position, r.position + recordsSize)));
109
+ r.skip(recordsSize);
110
+ }
111
+ return partition;
112
+ })
113
+ };
114
+ })
115
+ };
116
+ if (errors.length) {
117
+ throw new ResponseError(apiKey, apiVersion, Object.fromEntries(errors), response);
118
+ }
119
+ return response;
120
+ }
121
+ export const api = createAPI(1, 15, createRequest, parseResponse);
@@ -1,4 +1,5 @@
1
1
  export * as consumerGroupHeartbeatV0 from './consumer-group-heartbeat-v0.ts';
2
+ export * as fetchV15 from './fetch-v15.ts';
2
3
  export * as fetchV16 from './fetch-v16.ts';
3
4
  export * as fetchV17 from './fetch-v17.ts';
4
5
  export * as heartbeatV4 from './heartbeat-v4.ts';
@@ -6,6 +7,8 @@ export * as joinGroupV9 from './join-group-v9.ts';
6
7
  export * as leaveGroupV5 from './leave-group-v5.ts';
7
8
  export * as listOffsetsV8 from './list-offsets-v8.ts';
8
9
  export * as listOffsetsV9 from './list-offsets-v9.ts';
10
+ export * as offsetCommitV8 from './offset-commit-v8.ts';
9
11
  export * as offsetCommitV9 from './offset-commit-v9.ts';
12
+ export * as offsetFetchV8 from './offset-fetch-v8.ts';
10
13
  export * as offsetFetchV9 from './offset-fetch-v9.ts';
11
14
  export * as syncGroupV5 from './sync-group-v5.ts';
@@ -1,4 +1,5 @@
1
1
  export * as consumerGroupHeartbeatV0 from "./consumer-group-heartbeat-v0.js";
2
+ export * as fetchV15 from "./fetch-v15.js";
2
3
  export * as fetchV16 from "./fetch-v16.js";
3
4
  export * as fetchV17 from "./fetch-v17.js";
4
5
  export * as heartbeatV4 from "./heartbeat-v4.js";
@@ -6,6 +7,8 @@ export * as joinGroupV9 from "./join-group-v9.js";
6
7
  export * as leaveGroupV5 from "./leave-group-v5.js";
7
8
  export * as listOffsetsV8 from "./list-offsets-v8.js";
8
9
  export * as listOffsetsV9 from "./list-offsets-v9.js";
10
+ export * as offsetCommitV8 from "./offset-commit-v8.js";
9
11
  export * as offsetCommitV9 from "./offset-commit-v9.js";
12
+ export * as offsetFetchV8 from "./offset-fetch-v8.js";
10
13
  export * as offsetFetchV9 from "./offset-fetch-v9.js";
11
14
  export * as syncGroupV5 from "./sync-group-v5.js";
@@ -0,0 +1,29 @@
1
+ import { type NullableString } from '../../protocol/definitions.ts';
2
+ import { type Reader } from '../../protocol/reader.ts';
3
+ import { Writer } from '../../protocol/writer.ts';
4
+ export interface OffsetCommitRequestPartition {
5
+ partitionIndex: number;
6
+ committedOffset: bigint;
7
+ committedLeaderEpoch: number;
8
+ committedMetadata?: NullableString;
9
+ }
10
+ export interface OffsetCommitRequestTopic {
11
+ name: string;
12
+ partitions: OffsetCommitRequestPartition[];
13
+ }
14
+ export type OffsetCommitRequest = Parameters<typeof createRequest>;
15
+ export interface OffsetCommitResponsePartition {
16
+ partitionIndex: number;
17
+ errorCode: number;
18
+ }
19
+ export interface OffsetCommitResponseTopic {
20
+ name: string;
21
+ partitions: OffsetCommitResponsePartition[];
22
+ }
23
+ export interface OffsetCommitResponse {
24
+ throttleTimeMs: number;
25
+ topics: OffsetCommitResponseTopic[];
26
+ }
27
+ export declare function createRequest(groupId: string, generationIdOrMemberEpoch: number, memberId: string, groupInstanceId: NullableString, topics: OffsetCommitRequestTopic[]): Writer;
28
+ export declare function parseResponse(_correlationId: number, apiKey: number, apiVersion: number, reader: Reader): OffsetCommitResponse;
29
+ export declare const api: import("../definitions.ts").API<[groupId: string, generationIdOrMemberEpoch: number, memberId: string, groupInstanceId: NullableString, topics: OffsetCommitRequestTopic[]], OffsetCommitResponse>;
@@ -0,0 +1,68 @@
1
+ import { ResponseError } from "../../errors.js";
2
+ import { Writer } from "../../protocol/writer.js";
3
+ import { createAPI } from "../definitions.js";
4
+ /*
5
+ OffsetCommit Request (Version: 8) => group_id generation_id_or_member_epoch member_id group_instance_id [topics] TAG_BUFFER
6
+ group_id => COMPACT_STRING
7
+ generation_id_or_member_epoch => INT32
8
+ member_id => COMPACT_STRING
9
+ group_instance_id => COMPACT_NULLABLE_STRING
10
+ topics => name [partitions] TAG_BUFFER
11
+ name => COMPACT_STRING
12
+ partitions => partition_index committed_offset committed_leader_epoch committed_metadata TAG_BUFFER
13
+ partition_index => INT32
14
+ committed_offset => INT64
15
+ committed_leader_epoch => INT32
16
+ committed_metadata => COMPACT_NULLABLE_STRING
17
+ */
18
+ export function createRequest(groupId, generationIdOrMemberEpoch, memberId, groupInstanceId, topics) {
19
+ return Writer.create()
20
+ .appendString(groupId)
21
+ .appendInt32(generationIdOrMemberEpoch)
22
+ .appendString(memberId)
23
+ .appendString(groupInstanceId)
24
+ .appendArray(topics, (w, t) => {
25
+ w.appendString(t.name).appendArray(t.partitions, (w, p) => {
26
+ w.appendInt32(p.partitionIndex)
27
+ .appendInt64(p.committedOffset)
28
+ .appendInt32(p.committedLeaderEpoch)
29
+ .appendString(p.committedMetadata);
30
+ });
31
+ })
32
+ .appendTaggedFields();
33
+ }
34
+ /*
35
+ OffsetCommit Response (Version: 8) => throttle_time_ms [topics] TAG_BUFFER
36
+ throttle_time_ms => INT32
37
+ topics => name [partitions] TAG_BUFFER
38
+ name => COMPACT_STRING
39
+ partitions => partition_index error_code TAG_BUFFER
40
+ partition_index => INT32
41
+ error_code => INT16
42
+ */
43
+ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
44
+ const errors = [];
45
+ const response = {
46
+ throttleTimeMs: reader.readInt32(),
47
+ topics: reader.readArray((r, i) => {
48
+ return {
49
+ name: r.readString(),
50
+ partitions: r.readArray((r, j) => {
51
+ const partition = {
52
+ partitionIndex: r.readInt32(),
53
+ errorCode: r.readInt16()
54
+ };
55
+ if (partition.errorCode !== 0) {
56
+ errors.push([`/topics/${i}/partitions/${j}`, partition.errorCode]);
57
+ }
58
+ return partition;
59
+ })
60
+ };
61
+ })
62
+ };
63
+ if (errors.length) {
64
+ throw new ResponseError(apiKey, apiVersion, Object.fromEntries(errors), response);
65
+ }
66
+ return response;
67
+ }
68
+ export const api = createAPI(8, 8, createRequest, parseResponse);
@@ -0,0 +1,37 @@
1
+ import { type NullableString } from '../../protocol/definitions.ts';
2
+ import { type Reader } from '../../protocol/reader.ts';
3
+ import { Writer } from '../../protocol/writer.ts';
4
+ export interface OffsetFetchRequestTopic {
5
+ name: string;
6
+ partitionIndexes: number[];
7
+ }
8
+ export interface OffsetFetchRequestGroup {
9
+ groupId: string;
10
+ memberId?: NullableString;
11
+ memberEpoch: number;
12
+ topics: OffsetFetchRequestTopic[];
13
+ }
14
+ export type OffsetFetchRequest = Parameters<typeof createRequest>;
15
+ export interface OffsetFetchResponsePartition {
16
+ partitionIndex: number;
17
+ committedOffset: bigint;
18
+ committedLeaderEpoch: number;
19
+ metadata: NullableString;
20
+ errorCode: number;
21
+ }
22
+ export interface OffsetFetchResponseTopic {
23
+ name: string;
24
+ partitions: OffsetFetchResponsePartition[];
25
+ }
26
+ export interface OffsetFetchResponseGroup {
27
+ groupId: string;
28
+ topics: OffsetFetchResponseTopic[];
29
+ errorCode: number;
30
+ }
31
+ export interface OffsetFetchResponse {
32
+ throttleTimeMs: number;
33
+ groups: OffsetFetchResponseGroup[];
34
+ }
35
+ export declare function createRequest(groups: OffsetFetchRequestGroup[], requireStable: boolean): Writer;
36
+ export declare function parseResponse(_correlationId: number, apiKey: number, apiVersion: number, reader: Reader): OffsetFetchResponse;
37
+ export declare const api: import("../definitions.ts").API<[groups: OffsetFetchRequestGroup[], requireStable: boolean], OffsetFetchResponse>;
@@ -0,0 +1,78 @@
1
+ import { ResponseError } from "../../errors.js";
2
+ import { Writer } from "../../protocol/writer.js";
3
+ import { createAPI } from "../definitions.js";
4
+ /*
5
+ OffsetFetch Request (Version: 8) => [groups] require_stable TAG_BUFFER
6
+ groups => group_id member_id member_epoch [topics] TAG_BUFFER
7
+ group_id => COMPACT_STRING
8
+ topics => name [partition_indexes] TAG_BUFFER
9
+ name => COMPACT_STRING
10
+ partition_indexes => INT32
11
+ require_stable => BOOLEAN
12
+
13
+ Note that OffsetFetchRequestGroup contains a memberId and memberEpoch fields, which is not used in version 8.
14
+ */
15
+ export function createRequest(groups, requireStable) {
16
+ return Writer.create()
17
+ .appendArray(groups, (w, g) => {
18
+ w.appendString(g.groupId).appendArray(g.topics, (w, t) => {
19
+ w.appendString(t.name).appendArray(t.partitionIndexes, (w, i) => w.appendInt32(i), true, false);
20
+ });
21
+ })
22
+ .appendBoolean(requireStable)
23
+ .appendTaggedFields();
24
+ }
25
+ /*
26
+ OffsetFetch Response (Version: 8) => throttle_time_ms [groups] TAG_BUFFER
27
+ throttle_time_ms => INT32
28
+ groups => group_id [topics] error_code TAG_BUFFER
29
+ group_id => COMPACT_STRING
30
+ topics => name [partitions] TAG_BUFFER
31
+ name => COMPACT_STRING
32
+ partitions => partition_index committed_offset committed_leader_epoch metadata error_code TAG_BUFFER
33
+ partition_index => INT32
34
+ committed_offset => INT64
35
+ committed_leader_epoch => INT32
36
+ metadata => COMPACT_NULLABLE_STRING
37
+ error_code => INT16
38
+ error_code => INT16
39
+ */
40
+ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
41
+ const errors = [];
42
+ const response = {
43
+ throttleTimeMs: reader.readInt32(),
44
+ groups: reader.readArray((r, i) => {
45
+ const group = {
46
+ groupId: r.readString(),
47
+ topics: r.readArray((r, j) => {
48
+ return {
49
+ name: r.readString(),
50
+ partitions: r.readArray((r, k) => {
51
+ const partition = {
52
+ partitionIndex: r.readInt32(),
53
+ committedOffset: r.readInt64(),
54
+ committedLeaderEpoch: r.readInt32(),
55
+ metadata: r.readNullableString(),
56
+ errorCode: r.readInt16()
57
+ };
58
+ if (partition.errorCode !== 0) {
59
+ errors.push([`/groups/${i}/topics/${j}/partitions/${k}`, partition.errorCode]);
60
+ }
61
+ return partition;
62
+ })
63
+ };
64
+ }),
65
+ errorCode: r.readInt16()
66
+ };
67
+ if (group.errorCode !== 0) {
68
+ errors.push([`/groups/${i}`, group.errorCode]);
69
+ }
70
+ return group;
71
+ })
72
+ };
73
+ if (errors.length) {
74
+ throw new ResponseError(apiKey, apiVersion, Object.fromEntries(errors), response);
75
+ }
76
+ return response;
77
+ }
78
+ export const api = createAPI(9, 8, createRequest, parseResponse);
@@ -5,4 +5,5 @@ export * as initProducerIdV4 from './init-producer-id-v4.ts';
5
5
  export * as initProducerIdV5 from './init-producer-id-v5.ts';
6
6
  export * as produceV10 from './produce-v10.ts';
7
7
  export * as produceV11 from './produce-v11.ts';
8
+ export * as produceV9 from './produce-v9.ts';
8
9
  export * as txnOffsetCommitV4 from './txn-offset-commit-v4.ts';
@@ -5,4 +5,5 @@ export * as initProducerIdV4 from "./init-producer-id-v4.js";
5
5
  export * as initProducerIdV5 from "./init-producer-id-v5.js";
6
6
  export * as produceV10 from "./produce-v10.js";
7
7
  export * as produceV11 from "./produce-v11.js";
8
+ export * as produceV9 from "./produce-v9.js";
8
9
  export * as txnOffsetCommitV4 from "./txn-offset-commit-v4.js";
@@ -0,0 +1,29 @@
1
+ import { type NullableString } from '../../protocol/definitions.ts';
2
+ import { type Reader } from '../../protocol/reader.ts';
3
+ import { type CreateRecordsBatchOptions, type MessageRecord } from '../../protocol/records.ts';
4
+ import { Writer } from '../../protocol/writer.ts';
5
+ export type ProduceRequest = Parameters<typeof createRequest>;
6
+ export interface ProduceResponsePartitionRecordError {
7
+ batchIndex: number;
8
+ batchIndexErrorMessage: NullableString;
9
+ }
10
+ export interface ProduceResponsePartition {
11
+ index: number;
12
+ errorCode: number;
13
+ baseOffset: bigint;
14
+ logAppendTimeMs: bigint;
15
+ logStartOffset: bigint;
16
+ recordErrors: ProduceResponsePartitionRecordError[];
17
+ errorMessage: NullableString;
18
+ }
19
+ export interface ProduceResponseTopic {
20
+ name: string;
21
+ partitionResponses: ProduceResponsePartition[];
22
+ }
23
+ export interface ProduceResponse {
24
+ responses: ProduceResponseTopic[];
25
+ throttleTimeMs: number;
26
+ }
27
+ export declare function createRequest(acks: number | undefined, timeout: number | undefined, topicData: MessageRecord[], options?: Partial<CreateRecordsBatchOptions>): Writer;
28
+ export declare function parseResponse(_correlationId: number, apiKey: number, apiVersion: number, reader: Reader): ProduceResponse;
29
+ export declare const api: import("../definitions.ts").API<[acks: number | undefined, timeout: number | undefined, topicData: MessageRecord[], options?: Partial<CreateRecordsBatchOptions> | undefined], boolean | ProduceResponse>;
@@ -0,0 +1,104 @@
1
+ import { ResponseError } from "../../errors.js";
2
+ import { createRecordsBatch } from "../../protocol/records.js";
3
+ import { Writer } from "../../protocol/writer.js";
4
+ import { groupByProperty } from "../../utils.js";
5
+ import { createAPI } from "../definitions.js";
6
+ import { ProduceAcks } from "../enumerations.js";
7
+ /*
8
+ Produce Request (Version: 9) => transactional_id acks timeout_ms [topic_data] TAG_BUFFER
9
+ transactional_id => COMPACT_NULLABLE_STRING
10
+ acks => INT16
11
+ timeout_ms => INT32
12
+ topic_data => name [partition_data] TAG_BUFFER
13
+ name => COMPACT_STRING
14
+ partition_data => index records TAG_BUFFER
15
+ index => INT32
16
+ records => COMPACT_RECORDS
17
+ */
18
+ export function createRequest(acks = 1, timeout = 0, topicData, options = {}) {
19
+ // Normalize the messages
20
+ const now = BigInt(Date.now());
21
+ for (const message of topicData) {
22
+ if (typeof message.partition === 'undefined') {
23
+ message.partition = 0;
24
+ }
25
+ if (typeof message.timestamp === 'undefined') {
26
+ message.timestamp = now;
27
+ }
28
+ }
29
+ const writer = Writer.create()
30
+ .appendString(options.transactionalId)
31
+ .appendInt16(acks)
32
+ .appendInt32(timeout)
33
+ .appendArray(groupByProperty(topicData, 'topic'), (w, [topic, messages]) => {
34
+ w.appendString(topic).appendArray(groupByProperty(messages, 'partition'), (w, [partition, messages]) => {
35
+ const records = createRecordsBatch(messages, options);
36
+ w.appendInt32(partition)
37
+ .appendUnsignedVarInt(records.length + 1)
38
+ .appendFrom(records);
39
+ });
40
+ })
41
+ .appendTaggedFields();
42
+ if (acks === ProduceAcks.NO_RESPONSE) {
43
+ writer.context.noResponse = true;
44
+ }
45
+ return writer;
46
+ }
47
+ /*
48
+ Produce Response (Version: 9) => [responses] throttle_time_ms TAG_BUFFER
49
+ responses => name [partition_responses] TAG_BUFFER
50
+ name => COMPACT_STRING
51
+ partition_responses => index error_code base_offset log_append_time_ms log_start_offset [record_errors] error_message TAG_BUFFER
52
+ index => INT32
53
+ error_code => INT16
54
+ base_offset => INT64
55
+ log_append_time_ms => INT64
56
+ log_start_offset => INT64
57
+ record_errors => batch_index batch_index_error_message TAG_BUFFER
58
+ batch_index => INT32
59
+ batch_index_error_message => COMPACT_NULLABLE_STRING
60
+ error_message => COMPACT_NULLABLE_STRING
61
+ throttle_time_ms => INT32
62
+ */
63
+ export function parseResponse(_correlationId, apiKey, apiVersion, reader) {
64
+ const errors = [];
65
+ const response = {
66
+ responses: reader.readArray((r, i) => {
67
+ const topicResponse = {
68
+ name: r.readString(),
69
+ partitionResponses: r.readArray((r, j) => {
70
+ const index = r.readInt32();
71
+ const errorCode = r.readInt16();
72
+ if (errorCode !== 0) {
73
+ errors.push([`/responses/${i}/partition_responses/${j}`, errorCode]);
74
+ }
75
+ return {
76
+ index,
77
+ errorCode,
78
+ baseOffset: r.readInt64(),
79
+ logAppendTimeMs: r.readInt64(),
80
+ logStartOffset: r.readInt64(),
81
+ recordErrors: r.readArray((r, k) => {
82
+ const recordError = {
83
+ batchIndex: r.readInt32(),
84
+ batchIndexErrorMessage: r.readNullableString()
85
+ };
86
+ if (recordError.batchIndexErrorMessage) {
87
+ errors.push([`/responses/${i}/partition_responses/${j}/record_errors/${k}`, -1]);
88
+ }
89
+ return recordError;
90
+ }),
91
+ errorMessage: r.readNullableString()
92
+ };
93
+ })
94
+ };
95
+ return topicResponse;
96
+ }),
97
+ throttleTimeMs: reader.readInt32()
98
+ };
99
+ if (errors.length) {
100
+ throw new ResponseError(apiKey, apiVersion, Object.fromEntries(errors), response);
101
+ }
102
+ return response;
103
+ }
104
+ export const api = createAPI(0, 9, createRequest, parseResponse);
package/dist/utils.d.ts CHANGED
@@ -22,4 +22,3 @@ export declare function groupByProperty<Key, Value>(entries: Value[], property:
22
22
  export declare function humanize(label: string, buffer: Buffer | DynamicBuffer): string;
23
23
  export declare function setDebugDumpLogger(logger: DebugDumpLogger): void;
24
24
  export declare function debugDump(...values: unknown[]): void;
25
- export declare function executeWithTimeout<T = unknown>(promise: Promise<T>, timeout: number, timeoutValue?: string): Promise<T | string>;
package/dist/utils.js CHANGED
@@ -1,8 +1,6 @@
1
- import { Unpromise } from '@watchable/unpromise';
2
1
  import ajvErrors from 'ajv-errors';
3
2
  import { Ajv2020 } from 'ajv/dist/2020.js';
4
3
  import debug from 'debug';
5
- import { setTimeout as sleep } from 'node:timers/promises';
6
4
  import { inspect } from 'node:util';
7
5
  export { setTimeout as sleep } from 'node:timers/promises';
8
6
  export const ajv = new Ajv2020({ allErrors: true, coerceTypes: false, strict: true });
@@ -142,10 +140,3 @@ export function setDebugDumpLogger(logger) {
142
140
  export function debugDump(...values) {
143
141
  debugDumpLogger(new Date().toISOString(), ...values.map(v => (typeof v === 'string' ? v : inspect(v, false, 10))));
144
142
  }
145
- export async function executeWithTimeout(promise, timeout, timeoutValue = 'timeout') {
146
- const ac = new AbortController();
147
- return Unpromise.race([promise, sleep(timeout, timeoutValue, { signal: ac.signal, ref: false })]).then((value) => {
148
- ac.abort();
149
- return value;
150
- });
151
- }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@platformatic/kafka",
3
- "version": "1.4.0",
3
+ "version": "1.5.0",
4
4
  "description": "Modern and performant client for Apache Kafka",
5
5
  "homepage": "https://github.com/platformatic/kafka",
6
6
  "author": "Platformatic Inc. <oss@platformatic.dev> (https://platformatic.dev)",
@@ -25,7 +25,6 @@
25
25
  "exports": "./dist/index.js",
26
26
  "types": "./dist/index.d.ts",
27
27
  "dependencies": {
28
- "@watchable/unpromise": "^1.0.2",
29
28
  "ajv": "^8.17.1",
30
29
  "ajv-errors": "^3.0.0",
31
30
  "debug": "^4.4.0",
@@ -42,6 +41,7 @@
42
41
  "@types/debug": "^4.1.12",
43
42
  "@types/node": "^22.13.5",
44
43
  "@types/semver": "^7.7.0",
44
+ "@watchable/unpromise": "^1.0.2",
45
45
  "c8": "^10.1.3",
46
46
  "cleaner-spec-reporter": "^0.5.0",
47
47
  "cronometro": "^5.3.0",
@@ -55,6 +55,7 @@
55
55
  "prettier": "^3.5.3",
56
56
  "prom-client": "^15.1.3",
57
57
  "semver": "^7.7.1",
58
+ "table": "^6.9.0",
58
59
  "typescript": "^5.7.3"
59
60
  },
60
61
  "engines": {