kafka-ts 0.0.3 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/README.md +36 -1
  2. package/dist/client.d.ts +1 -2
  3. package/dist/consumer/consumer.d.ts +2 -0
  4. package/dist/consumer/consumer.js +18 -7
  5. package/dist/index.d.ts +1 -0
  6. package/dist/index.js +1 -0
  7. package/dist/utils/retrier.d.ts +3 -4
  8. package/dist/utils/retrier.js +19 -14
  9. package/package.json +1 -1
  10. package/.prettierrc +0 -8
  11. package/src/__snapshots__/cluster.test.ts.snap +0 -1281
  12. package/src/api/api-versions.ts +0 -21
  13. package/src/api/create-topics.ts +0 -78
  14. package/src/api/delete-topics.ts +0 -42
  15. package/src/api/fetch.ts +0 -198
  16. package/src/api/find-coordinator.ts +0 -39
  17. package/src/api/heartbeat.ts +0 -33
  18. package/src/api/index.ts +0 -166
  19. package/src/api/init-producer-id.ts +0 -35
  20. package/src/api/join-group.ts +0 -67
  21. package/src/api/leave-group.ts +0 -48
  22. package/src/api/list-offsets.ts +0 -65
  23. package/src/api/metadata.ts +0 -66
  24. package/src/api/offset-commit.ts +0 -67
  25. package/src/api/offset-fetch.ts +0 -70
  26. package/src/api/produce.ts +0 -170
  27. package/src/api/sasl-authenticate.ts +0 -21
  28. package/src/api/sasl-handshake.ts +0 -16
  29. package/src/api/sync-group.ts +0 -54
  30. package/src/auth/index.ts +0 -2
  31. package/src/auth/plain.ts +0 -10
  32. package/src/auth/scram.ts +0 -52
  33. package/src/broker.ts +0 -72
  34. package/src/client.ts +0 -47
  35. package/src/cluster.test.ts +0 -371
  36. package/src/cluster.ts +0 -85
  37. package/src/codecs/gzip.ts +0 -9
  38. package/src/codecs/index.ts +0 -16
  39. package/src/codecs/none.ts +0 -6
  40. package/src/codecs/types.ts +0 -4
  41. package/src/connection.ts +0 -157
  42. package/src/consumer/consumer-group.ts +0 -229
  43. package/src/consumer/consumer-metadata.ts +0 -14
  44. package/src/consumer/consumer.ts +0 -252
  45. package/src/consumer/fetch-manager.ts +0 -169
  46. package/src/consumer/fetcher.ts +0 -64
  47. package/src/consumer/offset-manager.ts +0 -104
  48. package/src/consumer/processor.ts +0 -53
  49. package/src/distributors/assignments-to-replicas.test.ts +0 -43
  50. package/src/distributors/assignments-to-replicas.ts +0 -83
  51. package/src/distributors/messages-to-topic-partition-leaders.test.ts +0 -32
  52. package/src/distributors/messages-to-topic-partition-leaders.ts +0 -19
  53. package/src/distributors/partitioner.ts +0 -27
  54. package/src/index.ts +0 -9
  55. package/src/metadata.ts +0 -126
  56. package/src/producer/producer.ts +0 -142
  57. package/src/types.ts +0 -11
  58. package/src/utils/api.ts +0 -11
  59. package/src/utils/crypto.ts +0 -15
  60. package/src/utils/decoder.ts +0 -174
  61. package/src/utils/delay.ts +0 -1
  62. package/src/utils/encoder.ts +0 -148
  63. package/src/utils/error.ts +0 -21
  64. package/src/utils/logger.ts +0 -37
  65. package/src/utils/memo.ts +0 -12
  66. package/src/utils/murmur2.ts +0 -44
  67. package/src/utils/retrier.ts +0 -39
  68. package/src/utils/tracer.ts +0 -49
  69. package/tsconfig.json +0 -17
@@ -1,48 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
-
4
- export const LEAVE_GROUP = createApi({
5
- apiKey: 13,
6
- apiVersion: 5,
7
- request: (
8
- encoder,
9
- body: {
10
- groupId: string;
11
- members: {
12
- memberId: string;
13
- groupInstanceId: string | null;
14
- reason: string | null;
15
- }[];
16
- },
17
- ) =>
18
- encoder
19
- .writeUVarInt(0)
20
- .writeCompactString(body.groupId)
21
- .writeCompactArray(body.members, (encoder, member) =>
22
- encoder
23
- .writeCompactString(member.memberId)
24
- .writeCompactString(member.groupInstanceId)
25
- .writeCompactString(member.reason)
26
- .writeUVarInt(0),
27
- )
28
- .writeUVarInt(0),
29
- response: (decoder) => {
30
- const result = {
31
- _tag: decoder.readTagBuffer(),
32
- throttleTimeMs: decoder.readInt32(),
33
- errorCode: decoder.readInt16(),
34
- members: decoder.readCompactArray((decoder) => ({
35
- memberId: decoder.readCompactString()!,
36
- groupInstanceId: decoder.readCompactString(),
37
- errorCode: decoder.readInt16(),
38
- _tag: decoder.readTagBuffer(),
39
- })),
40
- _tag2: decoder.readTagBuffer(),
41
- };
42
- if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
43
- result.members.forEach((member) => {
44
- if (member.errorCode) throw new KafkaTSApiError(member.errorCode, null, result);
45
- });
46
- return result;
47
- },
48
- });
@@ -1,65 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
- import { IsolationLevel } from './fetch';
4
-
5
- export const LIST_OFFSETS = createApi({
6
- apiKey: 2,
7
- apiVersion: 8,
8
- request: (
9
- encoder,
10
- data: {
11
- replicaId: number;
12
- isolationLevel: IsolationLevel;
13
- topics: {
14
- name: string;
15
- partitions: {
16
- partitionIndex: number;
17
- currentLeaderEpoch: number;
18
- timestamp: bigint;
19
- }[];
20
- }[];
21
- },
22
- ) =>
23
- encoder
24
- .writeUVarInt(0)
25
- .writeInt32(data.replicaId)
26
- .writeInt8(data.isolationLevel)
27
- .writeCompactArray(data.topics, (encoder, topic) =>
28
- encoder
29
- .writeCompactString(topic.name)
30
- .writeCompactArray(topic.partitions, (encoder, partition) =>
31
- encoder
32
- .writeInt32(partition.partitionIndex)
33
- .writeInt32(partition.currentLeaderEpoch)
34
- .writeInt64(partition.timestamp)
35
- .writeUVarInt(0),
36
- )
37
- .writeUVarInt(0),
38
- )
39
- .writeUVarInt(0),
40
- response: (decoder) => {
41
- const result = {
42
- _tag: decoder.readTagBuffer(),
43
- throttleTimeMs: decoder.readInt32(),
44
- topics: decoder.readCompactArray((decoder) => ({
45
- name: decoder.readCompactString()!,
46
- partitions: decoder.readCompactArray((decoder) => ({
47
- partitionIndex: decoder.readInt32(),
48
- errorCode: decoder.readInt16(),
49
- timestamp: decoder.readInt64(),
50
- offset: decoder.readInt64(),
51
- leaderEpoch: decoder.readInt32(),
52
- _tag: decoder.readTagBuffer(),
53
- })),
54
- _tag: decoder.readTagBuffer(),
55
- })),
56
- _tag2: decoder.readTagBuffer(),
57
- };
58
- result.topics.forEach((topic) => {
59
- topic.partitions.forEach((partition) => {
60
- if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
61
- });
62
- });
63
- return result;
64
- },
65
- });
@@ -1,66 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
-
4
- export type Metadata = Awaited<ReturnType<(typeof METADATA)['response']>>;
5
-
6
- export const METADATA = createApi({
7
- apiKey: 3,
8
- apiVersion: 12,
9
- request: (
10
- encoder,
11
- data: {
12
- topics: { id: string | null; name: string }[] | null;
13
- allowTopicAutoCreation: boolean;
14
- includeTopicAuthorizedOperations: boolean;
15
- },
16
- ) =>
17
- encoder
18
- .writeUVarInt(0)
19
- .writeCompactArray(data.topics, (encoder, topic) =>
20
- encoder.writeUUID(topic.id).writeCompactString(topic.name).writeUVarInt(0),
21
- )
22
- .writeBoolean(data.allowTopicAutoCreation)
23
- .writeBoolean(data.includeTopicAuthorizedOperations)
24
- .writeUVarInt(0),
25
- response: (decoder) => {
26
- const result = {
27
- _tag: decoder.readTagBuffer(),
28
- throttleTimeMs: decoder.readInt32(),
29
- brokers: decoder.readCompactArray((broker) => ({
30
- nodeId: broker.readInt32(),
31
- host: broker.readCompactString()!,
32
- port: broker.readInt32(),
33
- rack: broker.readCompactString(),
34
- _tag: broker.readTagBuffer(),
35
- })),
36
- clusterId: decoder.readCompactString(),
37
- controllerId: decoder.readInt32(),
38
- topics: decoder.readCompactArray((topic) => ({
39
- errorCode: topic.readInt16(),
40
- name: topic.readCompactString()!,
41
- topicId: topic.readUUID(),
42
- isInternal: topic.readBoolean(),
43
- partitions: topic.readCompactArray((partition) => ({
44
- errorCode: partition.readInt16(),
45
- partitionIndex: partition.readInt32(),
46
- leaderId: partition.readInt32(),
47
- leaderEpoch: partition.readInt32(),
48
- replicaNodes: partition.readCompactArray((node) => node.readInt32()),
49
- isrNodes: partition.readCompactArray((node) => node.readInt32()),
50
- offlineReplicas: partition.readCompactArray((node) => node.readInt32()),
51
- _tag: partition.readTagBuffer(),
52
- })),
53
- topicAuthorizedOperations: topic.readInt32(),
54
- _tag: topic.readTagBuffer(),
55
- })),
56
- _tag2: decoder.readTagBuffer(),
57
- };
58
- result.topics.forEach((topic) => {
59
- if (topic.errorCode) throw new KafkaTSApiError(topic.errorCode, null, result);
60
- topic.partitions.forEach((partition) => {
61
- if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
62
- });
63
- });
64
- return result;
65
- },
66
- });
@@ -1,67 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
-
4
- export const OFFSET_COMMIT = createApi({
5
- apiKey: 8,
6
- apiVersion: 8,
7
- request: (
8
- encoder,
9
- data: {
10
- groupId: string;
11
- generationIdOrMemberEpoch: number;
12
- memberId: string;
13
- groupInstanceId: string | null;
14
- topics: {
15
- name: string;
16
- partitions: {
17
- partitionIndex: number;
18
- committedOffset: bigint;
19
- committedLeaderEpoch: number;
20
- committedMetadata: string | null;
21
- }[];
22
- }[];
23
- },
24
- ) =>
25
- encoder
26
- .writeUVarInt(0)
27
- .writeCompactString(data.groupId)
28
- .writeInt32(data.generationIdOrMemberEpoch)
29
- .writeCompactString(data.memberId)
30
- .writeCompactString(data.groupInstanceId)
31
- .writeCompactArray(data.topics, (encoder, topic) =>
32
- encoder
33
- .writeCompactString(topic.name)
34
- .writeCompactArray(topic.partitions, (encoder, partition) =>
35
- encoder
36
- .writeInt32(partition.partitionIndex)
37
- .writeInt64(partition.committedOffset)
38
- .writeInt32(partition.committedLeaderEpoch)
39
- .writeCompactString(partition.committedMetadata)
40
- .writeUVarInt(0),
41
- )
42
- .writeUVarInt(0),
43
- )
44
- .writeUVarInt(0),
45
- response: (decoder) => {
46
- const result = {
47
- _tag: decoder.readTagBuffer(),
48
- throttleTimeMs: decoder.readInt32(),
49
- topics: decoder.readCompactArray((decoder) => ({
50
- name: decoder.readCompactString(),
51
- partitions: decoder.readCompactArray((decoder) => ({
52
- partitionIndex: decoder.readInt32(),
53
- errorCode: decoder.readInt16(),
54
- _tag: decoder.readTagBuffer(),
55
- })),
56
- _tag: decoder.readTagBuffer(),
57
- })),
58
- _tag2: decoder.readTagBuffer(),
59
- };
60
- result.topics.forEach((topic) => {
61
- topic.partitions.forEach((partition) => {
62
- if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
63
- });
64
- });
65
- return result;
66
- },
67
- });
@@ -1,70 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
-
4
- export const OFFSET_FETCH = createApi({
5
- apiKey: 9,
6
- apiVersion: 8,
7
- request: (
8
- encoder,
9
- data: {
10
- groups: {
11
- groupId: string;
12
- topics: {
13
- name: string;
14
- partitionIndexes: number[];
15
- }[];
16
- }[];
17
- requireStable: boolean;
18
- },
19
- ) =>
20
- encoder
21
- .writeUVarInt(0)
22
- .writeCompactArray(data.groups, (encoder, group) =>
23
- encoder
24
- .writeCompactString(group.groupId)
25
- .writeCompactArray(group.topics, (encoder, topic) =>
26
- encoder
27
- .writeCompactString(topic.name)
28
- .writeCompactArray(topic.partitionIndexes, (encoder, partitionIndex) =>
29
- encoder.writeInt32(partitionIndex),
30
- )
31
- .writeUVarInt(0),
32
- )
33
- .writeUVarInt(0),
34
- )
35
- .writeBoolean(data.requireStable)
36
- .writeUVarInt(0),
37
- response: (decoder) => {
38
- const result = {
39
- _tag: decoder.readTagBuffer(),
40
- throttleTimeMs: decoder.readInt32(),
41
- groups: decoder.readCompactArray((decoder) => ({
42
- groupId: decoder.readCompactString(),
43
- topics: decoder.readCompactArray((decoder) => ({
44
- name: decoder.readCompactString()!,
45
- partitions: decoder.readCompactArray((decoder) => ({
46
- partitionIndex: decoder.readInt32(),
47
- committedOffset: decoder.readInt64(),
48
- committedLeaderEpoch: decoder.readInt32(),
49
- committedMetadata: decoder.readCompactString(),
50
- errorCode: decoder.readInt16(),
51
- _tag: decoder.readTagBuffer(),
52
- })),
53
- _tag: decoder.readTagBuffer(),
54
- })),
55
- errorCode: decoder.readInt16(),
56
- _tag: decoder.readTagBuffer(),
57
- })),
58
- _tag2: decoder.readTagBuffer(),
59
- };
60
- result.groups.forEach((group) => {
61
- if (group.errorCode) throw new KafkaTSApiError(group.errorCode, null, result);
62
- group.topics.forEach((topic) => {
63
- topic.partitions.forEach((partition) => {
64
- if (partition.errorCode) throw new KafkaTSApiError(partition.errorCode, null, result);
65
- });
66
- });
67
- });
68
- return result;
69
- },
70
- });
@@ -1,170 +0,0 @@
1
- import { createApi } from '../utils/api.js';
2
- import { Encoder } from '../utils/encoder.js';
3
- import { KafkaTSApiError } from '../utils/error.js';
4
-
5
- export const PRODUCE = createApi({
6
- apiKey: 0,
7
- apiVersion: 9,
8
- request: (
9
- encoder,
10
- data: {
11
- transactionalId: string | null;
12
- acks: number;
13
- timeoutMs: number;
14
- topicData: {
15
- name: string;
16
- partitionData: {
17
- index: number;
18
- baseOffset: bigint;
19
- partitionLeaderEpoch: number;
20
- attributes: number;
21
- lastOffsetDelta: number;
22
- baseTimestamp: bigint;
23
- maxTimestamp: bigint;
24
- producerId: bigint;
25
- producerEpoch: number;
26
- baseSequence: number;
27
- records: {
28
- attributes: number;
29
- timestampDelta: bigint;
30
- offsetDelta: number;
31
- key: Buffer | null;
32
- value: Buffer | null;
33
- headers: {
34
- key: Buffer;
35
- value: Buffer;
36
- }[];
37
- }[];
38
- }[];
39
- }[];
40
- },
41
- ) =>
42
- encoder
43
- .writeUVarInt(0)
44
- .writeCompactString(data.transactionalId)
45
- .writeInt16(data.acks)
46
- .writeInt32(data.timeoutMs)
47
- .writeCompactArray(data.topicData, (encoder, topic) =>
48
- encoder
49
- .writeCompactString(topic.name)
50
- .writeCompactArray(topic.partitionData, (encoder, partition) => {
51
- const batchBody = new Encoder()
52
- .writeInt16(partition.attributes)
53
- .writeInt32(partition.lastOffsetDelta)
54
- .writeInt64(partition.baseTimestamp)
55
- .writeInt64(partition.maxTimestamp)
56
- .writeInt64(partition.producerId)
57
- .writeInt16(partition.producerEpoch)
58
- .writeInt32(partition.baseSequence)
59
- .writeArray(partition.records, (encoder, record) => {
60
- const recordBody = new Encoder()
61
- .writeInt8(record.attributes)
62
- .writeVarLong(record.timestampDelta)
63
- .writeVarInt(record.offsetDelta)
64
- .writeVarIntBuffer(record.key)
65
- .writeVarIntBuffer(record.value)
66
- .writeVarIntArray(record.headers, (encoder, header) =>
67
- encoder.writeVarIntBuffer(header.key).writeVarIntBuffer(header.value),
68
- );
69
-
70
- return encoder.writeVarInt(recordBody.getByteLength()).writeEncoder(recordBody);
71
- })
72
- .value();
73
-
74
- const batchHeader = new Encoder()
75
- .writeInt32(partition.partitionLeaderEpoch)
76
- .writeInt8(2) // magic byte
77
- .writeUInt32(unsigned(crc32C(batchBody)))
78
- .write(batchBody);
79
-
80
- const batch = new Encoder()
81
- .writeInt64(partition.baseOffset)
82
- .writeInt32(batchHeader.getByteLength())
83
- .writeEncoder(batchHeader);
84
-
85
- return encoder
86
- .writeInt32(partition.index)
87
- .writeUVarInt(batch.getByteLength() + 1) // batch size
88
- .writeEncoder(batch)
89
- .writeUVarInt(0);
90
- })
91
- .writeUVarInt(0),
92
- )
93
- .writeUVarInt(0),
94
- response: (decoder) => {
95
- const result = {
96
- _tag: decoder.readTagBuffer(),
97
- responses: decoder.readCompactArray((response) => ({
98
- name: response.readCompactString(),
99
- partitionResponses: response.readCompactArray((partitionResponse) => ({
100
- index: partitionResponse.readInt32(),
101
- errorCode: partitionResponse.readInt16(),
102
- baseOffset: partitionResponse.readInt64(),
103
- logAppendTime: partitionResponse.readInt64(),
104
- logStartOffset: partitionResponse.readInt64(),
105
- recordErrors: partitionResponse.readCompactArray((recordError) => ({
106
- batchIndex: recordError.readInt32(),
107
- batchIndexError: recordError.readInt16(),
108
- _tag: recordError.readTagBuffer(),
109
- })),
110
- errorMessage: partitionResponse.readCompactString(),
111
- _tag: partitionResponse.readTagBuffer(),
112
- })),
113
- _tag: response.readTagBuffer(),
114
- })),
115
- throttleTimeMs: decoder.readInt32(),
116
- _tag2: decoder.readTagBuffer(),
117
- };
118
- result.responses.forEach((topic) => {
119
- topic.partitionResponses.forEach((partition) => {
120
- if (partition.errorCode !== 0) {
121
- throw new KafkaTSApiError(partition.errorCode, partition.errorMessage, result);
122
- }
123
- });
124
- });
125
- return result;
126
- },
127
- });
128
-
129
- const unsigned = (value: number) => Uint32Array.from([value])[0];
130
-
131
- const crc32C = (buffer: Buffer) => {
132
- let crc = 0 ^ -1;
133
- for (let i = 0; i < buffer.length; i++) {
134
- crc = T[(crc ^ buffer[i]) & 0xff] ^ (crc >>> 8);
135
- }
136
-
137
- return (crc ^ -1) >>> 0;
138
- };
139
-
140
- const T = new Int32Array([
141
- 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf,
142
- 0x78b2dbcc, 0x6be22838, 0x9989ab3b, 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c,
143
- 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57,
144
- 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a,
145
- 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e,
146
- 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad,
147
- 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, 0x7da08661, 0x8fcb0562, 0x9c9bf696,
148
- 0x6ef07595, 0x417b1dbc, 0xb3109ebf, 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957,
149
- 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, 0xed03a29b, 0x1f682198, 0x5125dad3,
150
- 0xa34e59d0, 0xb01eaa24, 0x42752927, 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f,
151
- 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, 0x61c69362, 0x93ad1061, 0x80fde395,
152
- 0x72966096, 0xa65c047d, 0x5437877e, 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859,
153
- 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, 0x90a324fa, 0x62c8a7f9, 0xb602c312,
154
- 0x44694011, 0x5739b3e5, 0xa55230e6, 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de,
155
- 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, 0x456cac67, 0xb7072f64, 0xa457dc90,
156
- 0x563c5f93, 0x082f63b7, 0xfa44e0b4, 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c,
157
- 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, 0xb4091bff, 0x466298fc, 0x1871a4d8,
158
- 0xea1a27db, 0xf94ad42f, 0x0b21572c, 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5,
159
- 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e,
160
- 0x3bc21e9d, 0xef087a76, 0x1d63f975, 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d,
161
- 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19,
162
- 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8,
163
- 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, 0x8ecee914, 0x7ca56a17, 0x6ff599e3,
164
- 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540,
165
- 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a,
166
- 0x115b2b19, 0x020bd8ed, 0xf0605bee, 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6,
167
- 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, 0xf36e6f75, 0x0105ec76, 0x12551f82,
168
- 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e,
169
- 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351,
170
- ]);
@@ -1,21 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
-
4
- export const SASL_AUTHENTICATE = createApi({
5
- apiKey: 36,
6
- apiVersion: 2,
7
- request: (encoder, data: { authBytes: Buffer }) =>
8
- encoder.writeUVarInt(0).writeCompactBytes(data.authBytes).writeUVarInt(0),
9
- response: (decoder) => {
10
- const result = {
11
- _tag: decoder.readTagBuffer(),
12
- errorCode: decoder.readInt16(),
13
- errorMessage: decoder.readCompactString(),
14
- authBytes: decoder.readCompactBytes(),
15
- sessionLifetimeMs: decoder.readInt64(),
16
- _tag2: decoder.readTagBuffer(),
17
- };
18
- if (result.errorCode) throw new KafkaTSApiError(result.errorCode, result.errorMessage, result);
19
- return result;
20
- },
21
- });
@@ -1,16 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
-
4
- export const SASL_HANDSHAKE = createApi({
5
- apiKey: 17,
6
- apiVersion: 1,
7
- request: (encoder, data: { mechanism: string }) => encoder.writeString(data.mechanism),
8
- response: (decoder) => {
9
- const result = {
10
- errorCode: decoder.readInt16(),
11
- mechanisms: decoder.readArray((mechanism) => mechanism.readString()),
12
- };
13
- if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
14
- return result;
15
- },
16
- });
@@ -1,54 +0,0 @@
1
- import { createApi } from '../utils/api';
2
- import { KafkaTSApiError } from '../utils/error';
3
-
4
- export type Assignment = { [topic: string]: number[] };
5
-
6
- export type MemberAssignment = {
7
- memberId: string;
8
- assignment: Assignment;
9
- };
10
-
11
- export const SYNC_GROUP = createApi({
12
- apiKey: 14,
13
- apiVersion: 5,
14
- request: (
15
- encoder,
16
- data: {
17
- groupId: string;
18
- generationId: number;
19
- memberId: string;
20
- groupInstanceId: string | null;
21
- protocolType: string | null;
22
- protocolName: string | null;
23
- assignments: MemberAssignment[];
24
- },
25
- ) =>
26
- encoder
27
- .writeUVarInt(0)
28
- .writeCompactString(data.groupId)
29
- .writeInt32(data.generationId)
30
- .writeCompactString(data.memberId)
31
- .writeCompactString(data.groupInstanceId)
32
- .writeCompactString(data.protocolType)
33
- .writeCompactString(data.protocolName)
34
- .writeCompactArray(data.assignments, (encoder, assignment) =>
35
- encoder
36
- .writeCompactString(assignment.memberId)
37
- .writeCompactString(JSON.stringify(assignment.assignment))
38
- .writeUVarInt(0),
39
- )
40
- .writeUVarInt(0),
41
- response: (decoder) => {
42
- const result = {
43
- _tag: decoder.readTagBuffer(),
44
- throttleTimeMs: decoder.readInt32(),
45
- errorCode: decoder.readInt16(),
46
- protocolType: decoder.readCompactString(),
47
- protocolName: decoder.readCompactString(),
48
- assignments: decoder.readCompactString()!,
49
- _tag2: decoder.readTagBuffer(),
50
- };
51
- if (result.errorCode) throw new KafkaTSApiError(result.errorCode, null, result);
52
- return result;
53
- },
54
- });
package/src/auth/index.ts DELETED
@@ -1,2 +0,0 @@
1
- export { saslPlain } from './plain';
2
- export { saslScramSha256, saslScramSha512 } from './scram';
package/src/auth/plain.ts DELETED
@@ -1,10 +0,0 @@
1
- import { API } from "../api";
2
- import { SASLProvider } from "../broker";
3
-
4
- export const saslPlain = ({ username, password }: { username: string; password: string }): SASLProvider => ({
5
- mechanism: 'PLAIN',
6
- authenticate: async ({ sendRequest }) => {
7
- const authBytes = [null, username, password].join('\u0000');
8
- await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(authBytes) });
9
- },
10
- });
package/src/auth/scram.ts DELETED
@@ -1,52 +0,0 @@
1
- import { API } from '../api';
2
- import { SASLProvider } from '../broker';
3
- import { base64Decode, base64Encode, generateNonce, hash, hmac, saltPassword, xor } from '../utils/crypto';
4
- import { KafkaTSError } from '../utils/error';
5
-
6
- const saslScram =
7
- ({ mechanism, keyLength, digest }: { mechanism: string; keyLength: number; digest: string }) =>
8
- ({ username, password }: { username: string; password: string }): SASLProvider => ({
9
- mechanism,
10
- authenticate: async ({ sendRequest }) => {
11
- const nonce = generateNonce();
12
- const firstMessage = `n=${username},r=${nonce}`;
13
-
14
- const { authBytes } = await sendRequest(API.SASL_AUTHENTICATE, {
15
- authBytes: Buffer.from(`n,,${firstMessage}`),
16
- });
17
- if (!authBytes) {
18
- throw new KafkaTSError('No auth response');
19
- }
20
-
21
- const response = Object.fromEntries(
22
- authBytes
23
- .toString()
24
- .split(',')
25
- .map((pair) => pair.split('=')),
26
- ) as { r: string; s: string; i: string };
27
-
28
- const rnonce = response.r;
29
- if (!rnonce.startsWith(nonce)) {
30
- throw new KafkaTSError('Invalid nonce');
31
- }
32
- const iterations = parseInt(response.i);
33
- const salt = base64Decode(response.s);
34
-
35
- const saltedPassword = await saltPassword(password, salt, iterations, keyLength, digest);
36
- const clientKey = hmac(saltedPassword, 'Client Key', digest);
37
- const clientKeyHash = hash(clientKey, digest);
38
-
39
- let finalMessage = `c=${base64Encode('n,,')},r=${rnonce}`;
40
-
41
- const fullMessage = `${firstMessage},${authBytes.toString()},${finalMessage}`;
42
- const clientSignature = hmac(clientKeyHash, fullMessage, digest);
43
- const clientProof = base64Encode(xor(clientKey, clientSignature));
44
-
45
- finalMessage += `,p=${clientProof}`;
46
-
47
- await sendRequest(API.SASL_AUTHENTICATE, { authBytes: Buffer.from(finalMessage) });
48
- },
49
- });
50
-
51
- export const saslScramSha256 = saslScram({ mechanism: 'SCRAM-SHA-256', keyLength: 32, digest: 'sha256' });
52
- export const saslScramSha512 = saslScram({ mechanism: 'SCRAM-SHA-512', keyLength: 64, digest: 'sha512' });