@venizia/ignis-docs 0.0.7-1 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/wiki/references/helpers/kafka/admin.md +178 -0
- package/wiki/references/helpers/kafka/consumer.md +384 -0
- package/wiki/references/helpers/kafka/examples.md +361 -0
- package/wiki/references/helpers/kafka/index.md +543 -212
- package/wiki/references/helpers/kafka/producer.md +273 -0
- package/wiki/references/helpers/kafka/schema-registry.md +214 -0
|
@@ -1,305 +1,636 @@
|
|
|
1
|
-
# Kafka
|
|
1
|
+
# Kafka
|
|
2
2
|
|
|
3
|
-
Apache Kafka event streaming with producer, consumer, and
|
|
3
|
+
Apache Kafka event streaming with producer, consumer, admin, and schema registry helpers. Built on [`@platformatic/kafka`](https://github.com/platformatic/kafka) v1.30.0 — a pure TypeScript Kafka client with zero native dependencies.
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
> This helper is **experimental**. The API may change in future releases.
|
|
5
|
+
## Overview
|
|
7
6
|
|
|
8
|
-
|
|
7
|
+
The Kafka module provides four helper classes built on a shared `BaseKafkaHelper` base:
|
|
9
8
|
|
|
10
|
-
| Class |
|
|
11
|
-
|
|
12
|
-
|
|
|
13
|
-
|
|
|
14
|
-
|
|
|
9
|
+
| Class | Wraps | Use Case |
|
|
10
|
+
|-------|-------|----------|
|
|
11
|
+
| `KafkaProducerHelper` | `Producer` | Publish messages, transactions |
|
|
12
|
+
| `KafkaConsumerHelper` | `Consumer` | Consume messages with consumer groups, lag monitoring |
|
|
13
|
+
| `KafkaAdminHelper` | `Admin` | Manage topics, partitions, groups, ACLs, configs |
|
|
14
|
+
| `KafkaSchemaRegistryHelper` | `ConfluentSchemaRegistry` | Schema validation and auto ser/deser |
|
|
15
|
+
|
|
16
|
+
All helpers (except schema registry) extend `BaseKafkaHelper` which provides:
|
|
17
|
+
|
|
18
|
+
- **Scoped logging** via `BaseHelper` (Winston with daily rotation)
|
|
19
|
+
- **Health tracking** — `isHealthy()`, `isReady()`, `getHealthStatus()`
|
|
20
|
+
- **Broker event callbacks** — `onBrokerConnect`, `onBrokerDisconnect`
|
|
21
|
+
- **Graceful shutdown** — timeout-based with force fallback
|
|
22
|
+
- **Sensible defaults** via `KafkaDefaults` constants
|
|
23
|
+
- **Factory pattern** via `newInstance()` static method
|
|
24
|
+
|
|
25
|
+
Use `getProducer()`, `getConsumer()`, or `getAdmin()` to access the full underlying `@platformatic/kafka` API directly.
|
|
15
26
|
|
|
16
27
|
### Import Path
|
|
17
28
|
|
|
18
29
|
```typescript
|
|
30
|
+
// Helpers & constants
|
|
19
31
|
import {
|
|
20
32
|
KafkaProducerHelper,
|
|
21
33
|
KafkaConsumerHelper,
|
|
22
34
|
KafkaAdminHelper,
|
|
35
|
+
KafkaSchemaRegistryHelper,
|
|
36
|
+
BaseKafkaHelper,
|
|
23
37
|
KafkaDefaults,
|
|
24
38
|
KafkaAcks,
|
|
25
|
-
|
|
39
|
+
KafkaGroupProtocol,
|
|
40
|
+
KafkaHealthStatuses,
|
|
41
|
+
KafkaClientEvents,
|
|
26
42
|
} from '@venizia/ignis-helpers/kafka';
|
|
27
43
|
|
|
44
|
+
// Types
|
|
28
45
|
import type {
|
|
29
46
|
IKafkaConnectionOptions,
|
|
30
47
|
IKafkaProducerOptions,
|
|
31
48
|
IKafkaConsumerOptions,
|
|
32
49
|
IKafkaAdminOptions,
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
50
|
+
IKafkaConsumeStartOptions,
|
|
51
|
+
IKafkaSchemaRegistryOptions,
|
|
52
|
+
IKafkaTransactionContext,
|
|
53
|
+
IKafkaBaseOptions,
|
|
54
|
+
TKafkaAcks,
|
|
55
|
+
TKafkaGroupProtocol,
|
|
56
|
+
TKafkaHealthStatus,
|
|
57
|
+
TKafkaBrokerEventCallback,
|
|
58
|
+
TKafkaMessageCallback,
|
|
59
|
+
TKafkaMessageDoneCallback,
|
|
60
|
+
TKafkaMessageErrorCallback,
|
|
61
|
+
TKafkaGroupJoinCallback,
|
|
62
|
+
TKafkaGroupLeaveCallback,
|
|
63
|
+
TKafkaGroupRebalanceCallback,
|
|
64
|
+
TKafkaHeartbeatErrorCallback,
|
|
65
|
+
TKafkaLagCallback,
|
|
66
|
+
TKafkaLagErrorCallback,
|
|
67
|
+
TKafkaTransactionCallback,
|
|
37
68
|
} from '@venizia/ignis-helpers/kafka';
|
|
69
|
+
|
|
70
|
+
// @platformatic/kafka (direct usage)
|
|
71
|
+
import {
|
|
72
|
+
Producer, Consumer, Admin, MessagesStream,
|
|
73
|
+
stringSerializers, stringDeserializers,
|
|
74
|
+
stringSerializer, stringDeserializer,
|
|
75
|
+
jsonSerializer, jsonDeserializer,
|
|
76
|
+
serializersFrom, deserializersFrom,
|
|
77
|
+
} from '@platformatic/kafka';
|
|
78
|
+
|
|
79
|
+
import type {
|
|
80
|
+
Message, MessageToProduce,
|
|
81
|
+
SendOptions, ConsumeOptions,
|
|
82
|
+
Serializers, Deserializers,
|
|
83
|
+
SASLOptions, ConnectionOptions,
|
|
84
|
+
} from '@platformatic/kafka';
|
|
38
85
|
```
|
|
39
86
|
|
|
40
|
-
|
|
87
|
+
### Installation
|
|
41
88
|
|
|
42
89
|
```bash
|
|
43
90
|
bun add @platformatic/kafka
|
|
44
91
|
```
|
|
45
92
|
|
|
46
|
-
##
|
|
93
|
+
## Architecture
|
|
94
|
+
|
|
95
|
+
### Class Hierarchy
|
|
96
|
+
|
|
97
|
+
```
|
|
98
|
+
BaseHelper (scoped logging, identifier)
|
|
99
|
+
└── BaseKafkaHelper<TClient> (health tracking, broker events, graceful shutdown)
|
|
100
|
+
├── KafkaProducerHelper<K,V,HK,HV>
|
|
101
|
+
├── KafkaConsumerHelper<K,V,HK,HV>
|
|
102
|
+
└── KafkaAdminHelper
|
|
103
|
+
|
|
104
|
+
BaseHelper
|
|
105
|
+
└── KafkaSchemaRegistryHelper<K,V,HK,HV> (no broker connection)
|
|
106
|
+
```
|
|
47
107
|
|
|
48
|
-
|
|
108
|
+
### BaseKafkaHelper
|
|
109
|
+
|
|
110
|
+
All Kafka helpers (except schema registry) extend `BaseKafkaHelper<TClient>`, which provides:
|
|
49
111
|
|
|
50
112
|
```typescript
|
|
51
|
-
|
|
113
|
+
abstract class BaseKafkaHelper<TClient extends Base<BaseOptions>> extends BaseHelper {
|
|
114
|
+
// Health
|
|
115
|
+
isHealthy(): boolean; // healthStatus === 'connected'
|
|
116
|
+
isReady(): boolean; // healthStatus === 'connected' (consumer overrides: + isActive())
|
|
117
|
+
getHealthStatus(): TKafkaHealthStatus; // 'connected' | 'disconnected' | 'unknown'
|
|
118
|
+
|
|
119
|
+
// Shutdown (used by subclasses)
|
|
120
|
+
protected closeClient(): Promise<void>;
|
|
121
|
+
protected gracefulCloseClient(): Promise<void>; // races closeClient vs shutdownTimeout
|
|
122
|
+
}
|
|
123
|
+
```
|
|
52
124
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
onConnected: () => console.log('Producer connected'),
|
|
59
|
-
onError: ({ error }) => console.error('Producer error:', error),
|
|
60
|
-
});
|
|
125
|
+
Health status transitions automatically via broker events:
|
|
126
|
+
- `client:broker:connect` → `'connected'`
|
|
127
|
+
- `client:broker:disconnect` → `'disconnected'`
|
|
128
|
+
- `client:broker:failed` → `'disconnected'`
|
|
129
|
+
- `close()` → `'disconnected'`
|
|
61
130
|
|
|
62
|
-
|
|
63
|
-
await producer.send({
|
|
64
|
-
messages: [
|
|
65
|
-
{ topic: 'orders', key: 'order-123', value: JSON.stringify({ status: 'created' }) },
|
|
66
|
-
],
|
|
67
|
-
});
|
|
131
|
+
## Connection Options
|
|
68
132
|
|
|
69
|
-
|
|
70
|
-
await producer.sendBatch({
|
|
71
|
-
topicMessages: [
|
|
72
|
-
{ topic: 'orders', messages: [{ key: 'o1', value: '...' }] },
|
|
73
|
-
{ topic: 'notifications', messages: [{ key: 'n1', value: '...' }] },
|
|
74
|
-
],
|
|
75
|
-
});
|
|
133
|
+
All three helpers share a common base interface `IKafkaConnectionOptions` which extends `@platformatic/kafka`'s `ConnectionOptions`.
|
|
76
134
|
|
|
77
|
-
|
|
78
|
-
|
|
135
|
+
```typescript
|
|
136
|
+
interface IKafkaConnectionOptions extends ConnectionOptions {
|
|
137
|
+
bootstrapBrokers: string[];
|
|
138
|
+
clientId: string;
|
|
139
|
+
retries?: number; // Default: 3
|
|
140
|
+
retryDelay?: number; // Default: 1000ms
|
|
141
|
+
}
|
|
79
142
|
```
|
|
80
143
|
|
|
81
|
-
###
|
|
144
|
+
### Full Options Table
|
|
82
145
|
|
|
83
146
|
| Option | Type | Default | Description |
|
|
84
147
|
|--------|------|---------|-------------|
|
|
85
|
-
| `bootstrapBrokers` | `string[]` |
|
|
86
|
-
| `
|
|
87
|
-
| `
|
|
88
|
-
| `
|
|
89
|
-
| `
|
|
90
|
-
| `
|
|
91
|
-
| `
|
|
92
|
-
| `
|
|
93
|
-
| `
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
| `
|
|
103
|
-
| `
|
|
104
|
-
| `
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
148
|
+
| `bootstrapBrokers` | `string[]` | — | Kafka broker addresses (`host:port`). **Required** |
|
|
149
|
+
| `clientId` | `string` | — | Unique client identifier. **Required** |
|
|
150
|
+
| `retries` | `number` | `3` | Number of connection retries before failing |
|
|
151
|
+
| `retryDelay` | `number` | `1000` | Delay between retries in milliseconds |
|
|
152
|
+
| `sasl` | `SASLOptions` | — | SASL authentication configuration |
|
|
153
|
+
| `tls` | `TLSConnectionOptions` | — | TLS/SSL connection options |
|
|
154
|
+
| `ssl` | `TLSConnectionOptions` | — | Alias for `tls` |
|
|
155
|
+
| `connectTimeout` | `number` | — | TCP connection timeout in milliseconds |
|
|
156
|
+
| `requestTimeout` | `number` | — | Kafka request timeout in milliseconds |
|
|
157
|
+
|
|
158
|
+
### Shared Helper Options
|
|
159
|
+
|
|
160
|
+
These options are available on all three helpers (`IKafkaProducerOptions`, `IKafkaConsumerOptions`, `IKafkaAdminOptions`):
|
|
161
|
+
|
|
162
|
+
| Option | Type | Default | Description |
|
|
163
|
+
|--------|------|---------|-------------|
|
|
164
|
+
| `identifier` | `string` | `'kafka-{type}'` | Scoped logging identifier |
|
|
165
|
+
| `shutdownTimeout` | `number` | `30000` | Graceful shutdown timeout in ms |
|
|
166
|
+
| `onBrokerConnect` | `TKafkaBrokerEventCallback` | — | Called when broker connects |
|
|
167
|
+
| `onBrokerDisconnect` | `TKafkaBrokerEventCallback` | — | Called when broker disconnects |
|
|
168
|
+
|
|
169
|
+
### SASL Authentication
|
|
170
|
+
|
|
171
|
+
`@platformatic/kafka` supports five SASL mechanisms:
|
|
172
|
+
|
|
173
|
+
| Mechanism | Use Case |
|
|
174
|
+
|-----------|----------|
|
|
175
|
+
| `PLAIN` | Simple username/password (use with TLS in production) |
|
|
176
|
+
| `SCRAM-SHA-256` | Challenge-response, password never sent in plaintext |
|
|
177
|
+
| `SCRAM-SHA-512` | Same as SHA-256 with stronger hash |
|
|
178
|
+
| `OAUTHBEARER` | Token-based (Azure Event Hubs, Confluent Cloud) |
|
|
179
|
+
| `GSSAPI` | Kerberos authentication |
|
|
111
180
|
|
|
112
181
|
```typescript
|
|
113
|
-
|
|
182
|
+
interface SASLOptions {
|
|
183
|
+
mechanism: 'PLAIN' | 'SCRAM-SHA-256' | 'SCRAM-SHA-512' | 'OAUTHBEARER' | 'GSSAPI';
|
|
184
|
+
username?: string | CredentialProvider;
|
|
185
|
+
password?: string | CredentialProvider;
|
|
186
|
+
token?: string | CredentialProvider;
|
|
187
|
+
oauthBearerExtensions?: Record<string, string> | CredentialProvider<Record<string, string>>;
|
|
188
|
+
authenticate?: SASLCustomAuthenticator;
|
|
189
|
+
}
|
|
190
|
+
```
|
|
114
191
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
onConnected: () => console.log('Consumer connected'),
|
|
127
|
-
onGroupJoin: ({ groupId, memberId }) => {
|
|
128
|
-
console.log(`Joined group ${groupId} as ${memberId}`);
|
|
192
|
+
#### SCRAM-SHA-512 Example
|
|
193
|
+
|
|
194
|
+
```typescript
|
|
195
|
+
const helper = KafkaConsumerHelper.newInstance({
|
|
196
|
+
bootstrapBrokers: ['broker1:9092', 'broker2:9092', 'broker3:9092'],
|
|
197
|
+
clientId: 'my-consumer',
|
|
198
|
+
groupId: 'my-group',
|
|
199
|
+
sasl: {
|
|
200
|
+
mechanism: 'SCRAM-SHA-512',
|
|
201
|
+
username: 'kafka-user',
|
|
202
|
+
password: 'kafka-password',
|
|
129
203
|
},
|
|
130
|
-
|
|
204
|
+
connectTimeout: 30_000,
|
|
205
|
+
requestTimeout: 30_000,
|
|
206
|
+
onBrokerConnect: ({ broker }) => console.log(`Connected to ${broker.host}:${broker.port}`),
|
|
131
207
|
});
|
|
208
|
+
```
|
|
209
|
+
|
|
210
|
+
#### OAUTHBEARER Example
|
|
132
211
|
|
|
133
|
-
|
|
134
|
-
|
|
212
|
+
```typescript
|
|
213
|
+
const helper = KafkaProducerHelper.newInstance({
|
|
214
|
+
bootstrapBrokers: ['pkc-xxxxx.us-west-2.aws.confluent.cloud:9092'],
|
|
215
|
+
clientId: 'my-producer',
|
|
216
|
+
sasl: {
|
|
217
|
+
mechanism: 'OAUTHBEARER',
|
|
218
|
+
token: async () => {
|
|
219
|
+
const response = await fetch('https://auth.example.com/token', { method: 'POST' });
|
|
220
|
+
const { access_token } = await response.json();
|
|
221
|
+
return access_token;
|
|
222
|
+
},
|
|
223
|
+
},
|
|
224
|
+
tls: true,
|
|
225
|
+
});
|
|
226
|
+
```
|
|
135
227
|
|
|
136
|
-
|
|
137
|
-
consumer.pause();
|
|
138
|
-
consumer.resume();
|
|
228
|
+
#### TLS Without SASL
|
|
139
229
|
|
|
140
|
-
|
|
141
|
-
|
|
230
|
+
```typescript
|
|
231
|
+
const helper = KafkaProducerHelper.newInstance({
|
|
232
|
+
bootstrapBrokers: ['broker:9093'],
|
|
233
|
+
clientId: 'my-producer',
|
|
234
|
+
tls: {
|
|
235
|
+
ca: fs.readFileSync('/path/to/ca.pem'),
|
|
236
|
+
cert: fs.readFileSync('/path/to/client-cert.pem'),
|
|
237
|
+
key: fs.readFileSync('/path/to/client-key.pem'),
|
|
238
|
+
},
|
|
239
|
+
});
|
|
142
240
|
```
|
|
143
241
|
|
|
144
|
-
|
|
242
|
+
## Serialization & Deserialization
|
|
145
243
|
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
|
151
|
-
|
|
152
|
-
| `
|
|
153
|
-
| `
|
|
154
|
-
| `
|
|
155
|
-
| `
|
|
156
|
-
| `
|
|
157
|
-
| `
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
|
162
|
-
|
|
163
|
-
| `
|
|
164
|
-
| `
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
| `onError` | `(opts: { error: Error }) => void` | -- | Error callback |
|
|
168
|
-
|
|
169
|
-
### Consumer API
|
|
170
|
-
|
|
171
|
-
| Method | Returns | Description |
|
|
172
|
-
|--------|---------|-------------|
|
|
173
|
-
| `start()` | `Promise<void>` | Start consuming messages (fires async consume loop) |
|
|
174
|
-
| `pause()` | `void` | Pause the message stream |
|
|
175
|
-
| `resume()` | `void` | Resume the message stream |
|
|
176
|
-
| `isPaused()` | `boolean` | Check if the stream is paused |
|
|
177
|
-
| `isConsuming()` | `boolean` | Check if the consumer is running |
|
|
178
|
-
| `commit(opts)` | `Promise<void>` | Manually commit offsets |
|
|
179
|
-
| `startLagMonitoring(opts)` | `void` | Start lag monitoring. `opts: { interval: number }` |
|
|
180
|
-
| `stopLagMonitoring()` | `void` | Stop lag monitoring |
|
|
181
|
-
| `getConsumer()` | `Consumer` | Access the underlying `@platformatic/kafka` Consumer |
|
|
182
|
-
| `close()` | `Promise<void>` | Abort consume loop, close stream and consumer |
|
|
183
|
-
| `static newInstance(opts)` | `KafkaConsumerHelper` | Factory method |
|
|
184
|
-
|
|
185
|
-
### Manual Commit
|
|
186
|
-
|
|
187
|
-
When `autocommit` is `false`, commit offsets explicitly:
|
|
244
|
+
`@platformatic/kafka`'s default wire format is `Buffer`. The helpers default generic types to `string` (matching common usage), but you must provide serializers/deserializers explicitly.
|
|
245
|
+
|
|
246
|
+
### Built-in Serializers
|
|
247
|
+
|
|
248
|
+
| Export | Type | Description |
|
|
249
|
+
|--------|------|-------------|
|
|
250
|
+
| `stringSerializer` | `Serializer<string>` | `string → Buffer` (UTF-8) |
|
|
251
|
+
| `stringDeserializer` | `Deserializer<string>` | `Buffer → string` (UTF-8) |
|
|
252
|
+
| `jsonSerializer` | `Serializer<T>` | `object → Buffer` (JSON.stringify + UTF-8) |
|
|
253
|
+
| `jsonDeserializer` | `Deserializer<T>` | `Buffer → object` (UTF-8 + JSON.parse) |
|
|
254
|
+
| `stringSerializers` | `Serializers<string, string, string, string>` | All four positions as string |
|
|
255
|
+
| `stringDeserializers` | `Deserializers<string, string, string, string>` | All four positions as string |
|
|
256
|
+
|
|
257
|
+
### Helper Functions
|
|
258
|
+
|
|
259
|
+
| Export | Signature | Description |
|
|
260
|
+
|--------|-----------|-------------|
|
|
261
|
+
| `serializersFrom(s)` | `<T>(s: Serializer<T>) => Serializers<T, T, T, T>` | Create full serializers from a single serializer |
|
|
262
|
+
| `deserializersFrom(d)` | `<T>(d: Deserializer<T>) => Deserializers<T, T, T, T>` | Create full deserializers from a single deserializer |
|
|
263
|
+
|
|
264
|
+
### String Serialization
|
|
188
265
|
|
|
189
266
|
```typescript
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
267
|
+
import { stringSerializers, stringDeserializers } from '@platformatic/kafka';
|
|
268
|
+
|
|
269
|
+
const producer = KafkaProducerHelper.newInstance({
|
|
270
|
+
bootstrapBrokers: ['localhost:9092'],
|
|
271
|
+
clientId: 'my-producer',
|
|
272
|
+
serializers: stringSerializers,
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
const consumer = KafkaConsumerHelper.newInstance({
|
|
276
|
+
bootstrapBrokers: ['localhost:9092'],
|
|
277
|
+
clientId: 'my-consumer',
|
|
278
|
+
groupId: 'my-group',
|
|
279
|
+
deserializers: stringDeserializers,
|
|
193
280
|
onMessage: async ({ message }) => {
|
|
194
|
-
|
|
195
|
-
// Commit after successful processing
|
|
196
|
-
await consumer.commit({
|
|
197
|
-
offsets: [{
|
|
198
|
-
topic: message.topic,
|
|
199
|
-
partition: message.partition,
|
|
200
|
-
offset: message.offset,
|
|
201
|
-
leaderEpoch: 0,
|
|
202
|
-
}],
|
|
203
|
-
});
|
|
281
|
+
console.log(message.key, message.value); // both strings
|
|
204
282
|
},
|
|
205
283
|
});
|
|
206
284
|
```
|
|
207
285
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
The `KafkaAdminHelper` provides topic, partition, consumer group, and config management.
|
|
286
|
+
### JSON Serialization
|
|
211
287
|
|
|
212
288
|
```typescript
|
|
213
|
-
import {
|
|
289
|
+
import {
|
|
290
|
+
jsonSerializer, jsonDeserializer,
|
|
291
|
+
stringSerializer, stringDeserializer,
|
|
292
|
+
serializersFrom, deserializersFrom,
|
|
293
|
+
} from '@platformatic/kafka';
|
|
294
|
+
|
|
295
|
+
const producer = KafkaProducerHelper.newInstance({
|
|
296
|
+
bootstrapBrokers: ['localhost:9092'],
|
|
297
|
+
clientId: 'my-producer',
|
|
298
|
+
serializers: { ...serializersFrom(jsonSerializer), key: stringSerializer },
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
await producer.getProducer().send({
|
|
302
|
+
messages: [{
|
|
303
|
+
topic: 'orders',
|
|
304
|
+
key: 'order-123',
|
|
305
|
+
value: { id: '123', status: 'created', amount: 99 },
|
|
306
|
+
}],
|
|
307
|
+
});
|
|
214
308
|
|
|
215
|
-
const
|
|
216
|
-
identifier: 'kafka-admin',
|
|
309
|
+
const consumer = KafkaConsumerHelper.newInstance({
|
|
217
310
|
bootstrapBrokers: ['localhost:9092'],
|
|
311
|
+
clientId: 'my-consumer',
|
|
312
|
+
groupId: 'my-group',
|
|
313
|
+
deserializers: { ...deserializersFrom(jsonDeserializer), key: stringDeserializer },
|
|
314
|
+
onMessage: async ({ message }) => {
|
|
315
|
+
console.log(message.value.id, message.value.status); // typed object
|
|
316
|
+
},
|
|
218
317
|
});
|
|
318
|
+
```
|
|
219
319
|
|
|
220
|
-
|
|
221
|
-
await admin.createTopics({ topics: ['orders', 'notifications'], partitions: 3, replicas: 1 });
|
|
222
|
-
const topics = await admin.listTopics();
|
|
223
|
-
await admin.deleteTopics({ topics: ['old-topic'] });
|
|
320
|
+
### Schema Registry Serialization
|
|
224
321
|
|
|
225
|
-
|
|
226
|
-
|
|
322
|
+
For schema-validated serialization (Avro, Protobuf, JSON Schema), use the schema registry helper:
|
|
323
|
+
|
|
324
|
+
```typescript
|
|
325
|
+
const registry = KafkaSchemaRegistryHelper.newInstance({
|
|
326
|
+
url: 'http://localhost:8081',
|
|
327
|
+
});
|
|
227
328
|
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
329
|
+
const producer = KafkaProducerHelper.newInstance({
|
|
330
|
+
bootstrapBrokers: ['localhost:9092'],
|
|
331
|
+
clientId: 'my-producer',
|
|
332
|
+
registry: registry.getRegistry(),
|
|
333
|
+
});
|
|
232
334
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
}
|
|
335
|
+
const consumer = KafkaConsumerHelper.newInstance({
|
|
336
|
+
bootstrapBrokers: ['localhost:9092'],
|
|
337
|
+
clientId: 'my-consumer',
|
|
338
|
+
groupId: 'my-group',
|
|
339
|
+
registry: registry.getRegistry(),
|
|
340
|
+
onMessage: async ({ message }) => {
|
|
341
|
+
// message.value is auto-deserialized using registered schema
|
|
342
|
+
},
|
|
239
343
|
});
|
|
344
|
+
```
|
|
345
|
+
|
|
346
|
+
See **[Schema Registry](./schema-registry)** for full documentation.
|
|
347
|
+
|
|
348
|
+
## Generic Type Parameters
|
|
240
349
|
|
|
241
|
-
|
|
242
|
-
const meta = await admin.metadata({ topics: ['orders'] });
|
|
350
|
+
All helpers (and their option interfaces) support generic type parameters controlling the serialization types:
|
|
243
351
|
|
|
244
|
-
|
|
245
|
-
|
|
352
|
+
```typescript
|
|
353
|
+
class KafkaProducerHelper<
|
|
354
|
+
KeyType = string,
|
|
355
|
+
ValueType = string,
|
|
356
|
+
HeaderKeyType = string,
|
|
357
|
+
HeaderValueType = string,
|
|
358
|
+
>
|
|
246
359
|
```
|
|
247
360
|
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
|
251
|
-
|
|
252
|
-
| `
|
|
253
|
-
| `
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
361
|
+
| Parameter | Default | Description |
|
|
362
|
+
|-----------|---------|-------------|
|
|
363
|
+
| `KeyType` | `string` | Message key type after serialization/deserialization |
|
|
364
|
+
| `ValueType` | `string` | Message value type after serialization/deserialization |
|
|
365
|
+
| `HeaderKeyType` | `string` | Header key type |
|
|
366
|
+
| `HeaderValueType` | `string` | Header value type |
|
|
367
|
+
|
|
368
|
+
> [!NOTE]
|
|
369
|
+
> `@platformatic/kafka` defaults to `Buffer` for all four positions. The helpers default to `string` which is more common for application code. If you don't pass serializers, your messages will be sent/received as `Buffer`.
|
|
370
|
+
|
|
371
|
+
```typescript
|
|
372
|
+
// Default: string types (most common)
|
|
373
|
+
const helper = KafkaProducerHelper.newInstance({ ... });
|
|
374
|
+
|
|
375
|
+
// Custom: string keys, JSON object values
|
|
376
|
+
const helper = KafkaProducerHelper.newInstance<string, MyEvent, string, string>({
|
|
377
|
+
serializers: { ...serializersFrom(jsonSerializer), key: stringSerializer },
|
|
378
|
+
...
|
|
379
|
+
});
|
|
380
|
+
```
|
|
267
381
|
|
|
268
382
|
## Constants
|
|
269
383
|
|
|
270
384
|
### KafkaDefaults
|
|
271
385
|
|
|
386
|
+
Centralized default values used by all helpers.
|
|
387
|
+
|
|
388
|
+
```typescript
|
|
389
|
+
import { KafkaDefaults } from '@venizia/ignis-helpers/kafka';
|
|
390
|
+
```
|
|
391
|
+
|
|
392
|
+
| Constant | Value | Scope | Description |
|
|
393
|
+
|----------|-------|-------|-------------|
|
|
394
|
+
| `RETRIES` | `3` | Shared | Connection retry count |
|
|
395
|
+
| `RETRY_DELAY` | `1000` | Shared | Retry delay in ms |
|
|
396
|
+
| `SHUTDOWN_TIMEOUT` | `30000` | Shared | Graceful shutdown timeout in ms |
|
|
397
|
+
| `STRICT` | `true` | Producer | Fail on unknown topics |
|
|
398
|
+
| `AUTOCREATE_TOPICS` | `false` | Producer | Auto-create topics on produce |
|
|
399
|
+
| `AUTOCOMMIT` | `false` | Consumer | Auto-commit offsets |
|
|
400
|
+
| `SESSION_TIMEOUT` | `30000` | Consumer | Session timeout in ms |
|
|
401
|
+
| `HEARTBEAT_INTERVAL` | `3000` | Consumer | Heartbeat interval in ms |
|
|
402
|
+
| `HIGH_WATER_MARK` | `1024` | Consumer | Stream buffer size (messages) |
|
|
403
|
+
| `MIN_BYTES` | `1` | Consumer | Min bytes per fetch |
|
|
404
|
+
| `METADATA_MAX_AGE` | `300000` | Consumer | Metadata cache TTL in ms |
|
|
405
|
+
| `GROUP_PROTOCOL` | `'classic'` | Consumer | Default group protocol |
|
|
406
|
+
| `CONSUME_MODE` | `'committed'` | Consumer | Default consume mode |
|
|
407
|
+
| `CONSUME_FALLBACK_MODE` | `'latest'` | Consumer | Default consume fallback mode |
|
|
408
|
+
| `LAG_MONITOR_INTERVAL` | `30000` | Consumer | Lag monitoring poll interval in ms |
|
|
409
|
+
|
|
410
|
+
### KafkaHealthStatuses
|
|
411
|
+
|
|
412
|
+
Health status values used by all Kafka helpers.
|
|
413
|
+
|
|
414
|
+
```typescript
|
|
415
|
+
import { KafkaHealthStatuses } from '@venizia/ignis-helpers/kafka';
|
|
416
|
+
```
|
|
417
|
+
|
|
272
418
|
| Constant | Value | Description |
|
|
273
419
|
|----------|-------|-------------|
|
|
274
|
-
| `
|
|
275
|
-
| `
|
|
276
|
-
| `
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
420
|
+
| `CONNECTED` | `'connected'` | Broker connection established |
|
|
421
|
+
| `DISCONNECTED` | `'disconnected'` | Broker connection lost or closed |
|
|
422
|
+
| `UNKNOWN` | `'unknown'` | Initial state before first broker event |
|
|
423
|
+
|
|
424
|
+
### KafkaClientEvents
|
|
425
|
+
|
|
426
|
+
Event name constants for `@platformatic/kafka` event emitters.
|
|
427
|
+
|
|
428
|
+
```typescript
|
|
429
|
+
import { KafkaClientEvents } from '@venizia/ignis-helpers/kafka';
|
|
430
|
+
```
|
|
431
|
+
|
|
432
|
+
| Constant | Value | Scope |
|
|
433
|
+
|----------|-------|-------|
|
|
434
|
+
| `BROKER_CONNECT` | `'client:broker:connect'` | All clients |
|
|
435
|
+
| `BROKER_DISCONNECT` | `'client:broker:disconnect'` | All clients |
|
|
436
|
+
| `BROKER_FAILED` | `'client:broker:failed'` | All clients |
|
|
437
|
+
| `CONSUMER_GROUP_JOIN` | `'consumer:group:join'` | Consumer |
|
|
438
|
+
| `CONSUMER_GROUP_LEAVE` | `'consumer:group:leave'` | Consumer |
|
|
439
|
+
| `CONSUMER_GROUP_REBALANCE` | `'consumer:group:rebalance'` | Consumer |
|
|
440
|
+
| `CONSUMER_HEARTBEAT_ERROR` | `'consumer:heartbeat:error'` | Consumer |
|
|
441
|
+
| `CONSUMER_LAG` | `'consumer:lag'` | Consumer |
|
|
442
|
+
| `CONSUMER_LAG_ERROR` | `'consumer:lag:error'` | Consumer |
|
|
443
|
+
| `STREAM_DATA` | `'data'` | Stream |
|
|
444
|
+
| `STREAM_ERROR` | `'error'` | Stream |
|
|
280
445
|
|
|
281
446
|
### KafkaAcks
|
|
282
447
|
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
448
|
+
Producer acknowledgment levels.
|
|
449
|
+
|
|
450
|
+
```typescript
|
|
451
|
+
import { KafkaAcks } from '@venizia/ignis-helpers/kafka';
|
|
452
|
+
```
|
|
453
|
+
|
|
454
|
+
| Constant | Value | Description | Trade-off |
|
|
455
|
+
|----------|-------|-------------|-----------|
|
|
456
|
+
| `NONE` | `0` | No acknowledgment — fire-and-forget | Fastest, no durability guarantee |
|
|
457
|
+
| `LEADER` | `1` | Leader broker acknowledges | Fast, leader-durable |
|
|
458
|
+
| `ALL` | `-1` | All in-sync replicas acknowledge | Slowest, fully durable |
|
|
459
|
+
|
|
460
|
+
### KafkaGroupProtocol
|
|
288
461
|
|
|
289
|
-
|
|
462
|
+
Consumer group protocol versions.
|
|
463
|
+
|
|
464
|
+
```typescript
|
|
465
|
+
import { KafkaGroupProtocol } from '@venizia/ignis-helpers/kafka';
|
|
466
|
+
```
|
|
290
467
|
|
|
291
468
|
| Constant | Value | Description |
|
|
292
469
|
|----------|-------|-------------|
|
|
293
|
-
| `
|
|
294
|
-
| `
|
|
295
|
-
|
|
296
|
-
|
|
470
|
+
| `CLASSIC` | `'classic'` | Classic consumer group protocol (default, all Kafka versions) |
|
|
471
|
+
| `CONSUMER` | `'consumer'` | New consumer group protocol — KIP-848 (Kafka 3.7+) |
|
|
472
|
+
|
|
473
|
+
### Derived Types
|
|
474
|
+
|
|
475
|
+
```typescript
|
|
476
|
+
import type { TKafkaAcks, TKafkaGroupProtocol, TKafkaHealthStatus } from '@venizia/ignis-helpers/kafka';
|
|
477
|
+
|
|
478
|
+
// TKafkaAcks = 0 | 1 | -1
|
|
479
|
+
// TKafkaGroupProtocol = 'classic' | 'consumer'
|
|
480
|
+
// TKafkaHealthStatus = 'connected' | 'disconnected' | 'unknown'
|
|
481
|
+
```
|
|
482
|
+
|
|
483
|
+
## Compression
|
|
484
|
+
|
|
485
|
+
`@platformatic/kafka` supports five compression algorithms:
|
|
486
|
+
|
|
487
|
+
| Algorithm | Value | Description |
|
|
488
|
+
|-----------|-------|-------------|
|
|
489
|
+
| None | `'none'` | No compression (default) |
|
|
490
|
+
| GZIP | `'gzip'` | Good compression ratio, moderate CPU |
|
|
491
|
+
| Snappy | `'snappy'` | Fast compression, moderate ratio |
|
|
492
|
+
| LZ4 | `'lz4'` | Very fast, good for high-throughput |
|
|
493
|
+
| Zstandard | `'zstd'` | Best ratio, moderate CPU |
|
|
494
|
+
|
|
495
|
+
```typescript
|
|
496
|
+
const helper = KafkaProducerHelper.newInstance({
|
|
497
|
+
bootstrapBrokers: ['localhost:9092'],
|
|
498
|
+
clientId: 'my-producer',
|
|
499
|
+
serializers: stringSerializers,
|
|
500
|
+
compression: 'zstd',
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
// Override per-send
|
|
504
|
+
await helper.getProducer().send({
|
|
505
|
+
messages: [{ topic: 'logs', key: 'l1', value: largePayload }],
|
|
506
|
+
compression: 'lz4',
|
|
507
|
+
});
|
|
508
|
+
```
|
|
509
|
+
|
|
510
|
+
## Quick Usage Comparison
|
|
511
|
+
|
|
512
|
+
### Construction
|
|
513
|
+
|
|
514
|
+
```typescript
|
|
515
|
+
// Admin
|
|
516
|
+
const admin = KafkaAdminHelper.newInstance({
|
|
517
|
+
bootstrapBrokers: ['127.0.0.1:29092'],
|
|
518
|
+
clientId: 'my-admin',
|
|
519
|
+
onBrokerConnect: ({ broker }) => console.log(`Connected to ${broker.host}`),
|
|
520
|
+
onBrokerDisconnect: ({ broker }) => console.log(`Disconnected from ${broker.host}`),
|
|
521
|
+
});
|
|
522
|
+
|
|
523
|
+
// Producer
|
|
524
|
+
const producer = KafkaProducerHelper.newInstance({
|
|
525
|
+
bootstrapBrokers: ['127.0.0.1:29092'],
|
|
526
|
+
clientId: 'my-producer',
|
|
527
|
+
acks: -1,
|
|
528
|
+
idempotent: true,
|
|
529
|
+
transactionalId: 'my-tx',
|
|
530
|
+
onBrokerConnect: ({ broker }) => console.log(`Connected to ${broker.host}`),
|
|
531
|
+
onBrokerDisconnect: ({ broker }) => console.log(`Disconnected from ${broker.host}`),
|
|
532
|
+
});
|
|
533
|
+
|
|
534
|
+
// Consumer
|
|
535
|
+
const consumer = KafkaConsumerHelper.newInstance({
|
|
536
|
+
bootstrapBrokers: ['127.0.0.1:29092'],
|
|
537
|
+
clientId: 'my-consumer',
|
|
538
|
+
groupId: 'my-group',
|
|
539
|
+
onBrokerConnect: ({ broker }) => console.log(`Connected to ${broker.host}`),
|
|
540
|
+
onBrokerDisconnect: ({ broker }) => console.log(`Disconnected from ${broker.host}`),
|
|
541
|
+
onMessage: async ({ message }) => {
|
|
542
|
+
console.log('Received:', message.value);
|
|
543
|
+
await message.commit();
|
|
544
|
+
},
|
|
545
|
+
onMessageDone: ({ message }) => console.log('Done:', message.key),
|
|
546
|
+
onMessageError: ({ error, message }) => console.error('Error:', error),
|
|
547
|
+
onGroupJoin: ({ groupId, memberId }) => console.log(`Joined ${groupId}`),
|
|
548
|
+
onGroupLeave: ({ groupId }) => console.log(`Left ${groupId}`),
|
|
549
|
+
onGroupRebalance: ({ groupId }) => console.log(`Rebalance ${groupId}`),
|
|
550
|
+
onHeartbeatError: ({ error }) => console.error('Heartbeat:', error),
|
|
551
|
+
onLag: ({ lag }) => console.log('Lag:', lag),
|
|
552
|
+
onLagError: ({ error }) => console.error('Lag error:', error),
|
|
553
|
+
});
|
|
554
|
+
```
|
|
555
|
+
|
|
556
|
+
### Core Operations
|
|
557
|
+
|
|
558
|
+
| Admin | Producer | Consumer |
|
|
559
|
+
|-------|----------|----------|
|
|
560
|
+
| `admin.getAdmin()` | `producer.getProducer()` | `consumer.getConsumer()` |
|
|
561
|
+
| — | `producer.getProducer().send(...)` | `await consumer.start({ topics: ['t1'] })` |
|
|
562
|
+
| — | `await producer.runInTransaction(async ({ send, addConsumer, addOffset }) => { ... })` | `consumer.startLagMonitoring({ topics: ['t1'], interval: 10_000 })` |
|
|
563
|
+
| — | — | `consumer.stopLagMonitoring()` |
|
|
564
|
+
| — | — | `consumer.getStream()` |
|
|
565
|
+
|
|
566
|
+
### Health Checks
|
|
567
|
+
|
|
568
|
+
```typescript
|
|
569
|
+
// All three — identical API
|
|
570
|
+
helper.isHealthy(); // true when broker connected
|
|
571
|
+
helper.isReady(); // Admin/Producer: same as isHealthy()
|
|
572
|
+
// Consumer: isHealthy() + consumer.isActive()
|
|
573
|
+
helper.getHealthStatus(); // 'connected' | 'disconnected' | 'unknown'
|
|
574
|
+
```
|
|
575
|
+
|
|
576
|
+
### Shutdown
|
|
577
|
+
|
|
578
|
+
```typescript
|
|
579
|
+
// All three — identical API
|
|
580
|
+
await helper.close(); // graceful (timeout → force fallback)
|
|
581
|
+
await helper.close({ isForce: true }); // immediate force close
|
|
582
|
+
```
|
|
583
|
+
|
|
584
|
+
### With Schema Registry
|
|
585
|
+
|
|
586
|
+
```typescript
|
|
587
|
+
const registry = KafkaSchemaRegistryHelper.newInstance({ url: 'http://localhost:8081' });
|
|
588
|
+
|
|
589
|
+
const producer = KafkaProducerHelper.newInstance({
|
|
590
|
+
...,
|
|
591
|
+
registry: registry.getRegistry(),
|
|
592
|
+
// or use registry.getSerializers() for manual serializer config
|
|
593
|
+
});
|
|
594
|
+
|
|
595
|
+
const consumer = KafkaConsumerHelper.newInstance({
|
|
596
|
+
...,
|
|
597
|
+
registry: registry.getRegistry(),
|
|
598
|
+
// or use registry.getDeserializers() for manual deserializer config
|
|
599
|
+
});
|
|
600
|
+
```
|
|
601
|
+
|
|
602
|
+
### Transaction (Producer Only)
|
|
603
|
+
|
|
604
|
+
```typescript
|
|
605
|
+
const result = await producer.runInTransaction(async ({ send, addConsumer, addOffset }) => {
|
|
606
|
+
// Send messages within transaction
|
|
607
|
+
const result = await send({
|
|
608
|
+
messages: [{ topic: 'orders', key: 'o1', value: '{"status":"created"}' }],
|
|
609
|
+
});
|
|
610
|
+
|
|
611
|
+
// Optionally add consumer for exactly-once semantics
|
|
612
|
+
await addConsumer(consumer.getConsumer());
|
|
613
|
+
await addOffset(message);
|
|
614
|
+
|
|
615
|
+
return result;
|
|
616
|
+
});
|
|
617
|
+
```
|
|
618
|
+
|
|
619
|
+
## Pages
|
|
620
|
+
|
|
621
|
+
- **[Producer](./producer)** — Producer helper, transactions, and full `@platformatic/kafka` Producer API reference
|
|
622
|
+
- **[Consumer](./consumer)** — Consumer helper, message callbacks, lag monitoring, and full Consumer API reference
|
|
623
|
+
- **[Admin](./admin)** — Admin helper and full Admin API reference
|
|
624
|
+
- **[Schema Registry](./schema-registry)** — Schema registry helper for Avro/Protobuf/JSON Schema validation
|
|
625
|
+
- **[Examples & Troubleshooting](./examples)** — Complete examples, IoC integration, and troubleshooting guide
|
|
297
626
|
|
|
298
627
|
## See Also
|
|
299
628
|
|
|
300
629
|
- **Other Helpers:**
|
|
301
|
-
- [Queue Helper](../queue/)
|
|
302
|
-
- [Redis Helper](../redis/)
|
|
630
|
+
- [Queue Helper](../queue/) — BullMQ, MQTT, and in-memory queues
|
|
631
|
+
- [Redis Helper](../redis/) — Redis connection management
|
|
303
632
|
|
|
304
633
|
- **External Resources:**
|
|
305
|
-
- [@platformatic/kafka](https://github.com/platformatic/kafka)
|
|
634
|
+
- [@platformatic/kafka](https://github.com/platformatic/kafka) — Underlying Kafka client library
|
|
635
|
+
- [Apache Kafka Documentation](https://kafka.apache.org/documentation/) — Official Kafka docs
|
|
636
|
+
- [KIP-848](https://cwiki.apache.org/confluence/display/KAFKA/KIP-848) — New consumer group protocol
|