@venizia/ignis-docs 0.0.7-2 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  # Producer
2
2
 
3
- The `KafkaProducerHelper` is a thin wrapper around `@platformatic/kafka`'s `Producer`. It manages creation, logging, and lifecycle.
3
+ The `KafkaProducerHelper` wraps `@platformatic/kafka`'s `Producer` with health tracking, graceful shutdown, broker event callbacks, and a transaction helper.
4
4
 
5
5
  ```typescript
6
6
  class KafkaProducerHelper<
@@ -8,7 +8,7 @@ class KafkaProducerHelper<
8
8
  ValueType = string,
9
9
  HeaderKeyType = string,
10
10
  HeaderValueType = string,
11
- > extends BaseHelper
11
+ > extends BaseKafkaHelper<Producer<KeyType, ValueType, HeaderKeyType, HeaderValueType>>
12
12
  ```
13
13
 
14
14
  ## Helper API
@@ -16,26 +16,34 @@ class KafkaProducerHelper<
16
16
  | Method | Signature | Description |
17
17
  |--------|-----------|-------------|
18
18
  | `newInstance(opts)` | `static newInstance<K,V,HK,HV>(opts): KafkaProducerHelper<K,V,HK,HV>` | Factory method |
19
- | `getProducer()` | `(): Producer<KeyType, ValueType, HeaderKeyType, HeaderValueType>` | Access the underlying `Producer` |
20
- | `close(isForce?)` | `(isForce?: boolean): Promise<void>` | Close the producer. Default: `force=false` |
19
+ | `getProducer()` | `(): Producer<K,V,HK,HV>` | Access the underlying `Producer` |
20
+ | `runInTransaction(cb)` | `<R>(cb: TKafkaTransactionCallback<R,K,V,HK,HV>): Promise<R>` | Execute callback within a Kafka transaction |
21
+ | `isHealthy()` | `(): boolean` | `true` when broker connected |
22
+ | `isReady()` | `(): boolean` | Same as `isHealthy()` |
23
+ | `getHealthStatus()` | `(): TKafkaHealthStatus` | `'connected'` \| `'disconnected'` \| `'unknown'` |
24
+ | `close(opts?)` | `(opts?: { isForce?: boolean }): Promise<void>` | Close the producer (default: graceful) |
21
25
 
22
- ## IKafkaProducerOpts
26
+ ## IKafkaProducerOptions
23
27
 
24
28
  ```typescript
25
- interface IKafkaProducerOpts<KeyType, ValueType, HeaderKeyType, HeaderValueType>
29
+ interface IKafkaProducerOptions<KeyType, ValueType, HeaderKeyType, HeaderValueType>
26
30
  extends IKafkaConnectionOptions
27
31
  ```
28
32
 
29
33
  | Option | Type | Default | Description |
30
34
  |--------|------|---------|-------------|
31
35
  | `identifier` | `string` | `'kafka-producer'` | Scoped logging identifier |
32
- | `serializers` | `Partial<Serializers<K,V,HK,HV>>` | — | Key/value/header serializers. **Pass explicitly** |
36
+ | `serializers` | `Partial<Serializers<K,V,HK,HV>>` | — | Key/value/header serializers |
33
37
  | `compression` | `CompressionAlgorithmValue` | — | `'none'`, `'gzip'`, `'snappy'`, `'lz4'`, `'zstd'` |
34
38
  | `acks` | `TKafkaAcks` | — | Acknowledgment level: `0`, `1`, or `-1` |
35
39
  | `idempotent` | `boolean` | — | Enable idempotent producer (exactly-once within partition) |
36
40
  | `transactionalId` | `string` | — | Transactional ID for exactly-once across partitions |
37
41
  | `strict` | `boolean` | `true` | Strict mode — fail on unknown topics |
38
42
  | `autocreateTopics` | `boolean` | `false` | Auto-create topics on first produce |
43
+ | `shutdownTimeout` | `number` | `30000` | Graceful shutdown timeout in ms |
44
+ | `registry` | `SchemaRegistry` | — | Schema registry for auto ser/deser |
45
+ | `onBrokerConnect` | `TKafkaBrokerEventCallback` | — | Called when broker connects |
46
+ | `onBrokerDisconnect` | `TKafkaBrokerEventCallback` | — | Called when broker disconnects |
39
47
 
40
48
  Plus all [Connection Options](./#connection-options).
41
49
 
@@ -51,41 +59,109 @@ const helper = KafkaProducerHelper.newInstance({
51
59
  serializers: stringSerializers,
52
60
  acks: KafkaAcks.ALL,
53
61
  compression: 'gzip',
62
+ onBrokerConnect: ({ broker }) => console.log(`Connected to ${broker.host}:${broker.port}`),
63
+ onBrokerDisconnect: ({ broker }) => console.log(`Disconnected from ${broker.host}`),
54
64
  });
55
65
 
66
+ // Health check
67
+ helper.isHealthy(); // true when connected
68
+ helper.getHealthStatus(); // 'connected' | 'disconnected' | 'unknown'
69
+
70
+ // Send messages via the underlying producer
56
71
  const producer = helper.getProducer();
57
72
 
58
- // Send a single message
59
73
  await producer.send({
60
74
  messages: [
61
75
  { topic: 'orders', key: 'order-123', value: JSON.stringify({ status: 'created' }) },
62
76
  ],
63
77
  });
64
78
 
65
- // Send multiple messages (batched in a single request)
79
+ // Batch send (single request, multiple messages)
66
80
  await producer.send({
67
81
  messages: [
68
82
  { topic: 'orders', key: 'order-124', value: JSON.stringify({ status: 'created' }) },
69
- { topic: 'orders', key: 'order-125', value: JSON.stringify({ status: 'created' }) },
70
83
  { topic: 'inventory', key: 'sku-001', value: JSON.stringify({ delta: -1 }) },
71
84
  ],
72
85
  });
73
86
 
87
+ // Graceful close (waits for in-flight, times out after shutdownTimeout → force)
74
88
  await helper.close();
89
+
90
+ // Or force close immediately
91
+ await helper.close({ isForce: true });
75
92
  ```
76
93
 
77
- ### Generic Types Example
94
+ ## Transactions
95
+
96
+ `runInTransaction()` wraps `beginTransaction()` → callback → `commit()` / `abort()` with automatic logging.
97
+
98
+ > [!NOTE]
99
+ > Requires `transactionalId` and `idempotent: true` in producer options.
78
100
 
79
101
  ```typescript
80
- // Custom: string keys, JSON object values
81
- const helper = KafkaProducerHelper.newInstance<string, MyEvent, string, string>({
102
+ const helper = KafkaProducerHelper.newInstance({
82
103
  bootstrapBrokers: ['localhost:9092'],
83
- clientId: 'typed-producer',
84
- serializers: { ...serializersFrom(jsonSerializer), key: stringSerializer },
104
+ clientId: 'tx-producer',
105
+ serializers: stringSerializers,
106
+ transactionalId: 'my-tx-id',
107
+ idempotent: true,
108
+ });
109
+
110
+ // Simple transaction
111
+ const result = await helper.runInTransaction(async ({ send }) => {
112
+ return send({
113
+ messages: [
114
+ { topic: 'orders', key: 'o1', value: '{"status":"paid"}' },
115
+ { topic: 'inventory', key: 'sku-1', value: '{"delta":-1}' },
116
+ ],
117
+ });
118
+ });
119
+
120
+ // Exactly-once consume-transform-produce (with consumer offset commit)
121
+ const result = await helper.runInTransaction(async ({ send, addConsumer, addOffset }) => {
122
+ // Add consumer to transaction (for exactly-once semantics)
123
+ await addConsumer(consumer.getConsumer());
124
+
125
+ // Add the consumed message offset (will be committed on tx.commit())
126
+ await addOffset(incomingMessage);
127
+
128
+ // Produce transformed result
129
+ return send({
130
+ messages: [{ topic: 'output', key: incomingMessage.key, value: transformed }],
131
+ });
85
132
  });
86
133
  ```
87
134
 
88
- ---
135
+ ### Transaction Context
136
+
137
+ The callback receives an `IKafkaTransactionContext`:
138
+
139
+ | Property | Type | Description |
140
+ |----------|------|-------------|
141
+ | `transaction` | `Transaction` | The underlying platformatic transaction |
142
+ | `send(opts)` | `(opts: SendOptions) => Promise<ProduceResult>` | Send messages within the transaction |
143
+ | `addConsumer(consumer)` | `(consumer: Consumer) => Promise<void>` | Add a consumer for exactly-once |
144
+ | `addOffset(message)` | `(message: Message) => Promise<void>` | Add consumed message offset to transaction |
145
+
146
+ If the callback throws, the transaction is automatically aborted and the error re-thrown.
147
+
148
+ ## Graceful Shutdown
149
+
150
+ `close()` implements a two-phase shutdown:
151
+
152
+ 1. **Graceful** (default): Waits for in-flight requests to complete, with a timeout (`shutdownTimeout`, default 30s)
153
+ 2. **Force fallback**: If graceful times out, automatically force-closes
154
+ 3. **Force** (`{ isForce: true }`): Immediately aborts all in-flight requests
155
+
156
+ ```typescript
157
+ // Graceful (recommended)
158
+ await helper.close();
159
+
160
+ // Force
161
+ await helper.close({ isForce: true });
162
+ ```
163
+
164
+ After `close()`, `healthStatus` is set to `'disconnected'`.
89
165
 
90
166
  ## API Reference (`@platformatic/kafka`)
91
167
 
@@ -114,7 +190,6 @@ interface MessageToProduce<Key, Value, HeaderKey, HeaderValue> {
114
190
  headers?: Map<HeaderKey, HeaderValue> | Record<string, HeaderValue>;
115
191
  }
116
192
 
117
- // Returns
118
193
  interface ProduceResult {
119
194
  offsets?: { topic: string; partition: number; offset: bigint }[];
120
195
  unwritableNodes?: number[];
@@ -124,11 +199,6 @@ interface ProduceResult {
124
199
  **Examples:**
125
200
 
126
201
  ```typescript
127
- // Basic send
128
- await producer.send({
129
- messages: [{ topic: 'events', key: 'user-1', value: '{"action":"login"}' }],
130
- });
131
-
132
202
  // With headers
133
203
  await producer.send({
134
204
  messages: [{
@@ -148,75 +218,25 @@ await producer.send({
148
218
  await producer.send({
149
219
  messages: [{ topic: 'events', key: 'e1', value: 'data', partition: 2 }],
150
220
  });
151
-
152
- // Override compression per-send
153
- await producer.send({
154
- messages: [{ topic: 'logs', key: 'l1', value: largePayload }],
155
- compression: 'zstd',
156
- });
157
221
  ```
158
222
 
159
223
  ### `producer.asStream(options)`
160
224
 
161
225
  Create a `Writable` stream for high-throughput producing with automatic batching.
162
226
 
163
- ```typescript
164
- interface ProducerStreamOptions<Key, Value, HeaderKey, HeaderValue> {
165
- highWaterMark?: number; // Stream buffer size
166
- batchSize?: number; // Messages per batch
167
- batchTime?: number; // Max ms before flushing batch
168
- reportMode?: 'none' | 'batch' | 'message';
169
- // ... plus all SendOptions except messages
170
- }
171
- ```
172
-
173
227
  ```typescript
174
228
  const stream = producer.asStream({ batchSize: 100, batchTime: 1000 });
175
229
 
176
- // Write messages — automatically batched
177
230
  stream.write({ topic: 'events', key: 'e1', value: '{"type":"click"}' });
178
231
  stream.write({ topic: 'events', key: 'e2', value: '{"type":"scroll"}' });
179
232
 
180
- // Listen for batch completion
181
233
  stream.on('data', (report) => {
182
234
  console.log(`Batch ${report.batchId}: ${report.count} messages sent`);
183
235
  });
184
236
 
185
- // Close when done
186
237
  await stream.close();
187
238
  ```
188
239
 
189
- ### `producer.beginTransaction(options?)`
190
-
191
- Start a Kafka transaction for exactly-once semantics across multiple topics/partitions.
192
-
193
- > [!NOTE]
194
- > Requires `transactionalId` in producer options and `idempotent: true`.
195
-
196
- ```typescript
197
- const producer = new Producer({
198
- clientId: 'tx-producer',
199
- bootstrapBrokers: ['localhost:9092'],
200
- transactionalId: 'my-tx-id',
201
- idempotent: true,
202
- serializers: stringSerializers,
203
- });
204
-
205
- const tx = await producer.beginTransaction();
206
- try {
207
- await tx.send({
208
- messages: [
209
- { topic: 'orders', key: 'o1', value: '{"status":"paid"}' },
210
- { topic: 'inventory', key: 'sku-1', value: '{"delta":-1}' },
211
- ],
212
- });
213
- await tx.commit();
214
- } catch (err) {
215
- await tx.abort();
216
- throw err;
217
- }
218
- ```
219
-
220
240
  ### `producer.close(force?)`
221
241
 
222
242
  Close the producer connection.
@@ -234,35 +254,19 @@ Close the producer connection.
234
254
  | `coordinatorId` | `number` | Transaction coordinator broker ID |
235
255
  | `streamsCount` | `number` | Number of active producer streams |
236
256
 
237
- ---
238
-
239
257
  ## Key Partitioning
240
258
 
241
259
  By default, `@platformatic/kafka` uses **murmur2 hashing** on the message key to determine the target partition:
242
260
 
243
- ```
244
- partition = murmur2(key) % numPartitions
245
- ```
246
-
247
261
  - Same key → always same partition → guaranteed ordering per key
248
262
  - `undefined` key → round-robin across partitions
249
263
  - Explicit `partition` field → overrides the partitioner
250
264
 
251
265
  ```typescript
252
- // Key-based routing: all "user-123" messages go to the same partition
253
- await producer.send({
254
- messages: [
255
- { topic: 'events', key: 'user-123', value: '{"action":"login"}' },
256
- { topic: 'events', key: 'user-123', value: '{"action":"click"}' }, // Same partition
257
- { topic: 'events', key: 'user-456', value: '{"action":"login"}' }, // Different partition
258
- ],
259
- });
260
-
261
266
  // Custom partitioner
262
267
  await producer.send({
263
268
  messages: [{ topic: 'events', key: 'e1', value: 'data' }],
264
269
  partitioner: (message) => {
265
- // Route by first character of key
266
270
  return message.key!.charCodeAt(0) % 3;
267
271
  },
268
272
  });
@@ -0,0 +1,214 @@
1
+ # Schema Registry
2
+
3
+ The `KafkaSchemaRegistryHelper` wraps `@platformatic/kafka`'s `ConfluentSchemaRegistry`. It provides a centralized schema registry that auto-serializes/deserializes messages using registered schemas (Avro, Protobuf, JSON Schema).
4
+
5
+ ```typescript
6
+ class KafkaSchemaRegistryHelper<
7
+ KeyType = string,
8
+ ValueType = string,
9
+ HeaderKeyType = string,
10
+ HeaderValueType = string,
11
+ > extends BaseHelper
12
+ ```
13
+
14
+ > [!NOTE]
15
+ > `KafkaSchemaRegistryHelper` extends `BaseHelper` directly (not `BaseKafkaHelper`) — it has no broker connection or health tracking. It's a configuration wrapper, not a client.
16
+
17
+ ## Helper API
18
+
19
+ | Method | Signature | Description |
20
+ |--------|-----------|-------------|
21
+ | `newInstance(opts)` | `static newInstance<K,V,HK,HV>(opts): KafkaSchemaRegistryHelper<K,V,HK,HV>` | Factory method |
22
+ | `getRegistry()` | `(): ConfluentSchemaRegistry<K,V,HK,HV>` | Get the registry instance (pass to producer/consumer) |
23
+ | `getSerializers()` | `(): Serializers<K,V,HK,HV>` | Get schema-based serializers |
24
+ | `getDeserializers()` | `(): Deserializers<K,V,HK,HV>` | Get schema-based deserializers |
25
+
26
+ ## IKafkaSchemaRegistryOptions
27
+
28
+ ```typescript
29
+ interface IKafkaSchemaRegistryOptions extends ConfluentSchemaRegistryOptions {
30
+ identifier?: string; // Default: 'kafka-schema-registry'
31
+ }
32
+ ```
33
+
34
+ | Option | Type | Default | Description |
35
+ |--------|------|---------|-------------|
36
+ | `url` | `string` | — | Schema registry URL. **Required** |
37
+ | `auth` | `{ username: string; password: string }` | — | Basic auth credentials |
38
+ | `protobufTypeMapper` | `ProtobufTypeMapper` | — | Custom Protobuf type mapper |
39
+ | `jsonValidateSend` | `boolean` | — | Validate JSON schema on produce |
40
+ | `identifier` | `string` | `'kafka-schema-registry'` | Scoped logging identifier |
41
+
42
+ ## What Schema Registry Solves
43
+
44
+ Without a schema registry, producers and consumers must agree on message format out-of-band. If the producer changes the shape of `value` (adds/removes fields), consumers break silently at runtime.
45
+
46
+ **Schema Registry** is a centralized server (Confluent Schema Registry) that stores and validates schemas (Avro, Protobuf, JSON Schema). It enforces a contract:
47
+
48
+ ```
49
+ Producer → "I want to send this shape" → Schema Registry validates → Kafka
50
+ Kafka → Consumer → "What shape is this?" → Schema Registry tells → Deserialize
51
+ ```
52
+
53
+ ### Without Schema Registry (raw strings)
54
+
55
+ ```typescript
56
+ // Producer — manually serialize
57
+ const producer = KafkaProducerHelper.newInstance({
58
+ bootstrapBrokers: ['127.0.0.1:29092'],
59
+ clientId: 'order-producer',
60
+ });
61
+
62
+ await producer.getProducer().send({
63
+ messages: [{
64
+ topic: 'orders',
65
+ key: 'order-1',
66
+ value: JSON.stringify({ id: 1, total: 99.99 }), // ← just a string, no validation
67
+ }],
68
+ });
69
+
70
+ // Consumer — manually deserialize, hope the shape is correct
71
+ const consumer = KafkaConsumerHelper.newInstance({
72
+ bootstrapBrokers: ['127.0.0.1:29092'],
73
+ clientId: 'order-consumer',
74
+ groupId: 'order-group',
75
+ onMessage: async ({ message }) => {
76
+ const order = JSON.parse(message.value as string); // ← pray it matches
77
+ console.log(order.id, order.total);
78
+ },
79
+ });
80
+ ```
81
+
82
+ Problem: if producer adds `{ id: 1, total: 99.99, currency: 'USD' }` or removes `total`, consumer has no way to know until it crashes.
83
+
84
+ ### With Schema Registry (auto serialize/deserialize)
85
+
86
+ ```typescript
87
+ // 1. Create registry — points to Confluent Schema Registry server
88
+ const registry = KafkaSchemaRegistryHelper.newInstance({
89
+ url: 'http://localhost:8081',
90
+ // auth: { username: 'user', password: 'pass' }, // optional
91
+ });
92
+
93
+ // 2. Producer — pass registry, it auto-serializes values using registered schema
94
+ const producer = KafkaProducerHelper.newInstance({
95
+ bootstrapBrokers: ['127.0.0.1:29092'],
96
+ clientId: 'order-producer',
97
+ registry: registry.getRegistry(), // ← registry handles serialization
98
+ });
99
+
100
+ await producer.getProducer().send({
101
+ messages: [{
102
+ topic: 'orders',
103
+ key: 'order-1',
104
+ value: { id: 1, total: 99.99 }, // ← object, not string! Registry serializes it
105
+ }],
106
+ });
107
+ // If the value doesn't match the registered schema → error BEFORE sending to Kafka
108
+
109
+ // 3. Consumer — pass same registry, it auto-deserializes
110
+ const consumer = KafkaConsumerHelper.newInstance({
111
+ bootstrapBrokers: ['127.0.0.1:29092'],
112
+ clientId: 'order-consumer',
113
+ groupId: 'order-group',
114
+ registry: registry.getRegistry(), // ← registry handles deserialization
115
+ onMessage: async ({ message }) => {
116
+ // message.value is already a typed object, not a raw string
117
+ console.log(message.value.id, message.value.total);
118
+ },
119
+ });
120
+ ```
121
+
122
+ ### Comparison
123
+
124
+ | | Without Registry | With Registry |
125
+ |---|---|---|
126
+ | **Message format** | Raw string, manual `JSON.stringify/parse` | Typed object, auto ser/deser |
127
+ | **Validation** | None — runtime crashes | Schema validated before send |
128
+ | **Schema evolution** | Break consumers silently | Backward/forward compatibility enforced |
129
+ | **Where schemas live** | Nowhere (tribal knowledge) | Centralized server `http://registry:8081` |
130
+
131
+ You only need it when you want **schema enforcement** across producers/consumers. For simple string messages, skip it entirely.
132
+
133
+ ## Basic Usage
134
+
135
+ ```typescript
136
+ import { KafkaSchemaRegistryHelper, KafkaProducerHelper, KafkaConsumerHelper } from '@venizia/ignis-helpers/kafka';
137
+
138
+ // 1. Create registry — points to Confluent Schema Registry server
139
+ const registry = KafkaSchemaRegistryHelper.newInstance({
140
+ url: 'http://localhost:8081',
141
+ });
142
+
143
+ // 2. Producer — registry auto-serializes values using registered schema
144
+ const producer = KafkaProducerHelper.newInstance({
145
+ bootstrapBrokers: ['localhost:9092'],
146
+ clientId: 'order-producer',
147
+ registry: registry.getRegistry(),
148
+ });
149
+
150
+ await producer.getProducer().send({
151
+ messages: [{
152
+ topic: 'orders',
153
+ key: 'order-1',
154
+ value: { id: 1, total: 99.99 }, // object, not string — registry serializes
155
+ }],
156
+ });
157
+ // If value doesn't match the registered schema → error BEFORE sending to Kafka
158
+
159
+ // 3. Consumer — registry auto-deserializes
160
+ const consumer = KafkaConsumerHelper.newInstance({
161
+ bootstrapBrokers: ['localhost:9092'],
162
+ clientId: 'order-consumer',
163
+ groupId: 'order-group',
164
+ registry: registry.getRegistry(),
165
+ onMessage: async ({ message }) => {
166
+ // message.value is already a typed object, not a raw string
167
+ console.log(message.value.id, message.value.total);
168
+ },
169
+ });
170
+
171
+ await consumer.start({ topics: ['orders'] });
172
+ ```
173
+
174
+ ## With Authentication
175
+
176
+ ```typescript
177
+ const registry = KafkaSchemaRegistryHelper.newInstance({
178
+ url: 'https://schema-registry.example.com',
179
+ auth: {
180
+ username: 'registry-user',
181
+ password: 'registry-password',
182
+ },
183
+ });
184
+ ```
185
+
186
+ ## Alternative: Manual Serializers
187
+
188
+ Instead of passing the full registry, you can extract serializers/deserializers for manual use:
189
+
190
+ ```typescript
191
+ const registry = KafkaSchemaRegistryHelper.newInstance({
192
+ url: 'http://localhost:8081',
193
+ });
194
+
195
+ // Use serializers directly (instead of registry)
196
+ const producer = KafkaProducerHelper.newInstance({
197
+ bootstrapBrokers: ['localhost:9092'],
198
+ clientId: 'my-producer',
199
+ serializers: registry.getSerializers(),
200
+ });
201
+
202
+ const consumer = KafkaConsumerHelper.newInstance({
203
+ bootstrapBrokers: ['localhost:9092'],
204
+ clientId: 'my-consumer',
205
+ groupId: 'my-group',
206
+ deserializers: registry.getDeserializers(),
207
+ onMessage: async ({ message }) => { ... },
208
+ });
209
+ ```
210
+
211
+ ## When to Use
212
+
213
+ - **Use schema registry** when you need schema enforcement, validation, and compatibility checks across producers/consumers — especially in multi-team environments
214
+ - **Skip schema registry** for simple string/JSON messages where both sides are controlled by the same team and format changes are coordinated