@venizia/ignis-docs 0.0.7-1 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,273 @@
1
+ # Producer
2
+
3
+ The `KafkaProducerHelper` wraps `@platformatic/kafka`'s `Producer` with health tracking, graceful shutdown, broker event callbacks, and a transaction helper.
4
+
5
+ ```typescript
6
+ class KafkaProducerHelper<
7
+ KeyType = string,
8
+ ValueType = string,
9
+ HeaderKeyType = string,
10
+ HeaderValueType = string,
11
+ > extends BaseKafkaHelper<Producer<KeyType, ValueType, HeaderKeyType, HeaderValueType>>
12
+ ```
13
+
14
+ ## Helper API
15
+
16
+ | Method | Signature | Description |
17
+ |--------|-----------|-------------|
18
+ | `newInstance(opts)` | `static newInstance<K,V,HK,HV>(opts): KafkaProducerHelper<K,V,HK,HV>` | Factory method |
19
+ | `getProducer()` | `(): Producer<K,V,HK,HV>` | Access the underlying `Producer` |
20
+ | `runInTransaction(cb)` | `<R>(cb: TKafkaTransactionCallback<R,K,V,HK,HV>): Promise<R>` | Execute callback within a Kafka transaction |
21
+ | `isHealthy()` | `(): boolean` | `true` when broker connected |
22
+ | `isReady()` | `(): boolean` | Same as `isHealthy()` |
23
+ | `getHealthStatus()` | `(): TKafkaHealthStatus` | `'connected'` \| `'disconnected'` \| `'unknown'` |
24
+ | `close(opts?)` | `(opts?: { isForce?: boolean }): Promise<void>` | Close the producer (default: graceful) |
25
+
26
+ ## IKafkaProducerOptions
27
+
28
+ ```typescript
29
+ interface IKafkaProducerOptions<KeyType, ValueType, HeaderKeyType, HeaderValueType>
30
+ extends IKafkaConnectionOptions
31
+ ```
32
+
33
+ | Option | Type | Default | Description |
34
+ |--------|------|---------|-------------|
35
+ | `identifier` | `string` | `'kafka-producer'` | Scoped logging identifier |
36
+ | `serializers` | `Partial<Serializers<K,V,HK,HV>>` | — | Key/value/header serializers |
37
+ | `compression` | `CompressionAlgorithmValue` | — | `'none'`, `'gzip'`, `'snappy'`, `'lz4'`, `'zstd'` |
38
+ | `acks` | `TKafkaAcks` | — | Acknowledgment level: `0`, `1`, or `-1` |
39
+ | `idempotent` | `boolean` | — | Enable idempotent producer (exactly-once within partition) |
40
+ | `transactionalId` | `string` | — | Transactional ID for exactly-once across partitions |
41
+ | `strict` | `boolean` | `true` | Strict mode — fail on unknown topics |
42
+ | `autocreateTopics` | `boolean` | `false` | Auto-create topics on first produce |
43
+ | `shutdownTimeout` | `number` | `30000` | Graceful shutdown timeout in ms |
44
+ | `registry` | `SchemaRegistry` | — | Schema registry for auto ser/deser |
45
+ | `onBrokerConnect` | `TKafkaBrokerEventCallback` | — | Called when broker connects |
46
+ | `onBrokerDisconnect` | `TKafkaBrokerEventCallback` | — | Called when broker disconnects |
47
+
48
+ Plus all [Connection Options](./#connection-options).
49
+
50
+ ## Basic Example
51
+
52
+ ```typescript
53
+ import { KafkaProducerHelper, KafkaAcks } from '@venizia/ignis-helpers/kafka';
54
+ import { stringSerializers } from '@platformatic/kafka';
55
+
56
+ const helper = KafkaProducerHelper.newInstance({
57
+ bootstrapBrokers: ['localhost:9092'],
58
+ clientId: 'order-producer',
59
+ serializers: stringSerializers,
60
+ acks: KafkaAcks.ALL,
61
+ compression: 'gzip',
62
+ onBrokerConnect: ({ broker }) => console.log(`Connected to ${broker.host}:${broker.port}`),
63
+ onBrokerDisconnect: ({ broker }) => console.log(`Disconnected from ${broker.host}`),
64
+ });
65
+
66
+ // Health check
67
+ helper.isHealthy(); // true when connected
68
+ helper.getHealthStatus(); // 'connected' | 'disconnected' | 'unknown'
69
+
70
+ // Send messages via the underlying producer
71
+ const producer = helper.getProducer();
72
+
73
+ await producer.send({
74
+ messages: [
75
+ { topic: 'orders', key: 'order-123', value: JSON.stringify({ status: 'created' }) },
76
+ ],
77
+ });
78
+
79
+ // Batch send (single request, multiple messages)
80
+ await producer.send({
81
+ messages: [
82
+ { topic: 'orders', key: 'order-124', value: JSON.stringify({ status: 'created' }) },
83
+ { topic: 'inventory', key: 'sku-001', value: JSON.stringify({ delta: -1 }) },
84
+ ],
85
+ });
86
+
87
+ // Graceful close (waits for in-flight, times out after shutdownTimeout → force)
88
+ await helper.close();
89
+
90
+ // Or force close immediately
91
+ await helper.close({ isForce: true });
92
+ ```
93
+
94
+ ## Transactions
95
+
96
+ `runInTransaction()` wraps `beginTransaction()` → callback → `commit()` / `abort()` with automatic logging.
97
+
98
+ > [!NOTE]
99
+ > Requires `transactionalId` and `idempotent: true` in producer options.
100
+
101
+ ```typescript
102
+ const helper = KafkaProducerHelper.newInstance({
103
+ bootstrapBrokers: ['localhost:9092'],
104
+ clientId: 'tx-producer',
105
+ serializers: stringSerializers,
106
+ transactionalId: 'my-tx-id',
107
+ idempotent: true,
108
+ });
109
+
110
+ // Simple transaction
111
+ const result = await helper.runInTransaction(async ({ send }) => {
112
+ return send({
113
+ messages: [
114
+ { topic: 'orders', key: 'o1', value: '{"status":"paid"}' },
115
+ { topic: 'inventory', key: 'sku-1', value: '{"delta":-1}' },
116
+ ],
117
+ });
118
+ });
119
+
120
+ // Exactly-once consume-transform-produce (with consumer offset commit)
121
+ const result = await helper.runInTransaction(async ({ send, addConsumer, addOffset }) => {
122
+ // Add consumer to transaction (for exactly-once semantics)
123
+ await addConsumer(consumer.getConsumer());
124
+
125
+ // Add the consumed message offset (will be committed on tx.commit())
126
+ await addOffset(incomingMessage);
127
+
128
+ // Produce transformed result
129
+ return send({
130
+ messages: [{ topic: 'output', key: incomingMessage.key, value: transformed }],
131
+ });
132
+ });
133
+ ```
134
+
135
+ ### Transaction Context
136
+
137
+ The callback receives an `IKafkaTransactionContext`:
138
+
139
+ | Property | Type | Description |
140
+ |----------|------|-------------|
141
+ | `transaction` | `Transaction` | The underlying platformatic transaction |
142
+ | `send(opts)` | `(opts: SendOptions) => Promise<ProduceResult>` | Send messages within the transaction |
143
+ | `addConsumer(consumer)` | `(consumer: Consumer) => Promise<void>` | Add a consumer for exactly-once |
144
+ | `addOffset(message)` | `(message: Message) => Promise<void>` | Add consumed message offset to transaction |
145
+
146
+ If the callback throws, the transaction is automatically aborted and the error re-thrown.
147
+
148
+ ## Graceful Shutdown
149
+
150
+ `close()` implements a two-phase shutdown:
151
+
152
+ 1. **Graceful** (default): Waits for in-flight requests to complete, with a timeout (`shutdownTimeout`, default 30s)
153
+ 2. **Force fallback**: If graceful times out, automatically force-closes
154
+ 3. **Force** (`{ isForce: true }`): Immediately aborts all in-flight requests
155
+
156
+ ```typescript
157
+ // Graceful (recommended)
158
+ await helper.close();
159
+
160
+ // Force
161
+ await helper.close({ isForce: true });
162
+ ```
163
+
164
+ After `close()`, `healthStatus` is set to `'disconnected'`.
165
+
166
+ ## API Reference (`@platformatic/kafka`)
167
+
168
+ After calling `helper.getProducer()`, you have full access to the `Producer` class:
169
+
170
+ ### `producer.send(options)`
171
+
172
+ Send messages to one or more topics.
173
+
174
+ ```typescript
175
+ interface SendOptions<Key, Value, HeaderKey, HeaderValue> {
176
+ messages: MessageToProduce<Key, Value, HeaderKey, HeaderValue>[];
177
+ acks?: number;
178
+ compression?: CompressionAlgorithmValue;
179
+ partitioner?: Partitioner<Key, Value, HeaderKey, HeaderValue>;
180
+ idempotent?: boolean;
181
+ autocreateTopics?: boolean;
182
+ }
183
+
184
+ interface MessageToProduce<Key, Value, HeaderKey, HeaderValue> {
185
+ topic: string;
186
+ key?: Key;
187
+ value?: Value;
188
+ partition?: number; // Explicit partition (overrides partitioner)
189
+ timestamp?: bigint; // Message timestamp
190
+ headers?: Map<HeaderKey, HeaderValue> | Record<string, HeaderValue>;
191
+ }
192
+
193
+ interface ProduceResult {
194
+ offsets?: { topic: string; partition: number; offset: bigint }[];
195
+ unwritableNodes?: number[];
196
+ }
197
+ ```
198
+
199
+ **Examples:**
200
+
201
+ ```typescript
202
+ // With headers
203
+ await producer.send({
204
+ messages: [{
205
+ topic: 'events',
206
+ key: 'user-1',
207
+ value: '{"action":"login"}',
208
+ headers: { 'x-trace-id': 'abc123', 'x-source': 'auth-service' },
209
+ }],
210
+ });
211
+
212
+ // Tombstone (delete compacted key)
213
+ await producer.send({
214
+ messages: [{ topic: 'users', key: 'user-deleted-123', value: undefined }],
215
+ });
216
+
217
+ // Explicit partition
218
+ await producer.send({
219
+ messages: [{ topic: 'events', key: 'e1', value: 'data', partition: 2 }],
220
+ });
221
+ ```
222
+
223
+ ### `producer.asStream(options)`
224
+
225
+ Create a `Writable` stream for high-throughput producing with automatic batching.
226
+
227
+ ```typescript
228
+ const stream = producer.asStream({ batchSize: 100, batchTime: 1000 });
229
+
230
+ stream.write({ topic: 'events', key: 'e1', value: '{"type":"click"}' });
231
+ stream.write({ topic: 'events', key: 'e2', value: '{"type":"scroll"}' });
232
+
233
+ stream.on('data', (report) => {
234
+ console.log(`Batch ${report.batchId}: ${report.count} messages sent`);
235
+ });
236
+
237
+ await stream.close();
238
+ ```
239
+
240
+ ### `producer.close(force?)`
241
+
242
+ Close the producer connection.
243
+
244
+ - `force=false` (default): Wait for in-flight requests to complete
245
+ - `force=true`: Abort immediately
246
+
247
+ ### Producer Properties
248
+
249
+ | Property | Type | Description |
250
+ |----------|------|-------------|
251
+ | `producerId` | `bigint \| undefined` | Assigned producer ID (after idempotent init) |
252
+ | `producerEpoch` | `number \| undefined` | Producer epoch (fencing) |
253
+ | `transaction` | `Transaction \| undefined` | Active transaction (if any) |
254
+ | `coordinatorId` | `number` | Transaction coordinator broker ID |
255
+ | `streamsCount` | `number` | Number of active producer streams |
256
+
257
+ ## Key Partitioning
258
+
259
+ By default, `@platformatic/kafka` uses **murmur2 hashing** on the message key to determine the target partition:
260
+
261
+ - Same key → always same partition → guaranteed ordering per key
262
+ - `undefined` key → round-robin across partitions
263
+ - Explicit `partition` field → overrides the partitioner
264
+
265
+ ```typescript
266
+ // Custom partitioner
267
+ await producer.send({
268
+ messages: [{ topic: 'events', key: 'e1', value: 'data' }],
269
+ partitioner: (message) => {
270
+ return message.key!.charCodeAt(0) % 3;
271
+ },
272
+ });
273
+ ```
@@ -0,0 +1,214 @@
1
+ # Schema Registry
2
+
3
+ The `KafkaSchemaRegistryHelper` wraps `@platformatic/kafka`'s `ConfluentSchemaRegistry`. It provides a centralized schema registry that auto-serializes/deserializes messages using registered schemas (Avro, Protobuf, JSON Schema).
4
+
5
+ ```typescript
6
+ class KafkaSchemaRegistryHelper<
7
+ KeyType = string,
8
+ ValueType = string,
9
+ HeaderKeyType = string,
10
+ HeaderValueType = string,
11
+ > extends BaseHelper
12
+ ```
13
+
14
+ > [!NOTE]
15
+ > `KafkaSchemaRegistryHelper` extends `BaseHelper` directly (not `BaseKafkaHelper`) — it has no broker connection or health tracking. It's a configuration wrapper, not a client.
16
+
17
+ ## Helper API
18
+
19
+ | Method | Signature | Description |
20
+ |--------|-----------|-------------|
21
+ | `newInstance(opts)` | `static newInstance<K,V,HK,HV>(opts): KafkaSchemaRegistryHelper<K,V,HK,HV>` | Factory method |
22
+ | `getRegistry()` | `(): ConfluentSchemaRegistry<K,V,HK,HV>` | Get the registry instance (pass to producer/consumer) |
23
+ | `getSerializers()` | `(): Serializers<K,V,HK,HV>` | Get schema-based serializers |
24
+ | `getDeserializers()` | `(): Deserializers<K,V,HK,HV>` | Get schema-based deserializers |
25
+
26
+ ## IKafkaSchemaRegistryOptions
27
+
28
+ ```typescript
29
+ interface IKafkaSchemaRegistryOptions extends ConfluentSchemaRegistryOptions {
30
+ identifier?: string; // Default: 'kafka-schema-registry'
31
+ }
32
+ ```
33
+
34
+ | Option | Type | Default | Description |
35
+ |--------|------|---------|-------------|
36
+ | `url` | `string` | — | Schema registry URL. **Required** |
37
+ | `auth` | `{ username: string; password: string }` | — | Basic auth credentials |
38
+ | `protobufTypeMapper` | `ProtobufTypeMapper` | — | Custom Protobuf type mapper |
39
+ | `jsonValidateSend` | `boolean` | — | Validate JSON schema on produce |
40
+ | `identifier` | `string` | `'kafka-schema-registry'` | Scoped logging identifier |
41
+
42
+ ## What Schema Registry Solves
43
+
44
+ Without a schema registry, producers and consumers must agree on message format out-of-band. If the producer changes the shape of `value` (adds/removes fields), consumers break silently at runtime.
45
+
46
+ **Schema Registry** is a centralized server (Confluent Schema Registry) that stores and validates schemas (Avro, Protobuf, JSON Schema). It enforces a contract:
47
+
48
+ ```
49
+ Producer → "I want to send this shape" → Schema Registry validates → Kafka
50
+ Kafka → Consumer → "What shape is this?" → Schema Registry tells → Deserialize
51
+ ```
52
+
53
+ ### Without Schema Registry (raw strings)
54
+
55
+ ```typescript
56
+ // Producer — manually serialize
57
+ const producer = KafkaProducerHelper.newInstance({
58
+ bootstrapBrokers: ['127.0.0.1:29092'],
59
+ clientId: 'order-producer',
60
+ });
61
+
62
+ await producer.getProducer().send({
63
+ messages: [{
64
+ topic: 'orders',
65
+ key: 'order-1',
66
+ value: JSON.stringify({ id: 1, total: 99.99 }), // ← just a string, no validation
67
+ }],
68
+ });
69
+
70
+ // Consumer — manually deserialize, hope the shape is correct
71
+ const consumer = KafkaConsumerHelper.newInstance({
72
+ bootstrapBrokers: ['127.0.0.1:29092'],
73
+ clientId: 'order-consumer',
74
+ groupId: 'order-group',
75
+ onMessage: async ({ message }) => {
76
+ const order = JSON.parse(message.value as string); // ← pray it matches
77
+ console.log(order.id, order.total);
78
+ },
79
+ });
80
+ ```
81
+
82
+ Problem: if producer adds `{ id: 1, total: 99.99, currency: 'USD' }` or removes `total`, consumer has no way to know until it crashes.
83
+
84
+ ### With Schema Registry (auto serialize/deserialize)
85
+
86
+ ```typescript
87
+ // 1. Create registry — points to Confluent Schema Registry server
88
+ const registry = KafkaSchemaRegistryHelper.newInstance({
89
+ url: 'http://localhost:8081',
90
+ // auth: { username: 'user', password: 'pass' }, // optional
91
+ });
92
+
93
+ // 2. Producer — pass registry, it auto-serializes values using registered schema
94
+ const producer = KafkaProducerHelper.newInstance({
95
+ bootstrapBrokers: ['127.0.0.1:29092'],
96
+ clientId: 'order-producer',
97
+ registry: registry.getRegistry(), // ← registry handles serialization
98
+ });
99
+
100
+ await producer.getProducer().send({
101
+ messages: [{
102
+ topic: 'orders',
103
+ key: 'order-1',
104
+ value: { id: 1, total: 99.99 }, // ← object, not string! Registry serializes it
105
+ }],
106
+ });
107
+ // If the value doesn't match the registered schema → error BEFORE sending to Kafka
108
+
109
+ // 3. Consumer — pass same registry, it auto-deserializes
110
+ const consumer = KafkaConsumerHelper.newInstance({
111
+ bootstrapBrokers: ['127.0.0.1:29092'],
112
+ clientId: 'order-consumer',
113
+ groupId: 'order-group',
114
+ registry: registry.getRegistry(), // ← registry handles deserialization
115
+ onMessage: async ({ message }) => {
116
+ // message.value is already a typed object, not a raw string
117
+ console.log(message.value.id, message.value.total);
118
+ },
119
+ });
120
+ ```
121
+
122
+ ### Comparison
123
+
124
+ | | Without Registry | With Registry |
125
+ |---|---|---|
126
+ | **Message format** | Raw string, manual `JSON.stringify/parse` | Typed object, auto ser/deser |
127
+ | **Validation** | None — runtime crashes | Schema validated before send |
128
+ | **Schema evolution** | Break consumers silently | Backward/forward compatibility enforced |
129
+ | **Where schemas live** | Nowhere (tribal knowledge) | Centralized server `http://registry:8081` |
130
+
131
+ You only need it when you want **schema enforcement** across producers/consumers. For simple string messages, skip it entirely.
132
+
133
+ ## Basic Usage
134
+
135
+ ```typescript
136
+ import { KafkaSchemaRegistryHelper, KafkaProducerHelper, KafkaConsumerHelper } from '@venizia/ignis-helpers/kafka';
137
+
138
+ // 1. Create registry — points to Confluent Schema Registry server
139
+ const registry = KafkaSchemaRegistryHelper.newInstance({
140
+ url: 'http://localhost:8081',
141
+ });
142
+
143
+ // 2. Producer — registry auto-serializes values using registered schema
144
+ const producer = KafkaProducerHelper.newInstance({
145
+ bootstrapBrokers: ['localhost:9092'],
146
+ clientId: 'order-producer',
147
+ registry: registry.getRegistry(),
148
+ });
149
+
150
+ await producer.getProducer().send({
151
+ messages: [{
152
+ topic: 'orders',
153
+ key: 'order-1',
154
+ value: { id: 1, total: 99.99 }, // object, not string — registry serializes
155
+ }],
156
+ });
157
+ // If value doesn't match the registered schema → error BEFORE sending to Kafka
158
+
159
+ // 3. Consumer — registry auto-deserializes
160
+ const consumer = KafkaConsumerHelper.newInstance({
161
+ bootstrapBrokers: ['localhost:9092'],
162
+ clientId: 'order-consumer',
163
+ groupId: 'order-group',
164
+ registry: registry.getRegistry(),
165
+ onMessage: async ({ message }) => {
166
+ // message.value is already a typed object, not a raw string
167
+ console.log(message.value.id, message.value.total);
168
+ },
169
+ });
170
+
171
+ await consumer.start({ topics: ['orders'] });
172
+ ```
173
+
174
+ ## With Authentication
175
+
176
+ ```typescript
177
+ const registry = KafkaSchemaRegistryHelper.newInstance({
178
+ url: 'https://schema-registry.example.com',
179
+ auth: {
180
+ username: 'registry-user',
181
+ password: 'registry-password',
182
+ },
183
+ });
184
+ ```
185
+
186
+ ## Alternative: Manual Serializers
187
+
188
+ Instead of passing the full registry, you can extract serializers/deserializers for manual use:
189
+
190
+ ```typescript
191
+ const registry = KafkaSchemaRegistryHelper.newInstance({
192
+ url: 'http://localhost:8081',
193
+ });
194
+
195
+ // Use serializers directly (instead of registry)
196
+ const producer = KafkaProducerHelper.newInstance({
197
+ bootstrapBrokers: ['localhost:9092'],
198
+ clientId: 'my-producer',
199
+ serializers: registry.getSerializers(),
200
+ });
201
+
202
+ const consumer = KafkaConsumerHelper.newInstance({
203
+ bootstrapBrokers: ['localhost:9092'],
204
+ clientId: 'my-consumer',
205
+ groupId: 'my-group',
206
+ deserializers: registry.getDeserializers(),
207
+ onMessage: async ({ message }) => { ... },
208
+ });
209
+ ```
210
+
211
+ ## When to Use
212
+
213
+ - **Use schema registry** when you need schema enforcement, validation, and compatibility checks across producers/consumers — especially in multi-team environments
214
+ - **Skip schema registry** for simple string/JSON messages where both sides are controlled by the same team and format changes are coordinated