@woovi/kafka 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,421 @@
1
+ # @woovi/kafka
2
+
3
+ Kafka setup and utilities for Woovi microservices. Production-ready with built-in logging, metrics, graceful shutdown, and test utilities.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pnpm add @woovi/kafka
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ```typescript
14
+ import {
15
+ createKafkaFromEnv,
16
+ createProducer,
17
+ createConsumer,
18
+ parseMessage,
19
+ } from '@woovi/kafka';
20
+
21
+ // Initialize Kafka client
22
+ createKafkaFromEnv('my-service');
23
+
24
+ // Create producer and publish
25
+ const producer = createProducer({ defaultTopic: 'user-events' });
26
+ await producer.publish({ data: { type: 'user.created', userId: '123' } });
27
+
28
+ // Create consumer and process messages
29
+ const consumer = createConsumer({
30
+ groupId: 'my-service-group',
31
+ topics: ['user-events'],
32
+ });
33
+
34
+ await consumer.run(async (payload) => {
35
+ const data = parseMessage<{ type: string; userId: string }>(payload);
36
+ console.log('Received:', data);
37
+ });
38
+ ```
39
+
40
+ ## Configuration
41
+
42
+ ### Environment Variables
43
+
44
+ | Variable | Description | Default |
45
+ |----------|-------------|---------|
46
+ | `KAFKA_BROKERS` | Comma-separated broker list | `localhost:9092` |
47
+ | `KAFKA_SSL` | Enable SSL (`true`/`false`) | `false` |
48
+ | `KAFKA_SASL_MECHANISM` | SASL mechanism | `plain` |
49
+ | `KAFKA_SASL_USERNAME` | SASL username | - |
50
+ | `KAFKA_SASL_PASSWORD` | SASL password | - |
51
+
52
+ ### Manual Configuration
53
+
54
+ ```typescript
55
+ import { createKafka } from '@woovi/kafka';
56
+
57
+ createKafka({
58
+ clientId: 'my-service',
59
+ brokers: ['kafka-1:9092', 'kafka-2:9092'],
60
+ ssl: true,
61
+ sasl: {
62
+ mechanism: 'scram-sha-512',
63
+ username: 'user',
64
+ password: 'password',
65
+ },
66
+ connectionTimeout: 10000,
67
+ requestTimeout: 30000,
68
+ });
69
+ ```
70
+
71
+ ## Producer
72
+
73
+ ### Basic Usage
74
+
75
+ ```typescript
76
+ import { createProducer } from '@woovi/kafka';
77
+
78
+ const producer = createProducer({ defaultTopic: 'events' });
79
+
80
+ // Publish single message
81
+ await producer.publish({
82
+ data: { type: 'order.created', orderId: '456' },
83
+ });
84
+
85
+ // Publish with options
86
+ await producer.publish({
87
+ data: { type: 'order.created', orderId: '456' },
88
+ key: 'order-456', // Message key for partitioning
89
+ headers: { source: 'api' },
90
+ partition: 0, // Specific partition (optional)
91
+ });
92
+
93
+ // Publish to different topic
94
+ await producer.publish({
95
+ data: { type: 'notification' },
96
+ topic: 'notifications',
97
+ });
98
+ ```
99
+
100
+ ### Batch Publishing
101
+
102
+ ```typescript
103
+ // Batch to same topic
104
+ await producer.publishBatch({
105
+ topic: 'events',
106
+ messages: [
107
+ { data: { type: 'event1' }, key: 'key1' },
108
+ { data: { type: 'event2' }, key: 'key2' },
109
+ ],
110
+ });
111
+
112
+ // Batch to multiple topics
113
+ await producer.publishToMultipleTopics({
114
+ messages: [
115
+ { topic: 'events', data: { type: 'event1' } },
116
+ { topic: 'notifications', data: { type: 'notify' } },
117
+ ],
118
+ });
119
+ ```
120
+
121
+ ### Producer Configuration
122
+
123
+ ```typescript
124
+ const producer = createProducer({
125
+ defaultTopic: 'events',
126
+ idempotent: true, // Enabled by default
127
+ maxInFlightRequests: 5, // Max concurrent requests
128
+ transactionTimeout: 30000, // Transaction timeout
129
+ });
130
+ ```
131
+
132
+ ## Consumer
133
+
134
+ ### Basic Usage
135
+
136
+ ```typescript
137
+ import { createConsumer, parseMessage, getMessageHeaders } from '@woovi/kafka';
138
+
139
+ const consumer = createConsumer({
140
+ groupId: 'my-service-group',
141
+ topics: ['events', 'notifications'],
142
+ fromBeginning: false, // Start from latest (default)
143
+ });
144
+
145
+ await consumer.run(async (payload) => {
146
+ const data = parseMessage<MyEventType>(payload);
147
+ const headers = getMessageHeaders(payload);
148
+
149
+ if (!data) {
150
+ console.error('Failed to parse message');
151
+ return;
152
+ }
153
+
154
+ console.log('Processing:', data, 'Headers:', headers);
155
+ });
156
+ ```
157
+
158
+ ### Batch Processing
159
+
160
+ ```typescript
161
+ await consumer.runBatch(async (payload) => {
162
+ for (const message of payload.batch.messages) {
163
+ const data = JSON.parse(message.value?.toString() || '{}');
164
+ // Process each message
165
+ }
166
+ });
167
+ ```
168
+
169
+ ### Error Handling
170
+
171
+ ```typescript
172
+ const consumer = createConsumer({
173
+ groupId: 'my-service-group',
174
+ topics: ['events'],
175
+ onError: async (error, payload) => {
176
+ // Send to error tracking (Sentry, etc.)
177
+ await sendToSentry(error, {
178
+ topic: payload.topic,
179
+ partition: payload.partition,
180
+ offset: payload.message.offset,
181
+ });
182
+
183
+ // Or send to dead letter queue
184
+ await dlqProducer.publish({
185
+ originalTopic: payload.topic,
186
+ error: error.message,
187
+ payload: payload.message.value?.toString(),
188
+ });
189
+ },
190
+ });
191
+ ```
192
+
193
+ ### Consumer Configuration
194
+
195
+ ```typescript
196
+ const consumer = createConsumer({
197
+ groupId: 'my-service-group',
198
+ topics: ['events'],
199
+ fromBeginning: false,
200
+ sessionTimeout: 30000, // Session timeout (default: 30s)
201
+ rebalanceTimeout: 60000, // Rebalance timeout (default: 60s)
202
+ heartbeatInterval: 3000, // Heartbeat interval (default: 3s)
203
+ maxBytesPerPartition: 1048576, // Max bytes per partition
204
+ retry: {
205
+ initialRetryTime: 100,
206
+ retries: 8,
207
+ },
208
+ });
209
+ ```
210
+
211
+ ### Pause and Resume
212
+
213
+ ```typescript
214
+ // Pause all subscribed topics
215
+ await consumer.pause();
216
+
217
+ // Pause specific topics
218
+ await consumer.pause(['events']);
219
+
220
+ // Resume
221
+ await consumer.resume();
222
+ await consumer.resume(['events']);
223
+ ```
224
+
225
+ ## Health Check
226
+
227
+ ```typescript
228
+ import { healthCheck } from '@woovi/kafka';
229
+
230
+ // For load balancer / kubernetes probes
231
+ app.get('/health', async (req, res) => {
232
+ const { healthy, error } = await healthCheck();
233
+
234
+ if (healthy) {
235
+ res.status(200).json({ status: 'ok' });
236
+ } else {
237
+ res.status(503).json({ status: 'error', error });
238
+ }
239
+ });
240
+ ```
241
+
242
+ ## Graceful Shutdown
243
+
244
+ Graceful shutdown is automatic on `SIGTERM` and `SIGINT`. All producers and consumers will disconnect cleanly.
245
+
246
+ ```typescript
247
+ import { onShutdown } from '@woovi/kafka';
248
+
249
+ // Add custom shutdown logic
250
+ onShutdown(async () => {
251
+ console.log('Custom cleanup before Kafka disconnects');
252
+ await myCleanupFunction();
253
+ });
254
+ ```
255
+
256
+ ## Prometheus Metrics
257
+
258
+ ### Setup
259
+
260
+ ```typescript
261
+ import { getMetrics, getMetricsContentType, enableDefaultMetrics } from '@woovi/kafka';
262
+
263
+ // Optional: enable Node.js default metrics
264
+ enableDefaultMetrics();
265
+
266
+ // Expose /metrics endpoint
267
+ app.get('/metrics', async (req, res) => {
268
+ res.set('Content-Type', getMetricsContentType());
269
+ res.send(await getMetrics());
270
+ });
271
+ ```
272
+
273
+ ### Available Metrics
274
+
275
+ | Metric | Type | Description |
276
+ |--------|------|-------------|
277
+ | `kafka_consumer_message_processing_duration_seconds` | Histogram | Message processing latency |
278
+ | `kafka_consumer_messages_processed_total` | Counter | Total messages processed |
279
+ | `kafka_consumer_messages_failed_total` | Counter | Failed messages |
280
+ | `kafka_consumer_batch_processing_duration_seconds` | Histogram | Batch processing latency |
281
+ | `kafka_consumer_batch_size` | Histogram | Messages per batch |
282
+ | `kafka_consumer_last_message_timestamp_seconds` | Gauge | Last processed timestamp |
283
+ | `kafka_producer_send_duration_seconds` | Histogram | Producer send latency |
284
+ | `kafka_producer_messages_produced_total` | Counter | Total messages produced |
285
+
286
+ ### Grafana Queries
287
+
288
+ ```promql
289
+ # P99 message processing latency
290
+ histogram_quantile(0.99, rate(kafka_consumer_message_processing_duration_seconds_bucket[5m]))
291
+
292
+ # Messages processed per second
293
+ rate(kafka_consumer_messages_processed_total{status="success"}[1m])
294
+
295
+ # Error rate percentage
296
+ 100 * rate(kafka_consumer_messages_failed_total[5m])
297
+ / rate(kafka_consumer_messages_processed_total[5m])
298
+
299
+ # Producer latency P95
300
+ histogram_quantile(0.95, rate(kafka_producer_send_duration_seconds_bucket[5m]))
301
+ ```
302
+
303
+ ## Testing
304
+
305
+ ### Setup Mocks
306
+
307
+ ```typescript
308
+ // Jest
309
+ jest.mock('kafkajs', () => require('@woovi/kafka/test-utils'));
310
+
311
+ // Vitest
312
+ vi.mock('kafkajs', () => import('@woovi/kafka/test-utils'));
313
+ ```
314
+
315
+ ### Test Utilities
316
+
317
+ ```typescript
318
+ import {
319
+ kafkaAssert,
320
+ kafkaAssertLength,
321
+ getKafkaMessages,
322
+ clearAllMocks,
323
+ } from '@woovi/kafka/test-utils';
324
+
325
+ describe('MyService', () => {
326
+ beforeEach(() => {
327
+ clearAllMocks();
328
+ });
329
+
330
+ it('should publish user created event', async () => {
331
+ await myService.createUser({ name: 'John' });
332
+
333
+ // Assert message was published
334
+ kafkaAssert({
335
+ topic: 'user-events',
336
+ message: { type: 'user.created', name: 'John' },
337
+ });
338
+
339
+ // Assert message count
340
+ kafkaAssertLength({ topic: 'user-events', length: 1 });
341
+ });
342
+
343
+ it('should access raw messages', async () => {
344
+ await myService.createUser({ name: 'John' });
345
+
346
+ const messages = getKafkaMessages();
347
+ expect(messages[0].topic).toBe('user-events');
348
+ });
349
+ });
350
+ ```
351
+
352
+ ### Available Test Exports
353
+
354
+ ```typescript
355
+ import {
356
+ // Assertions
357
+ kafkaAssert,
358
+ kafkaAssertLength,
359
+ getKafkaMessages,
360
+ clearAllMocks,
361
+
362
+ // Mocks
363
+ Kafka,
364
+ mockProducer,
365
+ mockConsumer,
366
+ mockAdmin,
367
+ mockProducerSend,
368
+ mockTransactionSend,
369
+
370
+ // Error classes
371
+ KafkaJSNonRetriableError,
372
+ KafkaJSProtocolError,
373
+
374
+ // Constants
375
+ logLevel,
376
+ CompressionTypes,
377
+ } from '@woovi/kafka/test-utils';
378
+ ```
379
+
380
+ ## API Reference
381
+
382
+ ### Producer Methods
383
+
384
+ | Method | Description |
385
+ |--------|-------------|
386
+ | `connect()` | Connect to Kafka (auto-called on publish) |
387
+ | `disconnect()` | Disconnect from Kafka |
388
+ | `isConnected()` | Check connection status |
389
+ | `publish(args)` | Publish single message |
390
+ | `publishBatch(args)` | Publish multiple messages to one topic |
391
+ | `publishToMultipleTopics(args)` | Publish to multiple topics |
392
+ | `getProducer()` | Get underlying kafkajs producer |
393
+
394
+ ### Consumer Methods
395
+
396
+ | Method | Description |
397
+ |--------|-------------|
398
+ | `connect()` | Connect to Kafka |
399
+ | `subscribe()` | Subscribe to topics |
400
+ | `disconnect()` | Disconnect from Kafka |
401
+ | `isConnected()` | Check connection status |
402
+ | `run(handler)` | Process messages one by one |
403
+ | `runBatch(handler)` | Process messages in batches |
404
+ | `pause(topics?)` | Pause consumption |
405
+ | `resume(topics?)` | Resume consumption |
406
+ | `getConsumer()` | Get underlying kafkajs consumer |
407
+
408
+ ### Utility Functions
409
+
410
+ | Function | Description |
411
+ |----------|-------------|
412
+ | `parseMessage<T>(payload)` | Parse message value as JSON |
413
+ | `getMessageHeaders(payload)` | Extract message headers |
414
+ | `healthCheck()` | Check Kafka connectivity |
415
+ | `onShutdown(callback)` | Register shutdown callback |
416
+ | `getMetrics()` | Get Prometheus metrics string |
417
+ | `getMetricsContentType()` | Get metrics content type |
418
+
419
+ ## License
420
+
421
+ ISC
@@ -0,0 +1,25 @@
1
+ import type { Consumer, EachMessagePayload } from 'kafkajs';
2
+ import type { WooviConsumerConfig, MessageHandler, BatchHandler } from './types.js';
3
+ export declare class WooviConsumer {
4
+ private consumer;
5
+ private topics;
6
+ private fromBeginning;
7
+ private connected;
8
+ private subscribed;
9
+ private groupId;
10
+ private onError?;
11
+ constructor(config: WooviConsumerConfig);
12
+ connect(): Promise<void>;
13
+ subscribe(): Promise<void>;
14
+ disconnect(): Promise<void>;
15
+ isConnected(): boolean;
16
+ run(handler: MessageHandler): Promise<void>;
17
+ runBatch(handler: BatchHandler): Promise<void>;
18
+ pause(topics?: string[]): Promise<void>;
19
+ resume(topics?: string[]): Promise<void>;
20
+ getConsumer(): Consumer;
21
+ }
22
+ export declare function createConsumer(config: WooviConsumerConfig): WooviConsumer;
23
+ export declare function parseMessage<T>(payload: EachMessagePayload): T | null;
24
+ export declare function getMessageHeaders(payload: EachMessagePayload): Record<string, string>;
25
+ //# sourceMappingURL=consumer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"consumer.d.ts","sourceRoot":"","sources":["../src/consumer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,kBAAkB,EAAoB,MAAM,SAAS,CAAC;AAW9E,OAAO,KAAK,EAAE,mBAAmB,EAAE,cAAc,EAAE,YAAY,EAAgB,MAAM,YAAY,CAAC;AAIlG,qBAAa,aAAa;IACxB,OAAO,CAAC,QAAQ,CAAW;IAC3B,OAAO,CAAC,MAAM,CAAgC;IAC9C,OAAO,CAAC,aAAa,CAAU;IAC/B,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,OAAO,CAAC,CAAe;gBAEnB,MAAM,EAAE,mBAAmB;IA6BjC,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAOxB,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IAW1B,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAQjC,WAAW,IAAI,OAAO;IAIhB,GAAG,CAAC,OAAO,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;IAoD3C,QAAQ,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC;IA8C9C,KAAK,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IASvC,MAAM,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAS9C,WAAW,IAAI,QAAQ;CAGxB;AAED,wBAAgB,cAAc,CAAC,MAAM,EAAE,mBAAmB,GAAG,aAAa,CAEzE;AAED,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,kBAAkB,GAAG,CAAC,GAAG,IAAI,CAuBrE;AAED,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,kBAAkB,GAAG,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAUrF"}
@@ -0,0 +1,6 @@
1
+ export { createKafka, createKafkaFromEnv, getKafka, onShutdown, healthCheck, Kafka, logLevel } from './kafka.js';
2
+ export { WooviProducer, createProducer } from './producer.js';
3
+ export { WooviConsumer, createConsumer, parseMessage, getMessageHeaders, } from './consumer.js';
4
+ export { kafkaRegistry, getMetrics, getMetricsContentType, resetMetrics, enableDefaultMetrics, messageProcessingDuration, messagesProcessedTotal, messagesFailedTotal, batchProcessingDuration, batchSize, consumerLag, lastMessageTimestamp, messagesProducedTotal, produceLatency, } from './metrics.js';
5
+ export type { KafkaConfig, WooviProducerConfig, WooviConsumerConfig, PublishArgs, PublishBatchArgs, PublishMultiTopicArgs, MessageHandler, BatchHandler, ErrorHandler, Message, RecordMetadata, EachMessagePayload, EachBatchPayload, } from './types.js';
6
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,kBAAkB,EAAE,QAAQ,EAAE,UAAU,EAAE,WAAW,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAGjH,OAAO,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AAG9D,OAAO,EACL,aAAa,EACb,cAAc,EACd,YAAY,EACZ,iBAAiB,GAClB,MAAM,eAAe,CAAC;AAGvB,OAAO,EACL,aAAa,EACb,UAAU,EACV,qBAAqB,EACrB,YAAY,EACZ,oBAAoB,EAEpB,yBAAyB,EACzB,sBAAsB,EACtB,mBAAmB,EACnB,uBAAuB,EACvB,SAAS,EACT,WAAW,EACX,oBAAoB,EACpB,qBAAqB,EACrB,cAAc,GACf,MAAM,cAAc,CAAC;AAGtB,YAAY,EACV,WAAW,EACX,mBAAmB,EACnB,mBAAmB,EACnB,WAAW,EACX,gBAAgB,EAChB,qBAAqB,EACrB,cAAc,EACd,YAAY,EACZ,YAAY,EACZ,OAAO,EACP,cAAc,EACd,kBAAkB,EAClB,gBAAgB,GACjB,MAAM,YAAY,CAAC"}