chub-dev 0.1.0 → 0.1.2-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/README.md +55 -0
  2. package/bin/chub-mcp +2 -0
  3. package/dist/airtable/docs/database/javascript/DOC.md +1437 -0
  4. package/dist/airtable/docs/database/python/DOC.md +1735 -0
  5. package/dist/amplitude/docs/analytics/javascript/DOC.md +1282 -0
  6. package/dist/amplitude/docs/analytics/python/DOC.md +1199 -0
  7. package/dist/anthropic/docs/claude-api/javascript/DOC.md +503 -0
  8. package/dist/anthropic/docs/claude-api/python/DOC.md +389 -0
  9. package/dist/asana/docs/tasks/DOC.md +1396 -0
  10. package/dist/assemblyai/docs/transcription/DOC.md +1043 -0
  11. package/dist/atlassian/docs/confluence/javascript/DOC.md +1347 -0
  12. package/dist/atlassian/docs/confluence/python/DOC.md +1604 -0
  13. package/dist/auth0/docs/identity/javascript/DOC.md +968 -0
  14. package/dist/auth0/docs/identity/python/DOC.md +1199 -0
  15. package/dist/aws/docs/s3/javascript/DOC.md +1773 -0
  16. package/dist/aws/docs/s3/python/DOC.md +1807 -0
  17. package/dist/binance/docs/trading/javascript/DOC.md +1315 -0
  18. package/dist/binance/docs/trading/python/DOC.md +1454 -0
  19. package/dist/braintree/docs/gateway/javascript/DOC.md +1278 -0
  20. package/dist/braintree/docs/gateway/python/DOC.md +1179 -0
  21. package/dist/chromadb/docs/embeddings-db/javascript/DOC.md +1263 -0
  22. package/dist/chromadb/docs/embeddings-db/python/DOC.md +1707 -0
  23. package/dist/clerk/docs/auth/javascript/DOC.md +1220 -0
  24. package/dist/clerk/docs/auth/python/DOC.md +274 -0
  25. package/dist/cloudflare/docs/workers/javascript/DOC.md +918 -0
  26. package/dist/cloudflare/docs/workers/python/DOC.md +994 -0
  27. package/dist/cockroachdb/docs/distributed-db/DOC.md +1500 -0
  28. package/dist/cohere/docs/llm/DOC.md +1335 -0
  29. package/dist/datadog/docs/monitoring/javascript/DOC.md +1740 -0
  30. package/dist/datadog/docs/monitoring/python/DOC.md +1815 -0
  31. package/dist/deepgram/docs/speech/javascript/DOC.md +885 -0
  32. package/dist/deepgram/docs/speech/python/DOC.md +685 -0
  33. package/dist/deepl/docs/translation/javascript/DOC.md +887 -0
  34. package/dist/deepl/docs/translation/python/DOC.md +944 -0
  35. package/dist/deepseek/docs/llm/DOC.md +1220 -0
  36. package/dist/directus/docs/headless-cms/javascript/DOC.md +1128 -0
  37. package/dist/directus/docs/headless-cms/python/DOC.md +1276 -0
  38. package/dist/discord/docs/bot/javascript/DOC.md +1090 -0
  39. package/dist/discord/docs/bot/python/DOC.md +1130 -0
  40. package/dist/elasticsearch/docs/search/DOC.md +1634 -0
  41. package/dist/elevenlabs/docs/text-to-speech/javascript/DOC.md +336 -0
  42. package/dist/elevenlabs/docs/text-to-speech/python/DOC.md +552 -0
  43. package/dist/firebase/docs/auth/DOC.md +1015 -0
  44. package/dist/gemini/docs/genai/javascript/DOC.md +691 -0
  45. package/dist/gemini/docs/genai/python/DOC.md +555 -0
  46. package/dist/github/docs/octokit/DOC.md +1560 -0
  47. package/dist/google/docs/bigquery/javascript/DOC.md +1688 -0
  48. package/dist/google/docs/bigquery/python/DOC.md +1503 -0
  49. package/dist/hubspot/docs/crm/javascript/DOC.md +1805 -0
  50. package/dist/hubspot/docs/crm/python/DOC.md +2033 -0
  51. package/dist/huggingface/docs/transformers/DOC.md +948 -0
  52. package/dist/intercom/docs/messaging/javascript/DOC.md +1844 -0
  53. package/dist/intercom/docs/messaging/python/DOC.md +1797 -0
  54. package/dist/jira/docs/issues/javascript/DOC.md +1420 -0
  55. package/dist/jira/docs/issues/python/DOC.md +1492 -0
  56. package/dist/kafka/docs/streaming/javascript/DOC.md +1671 -0
  57. package/dist/kafka/docs/streaming/python/DOC.md +1464 -0
  58. package/dist/landingai-ade/docs/api/DOC.md +620 -0
  59. package/dist/landingai-ade/docs/sdk/python/DOC.md +489 -0
  60. package/dist/landingai-ade/docs/sdk/typescript/DOC.md +542 -0
  61. package/dist/landingai-ade/skills/SKILL.md +489 -0
  62. package/dist/launchdarkly/docs/feature-flags/javascript/DOC.md +1191 -0
  63. package/dist/launchdarkly/docs/feature-flags/python/DOC.md +1671 -0
  64. package/dist/linear/docs/tracker/DOC.md +1554 -0
  65. package/dist/livekit/docs/realtime/javascript/DOC.md +303 -0
  66. package/dist/livekit/docs/realtime/python/DOC.md +163 -0
  67. package/dist/mailchimp/docs/marketing/DOC.md +1420 -0
  68. package/dist/meilisearch/docs/search/DOC.md +1241 -0
  69. package/dist/microsoft/docs/onedrive/javascript/DOC.md +1421 -0
  70. package/dist/microsoft/docs/onedrive/python/DOC.md +1549 -0
  71. package/dist/mongodb/docs/atlas/DOC.md +2041 -0
  72. package/dist/notion/docs/workspace-api/javascript/DOC.md +1435 -0
  73. package/dist/notion/docs/workspace-api/python/DOC.md +1400 -0
  74. package/dist/okta/docs/identity/javascript/DOC.md +1171 -0
  75. package/dist/okta/docs/identity/python/DOC.md +1401 -0
  76. package/dist/openai/docs/chat/javascript/DOC.md +407 -0
  77. package/dist/openai/docs/chat/python/DOC.md +568 -0
  78. package/dist/paypal/docs/checkout/DOC.md +278 -0
  79. package/dist/pinecone/docs/sdk/javascript/DOC.md +984 -0
  80. package/dist/pinecone/docs/sdk/python/DOC.md +1395 -0
  81. package/dist/plaid/docs/banking/javascript/DOC.md +1163 -0
  82. package/dist/plaid/docs/banking/python/DOC.md +1203 -0
  83. package/dist/playwright-community/skills/login-flows/SKILL.md +108 -0
  84. package/dist/postmark/docs/transactional-email/DOC.md +1168 -0
  85. package/dist/prisma/docs/orm/javascript/DOC.md +1419 -0
  86. package/dist/prisma/docs/orm/python/DOC.md +1317 -0
  87. package/dist/qdrant/docs/vector-search/javascript/DOC.md +1221 -0
  88. package/dist/qdrant/docs/vector-search/python/DOC.md +1653 -0
  89. package/dist/rabbitmq/docs/message-queue/javascript/DOC.md +1193 -0
  90. package/dist/rabbitmq/docs/message-queue/python/DOC.md +1243 -0
  91. package/dist/razorpay/docs/payments/javascript/DOC.md +1219 -0
  92. package/dist/razorpay/docs/payments/python/DOC.md +1330 -0
  93. package/dist/redis/docs/key-value/javascript/DOC.md +1851 -0
  94. package/dist/redis/docs/key-value/python/DOC.md +2054 -0
  95. package/dist/registry.json +2817 -0
  96. package/dist/replicate/docs/model-hosting/DOC.md +1318 -0
  97. package/dist/resend/docs/email/DOC.md +1271 -0
  98. package/dist/salesforce/docs/crm/javascript/DOC.md +1241 -0
  99. package/dist/salesforce/docs/crm/python/DOC.md +1183 -0
  100. package/dist/search-index.json +1 -0
  101. package/dist/sendgrid/docs/email-api/javascript/DOC.md +371 -0
  102. package/dist/sendgrid/docs/email-api/python/DOC.md +656 -0
  103. package/dist/sentry/docs/error-tracking/javascript/DOC.md +1073 -0
  104. package/dist/sentry/docs/error-tracking/python/DOC.md +1309 -0
  105. package/dist/shopify/docs/storefront/DOC.md +457 -0
  106. package/dist/slack/docs/workspace/javascript/DOC.md +933 -0
  107. package/dist/slack/docs/workspace/python/DOC.md +271 -0
  108. package/dist/square/docs/payments/javascript/DOC.md +1855 -0
  109. package/dist/square/docs/payments/python/DOC.md +1728 -0
  110. package/dist/stripe/docs/api/DOC.md +1727 -0
  111. package/dist/stripe/docs/payments/DOC.md +1726 -0
  112. package/dist/stytch/docs/auth/javascript/DOC.md +1813 -0
  113. package/dist/stytch/docs/auth/python/DOC.md +1962 -0
  114. package/dist/supabase/docs/client/DOC.md +1606 -0
  115. package/dist/twilio/docs/messaging/python/DOC.md +469 -0
  116. package/dist/twilio/docs/messaging/typescript/DOC.md +946 -0
  117. package/dist/vercel/docs/platform/DOC.md +1940 -0
  118. package/dist/weaviate/docs/vector-db/javascript/DOC.md +1268 -0
  119. package/dist/weaviate/docs/vector-db/python/DOC.md +1388 -0
  120. package/dist/zendesk/docs/support/javascript/DOC.md +2150 -0
  121. package/dist/zendesk/docs/support/python/DOC.md +2297 -0
  122. package/package.json +22 -6
  123. package/skills/get-api-docs/SKILL.md +84 -0
  124. package/src/commands/annotate.js +83 -0
  125. package/src/commands/build.js +12 -1
  126. package/src/commands/feedback.js +150 -0
  127. package/src/commands/get.js +83 -42
  128. package/src/commands/search.js +7 -0
  129. package/src/index.js +43 -17
  130. package/src/lib/analytics.js +90 -0
  131. package/src/lib/annotations.js +57 -0
  132. package/src/lib/bm25.js +170 -0
  133. package/src/lib/cache.js +69 -6
  134. package/src/lib/config.js +8 -3
  135. package/src/lib/identity.js +99 -0
  136. package/src/lib/registry.js +103 -20
  137. package/src/lib/telemetry.js +86 -0
  138. package/src/mcp/server.js +177 -0
  139. package/src/mcp/tools.js +251 -0
@@ -0,0 +1,1671 @@
1
+ ---
2
+ name: streaming
3
+ description: "KafkaJS - Apache Kafka client for Node.js streaming and messaging"
4
+ metadata:
5
+ languages: "javascript"
6
+ versions: "2.2.4"
7
+ updated-on: "2026-03-02"
8
+ source: maintainer
9
+ tags: "kafka,streaming,messaging,queue,events"
10
+ ---
11
+
12
+ # KafkaJS - Apache Kafka Client for Node.js
13
+
14
+ ## Golden Rule
15
+
16
+ **ALWAYS use `kafkajs` version 2.2.4 or later.**
17
+
18
+ ```bash
19
+ npm install kafkajs
20
+ ```
21
+
22
+ **DO NOT use:**
23
+ - `kafka-node` (deprecated)
24
+ - `no-kafka` (deprecated)
25
+ - `node-rdkafka` (different use case - C++ binding)
26
+
27
+ KafkaJS is the modern, pure JavaScript Apache Kafka client for Node.js. It provides a complete implementation of the Kafka protocol with support for producers, consumers, admin operations, and transactions.
28
+
29
+ ## Installation
30
+
31
+ ### Basic Installation
32
+
33
+ ```bash
34
+ npm install kafkajs
35
+ ```
36
+
37
+ ### With TypeScript
38
+
39
+ ```bash
40
+ npm install kafkajs
41
+ npm install --save-dev @types/node
42
+ ```
43
+
44
+ ### Environment Variables
45
+
46
+ Create a `.env` file:
47
+
48
+ ```bash
49
+ KAFKA_BROKERS=localhost:9092
50
+ KAFKA_CLIENT_ID=my-app
51
+ KAFKA_USERNAME=your-username
52
+ KAFKA_PASSWORD=your-password
53
+ ```
54
+
55
+ Load environment variables in your application:
56
+
57
+ ```javascript
58
+ require('dotenv').config()
59
+
60
+ const brokers = process.env.KAFKA_BROKERS.split(',')
61
+ const clientId = process.env.KAFKA_CLIENT_ID
62
+ ```
63
+
64
+ ## Initialization
65
+
66
+ ### Basic Client
67
+
68
+ ```javascript
69
+ const { Kafka } = require('kafkajs')
70
+
71
+ const kafka = new Kafka({
72
+ clientId: 'my-app',
73
+ brokers: ['localhost:9092']
74
+ })
75
+ ```
76
+
77
+ ### With Environment Variables
78
+
79
+ ```javascript
80
+ const { Kafka } = require('kafkajs')
81
+
82
+ const kafka = new Kafka({
83
+ clientId: process.env.KAFKA_CLIENT_ID || 'my-app',
84
+ brokers: process.env.KAFKA_BROKERS.split(',') || ['localhost:9092']
85
+ })
86
+ ```
87
+
88
+ ### With SSL/TLS
89
+
90
+ ```javascript
91
+ const fs = require('fs')
92
+ const { Kafka } = require('kafkajs')
93
+
94
+ const kafka = new Kafka({
95
+ clientId: 'my-app',
96
+ brokers: ['kafka1:9093', 'kafka2:9093'],
97
+ ssl: {
98
+ rejectUnauthorized: false,
99
+ ca: [fs.readFileSync('/path/to/ca-cert', 'utf-8')],
100
+ key: fs.readFileSync('/path/to/client-key', 'utf-8'),
101
+ cert: fs.readFileSync('/path/to/client-cert', 'utf-8')
102
+ }
103
+ })
104
+ ```
105
+
106
+ For basic SSL without custom certificates:
107
+
108
+ ```javascript
109
+ const kafka = new Kafka({
110
+ clientId: 'my-app',
111
+ brokers: ['kafka1:9093'],
112
+ ssl: true
113
+ })
114
+ ```
115
+
116
+ ### With SASL Authentication
117
+
118
+ #### PLAIN
119
+
120
+ ```javascript
121
+ const kafka = new Kafka({
122
+ clientId: 'my-app',
123
+ brokers: ['kafka1:9092'],
124
+ sasl: {
125
+ mechanism: 'plain',
126
+ username: process.env.KAFKA_USERNAME,
127
+ password: process.env.KAFKA_PASSWORD
128
+ }
129
+ })
130
+ ```
131
+
132
+ #### SCRAM-SHA-256
133
+
134
+ ```javascript
135
+ const kafka = new Kafka({
136
+ clientId: 'my-app',
137
+ brokers: ['kafka1:9092'],
138
+ sasl: {
139
+ mechanism: 'scram-sha-256',
140
+ username: process.env.KAFKA_USERNAME,
141
+ password: process.env.KAFKA_PASSWORD
142
+ }
143
+ })
144
+ ```
145
+
146
+ #### SCRAM-SHA-512
147
+
148
+ ```javascript
149
+ const kafka = new Kafka({
150
+ clientId: 'my-app',
151
+ brokers: ['kafka1:9092'],
152
+ sasl: {
153
+ mechanism: 'scram-sha-512',
154
+ username: process.env.KAFKA_USERNAME,
155
+ password: process.env.KAFKA_PASSWORD
156
+ }
157
+ })
158
+ ```
159
+
160
+ #### AWS IAM
161
+
162
+ ```javascript
163
+ const kafka = new Kafka({
164
+ clientId: 'my-app',
165
+ brokers: ['kafka1:9092'],
166
+ sasl: {
167
+ mechanism: 'aws',
168
+ authorizationIdentity: 'AIDAIOSFODNN7EXAMPLE',
169
+ accessKeyId: process.env.AWS_ACCESS_KEY_ID,
170
+ secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
171
+ sessionToken: process.env.AWS_SESSION_TOKEN // optional
172
+ }
173
+ })
174
+ ```
175
+
176
+ #### OAUTHBEARER
177
+
178
+ ```javascript
179
+ const kafka = new Kafka({
180
+ clientId: 'my-app',
181
+ brokers: ['kafka1:9092'],
182
+ sasl: {
183
+ mechanism: 'oauthbearer',
184
+ oauthBearerProvider: async () => {
185
+ const token = await getOAuthToken() // Your token fetching logic
186
+ return {
187
+ value: token
188
+ }
189
+ }
190
+ }
191
+ })
192
+ ```
193
+
194
+ ### With SSL and SASL Combined
195
+
196
+ ```javascript
197
+ const kafka = new Kafka({
198
+ clientId: 'my-app',
199
+ brokers: ['kafka1:9093'],
200
+ ssl: true,
201
+ sasl: {
202
+ mechanism: 'plain',
203
+ username: process.env.KAFKA_USERNAME,
204
+ password: process.env.KAFKA_PASSWORD
205
+ }
206
+ })
207
+ ```
208
+
209
+ ### Connection Configuration Options
210
+
211
+ ```javascript
212
+ const kafka = new Kafka({
213
+ clientId: 'my-app',
214
+ brokers: ['kafka1:9092', 'kafka2:9092'],
215
+ connectionTimeout: 1000, // Time in ms to wait for successful connection
216
+ requestTimeout: 30000, // Time in ms to wait for successful requests
217
+ enforceRequestTimeout: true, // Enforce request timeout
218
+ retry: {
219
+ initialRetryTime: 300, // Initial retry time in ms
220
+ retries: 5, // Max number of retries
221
+ maxRetryTime: 30000, // Max retry time in ms
222
+ multiplier: 2, // Exponential backoff multiplier
223
+ factor: 0.2 // Randomization factor
224
+ }
225
+ })
226
+ ```
227
+
228
+ ### Dynamic Broker Discovery
229
+
230
+ ```javascript
231
+ const kafka = new Kafka({
232
+ clientId: 'my-app',
233
+ brokers: async () => {
234
+ // Fetch broker list dynamically
235
+ const response = await fetch('http://service-discovery/kafka-brokers')
236
+ const data = await response.json()
237
+ return data.brokers
238
+ }
239
+ })
240
+ ```
241
+
242
+ ## Producer
243
+
244
+ ### Creating a Producer
245
+
246
+ ```javascript
247
+ const producer = kafka.producer()
248
+ ```
249
+
250
+ ### Producer Configuration Options
251
+
252
+ ```javascript
253
+ const producer = kafka.producer({
254
+ allowAutoTopicCreation: true, // Allow auto topic creation
255
+ transactionTimeout: 60000, // Transaction timeout in ms
256
+ idempotent: false, // Enable idempotent producer
257
+ maxInFlightRequests: null, // Max concurrent requests
258
+ metadataMaxAge: 300000 // Metadata refresh interval in ms
259
+ })
260
+ ```
261
+
262
+ ### Connecting and Disconnecting
263
+
264
+ ```javascript
265
+ await producer.connect()
266
+ // ... produce messages
267
+ await producer.disconnect()
268
+ ```
269
+
270
+ ### Sending Messages - Basic
271
+
272
+ ```javascript
273
+ const producer = kafka.producer()
274
+ await producer.connect()
275
+
276
+ await producer.send({
277
+ topic: 'my-topic',
278
+ messages: [
279
+ { value: 'Hello Kafka' }
280
+ ]
281
+ })
282
+
283
+ await producer.disconnect()
284
+ ```
285
+
286
+ ### Sending Messages with Keys
287
+
288
+ ```javascript
289
+ await producer.send({
290
+ topic: 'user-events',
291
+ messages: [
292
+ { key: 'user-123', value: 'User logged in' },
293
+ { key: 'user-456', value: 'User logged out' }
294
+ ]
295
+ })
296
+ ```
297
+
298
+ ### Sending Messages with Headers
299
+
300
+ ```javascript
301
+ await producer.send({
302
+ topic: 'my-topic',
303
+ messages: [
304
+ {
305
+ key: 'key1',
306
+ value: 'Hello World',
307
+ headers: {
308
+ 'correlation-id': '2bfb68bb-893a-423b-a7fa-7b568cad5b67',
309
+ 'user-id': 'user-123'
310
+ }
311
+ }
312
+ ]
313
+ })
314
+ ```
315
+
316
+ ### Sending to Specific Partition
317
+
318
+ ```javascript
319
+ await producer.send({
320
+ topic: 'my-topic',
321
+ messages: [
322
+ {
323
+ partition: 0,
324
+ value: 'Message to partition 0'
325
+ }
326
+ ]
327
+ })
328
+ ```
329
+
330
+ ### Sending with Timestamp
331
+
332
+ ```javascript
333
+ await producer.send({
334
+ topic: 'my-topic',
335
+ messages: [
336
+ {
337
+ value: 'Hello Kafka',
338
+ timestamp: Date.now()
339
+ }
340
+ ]
341
+ })
342
+ ```
343
+
344
+ ### Sending Multiple Messages
345
+
346
+ ```javascript
347
+ const messages = [
348
+ { key: 'key1', value: 'Message 1' },
349
+ { key: 'key2', value: 'Message 2' },
350
+ { key: 'key3', value: 'Message 3' }
351
+ ]
352
+
353
+ await producer.send({
354
+ topic: 'my-topic',
355
+ messages: messages
356
+ })
357
+ ```
358
+
359
+ ### Send with Acknowledgment Control
360
+
361
+ ```javascript
362
+ // Wait for all in-sync replicas
363
+ await producer.send({
364
+ topic: 'my-topic',
365
+ messages: [{ value: 'Important message' }],
366
+ acks: -1 // -1 = all replicas, 0 = no acks, 1 = leader only
367
+ })
368
+
369
+ // No acknowledgment
370
+ await producer.send({
371
+ topic: 'my-topic',
372
+ messages: [{ value: 'Fire and forget' }],
373
+ acks: 0
374
+ })
375
+
376
+ // Leader acknowledgment only
377
+ await producer.send({
378
+ topic: 'my-topic',
379
+ messages: [{ value: 'Quick send' }],
380
+ acks: 1
381
+ })
382
+ ```
383
+
384
+ ### Send with Timeout
385
+
386
+ ```javascript
387
+ await producer.send({
388
+ topic: 'my-topic',
389
+ messages: [{ value: 'Hello' }],
390
+ timeout: 5000 // Timeout in milliseconds
391
+ })
392
+ ```
393
+
394
+ ### Send with Compression
395
+
396
+ ```javascript
397
+ const { CompressionTypes } = require('kafkajs')
398
+
399
+ await producer.send({
400
+ topic: 'my-topic',
401
+ compression: CompressionTypes.GZIP,
402
+ messages: [
403
+ { value: 'Compressed message 1' },
404
+ { value: 'Compressed message 2' }
405
+ ]
406
+ })
407
+ ```
408
+
409
+ Available compression types:
410
+ - `CompressionTypes.None`
411
+ - `CompressionTypes.GZIP`
412
+ - `CompressionTypes.Snappy` (requires `kafkajs-snappy` package)
413
+ - `CompressionTypes.LZ4` (requires `kafkajs-lz4` package)
414
+ - `CompressionTypes.ZSTD` (requires `@kafkajs/zstd` package)
415
+
416
+ ### Batch Sending to Multiple Topics
417
+
418
+ ```javascript
419
+ await producer.sendBatch({
420
+ topicMessages: [
421
+ {
422
+ topic: 'topic-a',
423
+ messages: [
424
+ { key: 'key1', value: 'Message for topic A' }
425
+ ]
426
+ },
427
+ {
428
+ topic: 'topic-b',
429
+ messages: [
430
+ { key: 'key2', value: 'Message for topic B' }
431
+ ]
432
+ }
433
+ ]
434
+ })
435
+ ```
436
+
437
+ ### Batch Sending with Compression
438
+
439
+ ```javascript
440
+ const { CompressionTypes } = require('kafkajs')
441
+
442
+ await producer.sendBatch({
443
+ compression: CompressionTypes.GZIP,
444
+ topicMessages: [
445
+ {
446
+ topic: 'topic-a',
447
+ messages: [{ value: 'Message A' }]
448
+ },
449
+ {
450
+ topic: 'topic-b',
451
+ messages: [{ value: 'Message B' }]
452
+ }
453
+ ]
454
+ })
455
+ ```
456
+
457
+ ### Custom Partitioner
458
+
459
+ ```javascript
460
+ const MyPartitioner = () => {
461
+ return ({ topic, partitionMetadata, message }) => {
462
+ // Custom partitioning logic
463
+ const numPartitions = partitionMetadata.length
464
+ const partition = Math.abs(hashCode(message.key)) % numPartitions
465
+ return partition
466
+ }
467
+ }
468
+
469
+ const producer = kafka.producer({
470
+ createPartitioner: MyPartitioner
471
+ })
472
+
473
+ function hashCode(str) {
474
+ let hash = 0
475
+ for (let i = 0; i < str.length; i++) {
476
+ const char = str.charCodeAt(i)
477
+ hash = ((hash << 5) - hash) + char
478
+ hash = hash & hash
479
+ }
480
+ return hash
481
+ }
482
+ ```
483
+
484
+ ### Using Legacy Partitioner
485
+
486
+ ```javascript
487
+ const { Partitioners } = require('kafkajs')
488
+
489
+ const producer = kafka.producer({
490
+ createPartitioner: Partitioners.LegacyPartitioner
491
+ })
492
+ ```
493
+
494
+ ### Sending JSON Data
495
+
496
+ ```javascript
497
+ await producer.send({
498
+ topic: 'user-events',
499
+ messages: [
500
+ {
501
+ key: 'user-123',
502
+ value: JSON.stringify({
503
+ userId: 'user-123',
504
+ action: 'login',
505
+ timestamp: Date.now()
506
+ })
507
+ }
508
+ ]
509
+ })
510
+ ```
511
+
512
+ ### Error Handling
513
+
514
+ ```javascript
515
+ try {
516
+ await producer.send({
517
+ topic: 'my-topic',
518
+ messages: [{ value: 'Hello Kafka' }]
519
+ })
520
+ } catch (error) {
521
+ console.error('Error sending message:', error)
522
+ // Handle specific errors
523
+ if (error.type === 'TOPIC_AUTHORIZATION_FAILED') {
524
+ console.error('Not authorized to produce to topic')
525
+ }
526
+ }
527
+ ```
528
+
529
+ ## Consumer
530
+
531
+ ### Creating a Consumer
532
+
533
+ ```javascript
534
+ const consumer = kafka.consumer({ groupId: 'my-group' })
535
+ ```
536
+
537
+ ### Consumer Configuration Options
538
+
539
+ ```javascript
540
+ const consumer = kafka.consumer({
541
+ groupId: 'my-group',
542
+ sessionTimeout: 30000, // Session timeout in ms
543
+ heartbeatInterval: 3000, // Heartbeat interval in ms
544
+ maxBytesPerPartition: 1048576, // Max bytes per partition (1MB)
545
+ maxBytes: 10485760, // Max bytes per fetch (10MB)
546
+ maxWaitTimeInMs: 5000, // Max wait time for fetch
547
+ retry: {
548
+ retries: 5
549
+ },
550
+ allowAutoTopicCreation: true, // Allow auto topic creation
551
+ maxInFlightRequests: null, // Max concurrent requests
552
+ readUncommitted: false, // Read uncommitted messages
553
+ rackId: null // Enable follower fetching
554
+ })
555
+ ```
556
+
557
+ ### Connecting and Subscribing
558
+
559
+ ```javascript
560
+ const consumer = kafka.consumer({ groupId: 'my-group' })
561
+ await consumer.connect()
562
+ await consumer.subscribe({ topics: ['my-topic'] })
563
+ ```
564
+
565
+ ### Subscribe to Multiple Topics
566
+
567
+ ```javascript
568
+ await consumer.subscribe({ topics: ['topic-a', 'topic-b', 'topic-c'] })
569
+ ```
570
+
571
+ ### Subscribe with Regex Pattern
572
+
573
+ ```javascript
574
+ await consumer.subscribe({ topics: [/topic-(eu|us)-.*/i] })
575
+ ```
576
+
577
+ ### Subscribe from Beginning
578
+
579
+ ```javascript
580
+ await consumer.subscribe({
581
+ topics: ['my-topic'],
582
+ fromBeginning: true
583
+ })
584
+ ```
585
+
586
+ ### Consuming Messages - eachMessage
587
+
588
+ ```javascript
589
+ await consumer.run({
590
+ eachMessage: async ({ topic, partition, message }) => {
591
+ console.log({
592
+ topic: topic,
593
+ partition: partition,
594
+ offset: message.offset,
595
+ value: message.value.toString(),
596
+ key: message.key?.toString(),
597
+ headers: message.headers
598
+ })
599
+ }
600
+ })
601
+ ```
602
+
603
+ ### Consuming with Heartbeat
604
+
605
+ ```javascript
606
+ await consumer.run({
607
+ eachMessage: async ({ topic, partition, message, heartbeat }) => {
608
+ // For long-running processing
609
+ await processMessage(message)
610
+ await heartbeat()
611
+ }
612
+ })
613
+ ```
614
+
615
+ ### Consuming with Pause/Resume
616
+
617
+ ```javascript
618
+ await consumer.run({
619
+ eachMessage: async ({ topic, partition, message, pause }) => {
620
+ try {
621
+ await processMessage(message)
622
+ } catch (error) {
623
+ if (error.retryable) {
624
+ // Pause for 30 seconds on retryable error
625
+ const pausedConsumer = pause()
626
+ setTimeout(() => pausedConsumer.resume(), 30000)
627
+ }
628
+ }
629
+ }
630
+ })
631
+ ```
632
+
633
+ ### Consuming JSON Messages
634
+
635
+ ```javascript
636
+ await consumer.run({
637
+ eachMessage: async ({ topic, partition, message }) => {
638
+ const data = JSON.parse(message.value.toString())
639
+ console.log('Received:', data)
640
+ }
641
+ })
642
+ ```
643
+
644
+ ### Consuming Messages with Headers
645
+
646
+ ```javascript
647
+ await consumer.run({
648
+ eachMessage: async ({ topic, partition, message }) => {
649
+ const correlationId = message.headers['correlation-id']?.toString()
650
+ const userId = message.headers['user-id']?.toString()
651
+
652
+ console.log('Correlation ID:', correlationId)
653
+ console.log('User ID:', userId)
654
+ console.log('Message:', message.value.toString())
655
+ }
656
+ })
657
+ ```
658
+
659
+ ### Batch Processing - eachBatch
660
+
661
+ ```javascript
662
+ await consumer.run({
663
+ eachBatch: async ({ batch, resolveOffset, heartbeat, isRunning, isStale }) => {
664
+ for (let message of batch.messages) {
665
+ if (!isRunning() || isStale()) break
666
+
667
+ console.log({
668
+ topic: batch.topic,
669
+ partition: batch.partition,
670
+ offset: message.offset,
671
+ value: message.value.toString()
672
+ })
673
+
674
+ resolveOffset(message.offset)
675
+ await heartbeat()
676
+ }
677
+ }
678
+ })
679
+ ```
680
+
681
+ ### Batch Processing with Manual Commit
682
+
683
+ ```javascript
684
+ await consumer.run({
685
+ autoCommitInterval: null,
686
+ autoCommitThreshold: null,
687
+ eachBatch: async ({ batch, resolveOffset, commitOffsetsIfNecessary, heartbeat }) => {
688
+ for (let message of batch.messages) {
689
+ await processMessage(message)
690
+ resolveOffset(message.offset)
691
+ }
692
+
693
+ await commitOffsetsIfNecessary()
694
+ await heartbeat()
695
+ }
696
+ })
697
+ ```
698
+
699
+ ### Batch Processing with Auto-Resolve
700
+
701
+ ```javascript
702
+ await consumer.run({
703
+ eachBatchAutoResolve: true,
704
+ eachBatch: async ({ batch, heartbeat }) => {
705
+ console.log(`Received batch of ${batch.messages.length} messages`)
706
+
707
+ for (let message of batch.messages) {
708
+ await processMessage(message)
709
+ }
710
+
711
+ await heartbeat()
712
+ }
713
+ })
714
+ ```
715
+
716
+ ### Concurrent Processing by Partition
717
+
718
+ ```javascript
719
+ await consumer.run({
720
+ partitionsConsumedConcurrently: 3,
721
+ eachMessage: async ({ topic, partition, message }) => {
722
+ // This will process up to 3 partitions concurrently
723
+ // Messages within the same partition are still processed sequentially
724
+ await processMessage(message)
725
+ }
726
+ })
727
+ ```
728
+
729
+ ### Manual Offset Management
730
+
731
+ ```javascript
732
+ const consumer = kafka.consumer({
733
+ groupId: 'my-group',
734
+ autoCommit: false
735
+ })
736
+
737
+ await consumer.connect()
738
+ await consumer.subscribe({ topics: ['my-topic'] })
739
+
740
+ await consumer.run({
741
+ eachMessage: async ({ topic, partition, message }) => {
742
+ await processMessage(message)
743
+
744
+ // Manual commit
745
+ await consumer.commitOffsets([
746
+ {
747
+ topic: topic,
748
+ partition: partition,
749
+ offset: (parseInt(message.offset) + 1).toString()
750
+ }
751
+ ])
752
+ }
753
+ })
754
+ ```
755
+
756
+ ### Auto Commit Configuration
757
+
758
+ ```javascript
759
+ await consumer.run({
760
+ autoCommit: true,
761
+ autoCommitInterval: 5000, // Commit every 5 seconds
762
+ autoCommitThreshold: 100, // Or after 100 messages
763
+ eachMessage: async ({ topic, partition, message }) => {
764
+ await processMessage(message)
765
+ }
766
+ })
767
+ ```
768
+
769
+ ### Seek to Offset
770
+
771
+ ```javascript
772
+ // Seek before running consumer
773
+ await consumer.subscribe({ topics: ['my-topic'] })
774
+
775
+ consumer.on(consumer.events.GROUP_JOIN, async ({ payload }) => {
776
+ // Seek to specific offset
777
+ consumer.seek({ topic: 'my-topic', partition: 0, offset: 12345 })
778
+ })
779
+
780
+ await consumer.run({
781
+ eachMessage: async ({ topic, partition, message }) => {
782
+ console.log(`Offset: ${message.offset}`)
783
+ }
784
+ })
785
+ ```
786
+
787
+ ### Seek to Beginning
788
+
789
+ ```javascript
790
+ consumer.on(consumer.events.GROUP_JOIN, async ({ payload }) => {
791
+ const { groupId } = payload
792
+ console.log(`Consumer ${groupId} joined`)
793
+
794
+ // Seek all partitions to beginning
795
+ const assignments = consumer.assignment()
796
+ for (const assignment of assignments) {
797
+ consumer.seek({
798
+ topic: assignment.topic,
799
+ partition: assignment.partition,
800
+ offset: '0'
801
+ })
802
+ }
803
+ })
804
+ ```
805
+
806
+ ### Pause and Resume Consumption
807
+
808
+ ```javascript
809
+ // Pause consumption
810
+ consumer.pause([{ topic: 'my-topic', partitions: [0, 1] }])
811
+
812
+ // Resume after some time
813
+ setTimeout(() => {
814
+ consumer.resume([{ topic: 'my-topic', partitions: [0, 1] }])
815
+ }, 60000)
816
+ ```
817
+
818
+ ### Error Handling
819
+
820
+ ```javascript
821
+ await consumer.run({
822
+ eachMessage: async ({ topic, partition, message }) => {
823
+ try {
824
+ await processMessage(message)
825
+ } catch (error) {
826
+ console.error('Error processing message:', error)
827
+ // Optionally: send to dead letter queue
828
+ await sendToDeadLetterQueue(topic, message, error)
829
+ }
830
+ }
831
+ })
832
+ ```
833
+
834
+ ### Graceful Shutdown
835
+
836
+ ```javascript
837
+ const consumer = kafka.consumer({ groupId: 'my-group' })
838
+
839
+ const shutdown = async () => {
840
+ console.log('Shutting down consumer...')
841
+ await consumer.disconnect()
842
+ process.exit(0)
843
+ }
844
+
845
+ process.on('SIGINT', shutdown)
846
+ process.on('SIGTERM', shutdown)
847
+
848
+ await consumer.connect()
849
+ await consumer.subscribe({ topics: ['my-topic'] })
850
+
851
+ await consumer.run({
852
+ eachMessage: async ({ topic, partition, message }) => {
853
+ await processMessage(message)
854
+ }
855
+ })
856
+ ```
857
+
858
+ ## Admin Operations
859
+
860
+ ### Creating Admin Client
861
+
862
+ ```javascript
863
+ const admin = kafka.admin()
864
+ await admin.connect()
865
+ ```
866
+
867
+ ### List Topics
868
+
869
+ ```javascript
870
+ const topics = await admin.listTopics()
871
+ console.log('Topics:', topics)
872
+ ```
873
+
874
+ ### Create Topic
875
+
876
+ ```javascript
877
+ await admin.createTopics({
878
+ topics: [
879
+ {
880
+ topic: 'my-topic',
881
+ numPartitions: 3,
882
+ replicationFactor: 2
883
+ }
884
+ ]
885
+ })
886
+ ```
887
+
888
+ ### Create Topic with Configuration
889
+
890
+ ```javascript
891
+ await admin.createTopics({
892
+ topics: [
893
+ {
894
+ topic: 'my-topic',
895
+ numPartitions: 3,
896
+ replicationFactor: 2,
897
+ configEntries: [
898
+ { name: 'cleanup.policy', value: 'compact' },
899
+ { name: 'retention.ms', value: '86400000' }
900
+ ]
901
+ }
902
+ ]
903
+ })
904
+ ```
905
+
906
+ ### Create Multiple Topics
907
+
908
+ ```javascript
909
+ await admin.createTopics({
910
+ topics: [
911
+ { topic: 'topic-a', numPartitions: 1 },
912
+ { topic: 'topic-b', numPartitions: 2 },
913
+ { topic: 'topic-c', numPartitions: 3 }
914
+ ]
915
+ })
916
+ ```
917
+
918
+ ### Delete Topics
919
+
920
+ ```javascript
921
+ await admin.deleteTopics({
922
+ topics: ['topic-to-delete']
923
+ })
924
+ ```
925
+
926
+ ### Fetch Topic Metadata
927
+
928
+ ```javascript
929
+ const metadata = await admin.fetchTopicMetadata({
930
+ topics: ['my-topic']
931
+ })
932
+
933
+ console.log('Topic metadata:', metadata)
934
+ ```
935
+
936
+ ### Fetch Topic Offsets
937
+
938
+ ```javascript
939
+ const offsets = await admin.fetchTopicOffsets('my-topic')
940
+
941
+ console.log('Topic offsets:', offsets)
942
+ // Output: [{ partition: 0, offset: '100', high: '100', low: '0' }]
943
+ ```
944
+
945
+ ### Fetch Offsets by Timestamp
946
+
947
+ ```javascript
948
+ const timestamp = Date.now() - 3600000 // 1 hour ago
949
+
950
+ const offsets = await admin.fetchTopicOffsetsByTimestamp('my-topic', timestamp)
951
+ console.log('Offsets at timestamp:', offsets)
952
+ ```
953
+
954
+ ### Create Partitions
955
+
956
+ ```javascript
957
+ await admin.createPartitions({
958
+ topicPartitions: [
959
+ {
960
+ topic: 'my-topic',
961
+ count: 5 // New total partition count
962
+ }
963
+ ]
964
+ })
965
+ ```
966
+
967
+ ### List Consumer Groups
968
+
969
+ ```javascript
970
+ const groups = await admin.listGroups()
971
+ console.log('Consumer groups:', groups.groups)
972
+ ```
973
+
974
+ ### Describe Consumer Group
975
+
976
+ ```javascript
977
+ const group = await admin.describeGroups(['my-group'])
978
+ console.log('Group details:', group)
979
+ ```
980
+
981
+ ### Fetch Consumer Group Offsets
982
+
983
+ ```javascript
984
+ const offsets = await admin.fetchOffsets({
985
+ groupId: 'my-group',
986
+ topics: ['my-topic']
987
+ })
988
+
989
+ console.log('Consumer group offsets:', offsets)
990
+ ```
991
+
992
+ ### Reset Consumer Group Offsets
993
+
994
+ ```javascript
995
+ // Reset to earliest
996
+ await admin.resetOffsets({
997
+ groupId: 'my-group',
998
+ topic: 'my-topic',
999
+ earliest: true
1000
+ })
1001
+
1002
+ // Reset to latest
1003
+ await admin.resetOffsets({
1004
+ groupId: 'my-group',
1005
+ topic: 'my-topic'
1006
+ })
1007
+ ```
1008
+
1009
+ ### Set Consumer Group Offsets
1010
+
1011
+ ```javascript
1012
+ await admin.setOffsets({
1013
+ groupId: 'my-group',
1014
+ topic: 'my-topic',
1015
+ partitions: [
1016
+ { partition: 0, offset: '100' },
1017
+ { partition: 1, offset: '200' }
1018
+ ]
1019
+ })
1020
+ ```
1021
+
1022
+ ### Delete Consumer Group
1023
+
1024
+ ```javascript
1025
+ await admin.deleteGroups(['my-group'])
1026
+ ```
1027
+
1028
+ ### Describe Cluster
1029
+
1030
+ ```javascript
1031
+ const cluster = await admin.describeCluster()
1032
+
1033
+ console.log('Cluster ID:', cluster.clusterId)
1034
+ console.log('Controller:', cluster.controller)
1035
+ console.log('Brokers:', cluster.brokers)
1036
+ ```
1037
+
1038
+ ### Describe Configs
1039
+
1040
+ ```javascript
1041
+ const configs = await admin.describeConfigs({
1042
+ resources: [
1043
+ {
1044
+ type: 2, // TOPIC
1045
+ name: 'my-topic'
1046
+ }
1047
+ ]
1048
+ })
1049
+
1050
+ console.log('Topic configs:', configs)
1051
+ ```
1052
+
1053
+ ### Alter Configs
1054
+
1055
+ ```javascript
1056
+ await admin.alterConfigs({
1057
+ resources: [
1058
+ {
1059
+ type: 2, // TOPIC
1060
+ name: 'my-topic',
1061
+ configEntries: [
1062
+ { name: 'retention.ms', value: '604800000' }
1063
+ ]
1064
+ }
1065
+ ]
1066
+ })
1067
+ ```
1068
+
1069
+ ### Delete Topic Records
1070
+
1071
+ ```javascript
1072
+ // Delete records up to offset 100
1073
+ await admin.deleteTopicRecords({
1074
+ topic: 'my-topic',
1075
+ partitions: [
1076
+ { partition: 0, offset: '100' }
1077
+ ]
1078
+ })
1079
+ ```
1080
+
1081
+ ### Disconnect Admin
1082
+
1083
+ ```javascript
1084
+ await admin.disconnect()
1085
+ ```
1086
+
1087
+ ## Transactions
1088
+
1089
+ ### Creating Transactional Producer
1090
+
1091
+ ```javascript
1092
+ const producer = kafka.producer({
1093
+ transactionalId: 'my-transactional-producer',
1094
+ maxInFlightRequests: 1,
1095
+ idempotent: true
1096
+ })
1097
+ ```
1098
+
1099
+ ### Basic Transaction
1100
+
1101
+ ```javascript
1102
+ await producer.connect()
1103
+
1104
+ const transaction = await producer.transaction()
1105
+
1106
+ try {
1107
+ await transaction.send({
1108
+ topic: 'my-topic',
1109
+ messages: [
1110
+ { value: 'Message 1' },
1111
+ { value: 'Message 2' }
1112
+ ]
1113
+ })
1114
+
1115
+ await transaction.commit()
1116
+ } catch (error) {
1117
+ await transaction.abort()
1118
+ throw error
1119
+ }
1120
+ ```
1121
+
1122
+ ### Transaction with Multiple Topics
1123
+
1124
+ ```javascript
1125
+ const transaction = await producer.transaction()
1126
+
1127
+ try {
1128
+ await transaction.send({
1129
+ topic: 'topic-a',
1130
+ messages: [{ value: 'Message A' }]
1131
+ })
1132
+
1133
+ await transaction.send({
1134
+ topic: 'topic-b',
1135
+ messages: [{ value: 'Message B' }]
1136
+ })
1137
+
1138
+ await transaction.commit()
1139
+ } catch (error) {
1140
+ await transaction.abort()
1141
+ }
1142
+ ```
1143
+
1144
+ ### Transaction with sendOffsets
1145
+
1146
+ ```javascript
1147
+ const consumer = kafka.consumer({ groupId: 'my-group' })
1148
+ await consumer.connect()
1149
+ await consumer.subscribe({ topics: ['input-topic'] })
1150
+
1151
+ await consumer.run({
1152
+ eachMessage: async ({ topic, partition, message }) => {
1153
+ const transaction = await producer.transaction()
1154
+
1155
+ try {
1156
+ // Process and send to output topic
1157
+ const outputMessage = processMessage(message)
1158
+
1159
+ await transaction.send({
1160
+ topic: 'output-topic',
1161
+ messages: [{ value: outputMessage }]
1162
+ })
1163
+
1164
+ // Commit consumer offset atomically
1165
+ await transaction.sendOffsets({
1166
+ consumerGroupId: 'my-group',
1167
+ topics: [
1168
+ {
1169
+ topic: topic,
1170
+ partitions: [
1171
+ {
1172
+ partition: partition,
1173
+ offset: (parseInt(message.offset) + 1).toString()
1174
+ }
1175
+ ]
1176
+ }
1177
+ ]
1178
+ })
1179
+
1180
+ await transaction.commit()
1181
+ } catch (error) {
1182
+ await transaction.abort()
1183
+ throw error
1184
+ }
1185
+ }
1186
+ })
1187
+ ```
1188
+
1189
+ ## Logging
1190
+
1191
+ ### Setting Log Level
1192
+
1193
+ ```javascript
1194
+ const { Kafka, logLevel } = require('kafkajs')
1195
+
1196
+ const kafka = new Kafka({
1197
+ clientId: 'my-app',
1198
+ brokers: ['localhost:9092'],
1199
+ logLevel: logLevel.ERROR
1200
+ })
1201
+ ```
1202
+
1203
+ Available log levels:
1204
+ - `logLevel.NOTHING`
1205
+ - `logLevel.ERROR`
1206
+ - `logLevel.WARN`
1207
+ - `logLevel.INFO` (default)
1208
+ - `logLevel.DEBUG`
1209
+
1210
+ ### Custom Logger
1211
+
1212
+ ```javascript
1213
+ const { Kafka, logLevel } = require('kafkajs')
1214
+
1215
+ const customLogger = () => {
1216
+ return {
1217
+ info: (message) => console.log(`[INFO] ${message.message}`),
1218
+ error: (message) => console.error(`[ERROR] ${message.message}`),
1219
+ warn: (message) => console.warn(`[WARN] ${message.message}`),
1220
+ debug: (message) => console.debug(`[DEBUG] ${message.message}`)
1221
+ }
1222
+ }
1223
+
1224
+ const kafka = new Kafka({
1225
+ clientId: 'my-app',
1226
+ brokers: ['localhost:9092'],
1227
+ logLevel: logLevel.INFO,
1228
+ logCreator: customLogger
1229
+ })
1230
+ ```
1231
+
1232
+ ### Change Log Level at Runtime
1233
+
1234
+ ```javascript
1235
+ kafka.logger().setLogLevel(logLevel.DEBUG)
1236
+ ```
1237
+
1238
+ ### Environment Variable for Logging
1239
+
1240
+ ```bash
1241
+ export KAFKAJS_LOG_LEVEL=DEBUG
1242
+ ```
1243
+
1244
+ ## Error Handling and Retries
1245
+
1246
+ ### Producer Retry Configuration
1247
+
1248
+ ```javascript
1249
+ const producer = kafka.producer({
1250
+ retry: {
1251
+ initialRetryTime: 100,
1252
+ retries: 8,
1253
+ maxRetryTime: 30000,
1254
+ multiplier: 2,
1255
+ factor: 0.2
1256
+ }
1257
+ })
1258
+ ```
1259
+
1260
+ ### Consumer Retry Configuration
1261
+
1262
+ ```javascript
1263
+ const consumer = kafka.consumer({
1264
+ groupId: 'my-group',
1265
+ retry: {
1266
+ initialRetryTime: 100,
1267
+ retries: 8
1268
+ }
1269
+ })
1270
+ ```
1271
+
1272
+ ### Custom Restart on Failure
1273
+
1274
+ ```javascript
1275
+ const kafka = new Kafka({
1276
+ clientId: 'my-app',
1277
+ brokers: ['localhost:9092'],
1278
+ retry: {
1279
+ retries: 5
1280
+ },
1281
+ restartOnFailure: async (error) => {
1282
+ console.error('Error occurred:', error)
1283
+ // Return true to restart consumer, false to stop
1284
+ return error.type !== 'FATAL_ERROR'
1285
+ }
1286
+ })
1287
+ ```
1288
+
1289
+ ## Advanced Configuration
1290
+
1291
+ ### Custom Socket Factory
1292
+
1293
+ ```javascript
1294
+ const net = require('net')
1295
+ const tls = require('tls')
1296
+
1297
+ const kafka = new Kafka({
1298
+ clientId: 'my-app',
1299
+ brokers: ['localhost:9092'],
1300
+ socketFactory: ({ host, port, ssl, onConnect }) => {
1301
+ const socket = ssl
1302
+ ? tls.connect({ host, port, servername: host }, onConnect)
1303
+ : net.connect({ host, port }, onConnect)
1304
+
1305
+ socket.setKeepAlive(true, 60000)
1306
+ return socket
1307
+ }
1308
+ })
1309
+ ```
1310
+
1311
+ ### Rack Awareness for Follower Fetching
1312
+
1313
+ ```javascript
1314
+ const consumer = kafka.consumer({
1315
+ groupId: 'my-group',
1316
+ rackId: 'us-east-1a'
1317
+ })
1318
+ ```
1319
+
1320
+ ### Authentication Timeout
1321
+
1322
+ ```javascript
1323
+ const kafka = new Kafka({
1324
+ clientId: 'my-app',
1325
+ brokers: ['localhost:9092'],
1326
+ sasl: {
1327
+ mechanism: 'plain',
1328
+ username: process.env.KAFKA_USERNAME,
1329
+ password: process.env.KAFKA_PASSWORD
1330
+ },
1331
+ authenticationTimeout: 10000
1332
+ })
1333
+ ```
1334
+
1335
+ ### Reauthentication
1336
+
1337
+ ```javascript
1338
+ const kafka = new Kafka({
1339
+ clientId: 'my-app',
1340
+ brokers: ['localhost:9092'],
1341
+ sasl: {
1342
+ mechanism: 'oauthbearer',
1343
+ oauthBearerProvider: async () => ({ value: await getToken() })
1344
+ },
1345
+ reauthenticationThreshold: 10000
1346
+ })
1347
+ ```
1348
+
1349
+ ## Events
1350
+
1351
+ ### Producer Events
1352
+
1353
+ ```javascript
1354
+ const producer = kafka.producer()
1355
+
1356
+ producer.on(producer.events.CONNECT, () => {
1357
+ console.log('Producer connected')
1358
+ })
1359
+
1360
+ producer.on(producer.events.DISCONNECT, () => {
1361
+ console.log('Producer disconnected')
1362
+ })
1363
+
1364
+ producer.on(producer.events.REQUEST_TIMEOUT, ({ payload }) => {
1365
+ console.log('Request timeout:', payload)
1366
+ })
1367
+ ```
1368
+
1369
+ ### Consumer Events
1370
+
1371
+ ```javascript
1372
+ const consumer = kafka.consumer({ groupId: 'my-group' })
1373
+
1374
+ consumer.on(consumer.events.GROUP_JOIN, ({ payload }) => {
1375
+ console.log('Consumer joined group:', payload)
1376
+ })
1377
+
1378
+ consumer.on(consumer.events.CRASH, ({ payload }) => {
1379
+ console.error('Consumer crashed:', payload.error)
1380
+ })
1381
+
1382
+ consumer.on(consumer.events.REBALANCING, ({ payload }) => {
1383
+ console.log('Consumer rebalancing')
1384
+ })
1385
+
1386
+ consumer.on(consumer.events.STOP, () => {
1387
+ console.log('Consumer stopped')
1388
+ })
1389
+
1390
+ consumer.on(consumer.events.CONNECT, () => {
1391
+ console.log('Consumer connected')
1392
+ })
1393
+
1394
+ consumer.on(consumer.events.DISCONNECT, () => {
1395
+ console.log('Consumer disconnected')
1396
+ })
1397
+ ```
1398
+
1399
+ ## Complete Examples
1400
+
1401
+ ### Complete Producer Example
1402
+
1403
+ ```javascript
1404
+ const { Kafka, CompressionTypes, logLevel } = require('kafkajs')
1405
+
1406
+ const kafka = new Kafka({
1407
+ clientId: 'order-service',
1408
+ brokers: [process.env.KAFKA_BROKER || 'localhost:9092'],
1409
+ logLevel: logLevel.INFO,
1410
+ retry: {
1411
+ retries: 5
1412
+ }
1413
+ })
1414
+
1415
+ const producer = kafka.producer({
1416
+ allowAutoTopicCreation: false,
1417
+ idempotent: true
1418
+ })
1419
+
1420
+ async function sendOrders() {
1421
+ await producer.connect()
1422
+
1423
+ try {
1424
+ const orders = [
1425
+ { orderId: '123', amount: 99.99 },
1426
+ { orderId: '124', amount: 149.99 }
1427
+ ]
1428
+
1429
+ for (const order of orders) {
1430
+ await producer.send({
1431
+ topic: 'orders',
1432
+ compression: CompressionTypes.GZIP,
1433
+ messages: [
1434
+ {
1435
+ key: order.orderId,
1436
+ value: JSON.stringify(order),
1437
+ headers: {
1438
+ 'correlation-id': generateId(),
1439
+ 'timestamp': Date.now().toString()
1440
+ }
1441
+ }
1442
+ ]
1443
+ })
1444
+
1445
+ console.log(`Sent order ${order.orderId}`)
1446
+ }
1447
+ } catch (error) {
1448
+ console.error('Error sending orders:', error)
1449
+ throw error
1450
+ } finally {
1451
+ await producer.disconnect()
1452
+ }
1453
+ }
1454
+
1455
+ sendOrders().catch(console.error)
1456
+
1457
+ function generateId() {
1458
+ return Math.random().toString(36).substring(7)
1459
+ }
1460
+ ```
1461
+
1462
+ ### Complete Consumer Example
1463
+
1464
+ ```javascript
1465
+ const { Kafka, logLevel } = require('kafkajs')
1466
+
1467
+ const kafka = new Kafka({
1468
+ clientId: 'order-processor',
1469
+ brokers: [process.env.KAFKA_BROKER || 'localhost:9092'],
1470
+ logLevel: logLevel.INFO
1471
+ })
1472
+
1473
+ const consumer = kafka.consumer({
1474
+ groupId: 'order-processing-group',
1475
+ sessionTimeout: 30000,
1476
+ heartbeatInterval: 3000
1477
+ })
1478
+
1479
+ async function processOrders() {
1480
+ await consumer.connect()
1481
+ await consumer.subscribe({
1482
+ topics: ['orders'],
1483
+ fromBeginning: true
1484
+ })
1485
+
1486
+ await consumer.run({
1487
+ autoCommit: true,
1488
+ autoCommitInterval: 5000,
1489
+ eachMessage: async ({ topic, partition, message }) => {
1490
+ const order = JSON.parse(message.value.toString())
1491
+ const correlationId = message.headers['correlation-id']?.toString()
1492
+
1493
+ console.log(`Processing order ${order.orderId}`, {
1494
+ partition,
1495
+ offset: message.offset,
1496
+ correlationId
1497
+ })
1498
+
1499
+ try {
1500
+ await processOrder(order)
1501
+ console.log(`Order ${order.orderId} processed successfully`)
1502
+ } catch (error) {
1503
+ console.error(`Error processing order ${order.orderId}:`, error)
1504
+ // Send to DLQ or handle error
1505
+ }
1506
+ }
1507
+ })
1508
+ }
1509
+
1510
+ async function processOrder(order) {
1511
+ // Simulate order processing
1512
+ await new Promise(resolve => setTimeout(resolve, 100))
1513
+ }
1514
+
1515
+ const shutdown = async () => {
1516
+ console.log('Shutting down...')
1517
+ await consumer.disconnect()
1518
+ process.exit(0)
1519
+ }
1520
+
1521
+ process.on('SIGINT', shutdown)
1522
+ process.on('SIGTERM', shutdown)
1523
+
1524
+ processOrders().catch(console.error)
1525
+ ```
1526
+
1527
+ ### Complete Transaction Example
1528
+
1529
+ ```javascript
1530
+ const { Kafka } = require('kafkajs')
1531
+
1532
+ const kafka = new Kafka({
1533
+ clientId: 'transaction-app',
1534
+ brokers: ['localhost:9092']
1535
+ })
1536
+
1537
+ const producer = kafka.producer({
1538
+ transactionalId: 'my-transactional-id',
1539
+ maxInFlightRequests: 1,
1540
+ idempotent: true
1541
+ })
1542
+
1543
+ const consumer = kafka.consumer({
1544
+ groupId: 'transaction-group'
1545
+ })
1546
+
1547
+ async function runTransactionalProcessing() {
1548
+ await producer.connect()
1549
+ await consumer.connect()
1550
+
1551
+ await consumer.subscribe({ topics: ['input-topic'] })
1552
+
1553
+ await consumer.run({
1554
+ autoCommit: false,
1555
+ eachMessage: async ({ topic, partition, message }) => {
1556
+ const transaction = await producer.transaction()
1557
+
1558
+ try {
1559
+ // Parse input message
1560
+ const input = JSON.parse(message.value.toString())
1561
+
1562
+ // Process the message
1563
+ const result = {
1564
+ original: input,
1565
+ processed: true,
1566
+ timestamp: Date.now()
1567
+ }
1568
+
1569
+ // Send to output topic
1570
+ await transaction.send({
1571
+ topic: 'output-topic',
1572
+ messages: [
1573
+ {
1574
+ key: message.key,
1575
+ value: JSON.stringify(result)
1576
+ }
1577
+ ]
1578
+ })
1579
+
1580
+ // Commit offset atomically
1581
+ await transaction.sendOffsets({
1582
+ consumerGroupId: 'transaction-group',
1583
+ topics: [
1584
+ {
1585
+ topic: topic,
1586
+ partitions: [
1587
+ {
1588
+ partition: partition,
1589
+ offset: (parseInt(message.offset) + 1).toString()
1590
+ }
1591
+ ]
1592
+ }
1593
+ ]
1594
+ })
1595
+
1596
+ await transaction.commit()
1597
+ console.log(`Processed message at offset ${message.offset}`)
1598
+ } catch (error) {
1599
+ console.error('Transaction failed:', error)
1600
+ await transaction.abort()
1601
+ }
1602
+ }
1603
+ })
1604
+ }
1605
+
1606
+ runTransactionalProcessing().catch(console.error)
1607
+ ```
1608
+
1609
+ ### Complete Admin Example
1610
+
1611
+ ```javascript
1612
+ const { Kafka } = require('kafkajs')
1613
+
1614
+ const kafka = new Kafka({
1615
+ clientId: 'admin-client',
1616
+ brokers: ['localhost:9092']
1617
+ })
1618
+
1619
+ const admin = kafka.admin()
1620
+
1621
+ async function manageTopics() {
1622
+ await admin.connect()
1623
+
1624
+ try {
1625
+ // List existing topics
1626
+ const existingTopics = await admin.listTopics()
1627
+ console.log('Existing topics:', existingTopics)
1628
+
1629
+ // Create new topic
1630
+ const topicName = 'my-new-topic'
1631
+
1632
+ if (!existingTopics.includes(topicName)) {
1633
+ await admin.createTopics({
1634
+ topics: [
1635
+ {
1636
+ topic: topicName,
1637
+ numPartitions: 3,
1638
+ replicationFactor: 1,
1639
+ configEntries: [
1640
+ { name: 'retention.ms', value: '86400000' },
1641
+ { name: 'cleanup.policy', value: 'delete' }
1642
+ ]
1643
+ }
1644
+ ]
1645
+ })
1646
+ console.log(`Topic ${topicName} created`)
1647
+ }
1648
+
1649
+ // Fetch metadata
1650
+ const metadata = await admin.fetchTopicMetadata({
1651
+ topics: [topicName]
1652
+ })
1653
+ console.log('Topic metadata:', JSON.stringify(metadata, null, 2))
1654
+
1655
+ // Fetch offsets
1656
+ const offsets = await admin.fetchTopicOffsets(topicName)
1657
+ console.log('Topic offsets:', offsets)
1658
+
1659
+ // List consumer groups
1660
+ const groups = await admin.listGroups()
1661
+ console.log('Consumer groups:', groups.groups)
1662
+
1663
+ } catch (error) {
1664
+ console.error('Error managing topics:', error)
1665
+ } finally {
1666
+ await admin.disconnect()
1667
+ }
1668
+ }
1669
+
1670
+ manageTopics().catch(console.error)
1671
+ ```