n8n-nodes-kafka-batch-consumer 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,331 @@
1
+ /// <reference types="node" />
2
+
3
+ import {
4
+ IExecuteFunctions,
5
+ INodeExecutionData,
6
+ INodeType,
7
+ INodeTypeDescription,
8
+ NodeOperationError,
9
+ } from 'n8n-workflow';
10
+
11
+ import { Kafka, Consumer, EachMessagePayload } from 'kafkajs';
12
+
13
+ /**
14
+ * Step 1: Node Interface
15
+ * Implements INodeType with complete node description
16
+ * Defines all Kafka configuration properties
17
+ * Includes optional credentials reference for authentication
18
+ */
19
+ export class KafkaBatchConsumer implements INodeType {
20
+ description: INodeTypeDescription = {
21
+ displayName: 'Kafka Batch Consumer',
22
+ name: 'kafkaBatchConsumer',
23
+ icon: 'file:kafka.svg',
24
+ group: ['transform'],
25
+ version: 1,
26
+ description: 'Consume messages from Kafka in batches',
27
+ defaults: {
28
+ name: 'Kafka Batch Consumer',
29
+ },
30
+ inputs: ['main'],
31
+ outputs: ['main'],
32
+ // Credentials reference - same as Kafka Trigger and Producer nodes
33
+ // Optional: allows unauthenticated connections
34
+ credentials: [
35
+ {
36
+ name: 'kafka',
37
+ required: false,
38
+ },
39
+ ],
40
+ // Define all Kafka configuration properties
41
+ properties: [
42
+ {
43
+ displayName: 'Brokers',
44
+ name: 'brokers',
45
+ type: 'string',
46
+ default: 'localhost:9092',
47
+ required: true,
48
+ description: 'Comma-separated list of Kafka broker addresses',
49
+ },
50
+ {
51
+ displayName: 'Client ID',
52
+ name: 'clientId',
53
+ type: 'string',
54
+ default: 'n8n-kafka-batch-consumer',
55
+ required: true,
56
+ description: 'Unique identifier for this Kafka client',
57
+ },
58
+ {
59
+ displayName: 'Group ID',
60
+ name: 'groupId',
61
+ type: 'string',
62
+ default: 'n8n-consumer-group',
63
+ required: true,
64
+ description: 'Consumer group identifier',
65
+ },
66
+ {
67
+ displayName: 'Topic',
68
+ name: 'topic',
69
+ type: 'string',
70
+ default: '',
71
+ required: true,
72
+ description: 'Kafka topic to consume from',
73
+ },
74
+ {
75
+ displayName: 'Batch Size',
76
+ name: 'batchSize',
77
+ type: 'number',
78
+ default: 10,
79
+ required: true,
80
+ description: 'Number of messages to consume in a batch',
81
+ },
82
+ {
83
+ displayName: 'From Beginning',
84
+ name: 'fromBeginning',
85
+ type: 'boolean',
86
+ default: false,
87
+ description: 'Whether to read from the beginning of the topic',
88
+ },
89
+ {
90
+ displayName: 'Session Timeout',
91
+ name: 'sessionTimeout',
92
+ type: 'number',
93
+ default: 30000,
94
+ description: 'Session timeout in milliseconds',
95
+ },
96
+ {
97
+ displayName: 'Options',
98
+ name: 'options',
99
+ type: 'collection',
100
+ placeholder: 'Add Option',
101
+ default: {},
102
+ options: [
103
+ {
104
+ displayName: 'Read Timeout',
105
+ name: 'readTimeout',
106
+ type: 'number',
107
+ default: 60000,
108
+ description: 'Maximum time to wait for messages in milliseconds',
109
+ },
110
+ {
111
+ displayName: 'Parse JSON',
112
+ name: 'parseJson',
113
+ type: 'boolean',
114
+ default: true,
115
+ description: 'Whether to parse message values as JSON',
116
+ },
117
+ ],
118
+ },
119
+ ],
120
+ };
121
+
122
+ /**
123
+ * Main execution method
124
+ * Handles the complete workflow: credentials, connection, consumption, and error handling
125
+ */
126
+ async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
127
+ const items = this.getInputData();
128
+ const returnData: INodeExecutionData[] = [];
129
+
130
+ // Get all node parameters from N8N configuration
131
+ const brokers = this.getNodeParameter('brokers', 0) as string;
132
+ const clientId = this.getNodeParameter('clientId', 0) as string;
133
+ const groupId = this.getNodeParameter('groupId', 0) as string;
134
+ const topic = this.getNodeParameter('topic', 0) as string;
135
+ const batchSize = this.getNodeParameter('batchSize', 0) as number;
136
+ const fromBeginning = this.getNodeParameter('fromBeginning', 0) as boolean;
137
+ const sessionTimeout = this.getNodeParameter('sessionTimeout', 0) as number;
138
+ const options = this.getNodeParameter('options', 0) as {
139
+ readTimeout?: number;
140
+ parseJson?: boolean;
141
+ };
142
+
143
+ const readTimeout = options.readTimeout || 60000;
144
+ const parseJson = options.parseJson !== undefined ? options.parseJson : true;
145
+
146
+ // Parse comma-separated brokers string to array
147
+ const brokerList = brokers.split(',').map((b) => b.trim());
148
+
149
+ /**
150
+ * Step 2: Credentials Retrieval and Kafka Configuration
151
+ * Build KafkaJS configuration with optional authentication
152
+ * Supports SASL (PLAIN, SCRAM-SHA-256, SCRAM-SHA-512) and SSL/TLS
153
+ */
154
+ // Build base Kafka configuration
155
+ const kafkaConfig: any = {
156
+ clientId,
157
+ brokers: brokerList,
158
+ };
159
+
160
+ // Attempt to retrieve optional Kafka credentials
161
+ let credentials: any = null;
162
+ try {
163
+ credentials = await this.getCredentials('kafka');
164
+ } catch (error) {
165
+ // Credentials are optional, continue without them for unauthenticated connections
166
+ }
167
+
168
+ // Map N8N credential fields to KafkaJS authentication format
169
+ // Add authentication if credentials are provided
170
+ if (credentials) {
171
+ // Add SASL authentication for secure connections
172
+ // Supports mechanisms: plain, scram-sha-256, scram-sha-512
173
+ if (credentials.authentication) {
174
+ kafkaConfig.sasl = {
175
+ mechanism: credentials.authentication, // PLAIN, SCRAM-SHA-256, or SCRAM-SHA-512
176
+ username: credentials.username,
177
+ password: credentials.password,
178
+ };
179
+ }
180
+
181
+ // Add SSL/TLS configuration for encrypted connections
182
+ if (credentials.ssl !== undefined) {
183
+ kafkaConfig.ssl = {
184
+ rejectUnauthorized: credentials.ssl, // Validate server certificates
185
+ };
186
+
187
+ // Add optional SSL certificates for mutual TLS authentication
188
+ if (credentials.ca) {
189
+ kafkaConfig.ssl.ca = credentials.ca; // Certificate Authority
190
+ }
191
+ if (credentials.cert) {
192
+ kafkaConfig.ssl.cert = credentials.cert; // Client certificate
193
+ }
194
+ if (credentials.key) {
195
+ kafkaConfig.ssl.key = credentials.key; // Client private key
196
+ }
197
+ }
198
+ }
199
+
200
+ /**
201
+ * Step 3: Consumer Setup
202
+ * Initialize Kafka client and consumer with configuration
203
+ * Connect to brokers and subscribe to topic
204
+ */
205
+ // Create Kafka instance with complete configuration
206
+ const kafka = new Kafka(kafkaConfig);
207
+ // Create consumer with group ID and session timeout
208
+ const consumer: Consumer = kafka.consumer({
209
+ groupId, // Consumer group for load balancing and offset management
210
+ sessionTimeout, // Session timeout in milliseconds
211
+ });
212
+
213
+ // Track connection state for proper cleanup
214
+ let consumerConnected = false;
215
+
216
+ try {
217
+ // Establish connection to Kafka brokers
218
+ await consumer.connect();
219
+ consumerConnected = true;
220
+
221
+ // Subscribe to the specified topic
222
+ // fromBeginning: if true, read from start; if false, read from latest
223
+ await consumer.subscribe({ topic, fromBeginning });
224
+
225
+ /**
226
+ * Step 4: Message Collection
227
+ * Collect messages in batch with timeout support
228
+ * Stop when batch size reached or timeout occurs
229
+ */
230
+ // Initialize message collection array
231
+ const messages: INodeExecutionData[] = [];
232
+ let timeoutHandle: NodeJS.Timeout | null = null;
233
+ let resolvePromise: ((value: void) => void) | null = null;
234
+
235
+ const collectionPromise = new Promise<void>((resolve) => {
236
+ resolvePromise = resolve;
237
+ });
238
+
239
+ // Set maximum wait time for message collection
240
+ timeoutHandle = setTimeout(() => {
241
+ if (resolvePromise) {
242
+ resolvePromise(); // Resolve with partial batch on timeout
243
+ }
244
+ }, readTimeout);
245
+
246
+ /**
247
+ * Start message consumption
248
+ * eachMessage callback processes messages one by one
249
+ * Collects until batch size or timeout reached
250
+ * Note: consumer.run() starts the consumer but doesn't block
251
+ */
252
+ consumer.run({
253
+ eachMessage: async ({ topic, partition, message }: EachMessagePayload) => {
254
+ /**
255
+ * Step 6: Output Format
256
+ * Process each message and format for N8N output
257
+ * Parse JSON if configured, preserve metadata
258
+ */
259
+ // Parse message value from Buffer to string from Buffer to string
260
+ let value: any = message.value?.toString() || '';
261
+
262
+ // Attempt JSON parsing if configured
263
+ if (parseJson && value) {
264
+ try {
265
+ value = JSON.parse(value); // Parse valid JSON to object
266
+ } catch (error) {
267
+ // Keep as string if JSON parsing fails (invalid JSON)
268
+ }
269
+ }
270
+
271
+ // Build N8N execution data with complete Kafka message metadata
272
+ const messageData: INodeExecutionData = {
273
+ json: {
274
+ topic,
275
+ partition,
276
+ offset: message.offset,
277
+ key: message.key?.toString() || null,
278
+ value,
279
+ timestamp: message.timestamp,
280
+ headers: message.headers || {},
281
+ },
282
+ };
283
+
284
+ messages.push(messageData);
285
+
286
+ // Check if batch size reached
287
+ if (messages.length >= batchSize) {
288
+ if (timeoutHandle) {
289
+ clearTimeout(timeoutHandle); // Cancel timeout
290
+ }
291
+ if (resolvePromise) {
292
+ resolvePromise(); // Complete batch collection
293
+ }
294
+ }
295
+ },
296
+ });
297
+
298
+ /**
299
+ * Wait for collection to complete
300
+ * Completes when: batch size reached OR timeout occurs
301
+ * Partial batches are valid on timeout
302
+ */
303
+ await collectionPromise;
304
+
305
+ // Gracefully disconnect consumer and cleanup resources
306
+ await consumer.disconnect();
307
+ consumerConnected = false;
308
+
309
+ // Add collected messages to return data
310
+ returnData.push(...messages);
311
+ } catch (error) {
312
+ // Ensure consumer is disconnected
313
+ if (consumerConnected) {
314
+ try {
315
+ await consumer.disconnect();
316
+ } catch (disconnectError) {
317
+ // Ignore disconnect errors
318
+ }
319
+ }
320
+
321
+ const errorMessage = error instanceof Error ? error.message : String(error);
322
+ throw new NodeOperationError(
323
+ this.getNode(),
324
+ `Kafka error: ${errorMessage}`,
325
+ { description: errorMessage }
326
+ );
327
+ }
328
+
329
+ return [returnData];
330
+ }
331
+ }
@@ -0,0 +1,3 @@
1
+ <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="24" height="24">
2
+ <path fill="currentColor" d="M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zm-1-13h2v6h-2zm0 8h2v2h-2z"/>
3
+ </svg>
package/tsconfig.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2020",
4
+ "module": "commonjs",
5
+ "lib": ["ES2020"],
6
+ "outDir": "./dist",
7
+ "rootDir": "./src",
8
+ "strict": true,
9
+ "esModuleInterop": true,
10
+ "skipLibCheck": true,
11
+ "forceConsistentCasingInFileNames": true,
12
+ "declaration": true,
13
+ "declarationMap": true,
14
+ "sourceMap": true,
15
+ "resolveJsonModule": true,
16
+ "moduleResolution": "node"
17
+ },
18
+ "include": ["src/**/*"],
19
+ "exclude": ["node_modules", "dist", "**/*.test.ts"]
20
+ }