n8n-nodes-kafka-batch-consumer 1.0.10 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,314 +0,0 @@
1
- /// <reference types="node" />
2
-
3
- import {
4
- IExecuteFunctions,
5
- INodeExecutionData,
6
- INodeType,
7
- INodeTypeDescription,
8
- NodeOperationError,
9
- } from 'n8n-workflow';
10
-
11
- import { Kafka, Consumer, EachMessagePayload } from 'kafkajs';
12
-
13
- /**
14
- * Step 1: Node Interface
15
- * Implements INodeType with complete node description
16
- * Defines all Kafka configuration properties
17
- * Includes optional credentials reference for authentication
18
- */
19
- export class KafkaBatchConsumer implements INodeType {
20
- description: INodeTypeDescription = {
21
- displayName: 'Kafka Batch Consumer',
22
- name: 'kafkaBatchConsumer',
23
- icon: 'file:kafka.svg',
24
- group: ['transform'],
25
- version: 1,
26
- description: 'Consume messages from Kafka in batches',
27
- defaults: {
28
- name: 'Kafka Batch Consumer',
29
- },
30
- inputs: ['main'],
31
- outputs: ['main'],
32
- // Credentials reference - required for brokers and clientId configuration
33
- credentials: [
34
- {
35
- name: 'kafka',
36
- required: true,
37
- },
38
- ],
39
- // Define all Kafka configuration properties
40
- properties: [
41
- {
42
- displayName: 'Group ID',
43
- name: 'groupId',
44
- type: 'string',
45
- default: 'n8n-consumer-group',
46
- required: true,
47
- description: 'Consumer group identifier',
48
- },
49
- {
50
- displayName: 'Topic',
51
- name: 'topic',
52
- type: 'string',
53
- default: '',
54
- required: true,
55
- description: 'Kafka topic to consume from',
56
- },
57
- {
58
- displayName: 'Batch Size',
59
- name: 'batchSize',
60
- type: 'number',
61
- default: 10,
62
- required: true,
63
- description: 'Number of messages to consume in a batch',
64
- },
65
- {
66
- displayName: 'From Beginning',
67
- name: 'fromBeginning',
68
- type: 'boolean',
69
- default: false,
70
- description: 'Whether to read from the beginning of the topic',
71
- },
72
- {
73
- displayName: 'Session Timeout',
74
- name: 'sessionTimeout',
75
- type: 'number',
76
- default: 30000,
77
- description: 'Session timeout in milliseconds',
78
- },
79
- {
80
- displayName: 'Options',
81
- name: 'options',
82
- type: 'collection',
83
- placeholder: 'Add Option',
84
- default: {},
85
- options: [
86
- {
87
- displayName: 'Read Timeout',
88
- name: 'readTimeout',
89
- type: 'number',
90
- default: 60000,
91
- description: 'Maximum time to wait for messages in milliseconds',
92
- },
93
- {
94
- displayName: 'Parse JSON',
95
- name: 'parseJson',
96
- type: 'boolean',
97
- default: true,
98
- description: 'Whether to parse message values as JSON',
99
- },
100
- ],
101
- },
102
- ],
103
- };
104
-
105
- /**
106
- * Main execution method
107
- * Handles the complete workflow: credentials, connection, consumption, and error handling
108
- */
109
- async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
110
- const returnData: INodeExecutionData[] = [];
111
-
112
- // Get all node parameters from N8N configuration
113
- const groupId = this.getNodeParameter('groupId', 0) as string;
114
- const topic = this.getNodeParameter('topic', 0) as string;
115
- const batchSize = this.getNodeParameter('batchSize', 0) as number;
116
- const fromBeginning = this.getNodeParameter('fromBeginning', 0) as boolean;
117
- const sessionTimeout = this.getNodeParameter('sessionTimeout', 0) as number;
118
- const options = this.getNodeParameter('options', 0) as {
119
- readTimeout?: number;
120
- parseJson?: boolean;
121
- };
122
-
123
- const readTimeout = options.readTimeout || 60000;
124
- const parseJson = options.parseJson !== undefined ? options.parseJson : true;
125
-
126
- /**
127
- * Step 2: Credentials Retrieval and Kafka Configuration
128
- * Build KafkaJS configuration with optional authentication
129
- * Supports SASL (PLAIN, SCRAM-SHA-256, SCRAM-SHA-512) and SSL/TLS
130
- * Brokers and clientId are now taken from credentials
131
- */
132
-
133
- // Attempt to retrieve Kafka credentials (required for brokers and clientId)
134
- let credentials: any = null;
135
- try {
136
- credentials = await this.getCredentials('kafka');
137
- } catch (error) {
138
- throw new NodeOperationError(
139
- this.getNode(),
140
- 'Kafka credentials are required to get brokers and clientId configuration'
141
- );
142
- }
143
-
144
- // Build base Kafka configuration from credentials
145
- const kafkaConfig: any = {
146
- clientId: credentials.clientId || 'n8n-kafka-batch-consumer',
147
- brokers: credentials.brokers ?
148
- (typeof credentials.brokers === 'string' ?
149
- credentials.brokers.split(',').map((b: string) => b.trim()) :
150
- credentials.brokers) :
151
- ['localhost:9092'],
152
- };
153
-
154
- // Map N8N credential fields to KafkaJS authentication format
155
- // Add SASL authentication if provided
156
- if (credentials.authentication) {
157
- kafkaConfig.sasl = {
158
- mechanism: credentials.authentication, // PLAIN, SCRAM-SHA-256, or SCRAM-SHA-512
159
- username: credentials.username,
160
- password: credentials.password,
161
- };
162
- }
163
-
164
- // Add SSL/TLS configuration for encrypted connections
165
- // Only enable SSL if explicitly set to true or if SSL certificates are provided
166
- if (credentials.ssl === true || credentials.ca || credentials.cert || credentials.key) {
167
- kafkaConfig.ssl = {
168
- rejectUnauthorized: credentials.ssl !== false, // Default to true if SSL is enabled
169
- };
170
-
171
- // Add optional SSL certificates for mutual TLS authentication
172
- if (credentials.ca) {
173
- kafkaConfig.ssl.ca = credentials.ca; // Certificate Authority
174
- }
175
- if (credentials.cert) {
176
- kafkaConfig.ssl.cert = credentials.cert; // Client certificate
177
- }
178
- if (credentials.key) {
179
- kafkaConfig.ssl.key = credentials.key; // Client private key
180
- }
181
- }
182
-
183
- /**
184
- * Step 3: Consumer Setup
185
- * Initialize Kafka client and consumer with configuration
186
- * Connect to brokers and subscribe to topic
187
- */
188
- // Create Kafka instance with complete configuration
189
- const kafka = new Kafka(kafkaConfig);
190
- // Create consumer with group ID and session timeout
191
- const consumer: Consumer = kafka.consumer({
192
- groupId, // Consumer group for load balancing and offset management
193
- sessionTimeout, // Session timeout in milliseconds
194
- });
195
-
196
- // Track connection state for proper cleanup
197
- let consumerConnected = false;
198
-
199
- try {
200
- // Establish connection to Kafka brokers
201
- await consumer.connect();
202
- consumerConnected = true;
203
-
204
- // Subscribe to the specified topic
205
- // fromBeginning: if true, read from start; if false, read from latest
206
- await consumer.subscribe({ topic, fromBeginning });
207
-
208
- /**
209
- * Step 4: Message Collection
210
- * Collect messages in batch with timeout support
211
- * Stop when batch size reached or timeout occurs
212
- */
213
- // Initialize message collection array
214
- const messages: INodeExecutionData[] = [];
215
- let timeoutHandle: NodeJS.Timeout | null = null;
216
- let resolvePromise: ((value: void) => void) | null = null;
217
-
218
- const collectionPromise = new Promise<void>((resolve) => {
219
- resolvePromise = resolve;
220
- });
221
-
222
- // Set maximum wait time for message collection
223
- timeoutHandle = setTimeout(() => {
224
- if (resolvePromise) {
225
- resolvePromise(); // Resolve with partial batch on timeout
226
- }
227
- }, readTimeout);
228
-
229
- /**
230
- * Start message consumption
231
- * eachMessage callback processes messages one by one
232
- * Collects until batch size or timeout reached
233
- * Note: consumer.run() starts the consumer but doesn't block
234
- */
235
- consumer.run({
236
- eachMessage: async ({ topic, partition, message }: EachMessagePayload) => {
237
- /**
238
- * Step 6: Output Format
239
- * Process each message and format for N8N output
240
- * Parse JSON if configured, preserve metadata
241
- */
242
- // Parse message value from Buffer to string from Buffer to string
243
- let value: any = message.value?.toString() || '';
244
-
245
- // Attempt JSON parsing if configured
246
- if (parseJson && value) {
247
- try {
248
- value = JSON.parse(value); // Parse valid JSON to object
249
- } catch (error) {
250
- // Keep as string if JSON parsing fails (invalid JSON)
251
- }
252
- }
253
-
254
- // Build N8N execution data with complete Kafka message metadata
255
- const messageData: INodeExecutionData = {
256
- json: {
257
- topic,
258
- partition,
259
- offset: message.offset,
260
- key: message.key?.toString() || null,
261
- value,
262
- timestamp: message.timestamp,
263
- headers: message.headers || {},
264
- },
265
- };
266
-
267
- messages.push(messageData);
268
-
269
- // Check if batch size reached
270
- if (messages.length >= batchSize) {
271
- if (timeoutHandle) {
272
- clearTimeout(timeoutHandle); // Cancel timeout
273
- }
274
- if (resolvePromise) {
275
- resolvePromise(); // Complete batch collection
276
- }
277
- }
278
- },
279
- });
280
-
281
- /**
282
- * Wait for collection to complete
283
- * Completes when: batch size reached OR timeout occurs
284
- * Partial batches are valid on timeout
285
- */
286
- await collectionPromise;
287
-
288
- // Gracefully disconnect consumer and cleanup resources
289
- await consumer.disconnect();
290
- consumerConnected = false;
291
-
292
- // Add collected messages to return data
293
- returnData.push(...messages);
294
- } catch (error) {
295
- // Ensure consumer is disconnected
296
- if (consumerConnected) {
297
- try {
298
- await consumer.disconnect();
299
- } catch (disconnectError) {
300
- // Ignore disconnect errors
301
- }
302
- }
303
-
304
- const errorMessage = error instanceof Error ? error.message : String(error);
305
- throw new NodeOperationError(
306
- this.getNode(),
307
- `Kafka error: ${errorMessage}`,
308
- { description: errorMessage }
309
- );
310
- }
311
-
312
- return [returnData];
313
- }
314
- }
@@ -1,3 +0,0 @@
1
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="24" height="24">
2
- <path fill="currentColor" d="M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zm-1-13h2v6h-2zm0 8h2v2h-2z"/>
3
- </svg>
package/tsconfig.json DELETED
@@ -1,20 +0,0 @@
1
- {
2
- "compilerOptions": {
3
- "target": "ES2020",
4
- "module": "commonjs",
5
- "lib": ["ES2020"],
6
- "outDir": "./dist",
7
- "rootDir": "./src",
8
- "strict": true,
9
- "esModuleInterop": true,
10
- "skipLibCheck": true,
11
- "forceConsistentCasingInFileNames": true,
12
- "declaration": true,
13
- "declarationMap": true,
14
- "sourceMap": true,
15
- "resolveJsonModule": true,
16
- "moduleResolution": "node"
17
- },
18
- "include": ["src/**/*"],
19
- "exclude": ["node_modules", "dist", "**/*.test.ts"]
20
- }