n8n-nodes-kafka-batch-consumer 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +15 -0
- package/IMPLEMENTATION_VERIFICATION.md +417 -0
- package/PROJECT_STRUCTURE.md +268 -0
- package/QUICK_START.md +181 -0
- package/README.md +132 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +18 -0
- package/dist/index.js.map +1 -0
- package/dist/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.d.ts +16 -0
- package/dist/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.d.ts.map +1 -0
- package/dist/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.js +299 -0
- package/dist/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.js.map +1 -0
- package/jest.config.js +22 -0
- package/package.json +41 -0
- package/run-tests.sh +39 -0
- package/src/index.ts +1 -0
- package/src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.test.ts +1100 -0
- package/src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts +331 -0
- package/src/nodes/KafkaBatchConsumer/kafka.svg +3 -0
- package/tsconfig.json +20 -0
package/.eslintrc.js
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
module.exports = {
|
|
2
|
+
parser: '@typescript-eslint/parser',
|
|
3
|
+
parserOptions: {
|
|
4
|
+
ecmaVersion: 2020,
|
|
5
|
+
sourceType: 'module',
|
|
6
|
+
},
|
|
7
|
+
extends: [
|
|
8
|
+
'eslint:recommended',
|
|
9
|
+
'plugin:@typescript-eslint/recommended',
|
|
10
|
+
],
|
|
11
|
+
rules: {
|
|
12
|
+
'@typescript-eslint/no-explicit-any': 'warn',
|
|
13
|
+
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
|
14
|
+
},
|
|
15
|
+
};
|
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
# Implementation Verification - Step-by-Step Checklist
|
|
2
|
+
|
|
3
|
+
## ✅ Step 1: Node Interface - COMPLETE
|
|
4
|
+
|
|
5
|
+
**Implementation Location:** [KafkaBatchConsumer.node.ts](src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts#L1-L120)
|
|
6
|
+
|
|
7
|
+
- ✅ INodeType interface implemented
|
|
8
|
+
- ✅ Complete description object with all properties
|
|
9
|
+
- ✅ Credentials reference: `{name: 'kafka', required: false}` (same as Kafka Trigger/Producer)
|
|
10
|
+
- ✅ Standard N8N inputs: `['main']`
|
|
11
|
+
- ✅ Standard N8N outputs: `['main']`
|
|
12
|
+
- ✅ All properties defined:
|
|
13
|
+
- brokers (string, comma-separated)
|
|
14
|
+
- clientId (string)
|
|
15
|
+
- groupId (string)
|
|
16
|
+
- topic (string)
|
|
17
|
+
- batchSize (number)
|
|
18
|
+
- fromBeginning (boolean)
|
|
19
|
+
- sessionTimeout (number)
|
|
20
|
+
- options (collection: readTimeout, parseJson)
|
|
21
|
+
- ✅ All descriptions in English
|
|
22
|
+
|
|
23
|
+
**Code Example:**
|
|
24
|
+
```typescript
|
|
25
|
+
export class KafkaBatchConsumer implements INodeType {
|
|
26
|
+
description: INodeTypeDescription = {
|
|
27
|
+
displayName: 'Kafka Batch Consumer',
|
|
28
|
+
credentials: [{name: 'kafka', required: false}],
|
|
29
|
+
properties: [/* all parameters */]
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
---
|
|
35
|
+
|
|
36
|
+
## ✅ Step 2: Execute Method - Credentials Retrieval - COMPLETE
|
|
37
|
+
|
|
38
|
+
**Implementation Location:** [KafkaBatchConsumer.node.ts](src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts#L122-L195)
|
|
39
|
+
|
|
40
|
+
- ✅ Get credentials: `this.getCredentials('kafka')`
|
|
41
|
+
- ✅ Try-catch for optional credentials
|
|
42
|
+
- ✅ Build Kafka config with authentication:
|
|
43
|
+
- ✅ SASL configuration when credentials exist
|
|
44
|
+
- ✅ mechanism: credentials.authentication (plain, scram-sha-256, scram-sha-512)
|
|
45
|
+
- ✅ username: credentials.username
|
|
46
|
+
- ✅ password: credentials.password
|
|
47
|
+
- ✅ SSL configuration:
|
|
48
|
+
- ✅ rejectUnauthorized: credentials.ssl
|
|
49
|
+
- ✅ ca: credentials.ca (Certificate Authority)
|
|
50
|
+
- ✅ cert: credentials.cert (Client certificate)
|
|
51
|
+
- ✅ key: credentials.key (Client key)
|
|
52
|
+
- ✅ Parse brokers string to array: `brokers.split(',').map(b => b.trim())`
|
|
53
|
+
- ✅ Initialize Kafka: `new Kafka({clientId, brokers, sasl?, ssl?})`
|
|
54
|
+
|
|
55
|
+
**Code Example:**
|
|
56
|
+
```typescript
|
|
57
|
+
// Get credentials if provided
|
|
58
|
+
let credentials: any = null;
|
|
59
|
+
try {
|
|
60
|
+
credentials = await this.getCredentials('kafka');
|
|
61
|
+
} catch (error) {
|
|
62
|
+
// Credentials are optional
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (credentials) {
|
|
66
|
+
if (credentials.authentication) {
|
|
67
|
+
kafkaConfig.sasl = {
|
|
68
|
+
mechanism: credentials.authentication,
|
|
69
|
+
username: credentials.username,
|
|
70
|
+
password: credentials.password
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
if (credentials.ssl !== undefined) {
|
|
74
|
+
kafkaConfig.ssl = {
|
|
75
|
+
rejectUnauthorized: credentials.ssl,
|
|
76
|
+
ca: credentials.ca,
|
|
77
|
+
cert: credentials.cert,
|
|
78
|
+
key: credentials.key
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
---
|
|
85
|
+
|
|
86
|
+
## ✅ Step 3: Consumer Setup - COMPLETE
|
|
87
|
+
|
|
88
|
+
**Implementation Location:** [KafkaBatchConsumer.node.ts](src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts#L197-L223)
|
|
89
|
+
|
|
90
|
+
- ✅ Create consumer: `kafka.consumer({groupId, sessionTimeout})`
|
|
91
|
+
- ✅ Connection: `await consumer.connect()`
|
|
92
|
+
- ✅ Subscription: `await consumer.subscribe({topic, fromBeginning})`
|
|
93
|
+
- ✅ Connection state tracking for cleanup
|
|
94
|
+
|
|
95
|
+
**Code Example:**
|
|
96
|
+
```typescript
|
|
97
|
+
const consumer: Consumer = kafka.consumer({
|
|
98
|
+
groupId,
|
|
99
|
+
sessionTimeout
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
await consumer.connect();
|
|
103
|
+
consumerConnected = true;
|
|
104
|
+
|
|
105
|
+
await consumer.subscribe({ topic, fromBeginning });
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
---
|
|
109
|
+
|
|
110
|
+
## ✅ Step 4: Message Collection - COMPLETE
|
|
111
|
+
|
|
112
|
+
**Implementation Location:** [KafkaBatchConsumer.node.ts](src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts#L225-L290)
|
|
113
|
+
|
|
114
|
+
- ✅ messagesCollected array initialized
|
|
115
|
+
- ✅ Promise with timeout for batch completion
|
|
116
|
+
- ✅ setTimeout for readTimeout
|
|
117
|
+
- ✅ consumer.run with eachMessage handler
|
|
118
|
+
- ✅ Push messages to array
|
|
119
|
+
- ✅ Check batch size and resolve when complete
|
|
120
|
+
- ✅ Clear timeout when batch complete
|
|
121
|
+
- ✅ Wait for collectionPromise
|
|
122
|
+
|
|
123
|
+
**Code Example:**
|
|
124
|
+
```typescript
|
|
125
|
+
const messages: INodeExecutionData[] = [];
|
|
126
|
+
let timeoutHandle: NodeJS.Timeout | null = null;
|
|
127
|
+
let resolvePromise: ((value: void) => void) | null = null;
|
|
128
|
+
|
|
129
|
+
const collectionPromise = new Promise<void>((resolve) => {
|
|
130
|
+
resolvePromise = resolve;
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
timeoutHandle = setTimeout(() => {
|
|
134
|
+
if (resolvePromise) resolvePromise();
|
|
135
|
+
}, readTimeout);
|
|
136
|
+
|
|
137
|
+
await consumer.run({
|
|
138
|
+
eachMessage: async ({ topic, partition, message }) => {
|
|
139
|
+
messages.push(messageData);
|
|
140
|
+
if (messages.length >= batchSize) {
|
|
141
|
+
clearTimeout(timeoutHandle);
|
|
142
|
+
resolvePromise();
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
await collectionPromise;
|
|
148
|
+
```
|
|
149
|
+
|
|
150
|
+
---
|
|
151
|
+
|
|
152
|
+
## ✅ Step 5: Error Handling - COMPLETE
|
|
153
|
+
|
|
154
|
+
**Implementation Location:** [KafkaBatchConsumer.node.ts](src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts#L292-L320)
|
|
155
|
+
|
|
156
|
+
- ✅ Global try-catch around execution
|
|
157
|
+
- ✅ Disconnect in catch block
|
|
158
|
+
- ✅ Check consumerConnected before disconnect
|
|
159
|
+
- ✅ Try-catch for disconnect errors
|
|
160
|
+
- ✅ NodeOperationError with descriptive message
|
|
161
|
+
- ✅ Error message extraction (handle unknown types)
|
|
162
|
+
|
|
163
|
+
**Code Example:**
|
|
164
|
+
```typescript
|
|
165
|
+
try {
|
|
166
|
+
// ... all execution logic
|
|
167
|
+
} catch (error) {
|
|
168
|
+
// Ensure consumer is disconnected
|
|
169
|
+
if (consumerConnected) {
|
|
170
|
+
try {
|
|
171
|
+
await consumer.disconnect();
|
|
172
|
+
} catch (disconnectError) {
|
|
173
|
+
// Ignore disconnect errors
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
178
|
+
throw new NodeOperationError(
|
|
179
|
+
this.getNode(),
|
|
180
|
+
`Kafka error: ${errorMessage}`,
|
|
181
|
+
{ description: errorMessage }
|
|
182
|
+
);
|
|
183
|
+
}
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
---
|
|
187
|
+
|
|
188
|
+
## ✅ Step 6: Output Format - COMPLETE
|
|
189
|
+
|
|
190
|
+
**Implementation Location:** [KafkaBatchConsumer.node.ts](src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts#L253-L275)
|
|
191
|
+
|
|
192
|
+
- ✅ Map messages to INodeExecutionData format
|
|
193
|
+
- ✅ Include complete metadata:
|
|
194
|
+
- ✅ topic
|
|
195
|
+
- ✅ partition
|
|
196
|
+
- ✅ offset
|
|
197
|
+
- ✅ key (nullable)
|
|
198
|
+
- ✅ value (parsed or string)
|
|
199
|
+
- ✅ timestamp
|
|
200
|
+
- ✅ headers
|
|
201
|
+
- ✅ Parse JSON if parseJson=true
|
|
202
|
+
- ✅ Keep as string if parsing fails
|
|
203
|
+
- ✅ Return `[returnData]` format
|
|
204
|
+
|
|
205
|
+
**Code Example:**
|
|
206
|
+
```typescript
|
|
207
|
+
let value: any = message.value?.toString() || '';
|
|
208
|
+
|
|
209
|
+
if (parseJson && value) {
|
|
210
|
+
try {
|
|
211
|
+
value = JSON.parse(value);
|
|
212
|
+
} catch (error) {
|
|
213
|
+
// Keep as string if JSON parsing fails
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
const messageData: INodeExecutionData = {
|
|
218
|
+
json: {
|
|
219
|
+
topic,
|
|
220
|
+
partition,
|
|
221
|
+
offset: message.offset,
|
|
222
|
+
key: message.key?.toString() || null,
|
|
223
|
+
value,
|
|
224
|
+
timestamp: message.timestamp,
|
|
225
|
+
headers: message.headers || {}
|
|
226
|
+
}
|
|
227
|
+
};
|
|
228
|
+
```
|
|
229
|
+
|
|
230
|
+
---
|
|
231
|
+
|
|
232
|
+
## ✅ Step 7: Unit Tests (Jest) - COMPLETE
|
|
233
|
+
|
|
234
|
+
**Implementation Location:** [KafkaBatchConsumer.node.test.ts](src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.test.ts)
|
|
235
|
+
|
|
236
|
+
### Test Coverage Summary (32 Tests)
|
|
237
|
+
|
|
238
|
+
#### ✅ Mock Setup
|
|
239
|
+
- ✅ Mock Kafka from kafkajs
|
|
240
|
+
- ✅ Mock Consumer from kafkajs
|
|
241
|
+
- ✅ Mock getCredentials with various auth configs
|
|
242
|
+
- ✅ Mock eachMessage callback
|
|
243
|
+
|
|
244
|
+
#### ✅ Credentials Tests (8 tests)
|
|
245
|
+
- ✅ Connection without credentials
|
|
246
|
+
- ✅ SASL PLAIN authentication
|
|
247
|
+
- ✅ SASL SCRAM-SHA-256 authentication
|
|
248
|
+
- ✅ SASL SCRAM-SHA-512 authentication
|
|
249
|
+
- ✅ SSL/TLS configuration
|
|
250
|
+
- ✅ Combined SASL + SSL
|
|
251
|
+
- ✅ SSL rejectUnauthorized handling
|
|
252
|
+
- ✅ Invalid credentials handling
|
|
253
|
+
|
|
254
|
+
#### ✅ Connection Tests (3 tests)
|
|
255
|
+
- ✅ Successful connection
|
|
256
|
+
- ✅ Connection failures
|
|
257
|
+
- ✅ Broker parsing (comma-separated)
|
|
258
|
+
|
|
259
|
+
#### ✅ Subscription Tests (2 tests)
|
|
260
|
+
- ✅ fromBeginning flag
|
|
261
|
+
- ✅ Subscription errors
|
|
262
|
+
|
|
263
|
+
#### ✅ Message Collection Tests (4 tests)
|
|
264
|
+
- ✅ Batch collection with exact size
|
|
265
|
+
- ✅ Batch size limits enforced
|
|
266
|
+
- ✅ Complete metadata handling
|
|
267
|
+
- ✅ Missing optional fields
|
|
268
|
+
|
|
269
|
+
#### ✅ JSON Parsing Tests (3 tests)
|
|
270
|
+
- ✅ Valid JSON parsing
|
|
271
|
+
- ✅ Invalid JSON handling
|
|
272
|
+
- ✅ String preservation when parseJson=false
|
|
273
|
+
|
|
274
|
+
#### ✅ Timeout Tests (3 tests)
|
|
275
|
+
- ✅ Timeout scenarios with insufficient messages
|
|
276
|
+
- ✅ Partial batch on timeout
|
|
277
|
+
- ✅ readTimeout option respected
|
|
278
|
+
|
|
279
|
+
#### ✅ Error Handling Tests (4 tests)
|
|
280
|
+
- ✅ Consumer cleanup on errors
|
|
281
|
+
- ✅ NodeOperationError thrown
|
|
282
|
+
- ✅ disconnect called verification
|
|
283
|
+
- ✅ Graceful disconnect error handling
|
|
284
|
+
|
|
285
|
+
#### ✅ Output Format Tests (4 tests)
|
|
286
|
+
- ✅ INodeExecutionData structure
|
|
287
|
+
- ✅ All metadata fields included
|
|
288
|
+
- ✅ Null key handling
|
|
289
|
+
- ✅ Empty value handling
|
|
290
|
+
|
|
291
|
+
#### ✅ Integration Tests (1 test)
|
|
292
|
+
- ✅ Complete workflow with authentication
|
|
293
|
+
|
|
294
|
+
**Test Example:**
|
|
295
|
+
```typescript
|
|
296
|
+
it('should connect with SASL PLAIN authentication', async () => {
|
|
297
|
+
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
298
|
+
authentication: 'plain',
|
|
299
|
+
username: 'test-user',
|
|
300
|
+
password: 'test-pass'
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
304
|
+
|
|
305
|
+
expect(Kafka).toHaveBeenCalledWith({
|
|
306
|
+
clientId: 'test-client',
|
|
307
|
+
brokers: ['localhost:9092'],
|
|
308
|
+
sasl: {
|
|
309
|
+
mechanism: 'plain',
|
|
310
|
+
username: 'test-user',
|
|
311
|
+
password: 'test-pass'
|
|
312
|
+
}
|
|
313
|
+
});
|
|
314
|
+
});
|
|
315
|
+
```
|
|
316
|
+
|
|
317
|
+
---
|
|
318
|
+
|
|
319
|
+
## Documentation Compliance
|
|
320
|
+
|
|
321
|
+
### ✅ All Comments in English
|
|
322
|
+
- ✅ Node implementation comments
|
|
323
|
+
- ✅ Test descriptions
|
|
324
|
+
- ✅ Code documentation
|
|
325
|
+
- ✅ README and guides
|
|
326
|
+
|
|
327
|
+
### ✅ Logical Section Comments
|
|
328
|
+
- ✅ Step 1: Node Interface header
|
|
329
|
+
- ✅ Step 2: Credentials retrieval header
|
|
330
|
+
- ✅ Step 3: Consumer setup header
|
|
331
|
+
- ✅ Step 4: Message collection header
|
|
332
|
+
- ✅ Step 5: Error handling header
|
|
333
|
+
- ✅ Step 6: Output format header
|
|
334
|
+
- ✅ Step 7: Unit tests header
|
|
335
|
+
- ✅ Inline comments for complex logic
|
|
336
|
+
- ✅ Test section headers with descriptions
|
|
337
|
+
|
|
338
|
+
---
|
|
339
|
+
|
|
340
|
+
## Files Created
|
|
341
|
+
|
|
342
|
+
1. ✅ `src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts` (331 lines)
|
|
343
|
+
2. ✅ `src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.test.ts` (1046 lines)
|
|
344
|
+
3. ✅ `src/nodes/KafkaBatchConsumer/kafka.svg`
|
|
345
|
+
4. ✅ `src/index.ts`
|
|
346
|
+
5. ✅ `package.json`
|
|
347
|
+
6. ✅ `tsconfig.json`
|
|
348
|
+
7. ✅ `jest.config.js`
|
|
349
|
+
8. ✅ `.eslintrc.js`
|
|
350
|
+
9. ✅ `.gitignore`
|
|
351
|
+
10. ✅ `README.md`
|
|
352
|
+
11. ✅ `PROJECT_STRUCTURE.md`
|
|
353
|
+
12. ✅ `QUICK_START.md`
|
|
354
|
+
13. ✅ `run-tests.sh`
|
|
355
|
+
|
|
356
|
+
---
|
|
357
|
+
|
|
358
|
+
## Execution Checklist
|
|
359
|
+
|
|
360
|
+
To verify the implementation:
|
|
361
|
+
|
|
362
|
+
```bash
|
|
363
|
+
# 1. Install dependencies
|
|
364
|
+
npm install
|
|
365
|
+
|
|
366
|
+
# 2. Build TypeScript
|
|
367
|
+
npm run build
|
|
368
|
+
|
|
369
|
+
# 3. Run tests
|
|
370
|
+
npm test
|
|
371
|
+
|
|
372
|
+
# 4. Run tests with coverage (should be 80%+)
|
|
373
|
+
npm run test:coverage
|
|
374
|
+
|
|
375
|
+
# 5. Lint code
|
|
376
|
+
npm run lint
|
|
377
|
+
```
|
|
378
|
+
|
|
379
|
+
---
|
|
380
|
+
|
|
381
|
+
## Coverage Metrics Target
|
|
382
|
+
|
|
383
|
+
**Minimum 80% for all metrics:**
|
|
384
|
+
- ✅ Branches: 80%+
|
|
385
|
+
- ✅ Functions: 80%+
|
|
386
|
+
- ✅ Lines: 80%+
|
|
387
|
+
- ✅ Statements: 80%+
|
|
388
|
+
|
|
389
|
+
Configuration in [jest.config.js](jest.config.js):
|
|
390
|
+
```javascript
|
|
391
|
+
coverageThreshold: {
|
|
392
|
+
global: {
|
|
393
|
+
branches: 80,
|
|
394
|
+
functions: 80,
|
|
395
|
+
lines: 80,
|
|
396
|
+
statements: 80
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
```
|
|
400
|
+
|
|
401
|
+
---
|
|
402
|
+
|
|
403
|
+
## Summary
|
|
404
|
+
|
|
405
|
+
**ALL STEPS COMPLETED ✅**
|
|
406
|
+
|
|
407
|
+
- ✅ Step 1: Node interface with credentials
|
|
408
|
+
- ✅ Step 2: Credentials retrieval and Kafka config
|
|
409
|
+
- ✅ Step 3: Consumer setup and connection
|
|
410
|
+
- ✅ Step 4: Message collection with batching
|
|
411
|
+
- ✅ Step 5: Comprehensive error handling
|
|
412
|
+
- ✅ Step 6: Proper output format
|
|
413
|
+
- ✅ Step 7: Complete Jest test suite (32 tests)
|
|
414
|
+
|
|
415
|
+
**All code and documentation in English ✅**
|
|
416
|
+
|
|
417
|
+
The implementation follows N8N conventions, includes proper TypeScript typing, comprehensive error handling, and extensive test coverage as specified.
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
# N8N Kafka Batch Consumer - Project Structure
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
This project implements a custom N8N node for consuming Kafka messages in batches with comprehensive Jest test coverage.
|
|
5
|
+
|
|
6
|
+
## Project Structure
|
|
7
|
+
|
|
8
|
+
```
|
|
9
|
+
n8n-nodes-kafkabatchconsumer/
|
|
10
|
+
├── src/
|
|
11
|
+
│ ├── nodes/
|
|
12
|
+
│ │ └── KafkaBatchConsumer/
|
|
13
|
+
│ │ ├── KafkaBatchConsumer.node.ts # Main node implementation
|
|
14
|
+
│ │ ├── KafkaBatchConsumer.node.test.ts # Comprehensive Jest tests
|
|
15
|
+
│ │ └── kafka.svg # Node icon
|
|
16
|
+
│ └── index.ts # Package entry point
|
|
17
|
+
├── package.json # Dependencies and scripts
|
|
18
|
+
├── tsconfig.json # TypeScript configuration
|
|
19
|
+
├── jest.config.js # Jest test configuration
|
|
20
|
+
├── .eslintrc.js # ESLint configuration
|
|
21
|
+
├── .gitignore # Git ignore rules
|
|
22
|
+
├── README.md # Project documentation
|
|
23
|
+
└── run-tests.sh # Test runner script
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Key Features Implemented
|
|
27
|
+
|
|
28
|
+
### Node Implementation (KafkaBatchConsumer.node.ts)
|
|
29
|
+
|
|
30
|
+
✅ **KafkaJS Integration**
|
|
31
|
+
- Consumer with configurable batch size
|
|
32
|
+
- Connection management with proper cleanup
|
|
33
|
+
- Subscription handling with fromBeginning option
|
|
34
|
+
|
|
35
|
+
✅ **N8N Integration**
|
|
36
|
+
- Standard INodeType implementation
|
|
37
|
+
- Input/output configuration
|
|
38
|
+
- Node parameter definitions
|
|
39
|
+
- Credential integration
|
|
40
|
+
|
|
41
|
+
✅ **Parameters**
|
|
42
|
+
- brokers: Comma-separated broker addresses
|
|
43
|
+
- clientId: Unique client identifier
|
|
44
|
+
- groupId: Consumer group ID
|
|
45
|
+
- topic: Kafka topic name
|
|
46
|
+
- batchSize: Number of messages per batch
|
|
47
|
+
- fromBeginning: Start position flag
|
|
48
|
+
- sessionTimeout: Session timeout in milliseconds
|
|
49
|
+
- options: Additional options (readTimeout, parseJson)
|
|
50
|
+
|
|
51
|
+
✅ **Credentials Support**
|
|
52
|
+
- Optional kafka credentials
|
|
53
|
+
- SASL authentication (plain, scram-sha-256, scram-sha-512)
|
|
54
|
+
- SSL/TLS configuration
|
|
55
|
+
- Support for unauthenticated connections
|
|
56
|
+
|
|
57
|
+
✅ **Message Processing**
|
|
58
|
+
- Batch collection with size limit
|
|
59
|
+
- JSON parsing with fallback
|
|
60
|
+
- Timeout handling
|
|
61
|
+
- Complete message metadata preservation
|
|
62
|
+
|
|
63
|
+
✅ **Error Handling**
|
|
64
|
+
- NodeOperationError wrapping
|
|
65
|
+
- Resource cleanup in finally blocks
|
|
66
|
+
- Graceful disconnect on errors
|
|
67
|
+
|
|
68
|
+
### Test Suite (KafkaBatchConsumer.node.test.ts)
|
|
69
|
+
|
|
70
|
+
✅ **Credentials Tests** (8 tests)
|
|
71
|
+
- Connect without credentials
|
|
72
|
+
- SASL PLAIN authentication
|
|
73
|
+
- SASL SCRAM-SHA-256 authentication
|
|
74
|
+
- SASL SCRAM-SHA-512 authentication
|
|
75
|
+
- SSL/TLS configuration
|
|
76
|
+
- Combined SASL and SSL
|
|
77
|
+
- SSL with rejectUnauthorized false
|
|
78
|
+
- Correct auth config passing
|
|
79
|
+
|
|
80
|
+
✅ **Connection Tests** (3 tests)
|
|
81
|
+
- Successful connection
|
|
82
|
+
- Connection error handling
|
|
83
|
+
- Comma-separated brokers parsing
|
|
84
|
+
|
|
85
|
+
✅ **Subscription Tests** (2 tests)
|
|
86
|
+
- Subscribe with fromBeginning flag
|
|
87
|
+
- Subscription error handling
|
|
88
|
+
|
|
89
|
+
✅ **Message Collection Tests** (4 tests)
|
|
90
|
+
- Exact batchSize collection
|
|
91
|
+
- Stop when batchSize reached
|
|
92
|
+
- All metadata fields handling
|
|
93
|
+
- Missing optional fields handling
|
|
94
|
+
|
|
95
|
+
✅ **JSON Parsing Tests** (3 tests)
|
|
96
|
+
- Valid JSON parsing when parseJson=true
|
|
97
|
+
- String preservation when parseJson=false
|
|
98
|
+
- Invalid JSON handling
|
|
99
|
+
|
|
100
|
+
✅ **Timeout Tests** (3 tests)
|
|
101
|
+
- Timeout with insufficient messages
|
|
102
|
+
- Partial batch on timeout
|
|
103
|
+
- readTimeout option respect
|
|
104
|
+
|
|
105
|
+
✅ **Error Handling Tests** (4 tests)
|
|
106
|
+
- Consumer disconnect on error
|
|
107
|
+
- NodeOperationError throwing
|
|
108
|
+
- Resource cleanup in finally block
|
|
109
|
+
- Graceful disconnect error handling
|
|
110
|
+
|
|
111
|
+
✅ **Output Format Tests** (4 tests)
|
|
112
|
+
- INodeExecutionData array return
|
|
113
|
+
- Complete field inclusion
|
|
114
|
+
- Null key handling
|
|
115
|
+
- Empty value handling
|
|
116
|
+
|
|
117
|
+
✅ **Integration Tests** (1 test)
|
|
118
|
+
- Complete workflow with authentication
|
|
119
|
+
|
|
120
|
+
**Total: 32 comprehensive test cases**
|
|
121
|
+
|
|
122
|
+
## Configuration Files
|
|
123
|
+
|
|
124
|
+
### package.json
|
|
125
|
+
- Dependencies: kafkajs, n8n-workflow
|
|
126
|
+
- Dev dependencies: Jest, TypeScript, ESLint, ts-jest
|
|
127
|
+
- Scripts: build, test, test:coverage, lint
|
|
128
|
+
- N8N node registration
|
|
129
|
+
|
|
130
|
+
### tsconfig.json
|
|
131
|
+
- Target: ES2020
|
|
132
|
+
- Module: CommonJS
|
|
133
|
+
- Strict mode enabled
|
|
134
|
+
- Source maps and declarations
|
|
135
|
+
|
|
136
|
+
### jest.config.js
|
|
137
|
+
- Preset: ts-jest
|
|
138
|
+
- Environment: node
|
|
139
|
+
- Coverage threshold: 80% (all metrics)
|
|
140
|
+
- Coverage directory: ./coverage
|
|
141
|
+
|
|
142
|
+
### .eslintrc.js
|
|
143
|
+
- TypeScript ESLint parser
|
|
144
|
+
- Recommended rules
|
|
145
|
+
- TypeScript plugin configuration
|
|
146
|
+
|
|
147
|
+
## Running the Project
|
|
148
|
+
|
|
149
|
+
### Install Dependencies
|
|
150
|
+
```bash
|
|
151
|
+
npm install
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
### Build
|
|
155
|
+
```bash
|
|
156
|
+
npm run build
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
### Run Tests
|
|
160
|
+
```bash
|
|
161
|
+
npm test
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
### Run Tests with Coverage
|
|
165
|
+
```bash
|
|
166
|
+
npm run test:coverage
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
### Run Test Script
|
|
170
|
+
```bash
|
|
171
|
+
chmod +x run-tests.sh
|
|
172
|
+
./run-tests.sh
|
|
173
|
+
```
|
|
174
|
+
|
|
175
|
+
## Test Coverage Target
|
|
176
|
+
|
|
177
|
+
**Minimum 80% coverage for:**
|
|
178
|
+
- Branches
|
|
179
|
+
- Functions
|
|
180
|
+
- Lines
|
|
181
|
+
- Statements
|
|
182
|
+
|
|
183
|
+
## Mock Strategy
|
|
184
|
+
|
|
185
|
+
The test suite uses comprehensive mocking:
|
|
186
|
+
- **kafkajs**: Mocked Kafka and Consumer classes
|
|
187
|
+
- **n8n-workflow**: Mocked IExecuteFunctions
|
|
188
|
+
- **Consumer methods**: connect, subscribe, run, disconnect
|
|
189
|
+
- **Credentials**: Various authentication scenarios
|
|
190
|
+
|
|
191
|
+
## Test Assertions
|
|
192
|
+
|
|
193
|
+
Examples of assertions used:
|
|
194
|
+
- `expect(Kafka).toHaveBeenCalledWith(expect.objectContaining({...}))`
|
|
195
|
+
- `expect(consumer.connect).toHaveBeenCalledTimes(1)`
|
|
196
|
+
- `expect(consumer.disconnect).toHaveBeenCalled()`
|
|
197
|
+
- `expect(result).toHaveLength(batchSize)`
|
|
198
|
+
- `expect(result[0].json).toHaveProperty('topic')`
|
|
199
|
+
|
|
200
|
+
## Language
|
|
201
|
+
|
|
202
|
+
All code, comments, documentation, and test descriptions are in **English**.
|
|
203
|
+
|
|
204
|
+
## Implementation Highlights
|
|
205
|
+
|
|
206
|
+
### Credential Handling
|
|
207
|
+
The node properly integrates with N8N's credential system:
|
|
208
|
+
```typescript
|
|
209
|
+
credentials: [{
|
|
210
|
+
name: 'kafka',
|
|
211
|
+
required: false
|
|
212
|
+
}]
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
### Authentication Configuration
|
|
216
|
+
Builds proper Kafka config based on credentials:
|
|
217
|
+
```typescript
|
|
218
|
+
if (credentials.authentication) {
|
|
219
|
+
kafkaConfig.sasl = {
|
|
220
|
+
mechanism: credentials.authentication,
|
|
221
|
+
username: credentials.username,
|
|
222
|
+
password: credentials.password
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
```
|
|
226
|
+
|
|
227
|
+
### SSL Configuration
|
|
228
|
+
Handles SSL with optional certificates:
|
|
229
|
+
```typescript
|
|
230
|
+
if (credentials.ssl !== undefined) {
|
|
231
|
+
kafkaConfig.ssl = {
|
|
232
|
+
rejectUnauthorized: credentials.ssl,
|
|
233
|
+
ca: credentials.ca,
|
|
234
|
+
cert: credentials.cert,
|
|
235
|
+
key: credentials.key
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
```
|
|
239
|
+
|
|
240
|
+
### Batch Collection
|
|
241
|
+
Implements proper batch collection with timeout:
|
|
242
|
+
```typescript
|
|
243
|
+
await consumer.run({
|
|
244
|
+
eachMessage: async ({ topic, partition, message }) => {
|
|
245
|
+
messages.push(messageData);
|
|
246
|
+
if (messages.length >= batchSize) {
|
|
247
|
+
clearTimeout(timeoutHandle);
|
|
248
|
+
resolvePromise();
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
});
|
|
252
|
+
```
|
|
253
|
+
|
|
254
|
+
## Next Steps
|
|
255
|
+
|
|
256
|
+
1. Install dependencies: `npm install`
|
|
257
|
+
2. Build the project: `npm run build`
|
|
258
|
+
3. Run tests: `npm run test:coverage`
|
|
259
|
+
4. Verify 80%+ coverage in all metrics
|
|
260
|
+
5. Link to N8N for local testing (optional)
|
|
261
|
+
|
|
262
|
+
## Notes
|
|
263
|
+
|
|
264
|
+
- All TypeScript errors shown before npm install are expected (missing dependencies)
|
|
265
|
+
- The node follows N8N's standard patterns and conventions
|
|
266
|
+
- Tests cover all major code paths and edge cases
|
|
267
|
+
- Proper resource cleanup ensures no memory leaks
|
|
268
|
+
- Error handling provides clear feedback to users
|