n8n-nodes-kafka-batch-consumer 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1100 @@
1
+ /**
2
+ * Step 7: Unit Tests (Jest)
3
+ * Comprehensive test suite for Kafka Batch Consumer node
4
+ *
5
+ * Test Coverage:
6
+ * - Credentials handling (SASL PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, SSL/TLS)
7
+ * - Connection management and error handling
8
+ * - Message batch collection and size limits
9
+ * - JSON parsing (valid/invalid)
10
+ * - Timeout scenarios with partial batches
11
+ * - Consumer cleanup and resource management
12
+ * - Output format and metadata preservation
13
+ * - Integration scenarios
14
+ */
15
+
16
+ import { IExecuteFunctions, INodeExecutionData, NodeOperationError } from 'n8n-workflow';
17
+ import { KafkaBatchConsumer } from './KafkaBatchConsumer.node';
18
+ import { Kafka, Consumer, EachMessagePayload } from 'kafkajs';
19
+
20
+ // Mock KafkaJS library for testing
21
+ jest.mock('kafkajs');
22
+
23
+ describe('KafkaBatchConsumer', () => {
24
+ let kafkaBatchConsumer: KafkaBatchConsumer;
25
+ let mockExecuteFunctions: jest.Mocked<IExecuteFunctions>;
26
+ let mockConsumer: jest.Mocked<Consumer>;
27
+ let mockKafka: jest.Mocked<Kafka>;
28
+
29
+ beforeEach(() => {
30
+ // Reset all mocks before each test for isolation
31
+ jest.clearAllMocks();
32
+
33
+ // Create fresh node instance
34
+ kafkaBatchConsumer = new KafkaBatchConsumer();
35
+
36
+ /**
37
+ * Mock Kafka Consumer
38
+ * Provides test implementations for all consumer methods
39
+ */
40
+ mockConsumer = {
41
+ connect: jest.fn().mockResolvedValue(undefined),
42
+ subscribe: jest.fn().mockResolvedValue(undefined),
43
+ run: jest.fn().mockResolvedValue(undefined),
44
+ disconnect: jest.fn().mockResolvedValue(undefined),
45
+ stop: jest.fn().mockResolvedValue(undefined),
46
+ pause: jest.fn().mockResolvedValue(undefined),
47
+ resume: jest.fn().mockResolvedValue(undefined),
48
+ seek: jest.fn(),
49
+ describeGroup: jest.fn().mockResolvedValue(undefined),
50
+ commitOffsets: jest.fn().mockResolvedValue(undefined),
51
+ on: jest.fn(),
52
+ events: {} as any,
53
+ } as any;
54
+
55
+ // Mock Kafka
56
+ mockKafka = {
57
+ consumer: jest.fn().mockReturnValue(mockConsumer),
58
+ producer: jest.fn(),
59
+ admin: jest.fn(),
60
+ logger: jest.fn(),
61
+ } as any;
62
+
63
+ // Mock Kafka constructor to return mocked instance
64
+ (Kafka as jest.MockedClass<typeof Kafka>).mockImplementation(() => mockKafka);
65
+
66
+ /**
67
+ * Mock N8N Execute Functions
68
+ * Simulates N8N runtime environment
69
+ */
70
+ mockExecuteFunctions = {
71
+ getInputData: jest.fn().mockReturnValue([{ json: {} }]),
72
+ getNodeParameter: jest.fn(),
73
+ getCredentials: jest.fn(),
74
+ getNode: jest.fn().mockReturnValue({ name: 'Kafka Batch Consumer' }),
75
+ helpers: {} as any,
76
+ continueOnFail: jest.fn().mockReturnValue(false),
77
+ } as any;
78
+ });
79
+
80
+ // ======================
81
+ // Setup and basic tests
82
+ // ======================
83
+
84
+ describe('Node description', () => {
85
+ it('should have correct node properties', () => {
86
+ expect(kafkaBatchConsumer.description.displayName).toBe('Kafka Batch Consumer');
87
+ expect(kafkaBatchConsumer.description.name).toBe('kafkaBatchConsumer');
88
+ expect(kafkaBatchConsumer.description.group).toContain('transform');
89
+ expect(kafkaBatchConsumer.description.inputs).toEqual(['main']);
90
+ expect(kafkaBatchConsumer.description.outputs).toEqual(['main']);
91
+ });
92
+
93
+ it('should have kafka credentials configured', () => {
94
+ const credentials = kafkaBatchConsumer.description.credentials;
95
+ expect(credentials).toBeDefined();
96
+ expect(credentials).toHaveLength(1);
97
+ expect(credentials![0].name).toBe('kafka');
98
+ expect(credentials![0].required).toBe(false);
99
+ });
100
+
101
+ it('should have all required parameters', () => {
102
+ const properties = kafkaBatchConsumer.description.properties;
103
+ const paramNames = properties.map((p: any) => p.name);
104
+
105
+ expect(paramNames).toContain('brokers');
106
+ expect(paramNames).toContain('clientId');
107
+ expect(paramNames).toContain('groupId');
108
+ expect(paramNames).toContain('topic');
109
+ expect(paramNames).toContain('batchSize');
110
+ expect(paramNames).toContain('fromBeginning');
111
+ expect(paramNames).toContain('sessionTimeout');
112
+ expect(paramNames).toContain('options');
113
+ });
114
+ });
115
+
116
+ // ======================
117
+ // Credentials tests
118
+ // Test SASL authentication mechanisms and SSL/TLS configurations
119
+ // Verify proper mapping from N8N credentials to KafkaJS format
120
+ // ======================
121
+
122
+ describe('Credentials handling', () => {
123
+ beforeEach(() => {
124
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
125
+ const params: Record<string, any> = {
126
+ brokers: 'localhost:9092',
127
+ clientId: 'test-client',
128
+ groupId: 'test-group',
129
+ topic: 'test-topic',
130
+ batchSize: 5,
131
+ fromBeginning: false,
132
+ sessionTimeout: 30000,
133
+ options: {},
134
+ };
135
+ return params[paramName];
136
+ });
137
+ });
138
+
139
+ /**
140
+ * Test unauthenticated connection
141
+ * Verifies that node works without credentials for local/unsecured brokers
142
+ */
143
+ it('should connect without credentials', async () => {
144
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
145
+
146
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
147
+ // Don't send messages, let timeout occur
148
+ // consumer.run never resolves - it runs forever
149
+ return new Promise(() => {});
150
+ });
151
+
152
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
153
+
154
+ // Verify Kafka initialized without SASL or SSL config
155
+ expect(Kafka).toHaveBeenCalledWith({
156
+ clientId: 'test-client',
157
+ brokers: ['localhost:9092'],
158
+ });
159
+ });
160
+
161
+ /**
162
+ * Test SASL PLAIN authentication
163
+ * Most basic SASL mechanism for username/password authentication
164
+ */
165
+ it('should connect with SASL PLAIN authentication', async () => {
166
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
167
+ authentication: 'plain',
168
+ username: 'test-user',
169
+ password: 'test-pass',
170
+ });
171
+
172
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
173
+
174
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
175
+
176
+ // Verify SASL PLAIN config passed to Kafka
177
+ expect(Kafka).toHaveBeenCalledWith({
178
+ clientId: 'test-client',
179
+ brokers: ['localhost:9092'],
180
+ sasl: {
181
+ mechanism: 'plain',
182
+ username: 'test-user',
183
+ password: 'test-pass',
184
+ },
185
+ });
186
+ });
187
+
188
+ it('should connect with SASL SCRAM-SHA-256 authentication', async () => {
189
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
190
+ authentication: 'scram-sha-256',
191
+ username: 'test-user',
192
+ password: 'test-pass',
193
+ });
194
+
195
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
196
+
197
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
198
+
199
+ expect(Kafka).toHaveBeenCalledWith({
200
+ clientId: 'test-client',
201
+ brokers: ['localhost:9092'],
202
+ sasl: {
203
+ mechanism: 'scram-sha-256',
204
+ username: 'test-user',
205
+ password: 'test-pass',
206
+ },
207
+ });
208
+ });
209
+
210
+ it('should connect with SASL SCRAM-SHA-512 authentication', async () => {
211
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
212
+ authentication: 'scram-sha-512',
213
+ username: 'test-user',
214
+ password: 'test-pass',
215
+ });
216
+
217
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
218
+
219
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
220
+
221
+ expect(Kafka).toHaveBeenCalledWith({
222
+ clientId: 'test-client',
223
+ brokers: ['localhost:9092'],
224
+ sasl: {
225
+ mechanism: 'scram-sha-512',
226
+ username: 'test-user',
227
+ password: 'test-pass',
228
+ },
229
+ });
230
+ });
231
+
232
+ it('should connect with SSL/TLS configuration', async () => {
233
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
234
+ ssl: true,
235
+ ca: 'ca-cert',
236
+ cert: 'client-cert',
237
+ key: 'client-key',
238
+ });
239
+
240
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
241
+
242
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
243
+
244
+ expect(Kafka).toHaveBeenCalledWith({
245
+ clientId: 'test-client',
246
+ brokers: ['localhost:9092'],
247
+ ssl: {
248
+ rejectUnauthorized: true,
249
+ ca: 'ca-cert',
250
+ cert: 'client-cert',
251
+ key: 'client-key',
252
+ },
253
+ });
254
+ });
255
+
256
+ it('should connect with both SASL and SSL', async () => {
257
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
258
+ authentication: 'plain',
259
+ username: 'test-user',
260
+ password: 'test-pass',
261
+ ssl: true,
262
+ ca: 'ca-cert',
263
+ });
264
+
265
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
266
+
267
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
268
+
269
+ expect(Kafka).toHaveBeenCalledWith({
270
+ clientId: 'test-client',
271
+ brokers: ['localhost:9092'],
272
+ sasl: {
273
+ mechanism: 'plain',
274
+ username: 'test-user',
275
+ password: 'test-pass',
276
+ },
277
+ ssl: {
278
+ rejectUnauthorized: true,
279
+ ca: 'ca-cert',
280
+ },
281
+ });
282
+ });
283
+
284
+ it('should handle SSL with rejectUnauthorized false', async () => {
285
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
286
+ ssl: false,
287
+ });
288
+
289
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
290
+
291
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
292
+
293
+ expect(Kafka).toHaveBeenCalledWith({
294
+ clientId: 'test-client',
295
+ brokers: ['localhost:9092'],
296
+ ssl: {
297
+ rejectUnauthorized: false,
298
+ },
299
+ });
300
+ });
301
+
302
+ it('should pass correct auth config to Kafka client', async () => {
303
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
304
+ authentication: 'scram-sha-256',
305
+ username: 'user123',
306
+ password: 'pass456',
307
+ });
308
+
309
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
310
+
311
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
312
+
313
+ const kafkaCall = (Kafka as jest.MockedClass<typeof Kafka>).mock.calls[0][0];
314
+ expect(kafkaCall.sasl).toEqual({
315
+ mechanism: 'scram-sha-256',
316
+ username: 'user123',
317
+ password: 'pass456',
318
+ });
319
+ });
320
+ });
321
+
322
+ // ======================
323
+ // Connection tests
324
+ // Test broker connection, error handling, and broker list parsing
325
+ // ======================
326
+
327
+ describe('Connection handling', () => {
328
+ beforeEach(() => {
329
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
330
+ const params: Record<string, any> = {
331
+ brokers: 'localhost:9092',
332
+ clientId: 'test-client',
333
+ groupId: 'test-group',
334
+ topic: 'test-topic',
335
+ batchSize: 5,
336
+ fromBeginning: false,
337
+ sessionTimeout: 30000,
338
+ options: {},
339
+ };
340
+ return params[paramName];
341
+ });
342
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
343
+ });
344
+
345
+ it('should connect to Kafka brokers successfully', async () => {
346
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
347
+
348
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
349
+
350
+ expect(mockConsumer.connect).toHaveBeenCalledTimes(1);
351
+ expect(mockConsumer.subscribe).toHaveBeenCalledTimes(1);
352
+ expect(mockConsumer.run).toHaveBeenCalledTimes(1);
353
+ });
354
+
355
+ it('should handle connection errors', async () => {
356
+ mockConsumer.connect.mockRejectedValue(new Error('Connection failed'));
357
+
358
+ await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow(
359
+ NodeOperationError
360
+ );
361
+ });
362
+
363
+ it('should parse comma-separated brokers correctly', async () => {
364
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
365
+ if (paramName === 'brokers') return 'broker1:9092, broker2:9092, broker3:9092';
366
+ const params: Record<string, any> = {
367
+ clientId: 'test-client',
368
+ groupId: 'test-group',
369
+ topic: 'test-topic',
370
+ batchSize: 5,
371
+ fromBeginning: false,
372
+ sessionTimeout: 30000,
373
+ options: {},
374
+ };
375
+ return params[paramName];
376
+ });
377
+
378
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
379
+
380
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
381
+
382
+ expect(Kafka).toHaveBeenCalledWith({
383
+ clientId: 'test-client',
384
+ brokers: ['broker1:9092', 'broker2:9092', 'broker3:9092'],
385
+ });
386
+ });
387
+ });
388
+
389
+ // ======================
390
+ // Subscription tests
391
+ // Test topic subscription with fromBeginning flag
392
+ // ======================
393
+
394
+ describe('Topic subscription', () => {
395
+ beforeEach(() => {
396
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
397
+ const params: Record<string, any> = {
398
+ brokers: 'localhost:9092',
399
+ clientId: 'test-client',
400
+ groupId: 'test-group',
401
+ topic: 'test-topic',
402
+ batchSize: 5,
403
+ fromBeginning: false,
404
+ sessionTimeout: 30000,
405
+ options: {},
406
+ };
407
+ return params[paramName];
408
+ });
409
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
410
+ });
411
+
412
+ it('should subscribe to topic with fromBeginning flag', async () => {
413
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
414
+ const params: Record<string, any> = {
415
+ brokers: 'localhost:9092',
416
+ clientId: 'test-client',
417
+ groupId: 'test-group',
418
+ topic: 'my-topic',
419
+ batchSize: 5,
420
+ fromBeginning: true,
421
+ sessionTimeout: 30000,
422
+ options: {},
423
+ };
424
+ return params[paramName];
425
+ });
426
+
427
+ mockConsumer.run.mockImplementation(async () => Promise.resolve());
428
+
429
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
430
+
431
+ expect(mockConsumer.subscribe).toHaveBeenCalledWith({
432
+ topic: 'my-topic',
433
+ fromBeginning: true,
434
+ });
435
+ });
436
+
437
+ it('should handle subscription errors', async () => {
438
+ mockConsumer.subscribe.mockRejectedValue(new Error('Subscription failed'));
439
+
440
+ await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow(
441
+ NodeOperationError
442
+ );
443
+ });
444
+ });
445
+
446
+ // ======================
447
+ // Message collection tests
448
+ // Test batch size limits, message metadata, and field handling
449
+ // ======================
450
+
451
+ describe('Message collection', () => {
452
+ beforeEach(() => {
453
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
454
+ const params: Record<string, any> = {
455
+ brokers: 'localhost:9092',
456
+ clientId: 'test-client',
457
+ groupId: 'test-group',
458
+ topic: 'test-topic',
459
+ batchSize: 5,
460
+ fromBeginning: false,
461
+ sessionTimeout: 30000,
462
+ options: { parseJson: true },
463
+ };
464
+ return params[paramName];
465
+ });
466
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
467
+ });
468
+
469
+ /**
470
+ * Test exact batch size collection
471
+ * Verifies that node collects exactly batchSize messages before stopping
472
+ */
473
+ it('should collect exact batchSize messages', async () => {
474
+ const batchSize = 5;
475
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
476
+ if (paramName === 'batchSize') return batchSize;
477
+ const params: Record<string, any> = {
478
+ brokers: 'localhost:9092',
479
+ clientId: 'test-client',
480
+ groupId: 'test-group',
481
+ topic: 'test-topic',
482
+ fromBeginning: false,
483
+ sessionTimeout: 30000,
484
+ options: { parseJson: true },
485
+ };
486
+ return params[paramName];
487
+ });
488
+
489
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
490
+ // Simulate receiving exactly batchSize messages
491
+ for (let i = 0; i < batchSize; i++) {
492
+ await eachMessage({
493
+ topic: 'test-topic',
494
+ partition: 0,
495
+ message: {
496
+ offset: String(i),
497
+ key: Buffer.from(`key${i}`),
498
+ value: Buffer.from(JSON.stringify({ id: i, data: `message${i}` })),
499
+ timestamp: String(Date.now()),
500
+ headers: {},
501
+ },
502
+ });
503
+ }
504
+ });
505
+
506
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
507
+
508
+ expect(result[0]).toHaveLength(batchSize);
509
+ });
510
+
511
+ it('should stop collecting when batchSize reached', async () => {
512
+ let messageCount = 0;
513
+
514
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
515
+ // Try to send 10 messages but batch size is 5
516
+ for (let i = 0; i < 10; i++) {
517
+ await eachMessage({
518
+ topic: 'test-topic',
519
+ partition: 0,
520
+ message: {
521
+ offset: String(i),
522
+ key: Buffer.from(`key${i}`),
523
+ value: Buffer.from(JSON.stringify({ id: i })),
524
+ timestamp: String(Date.now()),
525
+ headers: {},
526
+ },
527
+ });
528
+ messageCount++;
529
+
530
+ // Stop if we've reached batch size
531
+ if (messageCount >= 5) {
532
+ break;
533
+ }
534
+ }
535
+ });
536
+
537
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
538
+
539
+ expect(result[0]).toHaveLength(5);
540
+ });
541
+
542
+ it('should handle messages with all metadata fields', async () => {
543
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
544
+ await eachMessage({
545
+ topic: 'test-topic',
546
+ partition: 2,
547
+ message: {
548
+ offset: '12345',
549
+ key: Buffer.from('message-key'),
550
+ value: Buffer.from(JSON.stringify({ data: 'test' })),
551
+ timestamp: '1234567890000',
552
+ headers: { 'custom-header': Buffer.from('header-value') },
553
+ },
554
+ });
555
+ });
556
+
557
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
558
+
559
+ expect(result[0]).toHaveLength(1);
560
+ expect(result[0][0].json).toHaveProperty('topic', 'test-topic');
561
+ expect(result[0][0].json).toHaveProperty('partition', 2);
562
+ expect(result[0][0].json).toHaveProperty('offset', '12345');
563
+ expect(result[0][0].json).toHaveProperty('key', 'message-key');
564
+ expect(result[0][0].json).toHaveProperty('value');
565
+ expect(result[0][0].json).toHaveProperty('timestamp', '1234567890000');
566
+ expect(result[0][0].json).toHaveProperty('headers');
567
+ });
568
+
569
+ it('should handle messages with missing optional fields', async () => {
570
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
571
+ await eachMessage({
572
+ topic: 'test-topic',
573
+ partition: 0,
574
+ message: {
575
+ offset: '100',
576
+ key: null,
577
+ value: Buffer.from('simple message'),
578
+ timestamp: '1234567890000',
579
+ headers: {},
580
+ },
581
+ });
582
+ });
583
+
584
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
585
+
586
+ expect(result[0]).toHaveLength(1);
587
+ expect(result[0][0].json.key).toBeNull();
588
+ });
589
+ });
590
+
591
+ // ======================
592
+ // JSON parsing tests
593
+ // Test valid JSON parsing, string preservation, and invalid JSON handling
594
+ // ======================
595
+
596
+ describe('JSON parsing', () => {
597
+ beforeEach(() => {
598
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
599
+ const params: Record<string, any> = {
600
+ brokers: 'localhost:9092',
601
+ clientId: 'test-client',
602
+ groupId: 'test-group',
603
+ topic: 'test-topic',
604
+ batchSize: 5,
605
+ fromBeginning: false,
606
+ sessionTimeout: 30000,
607
+ options: { parseJson: true },
608
+ };
609
+ return params[paramName];
610
+ });
611
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
612
+ });
613
+
614
+ /**
615
+ * Test valid JSON parsing
616
+ * When parseJson=true, valid JSON strings should be parsed to objects
617
+ */
618
+ it('should parse valid JSON when parseJson=true', async () => {
619
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
620
+ await eachMessage({
621
+ topic: 'test-topic',
622
+ partition: 0,
623
+ message: {
624
+ offset: '1',
625
+ key: Buffer.from('key1'),
626
+ value: Buffer.from(JSON.stringify({ name: 'test', count: 42 })),
627
+ timestamp: String(Date.now()),
628
+ headers: {},
629
+ },
630
+ });
631
+ });
632
+
633
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
634
+
635
+ // Verify JSON was parsed to object
636
+ expect(result[0][0].json.value).toEqual({ name: 'test', count: 42 });
637
+ });
638
+
639
+ it('should keep string when parseJson=false', async () => {
640
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
641
+ if (paramName === 'options') return { parseJson: false };
642
+ const params: Record<string, any> = {
643
+ brokers: 'localhost:9092',
644
+ clientId: 'test-client',
645
+ groupId: 'test-group',
646
+ topic: 'test-topic',
647
+ batchSize: 5,
648
+ fromBeginning: false,
649
+ sessionTimeout: 30000,
650
+ };
651
+ return params[paramName];
652
+ });
653
+
654
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
655
+ await eachMessage({
656
+ topic: 'test-topic',
657
+ partition: 0,
658
+ message: {
659
+ offset: '1',
660
+ key: Buffer.from('key1'),
661
+ value: Buffer.from(JSON.stringify({ name: 'test' })),
662
+ timestamp: String(Date.now()),
663
+ headers: {},
664
+ },
665
+ });
666
+ });
667
+
668
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
669
+
670
+ expect(typeof result[0][0].json.value).toBe('string');
671
+ expect(result[0][0].json.value).toBe('{"name":"test"}');
672
+ });
673
+
674
+ it('should keep string when JSON invalid and parseJson=true', async () => {
675
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
676
+ await eachMessage({
677
+ topic: 'test-topic',
678
+ partition: 0,
679
+ message: {
680
+ offset: '1',
681
+ key: Buffer.from('key1'),
682
+ value: Buffer.from('not valid json {'),
683
+ timestamp: String(Date.now()),
684
+ headers: {},
685
+ },
686
+ });
687
+ });
688
+
689
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
690
+
691
+ expect(typeof result[0][0].json.value).toBe('string');
692
+ expect(result[0][0].json.value).toBe('not valid json {');
693
+ });
694
+ });
695
+
696
+ // ======================
697
+ // Timeout tests
698
+ // Test readTimeout option and partial batch collection on timeout
699
+ // ======================
700
+
701
+ describe('Timeout handling', () => {
702
+ beforeEach(() => {
703
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
704
+ const params: Record<string, any> = {
705
+ brokers: 'localhost:9092',
706
+ clientId: 'test-client',
707
+ groupId: 'test-group',
708
+ topic: 'test-topic',
709
+ batchSize: 10,
710
+ fromBeginning: false,
711
+ sessionTimeout: 30000,
712
+ options: { readTimeout: 100, parseJson: true },
713
+ };
714
+ return params[paramName];
715
+ });
716
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
717
+ });
718
+
719
+ it('should timeout when not enough messages', async () => {
720
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
721
+ // Send only 3 messages when batch size is 10
722
+ for (let i = 0; i < 3; i++) {
723
+ await eachMessage({
724
+ topic: 'test-topic',
725
+ partition: 0,
726
+ message: {
727
+ offset: String(i),
728
+ key: Buffer.from(`key${i}`),
729
+ value: Buffer.from(JSON.stringify({ id: i })),
730
+ timestamp: String(Date.now()),
731
+ headers: {},
732
+ },
733
+ });
734
+ }
735
+
736
+ // Wait for timeout
737
+ await new Promise((resolve) => setTimeout(resolve, 150));
738
+ });
739
+
740
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
741
+
742
+ expect(result[0]).toHaveLength(3);
743
+ });
744
+
745
+ it('should collect partial batch on timeout', async () => {
746
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
747
+ if (paramName === 'batchSize') return 10;
748
+ if (paramName === 'options') return { readTimeout: 100 };
749
+ const params: Record<string, any> = {
750
+ brokers: 'localhost:9092',
751
+ clientId: 'test-client',
752
+ groupId: 'test-group',
753
+ topic: 'test-topic',
754
+ fromBeginning: false,
755
+ sessionTimeout: 30000,
756
+ };
757
+ return params[paramName];
758
+ });
759
+
760
+ let messagesSent = 0;
761
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
762
+ // Send 5 messages
763
+ for (let i = 0; i < 5; i++) {
764
+ await eachMessage({
765
+ topic: 'test-topic',
766
+ partition: 0,
767
+ message: {
768
+ offset: String(i),
769
+ key: Buffer.from(`key${i}`),
770
+ value: Buffer.from(`message${i}`),
771
+ timestamp: String(Date.now()),
772
+ headers: {},
773
+ },
774
+ });
775
+ messagesSent++;
776
+ }
777
+
778
+ // Wait for timeout
779
+ await new Promise((resolve) => setTimeout(resolve, 150));
780
+ });
781
+
782
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
783
+
784
+ expect(result[0].length).toBeLessThan(10);
785
+ expect(result[0].length).toBe(5);
786
+ });
787
+
788
+ it('should respect readTimeout option', async () => {
789
+ const readTimeout = 200;
790
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
791
+ if (paramName === 'options') return { readTimeout };
792
+ const params: Record<string, any> = {
793
+ brokers: 'localhost:9092',
794
+ clientId: 'test-client',
795
+ groupId: 'test-group',
796
+ topic: 'test-topic',
797
+ batchSize: 10,
798
+ fromBeginning: false,
799
+ sessionTimeout: 30000,
800
+ };
801
+ return params[paramName];
802
+ });
803
+
804
+ const startTime = Date.now();
805
+
806
+ mockConsumer.run.mockImplementation(async () => {
807
+ // Don't send any messages, just wait for timeout
808
+ await new Promise((resolve) => setTimeout(resolve, 250));
809
+ });
810
+
811
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
812
+
813
+ const duration = Date.now() - startTime;
814
+
815
+ // Should timeout around readTimeout value (with some tolerance)
816
+ expect(duration).toBeGreaterThanOrEqual(readTimeout);
817
+ expect(duration).toBeLessThan(readTimeout + 100);
818
+ });
819
+ });
820
+
821
+ // ======================
822
+ // Error handling tests
823
+ // Test consumer cleanup, NodeOperationError wrapping, and resource management
824
+ // ======================
825
+
826
+ describe('Error handling', () => {
827
+ beforeEach(() => {
828
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
829
+ const params: Record<string, any> = {
830
+ brokers: 'localhost:9092',
831
+ clientId: 'test-client',
832
+ groupId: 'test-group',
833
+ topic: 'test-topic',
834
+ batchSize: 5,
835
+ fromBeginning: false,
836
+ sessionTimeout: 30000,
837
+ options: {},
838
+ };
839
+ return params[paramName];
840
+ });
841
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
842
+ });
843
+
844
+ /**
845
+ * Test consumer cleanup on error
846
+ * Verifies that consumer is always disconnected, even when errors occur
847
+ */
848
+ it('should disconnect consumer on error', async () => {
849
+ mockConsumer.run.mockRejectedValue(new Error('Kafka error'));
850
+
851
+ await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow();
852
+
853
+ // Verify disconnect was called for cleanup
854
+ expect(mockConsumer.disconnect).toHaveBeenCalled();
855
+ });
856
+
857
+ it('should throw NodeOperationError on Kafka errors', async () => {
858
+ mockConsumer.connect.mockRejectedValue(new Error('Connection failed'));
859
+
860
+ await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow(
861
+ NodeOperationError
862
+ );
863
+ });
864
+
865
+ it('should cleanup resources in finally block', async () => {
866
+ mockConsumer.run.mockRejectedValue(new Error('Run error'));
867
+
868
+ try {
869
+ await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
870
+ } catch (error) {
871
+ // Expected error
872
+ }
873
+
874
+ expect(mockConsumer.disconnect).toHaveBeenCalled();
875
+ });
876
+
877
+ it('should handle disconnect errors gracefully', async () => {
878
+ mockConsumer.run.mockRejectedValue(new Error('Run error'));
879
+ mockConsumer.disconnect.mockRejectedValue(new Error('Disconnect error'));
880
+
881
+ await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow(
882
+ NodeOperationError
883
+ );
884
+
885
+ // Should still attempt disconnect
886
+ expect(mockConsumer.disconnect).toHaveBeenCalled();
887
+ });
888
+ });
889
+
890
+ // ======================
891
+ // Output format tests
892
+ // Test INodeExecutionData format, field inclusion, and edge cases
893
+ // ======================
894
+
895
+ describe('Output format', () => {
896
+ beforeEach(() => {
897
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
898
+ const params: Record<string, any> = {
899
+ brokers: 'localhost:9092',
900
+ clientId: 'test-client',
901
+ groupId: 'test-group',
902
+ topic: 'test-topic',
903
+ batchSize: 3,
904
+ fromBeginning: false,
905
+ sessionTimeout: 30000,
906
+ options: { parseJson: true },
907
+ };
908
+ return params[paramName];
909
+ });
910
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
911
+ });
912
+
913
+ it('should return INodeExecutionData array', async () => {
914
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
915
+ await eachMessage({
916
+ topic: 'test-topic',
917
+ partition: 0,
918
+ message: {
919
+ offset: '1',
920
+ key: Buffer.from('key1'),
921
+ value: Buffer.from(JSON.stringify({ data: 'test' })),
922
+ timestamp: String(Date.now()),
923
+ headers: {},
924
+ },
925
+ });
926
+ });
927
+
928
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
929
+
930
+ expect(Array.isArray(result)).toBe(true);
931
+ expect(Array.isArray(result[0])).toBe(true);
932
+ expect(result[0][0]).toHaveProperty('json');
933
+ });
934
+
935
+ it('should include topic, partition, offset, key, value, timestamp, headers', async () => {
936
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
937
+ await eachMessage({
938
+ topic: 'my-topic',
939
+ partition: 3,
940
+ message: {
941
+ offset: '999',
942
+ key: Buffer.from('my-key'),
943
+ value: Buffer.from(JSON.stringify({ field: 'value' })),
944
+ timestamp: '9876543210',
945
+ headers: { header1: Buffer.from('value1') },
946
+ },
947
+ });
948
+ });
949
+
950
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
951
+
952
+ const message = result[0][0].json;
953
+ expect(message.topic).toBe('my-topic');
954
+ expect(message.partition).toBe(3);
955
+ expect(message.offset).toBe('999');
956
+ expect(message.key).toBe('my-key');
957
+ expect(message.value).toEqual({ field: 'value' });
958
+ expect(message.timestamp).toBe('9876543210');
959
+ expect(message.headers).toEqual({ header1: Buffer.from('value1') });
960
+ });
961
+
962
+ it('should handle null message keys', async () => {
963
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
964
+ await eachMessage({
965
+ topic: 'test-topic',
966
+ partition: 0,
967
+ message: {
968
+ offset: '1',
969
+ key: null,
970
+ value: Buffer.from('test message'),
971
+ timestamp: String(Date.now()),
972
+ headers: {},
973
+ },
974
+ });
975
+ });
976
+
977
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
978
+
979
+ expect(result[0][0].json.key).toBeNull();
980
+ });
981
+
982
+ it('should handle empty message value', async () => {
983
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
984
+ await eachMessage({
985
+ topic: 'test-topic',
986
+ partition: 0,
987
+ message: {
988
+ offset: '1',
989
+ key: Buffer.from('key1'),
990
+ value: null,
991
+ timestamp: String(Date.now()),
992
+ headers: {},
993
+ },
994
+ });
995
+ });
996
+
997
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
998
+
999
+ expect(result[0][0].json.value).toBe('');
1000
+ });
1001
+ });
1002
+
1003
+ // ======================
1004
+ // Integration tests
1005
+ // Test complete workflow with authentication, subscription, and message collection
1006
+ // ======================
1007
+
1008
+ describe('Integration scenarios', () => {
1009
+ beforeEach(() => {
1010
+ mockExecuteFunctions.getCredentials.mockRejectedValue(new Error('No credentials'));
1011
+ });
1012
+
1013
+ /**
1014
+ * Integration test: Complete workflow
1015
+ * Tests full flow from credentials to message collection with all features:
1016
+ * - Multiple brokers
1017
+ * - SCRAM-SHA-256 authentication
1018
+ * - SSL configuration
1019
+ * - fromBeginning subscription
1020
+ * - Batch collection
1021
+ * - JSON parsing
1022
+ * - Proper cleanup
1023
+ */
1024
+ it('should handle complete workflow with authentication', async () => {
1025
+ mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
1026
+ const params: Record<string, any> = {
1027
+ brokers: 'broker1:9092,broker2:9092',
1028
+ clientId: 'integration-client',
1029
+ groupId: 'integration-group',
1030
+ topic: 'integration-topic',
1031
+ batchSize: 3,
1032
+ fromBeginning: true,
1033
+ sessionTimeout: 25000,
1034
+ options: { readTimeout: 5000, parseJson: true },
1035
+ };
1036
+ return params[paramName];
1037
+ });
1038
+
1039
+ mockExecuteFunctions.getCredentials.mockResolvedValue({
1040
+ authentication: 'scram-sha-256',
1041
+ username: 'integration-user',
1042
+ password: 'integration-pass',
1043
+ ssl: true,
1044
+ });
1045
+
1046
+ mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
1047
+ for (let i = 0; i < 3; i++) {
1048
+ await eachMessage({
1049
+ topic: 'integration-topic',
1050
+ partition: i,
1051
+ message: {
1052
+ offset: String(i * 100),
1053
+ key: Buffer.from(`key-${i}`),
1054
+ value: Buffer.from(JSON.stringify({ id: i, name: `item-${i}` })),
1055
+ timestamp: String(Date.now()),
1056
+ headers: { source: Buffer.from('integration-test') },
1057
+ },
1058
+ });
1059
+ }
1060
+ });
1061
+
1062
+ const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
1063
+
1064
+ // Verify Kafka configuration
1065
+ expect(Kafka).toHaveBeenCalledWith({
1066
+ clientId: 'integration-client',
1067
+ brokers: ['broker1:9092', 'broker2:9092'],
1068
+ sasl: {
1069
+ mechanism: 'scram-sha-256',
1070
+ username: 'integration-user',
1071
+ password: 'integration-pass',
1072
+ },
1073
+ ssl: {
1074
+ rejectUnauthorized: true,
1075
+ },
1076
+ });
1077
+
1078
+ // Verify consumer setup
1079
+ expect(mockKafka.consumer).toHaveBeenCalledWith({
1080
+ groupId: 'integration-group',
1081
+ sessionTimeout: 25000,
1082
+ });
1083
+
1084
+ // Verify subscription
1085
+ expect(mockConsumer.subscribe).toHaveBeenCalledWith({
1086
+ topic: 'integration-topic',
1087
+ fromBeginning: true,
1088
+ });
1089
+
1090
+ // Verify results
1091
+ expect(result[0]).toHaveLength(3);
1092
+ expect(result[0][0].json.value).toEqual({ id: 0, name: 'item-0' });
1093
+ expect(result[0][1].json.value).toEqual({ id: 1, name: 'item-1' });
1094
+ expect(result[0][2].json.value).toEqual({ id: 2, name: 'item-2' });
1095
+
1096
+ // Verify cleanup
1097
+ expect(mockConsumer.disconnect).toHaveBeenCalled();
1098
+ });
1099
+ });
1100
+ });