n8n-nodes-kafka-batch-consumer 1.0.11 → 1.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/.eslintrc.js +0 -15
- package/IMPLEMENTATION_VERIFICATION.md +0 -417
- package/PROJECT_STRUCTURE.md +0 -268
- package/QUICK_START.md +0 -181
- package/jest.config.js +0 -22
- package/src/index.ts +0 -1
- package/src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.test.ts +0 -1113
- package/src/nodes/KafkaBatchConsumer/KafkaBatchConsumer.node.ts +0 -323
- package/src/nodes/KafkaBatchConsumer/kafka.svg +0 -3
- package/tsconfig.json +0 -20
|
@@ -1,1113 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Step 7: Unit Tests (Jest)
|
|
3
|
-
* Comprehensive test suite for Kafka Batch Consumer node
|
|
4
|
-
*
|
|
5
|
-
* Test Coverage:
|
|
6
|
-
* - Credentials handling (SASL PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, SSL/TLS)
|
|
7
|
-
* - Connection management and error handling
|
|
8
|
-
* - Message batch collection and size limits
|
|
9
|
-
* - JSON parsing (valid/invalid)
|
|
10
|
-
* - Timeout scenarios with partial batches
|
|
11
|
-
* - Consumer cleanup and resource management
|
|
12
|
-
* - Output format and metadata preservation
|
|
13
|
-
* - Integration scenarios
|
|
14
|
-
*/
|
|
15
|
-
|
|
16
|
-
import { IExecuteFunctions, INodeExecutionData, NodeOperationError } from 'n8n-workflow';
|
|
17
|
-
import { KafkaBatchConsumer } from './KafkaBatchConsumer.node';
|
|
18
|
-
import { Kafka, Consumer, EachMessagePayload } from 'kafkajs';
|
|
19
|
-
|
|
20
|
-
// Mock KafkaJS library for testing
|
|
21
|
-
jest.mock('kafkajs');
|
|
22
|
-
|
|
23
|
-
describe('KafkaBatchConsumer', () => {
|
|
24
|
-
let kafkaBatchConsumer: KafkaBatchConsumer;
|
|
25
|
-
let mockExecuteFunctions: jest.Mocked<IExecuteFunctions>;
|
|
26
|
-
let mockConsumer: jest.Mocked<Consumer>;
|
|
27
|
-
let mockKafka: jest.Mocked<Kafka>;
|
|
28
|
-
|
|
29
|
-
beforeEach(() => {
|
|
30
|
-
// Reset all mocks before each test for isolation
|
|
31
|
-
jest.clearAllMocks();
|
|
32
|
-
|
|
33
|
-
// Create fresh node instance
|
|
34
|
-
kafkaBatchConsumer = new KafkaBatchConsumer();
|
|
35
|
-
|
|
36
|
-
/**
|
|
37
|
-
* Mock Kafka Consumer
|
|
38
|
-
* Provides test implementations for all consumer methods
|
|
39
|
-
*/
|
|
40
|
-
mockConsumer = {
|
|
41
|
-
connect: jest.fn().mockResolvedValue(undefined),
|
|
42
|
-
subscribe: jest.fn().mockResolvedValue(undefined),
|
|
43
|
-
run: jest.fn().mockResolvedValue(undefined),
|
|
44
|
-
disconnect: jest.fn().mockResolvedValue(undefined),
|
|
45
|
-
stop: jest.fn().mockResolvedValue(undefined),
|
|
46
|
-
pause: jest.fn().mockResolvedValue(undefined),
|
|
47
|
-
resume: jest.fn().mockResolvedValue(undefined),
|
|
48
|
-
seek: jest.fn(),
|
|
49
|
-
describeGroup: jest.fn().mockResolvedValue(undefined),
|
|
50
|
-
commitOffsets: jest.fn().mockResolvedValue(undefined),
|
|
51
|
-
on: jest.fn(),
|
|
52
|
-
events: {} as any,
|
|
53
|
-
} as any;
|
|
54
|
-
|
|
55
|
-
// Mock Kafka
|
|
56
|
-
mockKafka = {
|
|
57
|
-
consumer: jest.fn().mockReturnValue(mockConsumer),
|
|
58
|
-
producer: jest.fn(),
|
|
59
|
-
admin: jest.fn(),
|
|
60
|
-
logger: jest.fn(),
|
|
61
|
-
} as any;
|
|
62
|
-
|
|
63
|
-
// Mock Kafka constructor to return mocked instance
|
|
64
|
-
(Kafka as jest.MockedClass<typeof Kafka>).mockImplementation(() => mockKafka);
|
|
65
|
-
|
|
66
|
-
/**
|
|
67
|
-
* Mock N8N Execute Functions
|
|
68
|
-
* Simulates N8N runtime environment
|
|
69
|
-
*/
|
|
70
|
-
mockExecuteFunctions = {
|
|
71
|
-
getInputData: jest.fn().mockReturnValue([{ json: {} }]),
|
|
72
|
-
getNodeParameter: jest.fn(),
|
|
73
|
-
getCredentials: jest.fn(),
|
|
74
|
-
getNode: jest.fn().mockReturnValue({ name: 'Kafka Batch Consumer' }),
|
|
75
|
-
helpers: {} as any,
|
|
76
|
-
continueOnFail: jest.fn().mockReturnValue(false),
|
|
77
|
-
} as any;
|
|
78
|
-
});
|
|
79
|
-
|
|
80
|
-
// ======================
|
|
81
|
-
// Setup and basic tests
|
|
82
|
-
// ======================
|
|
83
|
-
|
|
84
|
-
describe('Node description', () => {
|
|
85
|
-
it('should have correct node properties', () => {
|
|
86
|
-
expect(kafkaBatchConsumer.description.displayName).toBe('Kafka Batch Consumer');
|
|
87
|
-
expect(kafkaBatchConsumer.description.name).toBe('kafkaBatchConsumer');
|
|
88
|
-
expect(kafkaBatchConsumer.description.group).toContain('transform');
|
|
89
|
-
expect(kafkaBatchConsumer.description.inputs).toEqual(['main']);
|
|
90
|
-
expect(kafkaBatchConsumer.description.outputs).toEqual(['main']);
|
|
91
|
-
});
|
|
92
|
-
|
|
93
|
-
it('should have kafka credentials configured', () => {
|
|
94
|
-
const credentials = kafkaBatchConsumer.description.credentials;
|
|
95
|
-
expect(credentials).toBeDefined();
|
|
96
|
-
expect(credentials).toHaveLength(1);
|
|
97
|
-
expect(credentials![0].name).toBe('kafka');
|
|
98
|
-
expect(credentials![0].required).toBe(true); // Now required for brokers and clientId
|
|
99
|
-
});
|
|
100
|
-
|
|
101
|
-
it('should have all required parameters', () => {
|
|
102
|
-
const properties = kafkaBatchConsumer.description.properties;
|
|
103
|
-
const paramNames = properties.map((p: any) => p.name);
|
|
104
|
-
|
|
105
|
-
expect(paramNames).toContain('groupId');
|
|
106
|
-
expect(paramNames).toContain('topic');
|
|
107
|
-
expect(paramNames).toContain('batchSize');
|
|
108
|
-
expect(paramNames).toContain('fromBeginning');
|
|
109
|
-
expect(paramNames).toContain('sessionTimeout');
|
|
110
|
-
expect(paramNames).toContain('options');
|
|
111
|
-
});
|
|
112
|
-
});
|
|
113
|
-
|
|
114
|
-
// ======================
|
|
115
|
-
// Credentials tests
|
|
116
|
-
// Test SASL authentication mechanisms and SSL/TLS configurations
|
|
117
|
-
// Verify proper mapping from N8N credentials to KafkaJS format
|
|
118
|
-
// ======================
|
|
119
|
-
|
|
120
|
-
describe('Credentials handling', () => {
|
|
121
|
-
beforeEach(() => {
|
|
122
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
123
|
-
const params: Record<string, any> = {
|
|
124
|
-
brokers: 'localhost:9092',
|
|
125
|
-
clientId: 'test-client',
|
|
126
|
-
groupId: 'test-group',
|
|
127
|
-
topic: 'test-topic',
|
|
128
|
-
batchSize: 5,
|
|
129
|
-
fromBeginning: false,
|
|
130
|
-
sessionTimeout: 30000,
|
|
131
|
-
options: { readTimeout: 100, parseJson: true },
|
|
132
|
-
};
|
|
133
|
-
return params[paramName];
|
|
134
|
-
});
|
|
135
|
-
});
|
|
136
|
-
|
|
137
|
-
/**
|
|
138
|
-
* Test unauthenticated connection
|
|
139
|
-
* Verifies that node works without credentials for local/unsecured brokers
|
|
140
|
-
*/
|
|
141
|
-
it('should connect without credentials', async () => {
|
|
142
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
143
|
-
brokers: 'localhost:9092',
|
|
144
|
-
clientId: 'test-client',
|
|
145
|
-
});
|
|
146
|
-
|
|
147
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
148
|
-
// Don't send messages, let timeout occur
|
|
149
|
-
// consumer.run never resolves - it runs forever
|
|
150
|
-
return new Promise(() => {});
|
|
151
|
-
});
|
|
152
|
-
|
|
153
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
154
|
-
|
|
155
|
-
// Verify Kafka initialized without SASL or SSL config
|
|
156
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
157
|
-
clientId: 'test-client',
|
|
158
|
-
brokers: ['localhost:9092'],
|
|
159
|
-
});
|
|
160
|
-
});
|
|
161
|
-
|
|
162
|
-
/**
|
|
163
|
-
* Test SASL PLAIN authentication
|
|
164
|
-
* Most basic SASL mechanism for username/password authentication
|
|
165
|
-
*/
|
|
166
|
-
it('should connect with SASL PLAIN authentication', async () => {
|
|
167
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
168
|
-
brokers: 'localhost:9092',
|
|
169
|
-
clientId: 'test-client',
|
|
170
|
-
authentication: 'plain',
|
|
171
|
-
username: 'test-user',
|
|
172
|
-
password: 'test-pass',
|
|
173
|
-
});
|
|
174
|
-
|
|
175
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
176
|
-
|
|
177
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
178
|
-
|
|
179
|
-
// Verify SASL PLAIN config passed to Kafka
|
|
180
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
181
|
-
clientId: 'test-client',
|
|
182
|
-
brokers: ['localhost:9092'],
|
|
183
|
-
sasl: {
|
|
184
|
-
mechanism: 'plain',
|
|
185
|
-
username: 'test-user',
|
|
186
|
-
password: 'test-pass',
|
|
187
|
-
},
|
|
188
|
-
});
|
|
189
|
-
});
|
|
190
|
-
|
|
191
|
-
it('should connect with SASL SCRAM-SHA-256 authentication', async () => {
|
|
192
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
193
|
-
brokers: 'localhost:9092',
|
|
194
|
-
clientId: 'test-client',
|
|
195
|
-
authentication: 'scram-sha-256',
|
|
196
|
-
username: 'test-user',
|
|
197
|
-
password: 'test-pass',
|
|
198
|
-
});
|
|
199
|
-
|
|
200
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
201
|
-
|
|
202
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
203
|
-
|
|
204
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
205
|
-
clientId: 'test-client',
|
|
206
|
-
brokers: ['localhost:9092'],
|
|
207
|
-
sasl: {
|
|
208
|
-
mechanism: 'scram-sha-256',
|
|
209
|
-
username: 'test-user',
|
|
210
|
-
password: 'test-pass',
|
|
211
|
-
},
|
|
212
|
-
});
|
|
213
|
-
});
|
|
214
|
-
|
|
215
|
-
it('should connect with SASL SCRAM-SHA-512 authentication', async () => {
|
|
216
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
217
|
-
brokers: 'localhost:9092',
|
|
218
|
-
clientId: 'test-client',
|
|
219
|
-
authentication: 'scram-sha-512',
|
|
220
|
-
username: 'test-user',
|
|
221
|
-
password: 'test-pass',
|
|
222
|
-
});
|
|
223
|
-
|
|
224
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
225
|
-
|
|
226
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
227
|
-
|
|
228
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
229
|
-
clientId: 'test-client',
|
|
230
|
-
brokers: ['localhost:9092'],
|
|
231
|
-
sasl: {
|
|
232
|
-
mechanism: 'scram-sha-512',
|
|
233
|
-
username: 'test-user',
|
|
234
|
-
password: 'test-pass',
|
|
235
|
-
},
|
|
236
|
-
});
|
|
237
|
-
});
|
|
238
|
-
|
|
239
|
-
it('should connect with SSL/TLS configuration', async () => {
|
|
240
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
241
|
-
brokers: 'localhost:9092',
|
|
242
|
-
clientId: 'test-client',
|
|
243
|
-
ssl: true,
|
|
244
|
-
ca: 'ca-cert',
|
|
245
|
-
cert: 'client-cert',
|
|
246
|
-
key: 'client-key',
|
|
247
|
-
});
|
|
248
|
-
|
|
249
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
250
|
-
|
|
251
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
252
|
-
|
|
253
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
254
|
-
clientId: 'test-client',
|
|
255
|
-
brokers: ['localhost:9092'],
|
|
256
|
-
ssl: {
|
|
257
|
-
rejectUnauthorized: true,
|
|
258
|
-
ca: 'ca-cert',
|
|
259
|
-
cert: 'client-cert',
|
|
260
|
-
key: 'client-key',
|
|
261
|
-
},
|
|
262
|
-
});
|
|
263
|
-
});
|
|
264
|
-
|
|
265
|
-
it('should connect with both SASL and SSL', async () => {
|
|
266
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
267
|
-
brokers: 'localhost:9092',
|
|
268
|
-
clientId: 'test-client',
|
|
269
|
-
authentication: 'plain',
|
|
270
|
-
username: 'test-user',
|
|
271
|
-
password: 'test-pass',
|
|
272
|
-
ssl: true,
|
|
273
|
-
ca: 'ca-cert',
|
|
274
|
-
});
|
|
275
|
-
|
|
276
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
277
|
-
|
|
278
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
279
|
-
|
|
280
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
281
|
-
clientId: 'test-client',
|
|
282
|
-
brokers: ['localhost:9092'],
|
|
283
|
-
sasl: {
|
|
284
|
-
mechanism: 'plain',
|
|
285
|
-
username: 'test-user',
|
|
286
|
-
password: 'test-pass',
|
|
287
|
-
},
|
|
288
|
-
ssl: {
|
|
289
|
-
rejectUnauthorized: true,
|
|
290
|
-
ca: 'ca-cert',
|
|
291
|
-
},
|
|
292
|
-
});
|
|
293
|
-
});
|
|
294
|
-
|
|
295
|
-
/**
|
|
296
|
-
* Test plaintext connection (no SSL)
|
|
297
|
-
* When ssl is false or undefined, no SSL configuration should be added
|
|
298
|
-
*/
|
|
299
|
-
it('should handle plaintext connection without SSL', async () => {
|
|
300
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
301
|
-
brokers: 'localhost:9092',
|
|
302
|
-
clientId: 'test-client',
|
|
303
|
-
ssl: false, // Explicitly disable SSL
|
|
304
|
-
});
|
|
305
|
-
|
|
306
|
-
mockConsumer.run.mockImplementation(async () => new Promise(() => {}));
|
|
307
|
-
|
|
308
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
309
|
-
|
|
310
|
-
// Should NOT include ssl config when ssl: false
|
|
311
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
312
|
-
clientId: 'test-client',
|
|
313
|
-
brokers: ['localhost:9092'],
|
|
314
|
-
});
|
|
315
|
-
});
|
|
316
|
-
|
|
317
|
-
it('should pass correct auth config to Kafka client', async () => {
|
|
318
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
319
|
-
brokers: 'localhost:9092',
|
|
320
|
-
clientId: 'test-client',
|
|
321
|
-
authentication: 'scram-sha-256',
|
|
322
|
-
username: 'user123',
|
|
323
|
-
password: 'pass456',
|
|
324
|
-
});
|
|
325
|
-
|
|
326
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
327
|
-
|
|
328
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
329
|
-
|
|
330
|
-
const kafkaCall = (Kafka as jest.MockedClass<typeof Kafka>).mock.calls[0][0];
|
|
331
|
-
expect(kafkaCall.sasl).toEqual({
|
|
332
|
-
mechanism: 'scram-sha-256',
|
|
333
|
-
username: 'user123',
|
|
334
|
-
password: 'pass456',
|
|
335
|
-
});
|
|
336
|
-
});
|
|
337
|
-
});
|
|
338
|
-
|
|
339
|
-
// ======================
|
|
340
|
-
// Connection tests
|
|
341
|
-
// Test broker connection, error handling, and broker list parsing
|
|
342
|
-
// ======================
|
|
343
|
-
|
|
344
|
-
describe('Connection handling', () => {
|
|
345
|
-
beforeEach(() => {
|
|
346
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
347
|
-
const params: Record<string, any> = {
|
|
348
|
-
brokers: 'localhost:9092',
|
|
349
|
-
clientId: 'test-client',
|
|
350
|
-
groupId: 'test-group',
|
|
351
|
-
topic: 'test-topic',
|
|
352
|
-
batchSize: 5,
|
|
353
|
-
fromBeginning: false,
|
|
354
|
-
sessionTimeout: 30000,
|
|
355
|
-
options: { readTimeout: 100, parseJson: true },
|
|
356
|
-
};
|
|
357
|
-
return params[paramName];
|
|
358
|
-
});
|
|
359
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
360
|
-
brokers: 'localhost:9092',
|
|
361
|
-
clientId: 'test-client',
|
|
362
|
-
});
|
|
363
|
-
});
|
|
364
|
-
|
|
365
|
-
it('should connect to Kafka brokers successfully', async () => {
|
|
366
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
367
|
-
|
|
368
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
369
|
-
|
|
370
|
-
expect(mockConsumer.connect).toHaveBeenCalledTimes(1);
|
|
371
|
-
expect(mockConsumer.subscribe).toHaveBeenCalledTimes(1);
|
|
372
|
-
expect(mockConsumer.run).toHaveBeenCalledTimes(1);
|
|
373
|
-
});
|
|
374
|
-
|
|
375
|
-
it('should handle connection errors', async () => {
|
|
376
|
-
mockConsumer.connect.mockRejectedValue(new Error('Connection failed'));
|
|
377
|
-
|
|
378
|
-
await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow(
|
|
379
|
-
NodeOperationError
|
|
380
|
-
);
|
|
381
|
-
});
|
|
382
|
-
|
|
383
|
-
it('should parse comma-separated brokers correctly', async () => {
|
|
384
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
385
|
-
const params: Record<string, any> = {
|
|
386
|
-
groupId: 'test-group',
|
|
387
|
-
topic: 'test-topic',
|
|
388
|
-
batchSize: 5,
|
|
389
|
-
fromBeginning: false,
|
|
390
|
-
sessionTimeout: 30000,
|
|
391
|
-
options: { readTimeout: 100, parseJson: true },
|
|
392
|
-
};
|
|
393
|
-
return params[paramName];
|
|
394
|
-
});
|
|
395
|
-
|
|
396
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
397
|
-
brokers: 'broker1:9092, broker2:9092, broker3:9092',
|
|
398
|
-
clientId: 'test-client',
|
|
399
|
-
});
|
|
400
|
-
|
|
401
|
-
mockConsumer.run.mockImplementation(async () => new Promise(() => {}));
|
|
402
|
-
|
|
403
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
404
|
-
|
|
405
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
406
|
-
clientId: 'test-client',
|
|
407
|
-
brokers: ['broker1:9092', 'broker2:9092', 'broker3:9092'],
|
|
408
|
-
});
|
|
409
|
-
});
|
|
410
|
-
});
|
|
411
|
-
|
|
412
|
-
// ======================
|
|
413
|
-
// Subscription tests
|
|
414
|
-
// Test topic subscription with fromBeginning flag
|
|
415
|
-
// ======================
|
|
416
|
-
|
|
417
|
-
describe('Topic subscription', () => {
|
|
418
|
-
beforeEach(() => {
|
|
419
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
420
|
-
const params: Record<string, any> = {
|
|
421
|
-
brokers: 'localhost:9092',
|
|
422
|
-
clientId: 'test-client',
|
|
423
|
-
groupId: 'test-group',
|
|
424
|
-
topic: 'test-topic',
|
|
425
|
-
batchSize: 5,
|
|
426
|
-
fromBeginning: false,
|
|
427
|
-
sessionTimeout: 30000,
|
|
428
|
-
options: { readTimeout: 100, parseJson: true },
|
|
429
|
-
};
|
|
430
|
-
return params[paramName];
|
|
431
|
-
});
|
|
432
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
433
|
-
brokers: 'localhost:9092',
|
|
434
|
-
clientId: 'test-client',
|
|
435
|
-
});
|
|
436
|
-
});
|
|
437
|
-
|
|
438
|
-
it('should subscribe to topic with fromBeginning flag', async () => {
|
|
439
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
440
|
-
const params: Record<string, any> = {
|
|
441
|
-
brokers: 'localhost:9092',
|
|
442
|
-
clientId: 'test-client',
|
|
443
|
-
groupId: 'test-group',
|
|
444
|
-
topic: 'my-topic',
|
|
445
|
-
batchSize: 5,
|
|
446
|
-
fromBeginning: true,
|
|
447
|
-
sessionTimeout: 30000,
|
|
448
|
-
options: { readTimeout: 100, parseJson: true },
|
|
449
|
-
};
|
|
450
|
-
return params[paramName];
|
|
451
|
-
});
|
|
452
|
-
|
|
453
|
-
mockConsumer.run.mockImplementation(async () => Promise.resolve());
|
|
454
|
-
|
|
455
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
456
|
-
|
|
457
|
-
expect(mockConsumer.subscribe).toHaveBeenCalledWith({
|
|
458
|
-
topic: 'my-topic',
|
|
459
|
-
fromBeginning: true,
|
|
460
|
-
});
|
|
461
|
-
});
|
|
462
|
-
|
|
463
|
-
it('should handle subscription errors', async () => {
|
|
464
|
-
mockConsumer.subscribe.mockRejectedValue(new Error('Subscription failed'));
|
|
465
|
-
|
|
466
|
-
await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow(
|
|
467
|
-
NodeOperationError
|
|
468
|
-
);
|
|
469
|
-
});
|
|
470
|
-
});
|
|
471
|
-
|
|
472
|
-
// ======================
|
|
473
|
-
// Message collection tests
|
|
474
|
-
// Test batch size limits, message metadata, and field handling
|
|
475
|
-
// ======================
|
|
476
|
-
|
|
477
|
-
describe('Message collection', () => {
|
|
478
|
-
beforeEach(() => {
|
|
479
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
480
|
-
const params: Record<string, any> = {
|
|
481
|
-
brokers: 'localhost:9092',
|
|
482
|
-
clientId: 'test-client',
|
|
483
|
-
groupId: 'test-group',
|
|
484
|
-
topic: 'test-topic',
|
|
485
|
-
batchSize: 5,
|
|
486
|
-
fromBeginning: false,
|
|
487
|
-
sessionTimeout: 30000,
|
|
488
|
-
options: { readTimeout: 100, parseJson: true },
|
|
489
|
-
};
|
|
490
|
-
return params[paramName];
|
|
491
|
-
});
|
|
492
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
493
|
-
brokers: 'localhost:9092',
|
|
494
|
-
clientId: 'test-client',
|
|
495
|
-
});
|
|
496
|
-
});
|
|
497
|
-
|
|
498
|
-
/**
|
|
499
|
-
* Test exact batch size collection
|
|
500
|
-
* Verifies that node collects exactly batchSize messages before stopping
|
|
501
|
-
*/
|
|
502
|
-
it('should collect exact batchSize messages', async () => {
|
|
503
|
-
const batchSize = 5;
|
|
504
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
505
|
-
if (paramName === 'batchSize') return batchSize;
|
|
506
|
-
const params: Record<string, any> = {
|
|
507
|
-
brokers: 'localhost:9092',
|
|
508
|
-
clientId: 'test-client',
|
|
509
|
-
groupId: 'test-group',
|
|
510
|
-
topic: 'test-topic',
|
|
511
|
-
fromBeginning: false,
|
|
512
|
-
sessionTimeout: 30000,
|
|
513
|
-
options: { readTimeout: 100, parseJson: true },
|
|
514
|
-
};
|
|
515
|
-
return params[paramName];
|
|
516
|
-
});
|
|
517
|
-
|
|
518
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
519
|
-
// Simulate receiving exactly batchSize messages
|
|
520
|
-
for (let i = 0; i < batchSize; i++) {
|
|
521
|
-
await eachMessage({
|
|
522
|
-
topic: 'test-topic',
|
|
523
|
-
partition: 0,
|
|
524
|
-
message: {
|
|
525
|
-
offset: String(i),
|
|
526
|
-
key: Buffer.from(`key${i}`),
|
|
527
|
-
value: Buffer.from(JSON.stringify({ id: i, data: `message${i}` })),
|
|
528
|
-
timestamp: String(Date.now()),
|
|
529
|
-
headers: {},
|
|
530
|
-
},
|
|
531
|
-
});
|
|
532
|
-
}
|
|
533
|
-
});
|
|
534
|
-
|
|
535
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
536
|
-
|
|
537
|
-
expect(result[0]).toHaveLength(batchSize);
|
|
538
|
-
});
|
|
539
|
-
|
|
540
|
-
it('should stop collecting when batchSize reached', async () => {
|
|
541
|
-
let messageCount = 0;
|
|
542
|
-
|
|
543
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
544
|
-
// Try to send 10 messages but batch size is 5
|
|
545
|
-
for (let i = 0; i < 10; i++) {
|
|
546
|
-
await eachMessage({
|
|
547
|
-
topic: 'test-topic',
|
|
548
|
-
partition: 0,
|
|
549
|
-
message: {
|
|
550
|
-
offset: String(i),
|
|
551
|
-
key: Buffer.from(`key${i}`),
|
|
552
|
-
value: Buffer.from(JSON.stringify({ id: i })),
|
|
553
|
-
timestamp: String(Date.now()),
|
|
554
|
-
headers: {},
|
|
555
|
-
},
|
|
556
|
-
});
|
|
557
|
-
messageCount++;
|
|
558
|
-
|
|
559
|
-
// Stop if we've reached batch size
|
|
560
|
-
if (messageCount >= 5) {
|
|
561
|
-
break;
|
|
562
|
-
}
|
|
563
|
-
}
|
|
564
|
-
});
|
|
565
|
-
|
|
566
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
567
|
-
|
|
568
|
-
expect(result[0]).toHaveLength(5);
|
|
569
|
-
});
|
|
570
|
-
|
|
571
|
-
it('should handle messages with all metadata fields', async () => {
|
|
572
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
573
|
-
await eachMessage({
|
|
574
|
-
topic: 'test-topic',
|
|
575
|
-
partition: 2,
|
|
576
|
-
message: {
|
|
577
|
-
offset: '12345',
|
|
578
|
-
key: Buffer.from('message-key'),
|
|
579
|
-
value: Buffer.from(JSON.stringify({ data: 'test' })),
|
|
580
|
-
timestamp: '1234567890000',
|
|
581
|
-
headers: { 'custom-header': Buffer.from('header-value') },
|
|
582
|
-
},
|
|
583
|
-
});
|
|
584
|
-
});
|
|
585
|
-
|
|
586
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
587
|
-
|
|
588
|
-
expect(result[0]).toHaveLength(1);
|
|
589
|
-
expect(result[0][0].json).toHaveProperty('topic', 'test-topic');
|
|
590
|
-
expect(result[0][0].json).toHaveProperty('partition', 2);
|
|
591
|
-
expect(result[0][0].json).toHaveProperty('offset', '12345');
|
|
592
|
-
expect(result[0][0].json).toHaveProperty('key', 'message-key');
|
|
593
|
-
expect(result[0][0].json).toHaveProperty('value');
|
|
594
|
-
expect(result[0][0].json).toHaveProperty('timestamp', '1234567890000');
|
|
595
|
-
expect(result[0][0].json).toHaveProperty('headers');
|
|
596
|
-
});
|
|
597
|
-
|
|
598
|
-
it('should handle messages with missing optional fields', async () => {
|
|
599
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
600
|
-
await eachMessage({
|
|
601
|
-
topic: 'test-topic',
|
|
602
|
-
partition: 0,
|
|
603
|
-
message: {
|
|
604
|
-
offset: '100',
|
|
605
|
-
key: null,
|
|
606
|
-
value: Buffer.from('simple message'),
|
|
607
|
-
timestamp: '1234567890000',
|
|
608
|
-
headers: {},
|
|
609
|
-
},
|
|
610
|
-
});
|
|
611
|
-
});
|
|
612
|
-
|
|
613
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
614
|
-
|
|
615
|
-
expect(result[0]).toHaveLength(1);
|
|
616
|
-
expect(result[0][0].json.key).toBeNull();
|
|
617
|
-
});
|
|
618
|
-
});
|
|
619
|
-
|
|
620
|
-
// ======================
|
|
621
|
-
// JSON parsing tests
|
|
622
|
-
// Test valid JSON parsing, string preservation, and invalid JSON handling
|
|
623
|
-
// ======================
|
|
624
|
-
|
|
625
|
-
describe('JSON parsing', () => {
|
|
626
|
-
beforeEach(() => {
|
|
627
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
628
|
-
const params: Record<string, any> = {
|
|
629
|
-
brokers: 'localhost:9092',
|
|
630
|
-
clientId: 'test-client',
|
|
631
|
-
groupId: 'test-group',
|
|
632
|
-
topic: 'test-topic',
|
|
633
|
-
batchSize: 5,
|
|
634
|
-
fromBeginning: false,
|
|
635
|
-
sessionTimeout: 30000,
|
|
636
|
-
options: { readTimeout: 100, parseJson: true },
|
|
637
|
-
};
|
|
638
|
-
return params[paramName];
|
|
639
|
-
});
|
|
640
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
641
|
-
brokers: 'localhost:9092',
|
|
642
|
-
clientId: 'test-client',
|
|
643
|
-
});
|
|
644
|
-
});
|
|
645
|
-
|
|
646
|
-
/**
|
|
647
|
-
* Test valid JSON parsing
|
|
648
|
-
* When parseJson=true, valid JSON strings should be parsed to objects
|
|
649
|
-
*/
|
|
650
|
-
it('should parse valid JSON when parseJson=true', async () => {
|
|
651
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
652
|
-
await eachMessage({
|
|
653
|
-
topic: 'test-topic',
|
|
654
|
-
partition: 0,
|
|
655
|
-
message: {
|
|
656
|
-
offset: '1',
|
|
657
|
-
key: Buffer.from('key1'),
|
|
658
|
-
value: Buffer.from(JSON.stringify({ name: 'test', count: 42 })),
|
|
659
|
-
timestamp: String(Date.now()),
|
|
660
|
-
headers: {},
|
|
661
|
-
},
|
|
662
|
-
});
|
|
663
|
-
});
|
|
664
|
-
|
|
665
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
666
|
-
|
|
667
|
-
// Verify JSON was parsed to object
|
|
668
|
-
expect(result[0][0].json.value).toEqual({ name: 'test', count: 42 });
|
|
669
|
-
});
|
|
670
|
-
|
|
671
|
-
it('should keep string when parseJson=false', async () => {
|
|
672
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
673
|
-
if (paramName === 'options') return { readTimeout: 100, parseJson: false };
|
|
674
|
-
const params: Record<string, any> = {
|
|
675
|
-
brokers: 'localhost:9092',
|
|
676
|
-
clientId: 'test-client',
|
|
677
|
-
groupId: 'test-group',
|
|
678
|
-
topic: 'test-topic',
|
|
679
|
-
batchSize: 5,
|
|
680
|
-
fromBeginning: false,
|
|
681
|
-
sessionTimeout: 30000,
|
|
682
|
-
};
|
|
683
|
-
return params[paramName];
|
|
684
|
-
});
|
|
685
|
-
|
|
686
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
687
|
-
await eachMessage({
|
|
688
|
-
topic: 'test-topic',
|
|
689
|
-
partition: 0,
|
|
690
|
-
message: {
|
|
691
|
-
offset: '1',
|
|
692
|
-
key: Buffer.from('key1'),
|
|
693
|
-
value: Buffer.from(JSON.stringify({ name: 'test' })),
|
|
694
|
-
timestamp: String(Date.now()),
|
|
695
|
-
headers: {},
|
|
696
|
-
},
|
|
697
|
-
});
|
|
698
|
-
});
|
|
699
|
-
|
|
700
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
701
|
-
|
|
702
|
-
expect(typeof result[0][0].json.value).toBe('string');
|
|
703
|
-
expect(result[0][0].json.value).toBe('{"name":"test"}');
|
|
704
|
-
});
|
|
705
|
-
|
|
706
|
-
it('should keep string when JSON invalid and parseJson=true', async () => {
|
|
707
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
708
|
-
await eachMessage({
|
|
709
|
-
topic: 'test-topic',
|
|
710
|
-
partition: 0,
|
|
711
|
-
message: {
|
|
712
|
-
offset: '1',
|
|
713
|
-
key: Buffer.from('key1'),
|
|
714
|
-
value: Buffer.from('not valid json {'),
|
|
715
|
-
timestamp: String(Date.now()),
|
|
716
|
-
headers: {},
|
|
717
|
-
},
|
|
718
|
-
});
|
|
719
|
-
});
|
|
720
|
-
|
|
721
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
722
|
-
|
|
723
|
-
expect(typeof result[0][0].json.value).toBe('string');
|
|
724
|
-
expect(result[0][0].json.value).toBe('not valid json {');
|
|
725
|
-
});
|
|
726
|
-
});
|
|
727
|
-
|
|
728
|
-
// ======================
|
|
729
|
-
// Timeout tests
|
|
730
|
-
// Test readTimeout option and partial batch collection on timeout
|
|
731
|
-
// ======================
|
|
732
|
-
|
|
733
|
-
describe('Timeout handling', () => {
|
|
734
|
-
beforeEach(() => {
|
|
735
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
736
|
-
const params: Record<string, any> = {
|
|
737
|
-
brokers: 'localhost:9092',
|
|
738
|
-
clientId: 'test-client',
|
|
739
|
-
groupId: 'test-group',
|
|
740
|
-
topic: 'test-topic',
|
|
741
|
-
batchSize: 10,
|
|
742
|
-
fromBeginning: false,
|
|
743
|
-
sessionTimeout: 30000,
|
|
744
|
-
options: { readTimeout: 100, parseJson: true },
|
|
745
|
-
};
|
|
746
|
-
return params[paramName];
|
|
747
|
-
});
|
|
748
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
749
|
-
brokers: 'localhost:9092',
|
|
750
|
-
clientId: 'test-client',
|
|
751
|
-
});
|
|
752
|
-
});
|
|
753
|
-
|
|
754
|
-
it('should timeout when not enough messages', async () => {
|
|
755
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
756
|
-
// Send only 3 messages when batch size is 10
|
|
757
|
-
for (let i = 0; i < 3; i++) {
|
|
758
|
-
await eachMessage({
|
|
759
|
-
topic: 'test-topic',
|
|
760
|
-
partition: 0,
|
|
761
|
-
message: {
|
|
762
|
-
offset: String(i),
|
|
763
|
-
key: Buffer.from(`key${i}`),
|
|
764
|
-
value: Buffer.from(JSON.stringify({ id: i })),
|
|
765
|
-
timestamp: String(Date.now()),
|
|
766
|
-
headers: {},
|
|
767
|
-
},
|
|
768
|
-
});
|
|
769
|
-
}
|
|
770
|
-
|
|
771
|
-
// Wait for timeout
|
|
772
|
-
await new Promise((resolve) => setTimeout(resolve, 150));
|
|
773
|
-
});
|
|
774
|
-
|
|
775
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
776
|
-
|
|
777
|
-
expect(result[0]).toHaveLength(3);
|
|
778
|
-
});
|
|
779
|
-
|
|
780
|
-
it('should collect partial batch on timeout', async () => {
|
|
781
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
782
|
-
if (paramName === 'batchSize') return 10;
|
|
783
|
-
if (paramName === 'options') return { readTimeout: 100 };
|
|
784
|
-
const params: Record<string, any> = {
|
|
785
|
-
brokers: 'localhost:9092',
|
|
786
|
-
clientId: 'test-client',
|
|
787
|
-
groupId: 'test-group',
|
|
788
|
-
topic: 'test-topic',
|
|
789
|
-
fromBeginning: false,
|
|
790
|
-
sessionTimeout: 30000,
|
|
791
|
-
};
|
|
792
|
-
return params[paramName];
|
|
793
|
-
});
|
|
794
|
-
|
|
795
|
-
let messagesSent = 0;
|
|
796
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
797
|
-
// Send 5 messages
|
|
798
|
-
for (let i = 0; i < 5; i++) {
|
|
799
|
-
await eachMessage({
|
|
800
|
-
topic: 'test-topic',
|
|
801
|
-
partition: 0,
|
|
802
|
-
message: {
|
|
803
|
-
offset: String(i),
|
|
804
|
-
key: Buffer.from(`key${i}`),
|
|
805
|
-
value: Buffer.from(`message${i}`),
|
|
806
|
-
timestamp: String(Date.now()),
|
|
807
|
-
headers: {},
|
|
808
|
-
},
|
|
809
|
-
});
|
|
810
|
-
messagesSent++;
|
|
811
|
-
}
|
|
812
|
-
|
|
813
|
-
// Wait for timeout
|
|
814
|
-
await new Promise((resolve) => setTimeout(resolve, 150));
|
|
815
|
-
});
|
|
816
|
-
|
|
817
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
818
|
-
|
|
819
|
-
expect(result[0].length).toBeLessThan(10);
|
|
820
|
-
expect(result[0].length).toBe(5);
|
|
821
|
-
});
|
|
822
|
-
|
|
823
|
-
it('should respect readTimeout option', async () => {
|
|
824
|
-
const readTimeout = 200;
|
|
825
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
826
|
-
if (paramName === 'options') return { readTimeout };
|
|
827
|
-
const params: Record<string, any> = {
|
|
828
|
-
brokers: 'localhost:9092',
|
|
829
|
-
clientId: 'test-client',
|
|
830
|
-
groupId: 'test-group',
|
|
831
|
-
topic: 'test-topic',
|
|
832
|
-
batchSize: 10,
|
|
833
|
-
fromBeginning: false,
|
|
834
|
-
sessionTimeout: 30000,
|
|
835
|
-
};
|
|
836
|
-
return params[paramName];
|
|
837
|
-
});
|
|
838
|
-
|
|
839
|
-
const startTime = Date.now();
|
|
840
|
-
|
|
841
|
-
mockConsumer.run.mockImplementation(async () => {
|
|
842
|
-
// Don't send any messages, just wait for timeout
|
|
843
|
-
await new Promise((resolve) => setTimeout(resolve, 250));
|
|
844
|
-
});
|
|
845
|
-
|
|
846
|
-
await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
847
|
-
|
|
848
|
-
const duration = Date.now() - startTime;
|
|
849
|
-
|
|
850
|
-
// Should timeout around readTimeout value (with some tolerance)
|
|
851
|
-
expect(duration).toBeGreaterThanOrEqual(readTimeout);
|
|
852
|
-
expect(duration).toBeLessThan(readTimeout + 100);
|
|
853
|
-
});
|
|
854
|
-
});
|
|
855
|
-
|
|
856
|
-
// ======================
|
|
857
|
-
// Error handling tests
|
|
858
|
-
// Test consumer cleanup, NodeOperationError wrapping, and resource management
|
|
859
|
-
// ======================
|
|
860
|
-
|
|
861
|
-
describe('Error handling', () => {
|
|
862
|
-
beforeEach(() => {
|
|
863
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
864
|
-
const params: Record<string, any> = {
|
|
865
|
-
brokers: 'localhost:9092',
|
|
866
|
-
clientId: 'test-client',
|
|
867
|
-
groupId: 'test-group',
|
|
868
|
-
topic: 'test-topic',
|
|
869
|
-
batchSize: 5,
|
|
870
|
-
fromBeginning: false,
|
|
871
|
-
sessionTimeout: 30000,
|
|
872
|
-
options: { readTimeout: 100, parseJson: true },
|
|
873
|
-
};
|
|
874
|
-
return params[paramName];
|
|
875
|
-
});
|
|
876
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
877
|
-
brokers: 'localhost:9092',
|
|
878
|
-
clientId: 'test-client',
|
|
879
|
-
});
|
|
880
|
-
});
|
|
881
|
-
|
|
882
|
-
/**
|
|
883
|
-
* Test connection error handling
|
|
884
|
-
* Errors during connection should throw NodeOperationError
|
|
885
|
-
*/
|
|
886
|
-
it('should throw NodeOperationError on Kafka errors', async () => {
|
|
887
|
-
mockConsumer.connect.mockRejectedValue(new Error('Connection failed'));
|
|
888
|
-
|
|
889
|
-
await expect(kafkaBatchConsumer.execute.call(mockExecuteFunctions)).rejects.toThrow(
|
|
890
|
-
NodeOperationError
|
|
891
|
-
);
|
|
892
|
-
});
|
|
893
|
-
});
|
|
894
|
-
|
|
895
|
-
// ======================
|
|
896
|
-
// Output format tests
|
|
897
|
-
// Test INodeExecutionData format, field inclusion, and edge cases
|
|
898
|
-
// ======================
|
|
899
|
-
|
|
900
|
-
describe('Output format', () => {
|
|
901
|
-
beforeEach(() => {
|
|
902
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
903
|
-
const params: Record<string, any> = {
|
|
904
|
-
brokers: 'localhost:9092',
|
|
905
|
-
clientId: 'test-client',
|
|
906
|
-
groupId: 'test-group',
|
|
907
|
-
topic: 'test-topic',
|
|
908
|
-
batchSize: 3,
|
|
909
|
-
fromBeginning: false,
|
|
910
|
-
sessionTimeout: 30000,
|
|
911
|
-
options: { readTimeout: 100, parseJson: true },
|
|
912
|
-
};
|
|
913
|
-
return params[paramName];
|
|
914
|
-
});
|
|
915
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
916
|
-
brokers: 'localhost:9092',
|
|
917
|
-
clientId: 'test-client',
|
|
918
|
-
});
|
|
919
|
-
});
|
|
920
|
-
|
|
921
|
-
it('should return INodeExecutionData array', async () => {
|
|
922
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
923
|
-
await eachMessage({
|
|
924
|
-
topic: 'test-topic',
|
|
925
|
-
partition: 0,
|
|
926
|
-
message: {
|
|
927
|
-
offset: '1',
|
|
928
|
-
key: Buffer.from('key1'),
|
|
929
|
-
value: Buffer.from(JSON.stringify({ data: 'test' })),
|
|
930
|
-
timestamp: String(Date.now()),
|
|
931
|
-
headers: {},
|
|
932
|
-
},
|
|
933
|
-
});
|
|
934
|
-
});
|
|
935
|
-
|
|
936
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
937
|
-
|
|
938
|
-
expect(Array.isArray(result)).toBe(true);
|
|
939
|
-
expect(Array.isArray(result[0])).toBe(true);
|
|
940
|
-
expect(result[0][0]).toHaveProperty('json');
|
|
941
|
-
});
|
|
942
|
-
|
|
943
|
-
it('should include topic, partition, offset, key, value, timestamp, headers', async () => {
|
|
944
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
945
|
-
await eachMessage({
|
|
946
|
-
topic: 'my-topic',
|
|
947
|
-
partition: 3,
|
|
948
|
-
message: {
|
|
949
|
-
offset: '999',
|
|
950
|
-
key: Buffer.from('my-key'),
|
|
951
|
-
value: Buffer.from(JSON.stringify({ field: 'value' })),
|
|
952
|
-
timestamp: '9876543210',
|
|
953
|
-
headers: { header1: Buffer.from('value1') },
|
|
954
|
-
},
|
|
955
|
-
});
|
|
956
|
-
});
|
|
957
|
-
|
|
958
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
959
|
-
|
|
960
|
-
const message = result[0][0].json;
|
|
961
|
-
expect(message.topic).toBe('my-topic');
|
|
962
|
-
expect(message.partition).toBe(3);
|
|
963
|
-
expect(message.offset).toBe('999');
|
|
964
|
-
expect(message.key).toBe('my-key');
|
|
965
|
-
expect(message.value).toEqual({ field: 'value' });
|
|
966
|
-
expect(message.timestamp).toBe('9876543210');
|
|
967
|
-
expect(message.headers).toEqual({ header1: Buffer.from('value1') });
|
|
968
|
-
});
|
|
969
|
-
|
|
970
|
-
it('should handle null message keys', async () => {
|
|
971
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
972
|
-
await eachMessage({
|
|
973
|
-
topic: 'test-topic',
|
|
974
|
-
partition: 0,
|
|
975
|
-
message: {
|
|
976
|
-
offset: '1',
|
|
977
|
-
key: null,
|
|
978
|
-
value: Buffer.from('test message'),
|
|
979
|
-
timestamp: String(Date.now()),
|
|
980
|
-
headers: {},
|
|
981
|
-
},
|
|
982
|
-
});
|
|
983
|
-
});
|
|
984
|
-
|
|
985
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
986
|
-
|
|
987
|
-
expect(result[0][0].json.key).toBeNull();
|
|
988
|
-
});
|
|
989
|
-
|
|
990
|
-
it('should handle empty message value', async () => {
|
|
991
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
992
|
-
await eachMessage({
|
|
993
|
-
topic: 'test-topic',
|
|
994
|
-
partition: 0,
|
|
995
|
-
message: {
|
|
996
|
-
offset: '1',
|
|
997
|
-
key: Buffer.from('key1'),
|
|
998
|
-
value: null,
|
|
999
|
-
timestamp: String(Date.now()),
|
|
1000
|
-
headers: {},
|
|
1001
|
-
},
|
|
1002
|
-
});
|
|
1003
|
-
});
|
|
1004
|
-
|
|
1005
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
1006
|
-
|
|
1007
|
-
expect(result[0][0].json.value).toBe('');
|
|
1008
|
-
});
|
|
1009
|
-
});
|
|
1010
|
-
|
|
1011
|
-
// ======================
|
|
1012
|
-
// Integration tests
|
|
1013
|
-
// Test complete workflow with authentication, subscription, and message collection
|
|
1014
|
-
// ======================
|
|
1015
|
-
|
|
1016
|
-
describe('Integration scenarios', () => {
|
|
1017
|
-
beforeEach(() => {
|
|
1018
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
1019
|
-
brokers: 'localhost:9092',
|
|
1020
|
-
clientId: 'test-client',
|
|
1021
|
-
});
|
|
1022
|
-
});
|
|
1023
|
-
|
|
1024
|
-
/**
|
|
1025
|
-
* Integration test: Complete workflow
|
|
1026
|
-
* Tests full flow from credentials to message collection with all features:
|
|
1027
|
-
* - Multiple brokers
|
|
1028
|
-
* - SCRAM-SHA-256 authentication
|
|
1029
|
-
* - SSL configuration
|
|
1030
|
-
* - fromBeginning subscription
|
|
1031
|
-
* - Batch collection
|
|
1032
|
-
* - JSON parsing
|
|
1033
|
-
* - Proper cleanup
|
|
1034
|
-
*/
|
|
1035
|
-
it('should handle complete workflow with authentication', async () => {
|
|
1036
|
-
mockExecuteFunctions.getNodeParameter.mockImplementation((paramName: string) => {
|
|
1037
|
-
const params: Record<string, any> = {
|
|
1038
|
-
brokers: 'broker1:9092,broker2:9092',
|
|
1039
|
-
clientId: 'integration-client',
|
|
1040
|
-
groupId: 'integration-group',
|
|
1041
|
-
topic: 'integration-topic',
|
|
1042
|
-
batchSize: 3,
|
|
1043
|
-
fromBeginning: true,
|
|
1044
|
-
sessionTimeout: 25000,
|
|
1045
|
-
options: { readTimeout: 5000, parseJson: true },
|
|
1046
|
-
};
|
|
1047
|
-
return params[paramName];
|
|
1048
|
-
});
|
|
1049
|
-
|
|
1050
|
-
mockExecuteFunctions.getCredentials.mockResolvedValue({
|
|
1051
|
-
brokers: 'broker1:9092,broker2:9092',
|
|
1052
|
-
clientId: 'integration-client',
|
|
1053
|
-
authentication: 'scram-sha-256',
|
|
1054
|
-
username: 'integration-user',
|
|
1055
|
-
password: 'integration-pass',
|
|
1056
|
-
ssl: true,
|
|
1057
|
-
});
|
|
1058
|
-
|
|
1059
|
-
mockConsumer.run.mockImplementation(async ({ eachMessage }: any) => {
|
|
1060
|
-
for (let i = 0; i < 3; i++) {
|
|
1061
|
-
await eachMessage({
|
|
1062
|
-
topic: 'integration-topic',
|
|
1063
|
-
partition: i,
|
|
1064
|
-
message: {
|
|
1065
|
-
offset: String(i * 100),
|
|
1066
|
-
key: Buffer.from(`key-${i}`),
|
|
1067
|
-
value: Buffer.from(JSON.stringify({ id: i, name: `item-${i}` })),
|
|
1068
|
-
timestamp: String(Date.now()),
|
|
1069
|
-
headers: { source: Buffer.from('integration-test') },
|
|
1070
|
-
},
|
|
1071
|
-
});
|
|
1072
|
-
}
|
|
1073
|
-
});
|
|
1074
|
-
|
|
1075
|
-
const result = await kafkaBatchConsumer.execute.call(mockExecuteFunctions);
|
|
1076
|
-
|
|
1077
|
-
// Verify Kafka configuration
|
|
1078
|
-
expect(Kafka).toHaveBeenCalledWith({
|
|
1079
|
-
clientId: 'integration-client',
|
|
1080
|
-
brokers: ['broker1:9092', 'broker2:9092'],
|
|
1081
|
-
sasl: {
|
|
1082
|
-
mechanism: 'scram-sha-256',
|
|
1083
|
-
username: 'integration-user',
|
|
1084
|
-
password: 'integration-pass',
|
|
1085
|
-
},
|
|
1086
|
-
ssl: {
|
|
1087
|
-
rejectUnauthorized: true,
|
|
1088
|
-
},
|
|
1089
|
-
});
|
|
1090
|
-
|
|
1091
|
-
// Verify consumer setup
|
|
1092
|
-
expect(mockKafka.consumer).toHaveBeenCalledWith({
|
|
1093
|
-
groupId: 'integration-group',
|
|
1094
|
-
sessionTimeout: 25000,
|
|
1095
|
-
});
|
|
1096
|
-
|
|
1097
|
-
// Verify subscription
|
|
1098
|
-
expect(mockConsumer.subscribe).toHaveBeenCalledWith({
|
|
1099
|
-
topic: 'integration-topic',
|
|
1100
|
-
fromBeginning: true,
|
|
1101
|
-
});
|
|
1102
|
-
|
|
1103
|
-
// Verify results
|
|
1104
|
-
expect(result[0]).toHaveLength(3);
|
|
1105
|
-
expect(result[0][0].json.value).toEqual({ id: 0, name: 'item-0' });
|
|
1106
|
-
expect(result[0][1].json.value).toEqual({ id: 1, name: 'item-1' });
|
|
1107
|
-
expect(result[0][2].json.value).toEqual({ id: 2, name: 'item-2' });
|
|
1108
|
-
|
|
1109
|
-
// Verify cleanup
|
|
1110
|
-
expect(mockConsumer.disconnect).toHaveBeenCalled();
|
|
1111
|
-
});
|
|
1112
|
-
});
|
|
1113
|
-
});
|