@devrev/ts-adaas 1.19.4 → 1.19.6-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/attachments-streaming/attachments-streaming-pool.test.js +3 -6
- package/dist/common/event-type-translation.test.d.ts +2 -0
- package/dist/common/event-type-translation.test.d.ts.map +1 -0
- package/dist/common/event-type-translation.test.js +175 -0
- package/dist/common/time-value-resolver.test.js +0 -1
- package/dist/deprecated/uploader/index.d.ts +3 -1
- package/dist/deprecated/uploader/index.d.ts.map +1 -1
- package/dist/deprecated/uploader/index.js +29 -22
- package/dist/multithreading/create-worker.test.js +34 -16
- package/dist/multithreading/process-task.test.d.ts +2 -0
- package/dist/multithreading/process-task.test.d.ts.map +1 -0
- package/dist/multithreading/process-task.test.js +166 -0
- package/dist/multithreading/spawn/spawn.test.d.ts +2 -0
- package/dist/multithreading/spawn/spawn.test.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.test.js +223 -0
- package/dist/multithreading/worker-adapter/worker-adapter.emit.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.emit.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.emit.test.js +415 -0
- package/dist/multithreading/worker-adapter/worker-adapter.extraction.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.extraction.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.extraction.test.js +801 -0
- package/dist/multithreading/worker-adapter/worker-adapter.loading.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.loading.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.loading.test.js +598 -0
- package/dist/multithreading/worker-adapter/worker-adapter.serialization.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.serialization.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.serialization.test.js +71 -0
- package/dist/repo/repo.test.js +41 -0
- package/dist/state/state.extract-window.test.d.ts +2 -0
- package/dist/state/state.extract-window.test.d.ts.map +1 -0
- package/dist/state/state.extract-window.test.js +163 -0
- package/dist/state/state.pending-boundaries.test.d.ts +2 -0
- package/dist/state/state.pending-boundaries.test.d.ts.map +1 -0
- package/dist/state/state.pending-boundaries.test.js +189 -0
- package/dist/state/state.post-state.test.d.ts +2 -0
- package/dist/state/state.post-state.test.d.ts.map +1 -0
- package/dist/state/state.post-state.test.js +77 -0
- package/dist/state/state.test.js +23 -506
- package/dist/state/state.time-value-resolution.test.d.ts +2 -0
- package/dist/state/state.time-value-resolution.test.d.ts.map +1 -0
- package/dist/state/state.time-value-resolution.test.js +175 -0
- package/dist/types/extraction.d.ts +20 -1
- package/dist/types/extraction.d.ts.map +1 -1
- package/dist/types/extraction.test.js +57 -21
- package/dist/uploader/uploader.helpers.test.js +0 -11
- package/dist/uploader/uploader.test.js +0 -9
- package/package.json +7 -7
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts +0 -2
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts.map +0 -1
- package/dist/multithreading/worker-adapter/worker-adapter.test.js +0 -1243
|
@@ -1,1243 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
const attachments_streaming_pool_1 = require("../../attachments-streaming/attachments-streaming-pool");
|
|
4
|
-
const constants_1 = require("../../common/constants");
|
|
5
|
-
const state_1 = require("../../state/state");
|
|
6
|
-
const jest_setup_1 = require("../../tests/jest.setup");
|
|
7
|
-
const test_utils_1 = require("../../common/test-utils");
|
|
8
|
-
const types_1 = require("../../types");
|
|
9
|
-
const loading_1 = require("../../types/loading");
|
|
10
|
-
const worker_adapter_1 = require("./worker-adapter");
|
|
11
|
-
/* eslint-disable @typescript-eslint/no-require-imports */
|
|
12
|
-
// Mock dependencies
|
|
13
|
-
jest.mock('../../common/control-protocol', () => ({
|
|
14
|
-
emit: jest.fn().mockResolvedValue({}),
|
|
15
|
-
}));
|
|
16
|
-
// const mockPostState = jest.spyOn(State.prototype, 'postState').mockResolvedValue(); // Mock to resolve void
|
|
17
|
-
// const mockFetchState = jest.spyOn(State.prototype, 'fetchState').mockResolvedValue({}); // Mock to resolve a default state
|
|
18
|
-
jest.mock('../../mappers/mappers');
|
|
19
|
-
jest.mock('../../uploader/uploader');
|
|
20
|
-
// jest.mock('../../state/state');
|
|
21
|
-
jest.mock('../../repo/repo');
|
|
22
|
-
jest.mock('node:worker_threads', () => ({
|
|
23
|
-
parentPort: {
|
|
24
|
-
postMessage: jest.fn(),
|
|
25
|
-
},
|
|
26
|
-
}));
|
|
27
|
-
jest.mock('../../attachments-streaming/attachments-streaming-pool', () => {
|
|
28
|
-
return {
|
|
29
|
-
AttachmentsStreamingPool: jest.fn().mockImplementation(() => {
|
|
30
|
-
return {
|
|
31
|
-
streamAll: jest.fn().mockResolvedValue(undefined),
|
|
32
|
-
};
|
|
33
|
-
}),
|
|
34
|
-
};
|
|
35
|
-
});
|
|
36
|
-
describe(worker_adapter_1.WorkerAdapter.name, () => {
|
|
37
|
-
let adapter;
|
|
38
|
-
let mockEvent;
|
|
39
|
-
let mockAdapterState;
|
|
40
|
-
beforeEach(() => {
|
|
41
|
-
// Reset all mocks
|
|
42
|
-
jest.clearAllMocks();
|
|
43
|
-
// Create mock objects
|
|
44
|
-
mockEvent = (0, test_utils_1.createMockEvent)(jest_setup_1.mockServer.baseUrl, {
|
|
45
|
-
payload: { event_type: types_1.EventType.StartExtractingData },
|
|
46
|
-
});
|
|
47
|
-
const initialState = {
|
|
48
|
-
attachments: { completed: false },
|
|
49
|
-
lastSyncStarted: '',
|
|
50
|
-
lastSuccessfulSyncStarted: '',
|
|
51
|
-
snapInVersionId: '',
|
|
52
|
-
toDevRev: {
|
|
53
|
-
attachmentsMetadata: {
|
|
54
|
-
artifactIds: [],
|
|
55
|
-
lastProcessed: 0,
|
|
56
|
-
lastProcessedAttachmentsIdsList: [],
|
|
57
|
-
},
|
|
58
|
-
},
|
|
59
|
-
};
|
|
60
|
-
mockAdapterState = new state_1.State({
|
|
61
|
-
event: mockEvent,
|
|
62
|
-
initialState: initialState,
|
|
63
|
-
});
|
|
64
|
-
// Create the adapter instance
|
|
65
|
-
adapter = new worker_adapter_1.WorkerAdapter({
|
|
66
|
-
event: mockEvent,
|
|
67
|
-
adapterState: mockAdapterState,
|
|
68
|
-
});
|
|
69
|
-
});
|
|
70
|
-
describe(worker_adapter_1.WorkerAdapter.prototype.streamAttachments.name, () => {
|
|
71
|
-
it('should process all artifact batches successfully', async () => {
|
|
72
|
-
const mockStream = jest.fn();
|
|
73
|
-
// Set up adapter state with artifact IDs
|
|
74
|
-
adapter.state.toDevRev = {
|
|
75
|
-
attachmentsMetadata: {
|
|
76
|
-
artifactIds: ['artifact1', 'artifact2'],
|
|
77
|
-
lastProcessed: 0,
|
|
78
|
-
lastProcessedAttachmentsIdsList: [],
|
|
79
|
-
},
|
|
80
|
-
};
|
|
81
|
-
// Mock getting attachments from each artifact
|
|
82
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
83
|
-
.fn()
|
|
84
|
-
.mockResolvedValueOnce({
|
|
85
|
-
attachments: [
|
|
86
|
-
{
|
|
87
|
-
url: 'http://example.com/file1.pdf',
|
|
88
|
-
id: 'attachment1',
|
|
89
|
-
file_name: 'file1.pdf',
|
|
90
|
-
parent_id: 'parent1',
|
|
91
|
-
},
|
|
92
|
-
{
|
|
93
|
-
url: 'http://example.com/file2.pdf',
|
|
94
|
-
id: 'attachment2',
|
|
95
|
-
file_name: 'file2.pdf',
|
|
96
|
-
parent_id: 'parent2',
|
|
97
|
-
},
|
|
98
|
-
],
|
|
99
|
-
})
|
|
100
|
-
.mockResolvedValueOnce({
|
|
101
|
-
attachments: [
|
|
102
|
-
{
|
|
103
|
-
url: 'http://example.com/file3.pdf',
|
|
104
|
-
id: 'attachment3',
|
|
105
|
-
file_name: 'file3.pdf',
|
|
106
|
-
parent_id: 'parent3',
|
|
107
|
-
},
|
|
108
|
-
],
|
|
109
|
-
});
|
|
110
|
-
// Mock the initializeRepos method
|
|
111
|
-
adapter.initializeRepos = jest.fn();
|
|
112
|
-
const result = await adapter.streamAttachments({
|
|
113
|
-
stream: mockStream,
|
|
114
|
-
});
|
|
115
|
-
expect(adapter.initializeRepos).toHaveBeenCalledWith([
|
|
116
|
-
{ itemType: 'ssor_attachment' },
|
|
117
|
-
]);
|
|
118
|
-
expect(adapter.initializeRepos).toHaveBeenCalledTimes(1);
|
|
119
|
-
expect(adapter['uploader'].getAttachmentsFromArtifactId).toHaveBeenCalledTimes(2);
|
|
120
|
-
// Verify state was updated correctly
|
|
121
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([]);
|
|
122
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.lastProcessed).toBe(0);
|
|
123
|
-
expect(result).toBeUndefined();
|
|
124
|
-
});
|
|
125
|
-
it('[edge] should handle invalid batch size by using 1 instead', async () => {
|
|
126
|
-
const mockStream = jest.fn();
|
|
127
|
-
// Set up adapter state with artifact IDs
|
|
128
|
-
adapter.state.toDevRev = {
|
|
129
|
-
attachmentsMetadata: {
|
|
130
|
-
artifactIds: ['artifact1'],
|
|
131
|
-
lastProcessed: 0,
|
|
132
|
-
lastProcessedAttachmentsIdsList: [],
|
|
133
|
-
},
|
|
134
|
-
};
|
|
135
|
-
// Mock getting attachments
|
|
136
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
137
|
-
.fn()
|
|
138
|
-
.mockResolvedValue({
|
|
139
|
-
attachments: [
|
|
140
|
-
{
|
|
141
|
-
url: 'http://example.com/file1.pdf',
|
|
142
|
-
id: 'attachment1',
|
|
143
|
-
file_name: 'file1.pdf',
|
|
144
|
-
parent_id: 'parent1',
|
|
145
|
-
},
|
|
146
|
-
],
|
|
147
|
-
});
|
|
148
|
-
adapter.initializeRepos = jest.fn();
|
|
149
|
-
const result = await adapter.streamAttachments({
|
|
150
|
-
stream: mockStream,
|
|
151
|
-
batchSize: 0,
|
|
152
|
-
});
|
|
153
|
-
expect(result).toBeUndefined();
|
|
154
|
-
});
|
|
155
|
-
it('[edge] should cap batch size to 50 when batchSize is greater than 50', async () => {
|
|
156
|
-
const mockStream = jest.fn();
|
|
157
|
-
// Set up adapter state with artifact IDs
|
|
158
|
-
adapter.state.toDevRev = {
|
|
159
|
-
attachmentsMetadata: {
|
|
160
|
-
artifactIds: ['artifact1'],
|
|
161
|
-
lastProcessed: 0,
|
|
162
|
-
lastProcessedAttachmentsIdsList: [],
|
|
163
|
-
},
|
|
164
|
-
};
|
|
165
|
-
// Mock getting attachments
|
|
166
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
167
|
-
.fn()
|
|
168
|
-
.mockResolvedValue({
|
|
169
|
-
attachments: [
|
|
170
|
-
{
|
|
171
|
-
url: 'http://example.com/file1.pdf',
|
|
172
|
-
id: 'attachment1',
|
|
173
|
-
file_name: 'file1.pdf',
|
|
174
|
-
parent_id: 'parent1',
|
|
175
|
-
},
|
|
176
|
-
],
|
|
177
|
-
});
|
|
178
|
-
// Mock the required methods
|
|
179
|
-
adapter.initializeRepos = jest.fn();
|
|
180
|
-
const result = await adapter.streamAttachments({
|
|
181
|
-
stream: mockStream,
|
|
182
|
-
batchSize: 100, // Set batch size greater than 50
|
|
183
|
-
});
|
|
184
|
-
expect(result).toBeUndefined();
|
|
185
|
-
});
|
|
186
|
-
it('[edge] should handle empty attachments metadata artifact IDs', async () => {
|
|
187
|
-
const mockStream = jest.fn();
|
|
188
|
-
// Set up adapter state with no artifact IDs
|
|
189
|
-
adapter.state.toDevRev = {
|
|
190
|
-
attachmentsMetadata: {
|
|
191
|
-
artifactIds: [],
|
|
192
|
-
lastProcessed: 0,
|
|
193
|
-
},
|
|
194
|
-
};
|
|
195
|
-
const result = await adapter.streamAttachments({
|
|
196
|
-
stream: mockStream,
|
|
197
|
-
});
|
|
198
|
-
expect(result).toBeUndefined();
|
|
199
|
-
});
|
|
200
|
-
it('[edge] should handle errors when getting attachments', async () => {
|
|
201
|
-
const mockStream = jest.fn();
|
|
202
|
-
// Set up adapter state with artifact IDs
|
|
203
|
-
adapter.state.toDevRev = {
|
|
204
|
-
attachmentsMetadata: {
|
|
205
|
-
artifactIds: ['artifact1'],
|
|
206
|
-
lastProcessed: 0,
|
|
207
|
-
lastProcessedAttachmentsIdsList: [],
|
|
208
|
-
},
|
|
209
|
-
};
|
|
210
|
-
// Mock error when getting attachments
|
|
211
|
-
const mockError = new Error('Failed to get attachments');
|
|
212
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
213
|
-
.fn()
|
|
214
|
-
.mockResolvedValue({
|
|
215
|
-
error: mockError,
|
|
216
|
-
});
|
|
217
|
-
// Mock methods
|
|
218
|
-
adapter.initializeRepos = jest.fn();
|
|
219
|
-
const result = await adapter.streamAttachments({
|
|
220
|
-
stream: mockStream,
|
|
221
|
-
});
|
|
222
|
-
expect(result).toEqual({
|
|
223
|
-
error: mockError,
|
|
224
|
-
});
|
|
225
|
-
});
|
|
226
|
-
it('[edge] should handle empty attachments array from artifact', async () => {
|
|
227
|
-
const mockStream = jest.fn();
|
|
228
|
-
// Set up adapter state with artifact IDs
|
|
229
|
-
adapter.state.toDevRev = {
|
|
230
|
-
attachmentsMetadata: {
|
|
231
|
-
artifactIds: ['artifact1'],
|
|
232
|
-
lastProcessed: 0,
|
|
233
|
-
lastProcessedAttachmentsIdsList: [],
|
|
234
|
-
},
|
|
235
|
-
};
|
|
236
|
-
// Mock getting empty attachments
|
|
237
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
238
|
-
.fn()
|
|
239
|
-
.mockResolvedValue({
|
|
240
|
-
attachments: [],
|
|
241
|
-
});
|
|
242
|
-
// Mock methods
|
|
243
|
-
adapter.initializeRepos = jest.fn();
|
|
244
|
-
const result = await adapter.streamAttachments({
|
|
245
|
-
stream: mockStream,
|
|
246
|
-
});
|
|
247
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([]);
|
|
248
|
-
expect(result).toBeUndefined();
|
|
249
|
-
});
|
|
250
|
-
it('should use custom processors when provided', async () => {
|
|
251
|
-
const mockStream = jest.fn();
|
|
252
|
-
const mockReducer = jest.fn().mockReturnValue(['custom-reduced']);
|
|
253
|
-
const mockIterator = jest.fn().mockResolvedValue({});
|
|
254
|
-
// Set up adapter state with artifact IDs
|
|
255
|
-
adapter.state.toDevRev = {
|
|
256
|
-
attachmentsMetadata: {
|
|
257
|
-
artifactIds: ['artifact1'],
|
|
258
|
-
lastProcessed: 0,
|
|
259
|
-
lastProcessedAttachmentsIdsList: [],
|
|
260
|
-
},
|
|
261
|
-
};
|
|
262
|
-
// Mock getting attachments
|
|
263
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
264
|
-
.fn()
|
|
265
|
-
.mockResolvedValue({
|
|
266
|
-
attachments: [{ id: 'attachment1' }],
|
|
267
|
-
});
|
|
268
|
-
// Mock methods
|
|
269
|
-
adapter.initializeRepos = jest.fn();
|
|
270
|
-
const result = await adapter.streamAttachments({
|
|
271
|
-
stream: mockStream,
|
|
272
|
-
processors: {
|
|
273
|
-
reducer: mockReducer,
|
|
274
|
-
iterator: mockIterator,
|
|
275
|
-
},
|
|
276
|
-
});
|
|
277
|
-
expect(mockReducer).toHaveBeenCalledWith({
|
|
278
|
-
attachments: [{ id: 'attachment1' }],
|
|
279
|
-
adapter: adapter,
|
|
280
|
-
batchSize: 1,
|
|
281
|
-
});
|
|
282
|
-
expect(mockIterator).toHaveBeenCalledWith({
|
|
283
|
-
reducedAttachments: ['custom-reduced'],
|
|
284
|
-
adapter: adapter,
|
|
285
|
-
stream: mockStream,
|
|
286
|
-
});
|
|
287
|
-
expect(result).toBeUndefined();
|
|
288
|
-
});
|
|
289
|
-
it('should handle rate limiting from iterator', async () => {
|
|
290
|
-
const mockStream = jest.fn();
|
|
291
|
-
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => {
|
|
292
|
-
return {
|
|
293
|
-
// Return an object with a `streamAll` method that resolves to your desired value.
|
|
294
|
-
streamAll: jest.fn().mockResolvedValue({ delay: 30 }),
|
|
295
|
-
};
|
|
296
|
-
});
|
|
297
|
-
// Set up adapter state with artifact IDs
|
|
298
|
-
adapter.state.toDevRev = {
|
|
299
|
-
attachmentsMetadata: {
|
|
300
|
-
artifactIds: ['artifact1'],
|
|
301
|
-
lastProcessed: 0,
|
|
302
|
-
lastProcessedAttachmentsIdsList: [],
|
|
303
|
-
},
|
|
304
|
-
};
|
|
305
|
-
// Mock getting attachments
|
|
306
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
307
|
-
.fn()
|
|
308
|
-
.mockResolvedValue({
|
|
309
|
-
attachments: [{ id: 'attachment1' }],
|
|
310
|
-
});
|
|
311
|
-
// Mock methods
|
|
312
|
-
adapter.initializeRepos = jest.fn();
|
|
313
|
-
const result = await adapter.streamAttachments({
|
|
314
|
-
stream: mockStream,
|
|
315
|
-
});
|
|
316
|
-
expect(result).toEqual({
|
|
317
|
-
delay: 30,
|
|
318
|
-
});
|
|
319
|
-
// The artifactIds array should remain unchanged
|
|
320
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
321
|
-
'artifact1',
|
|
322
|
-
]);
|
|
323
|
-
});
|
|
324
|
-
it('should handle error from iterator', async () => {
|
|
325
|
-
const mockStream = jest.fn();
|
|
326
|
-
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => {
|
|
327
|
-
return {
|
|
328
|
-
// Return an object with a `streamAll` method that resolves to your desired value.
|
|
329
|
-
streamAll: jest.fn().mockResolvedValue({
|
|
330
|
-
error: 'Mock error',
|
|
331
|
-
}),
|
|
332
|
-
};
|
|
333
|
-
});
|
|
334
|
-
// Set up adapter state with artifact IDs
|
|
335
|
-
adapter.state.toDevRev = {
|
|
336
|
-
attachmentsMetadata: {
|
|
337
|
-
artifactIds: ['artifact1'],
|
|
338
|
-
lastProcessed: 0,
|
|
339
|
-
lastProcessedAttachmentsIdsList: [],
|
|
340
|
-
},
|
|
341
|
-
};
|
|
342
|
-
// Mock getting attachments
|
|
343
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
344
|
-
.fn()
|
|
345
|
-
.mockResolvedValue({
|
|
346
|
-
attachments: [{ id: 'attachment1' }],
|
|
347
|
-
});
|
|
348
|
-
// Mock methods
|
|
349
|
-
adapter.initializeRepos = jest.fn();
|
|
350
|
-
const result = await adapter.streamAttachments({
|
|
351
|
-
stream: mockStream,
|
|
352
|
-
});
|
|
353
|
-
expect(result).toEqual({
|
|
354
|
-
error: 'Mock error',
|
|
355
|
-
});
|
|
356
|
-
// The artifactIds array should remain unchanged
|
|
357
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
358
|
-
'artifact1',
|
|
359
|
-
]);
|
|
360
|
-
});
|
|
361
|
-
it('should emit progress event and exit process on timeout, preserving state for resumption', async () => {
|
|
362
|
-
const mockStream = jest.fn();
|
|
363
|
-
// Mock process.exit to prevent it from killing the test runner
|
|
364
|
-
const exitSpy = jest
|
|
365
|
-
.spyOn(process, 'exit')
|
|
366
|
-
.mockImplementation(() => undefined);
|
|
367
|
-
// Set up adapter state with multiple artifact IDs
|
|
368
|
-
adapter.state.toDevRev = {
|
|
369
|
-
attachmentsMetadata: {
|
|
370
|
-
artifactIds: ['artifact1', 'artifact2', 'artifact3'],
|
|
371
|
-
lastProcessed: 0,
|
|
372
|
-
lastProcessedAttachmentsIdsList: [],
|
|
373
|
-
},
|
|
374
|
-
};
|
|
375
|
-
// Mock getting attachments for each artifact
|
|
376
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
377
|
-
.fn()
|
|
378
|
-
.mockResolvedValue({
|
|
379
|
-
attachments: [
|
|
380
|
-
{
|
|
381
|
-
url: 'http://example.com/file1.pdf',
|
|
382
|
-
id: 'attachment1',
|
|
383
|
-
file_name: 'file1.pdf',
|
|
384
|
-
parent_id: 'parent1',
|
|
385
|
-
},
|
|
386
|
-
],
|
|
387
|
-
});
|
|
388
|
-
// Mock the pool to simulate timeout happening during the first artifact
|
|
389
|
-
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => {
|
|
390
|
-
return {
|
|
391
|
-
streamAll: jest.fn().mockImplementation(() => {
|
|
392
|
-
adapter.isTimeout = true;
|
|
393
|
-
return {};
|
|
394
|
-
}),
|
|
395
|
-
};
|
|
396
|
-
});
|
|
397
|
-
adapter.initializeRepos = jest.fn();
|
|
398
|
-
// Mock emit to verify it's called with progress event
|
|
399
|
-
const emitSpy = jest.spyOn(adapter, 'emit').mockResolvedValue();
|
|
400
|
-
await adapter.streamAttachments({
|
|
401
|
-
stream: mockStream,
|
|
402
|
-
});
|
|
403
|
-
// Should have emitted progress event
|
|
404
|
-
expect(emitSpy).toHaveBeenCalledWith(types_1.ExtractorEventType.AttachmentExtractionProgress);
|
|
405
|
-
// Should have called process.exit(0)
|
|
406
|
-
expect(exitSpy).toHaveBeenCalledWith(0);
|
|
407
|
-
// The current artifact should NOT be removed from the list
|
|
408
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
409
|
-
'artifact1',
|
|
410
|
-
'artifact2',
|
|
411
|
-
'artifact3',
|
|
412
|
-
]);
|
|
413
|
-
// Only the first artifact should have been fetched
|
|
414
|
-
expect(adapter['uploader'].getAttachmentsFromArtifactId).toHaveBeenCalledTimes(1);
|
|
415
|
-
exitSpy.mockRestore();
|
|
416
|
-
});
|
|
417
|
-
it('should reset lastProcessed and attachment IDs list after processing all artifacts', async () => {
|
|
418
|
-
const mockStream = jest.fn();
|
|
419
|
-
adapter.state.toDevRev = {
|
|
420
|
-
attachmentsMetadata: {
|
|
421
|
-
artifactIds: ['artifact1'],
|
|
422
|
-
lastProcessed: 0,
|
|
423
|
-
lastProcessedAttachmentsIdsList: [],
|
|
424
|
-
},
|
|
425
|
-
};
|
|
426
|
-
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
427
|
-
.fn()
|
|
428
|
-
.mockResolvedValueOnce({
|
|
429
|
-
attachments: [
|
|
430
|
-
{
|
|
431
|
-
url: 'http://example.com/file1.pdf',
|
|
432
|
-
id: 'attachment1',
|
|
433
|
-
file_name: 'file1.pdf',
|
|
434
|
-
parent_id: 'parent1',
|
|
435
|
-
},
|
|
436
|
-
{
|
|
437
|
-
url: 'http://example.com/file2.pdf',
|
|
438
|
-
id: 'attachment2',
|
|
439
|
-
file_name: 'file2.pdf',
|
|
440
|
-
parent_id: 'parent2',
|
|
441
|
-
},
|
|
442
|
-
{
|
|
443
|
-
url: 'http://example.com/file3.pdf',
|
|
444
|
-
id: 'attachment3',
|
|
445
|
-
file_name: 'file3.pdf',
|
|
446
|
-
parent_id: 'parent3',
|
|
447
|
-
},
|
|
448
|
-
],
|
|
449
|
-
});
|
|
450
|
-
adapter.processAttachment = jest.fn().mockResolvedValue(null);
|
|
451
|
-
await adapter.streamAttachments({
|
|
452
|
-
stream: mockStream,
|
|
453
|
-
});
|
|
454
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toHaveLength(0);
|
|
455
|
-
expect(adapter.state.toDevRev.attachmentsMetadata.lastProcessed).toBe(0);
|
|
456
|
-
});
|
|
457
|
-
});
|
|
458
|
-
describe(worker_adapter_1.WorkerAdapter.prototype.processAttachment.name, () => {
|
|
459
|
-
const createMockHttpStream = (headers = {}) => ({
|
|
460
|
-
headers,
|
|
461
|
-
data: { destroy: jest.fn() },
|
|
462
|
-
});
|
|
463
|
-
beforeEach(() => {
|
|
464
|
-
adapter.initializeRepos([{ itemType: 'ssor_attachment' }]);
|
|
465
|
-
const mockRepo = { push: jest.fn().mockResolvedValue(undefined) };
|
|
466
|
-
adapter.getRepo = jest.fn().mockReturnValue(mockRepo);
|
|
467
|
-
});
|
|
468
|
-
it('should use attachment.content_type when provided, ignoring HTTP header', async () => {
|
|
469
|
-
const mockStream = jest.fn().mockResolvedValue({
|
|
470
|
-
httpStream: createMockHttpStream({
|
|
471
|
-
'content-type': 'text/plain',
|
|
472
|
-
'content-length': '100',
|
|
473
|
-
}),
|
|
474
|
-
});
|
|
475
|
-
adapter['uploader'].getArtifactUploadUrl = jest.fn().mockResolvedValue({
|
|
476
|
-
response: {
|
|
477
|
-
artifact_id: 'art_1',
|
|
478
|
-
upload_url: 'https://upload',
|
|
479
|
-
form_data: [],
|
|
480
|
-
},
|
|
481
|
-
});
|
|
482
|
-
adapter['uploader'].streamArtifact = jest
|
|
483
|
-
.fn()
|
|
484
|
-
.mockResolvedValue({ response: {} });
|
|
485
|
-
adapter['uploader'].confirmArtifactUpload = jest
|
|
486
|
-
.fn()
|
|
487
|
-
.mockResolvedValue({ response: {} });
|
|
488
|
-
const attachment = {
|
|
489
|
-
id: 'att-1',
|
|
490
|
-
url: 'https://example.com/file.pdf',
|
|
491
|
-
file_name: 'file.pdf',
|
|
492
|
-
parent_id: 'parent-1',
|
|
493
|
-
content_type: 'application/pdf',
|
|
494
|
-
};
|
|
495
|
-
await adapter.processAttachment(attachment, mockStream);
|
|
496
|
-
expect(adapter['uploader'].getArtifactUploadUrl).toHaveBeenCalledWith('file.pdf', 'application/pdf', 100);
|
|
497
|
-
});
|
|
498
|
-
it('should use HTTP header content-type when attachment.content_type is not set', async () => {
|
|
499
|
-
const mockStream = jest.fn().mockResolvedValue({
|
|
500
|
-
httpStream: createMockHttpStream({
|
|
501
|
-
'content-type': 'image/jpeg',
|
|
502
|
-
'content-length': '200',
|
|
503
|
-
}),
|
|
504
|
-
});
|
|
505
|
-
adapter['uploader'].getArtifactUploadUrl = jest.fn().mockResolvedValue({
|
|
506
|
-
response: {
|
|
507
|
-
artifact_id: 'art_2',
|
|
508
|
-
upload_url: 'https://upload',
|
|
509
|
-
form_data: [],
|
|
510
|
-
},
|
|
511
|
-
});
|
|
512
|
-
adapter['uploader'].streamArtifact = jest
|
|
513
|
-
.fn()
|
|
514
|
-
.mockResolvedValue({ response: {} });
|
|
515
|
-
adapter['uploader'].confirmArtifactUpload = jest
|
|
516
|
-
.fn()
|
|
517
|
-
.mockResolvedValue({ response: {} });
|
|
518
|
-
const attachment = {
|
|
519
|
-
id: 'att-2',
|
|
520
|
-
url: 'https://example.com/photo.jpg',
|
|
521
|
-
file_name: 'photo.jpg',
|
|
522
|
-
parent_id: 'parent-2',
|
|
523
|
-
};
|
|
524
|
-
await adapter.processAttachment(attachment, mockStream);
|
|
525
|
-
expect(adapter['uploader'].getArtifactUploadUrl).toHaveBeenCalledWith('photo.jpg', 'image/jpeg', 200);
|
|
526
|
-
});
|
|
527
|
-
it('should fall back to application/octet-stream when neither content_type nor HTTP header is set', async () => {
|
|
528
|
-
const mockStream = jest.fn().mockResolvedValue({
|
|
529
|
-
httpStream: createMockHttpStream({}),
|
|
530
|
-
});
|
|
531
|
-
adapter['uploader'].getArtifactUploadUrl = jest.fn().mockResolvedValue({
|
|
532
|
-
response: {
|
|
533
|
-
artifact_id: 'art_3',
|
|
534
|
-
upload_url: 'https://upload',
|
|
535
|
-
form_data: [],
|
|
536
|
-
},
|
|
537
|
-
});
|
|
538
|
-
adapter['uploader'].streamArtifact = jest
|
|
539
|
-
.fn()
|
|
540
|
-
.mockResolvedValue({ response: {} });
|
|
541
|
-
adapter['uploader'].confirmArtifactUpload = jest
|
|
542
|
-
.fn()
|
|
543
|
-
.mockResolvedValue({ response: {} });
|
|
544
|
-
const attachment = {
|
|
545
|
-
id: 'att-3',
|
|
546
|
-
url: 'https://example.com/file.bin',
|
|
547
|
-
file_name: 'file.bin',
|
|
548
|
-
parent_id: 'parent-3',
|
|
549
|
-
};
|
|
550
|
-
await adapter.processAttachment(attachment, mockStream);
|
|
551
|
-
expect(adapter['uploader'].getArtifactUploadUrl).toHaveBeenCalledWith('file.bin', 'application/octet-stream', undefined);
|
|
552
|
-
});
|
|
553
|
-
});
|
|
554
|
-
describe(worker_adapter_1.WorkerAdapter.prototype.emit.name, () => {
|
|
555
|
-
let counter;
|
|
556
|
-
let mockPostMessage;
|
|
557
|
-
beforeEach(() => {
|
|
558
|
-
counter = { counter: 0 };
|
|
559
|
-
// Import the worker_threads module and spy on parentPort.postMessage
|
|
560
|
-
const workerThreads = require('node:worker_threads');
|
|
561
|
-
mockPostMessage = jest.fn().mockImplementation(() => {
|
|
562
|
-
counter.counter += 1;
|
|
563
|
-
});
|
|
564
|
-
// Spy on the parentPort.postMessage method
|
|
565
|
-
if (workerThreads.parentPort) {
|
|
566
|
-
jest
|
|
567
|
-
.spyOn(workerThreads.parentPort, 'postMessage')
|
|
568
|
-
.mockImplementation(mockPostMessage);
|
|
569
|
-
}
|
|
570
|
-
else {
|
|
571
|
-
// If parentPort is null (not in worker context), create a mock
|
|
572
|
-
workerThreads.parentPort = {
|
|
573
|
-
postMessage: mockPostMessage,
|
|
574
|
-
};
|
|
575
|
-
}
|
|
576
|
-
});
|
|
577
|
-
afterEach(() => {
|
|
578
|
-
// Restore all mocks
|
|
579
|
-
jest.restoreAllMocks();
|
|
580
|
-
});
|
|
581
|
-
it('should emit only one event when multiple events of same type are sent', async () => {
|
|
582
|
-
adapter['adapterState'].postState = jest
|
|
583
|
-
.fn()
|
|
584
|
-
.mockResolvedValue(undefined);
|
|
585
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
586
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
587
|
-
reports: [],
|
|
588
|
-
processed_files: [],
|
|
589
|
-
});
|
|
590
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
591
|
-
reports: [],
|
|
592
|
-
processed_files: [],
|
|
593
|
-
});
|
|
594
|
-
expect(counter.counter).toBe(1);
|
|
595
|
-
});
|
|
596
|
-
it('should emit event when different event type is sent after previous events', async () => {
|
|
597
|
-
adapter['adapterState'].postState = jest
|
|
598
|
-
.fn()
|
|
599
|
-
.mockResolvedValue(undefined);
|
|
600
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
601
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
602
|
-
reports: [],
|
|
603
|
-
processed_files: [],
|
|
604
|
-
});
|
|
605
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
606
|
-
reports: [],
|
|
607
|
-
processed_files: [],
|
|
608
|
-
});
|
|
609
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
610
|
-
reports: [],
|
|
611
|
-
processed_files: [],
|
|
612
|
-
});
|
|
613
|
-
expect(counter.counter).toBe(1);
|
|
614
|
-
});
|
|
615
|
-
it('should correctly emit one event even if postState errors', async () => {
|
|
616
|
-
adapter['adapterState'].postState = jest
|
|
617
|
-
.fn()
|
|
618
|
-
.mockRejectedValue(new Error('postState error'));
|
|
619
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
620
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
621
|
-
reports: [],
|
|
622
|
-
processed_files: [],
|
|
623
|
-
});
|
|
624
|
-
expect(counter.counter).toBe(1);
|
|
625
|
-
});
|
|
626
|
-
it('should correctly emit one event even if uploadAllRepos errors', async () => {
|
|
627
|
-
adapter['adapterState'].postState = jest
|
|
628
|
-
.fn()
|
|
629
|
-
.mockResolvedValue(undefined);
|
|
630
|
-
adapter.uploadAllRepos = jest
|
|
631
|
-
.fn()
|
|
632
|
-
.mockRejectedValue(new Error('uploadAllRepos error'));
|
|
633
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
634
|
-
reports: [],
|
|
635
|
-
processed_files: [],
|
|
636
|
-
});
|
|
637
|
-
expect(counter.counter).toBe(1);
|
|
638
|
-
});
|
|
639
|
-
it('should include artifacts in data for extraction events', async () => {
|
|
640
|
-
const { emit: mockEmit } = require('../../common/control-protocol');
|
|
641
|
-
adapter['adapterState'].postState = jest
|
|
642
|
-
.fn()
|
|
643
|
-
.mockResolvedValue(undefined);
|
|
644
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
645
|
-
adapter['_artifacts'] = [
|
|
646
|
-
{ id: 'art-1', item_count: 10, item_type: 'issues' },
|
|
647
|
-
];
|
|
648
|
-
await adapter.emit(types_1.ExtractorEventType.DataExtractionDone);
|
|
649
|
-
expect(mockEmit).toHaveBeenCalledWith(expect.objectContaining({
|
|
650
|
-
data: expect.objectContaining({
|
|
651
|
-
artifacts: expect.arrayContaining([
|
|
652
|
-
expect.objectContaining({ id: 'art-1' }),
|
|
653
|
-
]),
|
|
654
|
-
}),
|
|
655
|
-
}));
|
|
656
|
-
// Should not include loader-specific fields
|
|
657
|
-
const callData = mockEmit.mock.calls[0][0].data;
|
|
658
|
-
expect(callData).not.toHaveProperty('reports');
|
|
659
|
-
expect(callData).not.toHaveProperty('processed_files');
|
|
660
|
-
});
|
|
661
|
-
it('should include reports and processed_files in data for loader events', async () => {
|
|
662
|
-
const { emit: mockEmit } = require('../../common/control-protocol');
|
|
663
|
-
adapter['adapterState'].postState = jest
|
|
664
|
-
.fn()
|
|
665
|
-
.mockResolvedValue(undefined);
|
|
666
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
667
|
-
adapter['loaderReports'] = [
|
|
668
|
-
{ item_type: 'tasks', [loading_1.ActionType.CREATED]: 5 },
|
|
669
|
-
];
|
|
670
|
-
adapter['_processedFiles'] = ['file-1', 'file-2'];
|
|
671
|
-
await adapter.emit(types_1.LoaderEventType.DataLoadingDone);
|
|
672
|
-
expect(mockEmit).toHaveBeenCalledWith(expect.objectContaining({
|
|
673
|
-
data: expect.objectContaining({
|
|
674
|
-
reports: expect.arrayContaining([
|
|
675
|
-
expect.objectContaining({ item_type: 'tasks' }),
|
|
676
|
-
]),
|
|
677
|
-
processed_files: ['file-1', 'file-2'],
|
|
678
|
-
}),
|
|
679
|
-
}));
|
|
680
|
-
// Should not include extraction-specific fields
|
|
681
|
-
const callData = mockEmit.mock.calls[0][0].data;
|
|
682
|
-
expect(callData).not.toHaveProperty('artifacts');
|
|
683
|
-
});
|
|
684
|
-
it('should not include artifacts, reports, or processed_files for unknown event types', async () => {
|
|
685
|
-
const { emit: mockEmit } = require('../../common/control-protocol');
|
|
686
|
-
adapter['adapterState'].postState = jest
|
|
687
|
-
.fn()
|
|
688
|
-
.mockResolvedValue(undefined);
|
|
689
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
690
|
-
adapter['_artifacts'] = [
|
|
691
|
-
{ id: 'art-1', item_count: 10, item_type: 'issues' },
|
|
692
|
-
];
|
|
693
|
-
adapter['loaderReports'] = [
|
|
694
|
-
{ item_type: 'tasks', [loading_1.ActionType.CREATED]: 5 },
|
|
695
|
-
];
|
|
696
|
-
adapter['_processedFiles'] = ['file-1'];
|
|
697
|
-
await adapter.emit('SOME_UNKNOWN_EVENT');
|
|
698
|
-
const callData = mockEmit.mock.calls[0][0].data;
|
|
699
|
-
expect(callData).not.toHaveProperty('artifacts');
|
|
700
|
-
expect(callData).not.toHaveProperty('reports');
|
|
701
|
-
expect(callData).not.toHaveProperty('processed_files');
|
|
702
|
-
});
|
|
703
|
-
it('should include artifacts for all ExtractorEventType values', async () => {
|
|
704
|
-
var _a, _b;
|
|
705
|
-
const { emit: mockEmit } = require('../../common/control-protocol');
|
|
706
|
-
const extractorEvents = [
|
|
707
|
-
types_1.ExtractorEventType.DataExtractionDone,
|
|
708
|
-
types_1.ExtractorEventType.DataExtractionProgress,
|
|
709
|
-
types_1.ExtractorEventType.DataExtractionError,
|
|
710
|
-
types_1.ExtractorEventType.AttachmentExtractionDone,
|
|
711
|
-
types_1.ExtractorEventType.AttachmentExtractionProgress,
|
|
712
|
-
];
|
|
713
|
-
for (const eventType of extractorEvents) {
|
|
714
|
-
jest.clearAllMocks();
|
|
715
|
-
adapter.hasWorkerEmitted = false;
|
|
716
|
-
adapter['adapterState'].postState = jest
|
|
717
|
-
.fn()
|
|
718
|
-
.mockResolvedValue(undefined);
|
|
719
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
720
|
-
await adapter.emit(eventType);
|
|
721
|
-
const callData = (_b = (_a = mockEmit.mock.calls[0]) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b.data;
|
|
722
|
-
expect(callData).toHaveProperty('artifacts');
|
|
723
|
-
expect(callData).not.toHaveProperty('reports');
|
|
724
|
-
}
|
|
725
|
-
});
|
|
726
|
-
it('should include reports and processed_files for all LoaderEventType values', async () => {
|
|
727
|
-
var _a, _b;
|
|
728
|
-
const { emit: mockEmit } = require('../../common/control-protocol');
|
|
729
|
-
const loaderEvents = [
|
|
730
|
-
types_1.LoaderEventType.DataLoadingDone,
|
|
731
|
-
types_1.LoaderEventType.DataLoadingProgress,
|
|
732
|
-
types_1.LoaderEventType.DataLoadingError,
|
|
733
|
-
types_1.LoaderEventType.AttachmentLoadingDone,
|
|
734
|
-
types_1.LoaderEventType.AttachmentLoadingProgress,
|
|
735
|
-
];
|
|
736
|
-
for (const eventType of loaderEvents) {
|
|
737
|
-
jest.clearAllMocks();
|
|
738
|
-
adapter.hasWorkerEmitted = false;
|
|
739
|
-
adapter['adapterState'].postState = jest
|
|
740
|
-
.fn()
|
|
741
|
-
.mockResolvedValue(undefined);
|
|
742
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
743
|
-
await adapter.emit(eventType);
|
|
744
|
-
const callData = (_b = (_a = mockEmit.mock.calls[0]) === null || _a === void 0 ? void 0 : _a[0]) === null || _b === void 0 ? void 0 : _b.data;
|
|
745
|
-
expect(callData).toHaveProperty('reports');
|
|
746
|
-
expect(callData).toHaveProperty('processed_files');
|
|
747
|
-
expect(callData).not.toHaveProperty('artifacts');
|
|
748
|
-
}
|
|
749
|
-
});
|
|
750
|
-
});
|
|
751
|
-
describe('workersOldest / workersNewest boundary updates', () => {
|
|
752
|
-
let mockPostMessage;
|
|
753
|
-
beforeEach(() => {
|
|
754
|
-
const workerThreads = require('node:worker_threads');
|
|
755
|
-
mockPostMessage = jest.fn();
|
|
756
|
-
if (workerThreads.parentPort) {
|
|
757
|
-
jest
|
|
758
|
-
.spyOn(workerThreads.parentPort, 'postMessage')
|
|
759
|
-
.mockImplementation(mockPostMessage);
|
|
760
|
-
}
|
|
761
|
-
else {
|
|
762
|
-
workerThreads.parentPort = { postMessage: mockPostMessage };
|
|
763
|
-
}
|
|
764
|
-
adapter['adapterState'].postState = jest
|
|
765
|
-
.fn()
|
|
766
|
-
.mockResolvedValue(undefined);
|
|
767
|
-
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
768
|
-
});
|
|
769
|
-
afterEach(() => {
|
|
770
|
-
jest.restoreAllMocks();
|
|
771
|
-
});
|
|
772
|
-
/**
|
|
773
|
-
* Helper: sets extract_from and extract_to on the event context,
|
|
774
|
-
* resets the emit guard so the adapter can emit again, then emits
|
|
775
|
-
* AttachmentExtractionDone.
|
|
776
|
-
*/
|
|
777
|
-
async function emitDone(adapterInstance, extractionStart, extractionEnd) {
|
|
778
|
-
adapterInstance.event.payload.event_context.extract_from =
|
|
779
|
-
extractionStart;
|
|
780
|
-
adapterInstance.event.payload.event_context.extract_to = extractionEnd;
|
|
781
|
-
// Reset the emit guard so we can emit multiple times in a single test
|
|
782
|
-
adapterInstance['hasWorkerEmitted'] = false;
|
|
783
|
-
await adapterInstance.emit(types_1.ExtractorEventType.AttachmentExtractionDone, {
|
|
784
|
-
reports: [],
|
|
785
|
-
processed_files: [],
|
|
786
|
-
});
|
|
787
|
-
}
|
|
788
|
-
describe('initial import with UNBOUNDED start', () => {
|
|
789
|
-
it('should set workersOldest to UNBOUNDED_DATE_TIME_VALUE and workersNewest to extraction end', async () => {
|
|
790
|
-
await emitDone(adapter, constants_1.UNBOUNDED_DATE_TIME_VALUE, '2025-06-01T00:00:00.000Z');
|
|
791
|
-
expect(adapter.state.workersOldest).toBe(constants_1.UNBOUNDED_DATE_TIME_VALUE);
|
|
792
|
-
expect(adapter.state.workersNewest).toBe('2025-06-01T00:00:00.000Z');
|
|
793
|
-
});
|
|
794
|
-
});
|
|
795
|
-
describe('reconciliation after UNBOUNDED initial import', () => {
|
|
796
|
-
it('should NOT overwrite workersOldest when reconciliation start is later than sentinel', async () => {
|
|
797
|
-
// Initial import: UNBOUNDED start, NOW end
|
|
798
|
-
await emitDone(adapter, constants_1.UNBOUNDED_DATE_TIME_VALUE, '2025-06-01T00:00:00.000Z');
|
|
799
|
-
// Reconciliation: absolute dates within the range
|
|
800
|
-
await emitDone(adapter, '2025-01-01T00:00:00.000Z', '2025-03-01T00:00:00.000Z');
|
|
801
|
-
expect(adapter.state.workersOldest).toBe(constants_1.UNBOUNDED_DATE_TIME_VALUE);
|
|
802
|
-
expect(adapter.state.workersNewest).toBe('2025-06-01T00:00:00.000Z');
|
|
803
|
-
});
|
|
804
|
-
it('should NOT overwrite workersOldest even when reconciliation start is very early', async () => {
|
|
805
|
-
// Initial import: UNBOUNDED start, NOW end
|
|
806
|
-
await emitDone(adapter, constants_1.UNBOUNDED_DATE_TIME_VALUE, '2025-06-01T00:00:00.000Z');
|
|
807
|
-
// Reconciliation with a very old start date — still later than epoch
|
|
808
|
-
await emitDone(adapter, '1980-01-01T00:00:00.000Z', '1990-01-01T00:00:00.000Z');
|
|
809
|
-
expect(adapter.state.workersOldest).toBe(constants_1.UNBOUNDED_DATE_TIME_VALUE);
|
|
810
|
-
expect(adapter.state.workersNewest).toBe('2025-06-01T00:00:00.000Z');
|
|
811
|
-
});
|
|
812
|
-
});
|
|
813
|
-
describe('forward sync after UNBOUNDED initial import', () => {
|
|
814
|
-
it('should expand workersNewest forward while preserving workersOldest', async () => {
|
|
815
|
-
// Initial import
|
|
816
|
-
await emitDone(adapter, constants_1.UNBOUNDED_DATE_TIME_VALUE, '2025-06-01T00:00:00.000Z');
|
|
817
|
-
// Forward sync: from workersNewest to now
|
|
818
|
-
await emitDone(adapter, '2025-06-01T00:00:00.000Z', '2025-07-01T00:00:00.000Z');
|
|
819
|
-
expect(adapter.state.workersOldest).toBe(constants_1.UNBOUNDED_DATE_TIME_VALUE);
|
|
820
|
-
expect(adapter.state.workersNewest).toBe('2025-07-01T00:00:00.000Z');
|
|
821
|
-
});
|
|
822
|
-
});
|
|
823
|
-
describe('reconciliation with end beyond current newest', () => {
|
|
824
|
-
it('should expand workersNewest when reconciliation end is later', async () => {
|
|
825
|
-
// Initial import
|
|
826
|
-
await emitDone(adapter, constants_1.UNBOUNDED_DATE_TIME_VALUE, '2025-06-01T00:00:00.000Z');
|
|
827
|
-
// Reconciliation with end beyond current newest
|
|
828
|
-
await emitDone(adapter, '2024-01-01T00:00:00.000Z', '2025-08-01T00:00:00.000Z');
|
|
829
|
-
expect(adapter.state.workersOldest).toBe(constants_1.UNBOUNDED_DATE_TIME_VALUE);
|
|
830
|
-
expect(adapter.state.workersNewest).toBe('2025-08-01T00:00:00.000Z');
|
|
831
|
-
});
|
|
832
|
-
});
|
|
833
|
-
describe('first sync with absolute dates (no UNBOUNDED)', () => {
|
|
834
|
-
it('should set both boundaries from the extraction range', async () => {
|
|
835
|
-
await emitDone(adapter, '2025-01-01T00:00:00.000Z', '2025-03-01T00:00:00.000Z');
|
|
836
|
-
expect(adapter.state.workersOldest).toBe('2025-01-01T00:00:00.000Z');
|
|
837
|
-
expect(adapter.state.workersNewest).toBe('2025-03-01T00:00:00.000Z');
|
|
838
|
-
});
|
|
839
|
-
});
|
|
840
|
-
describe('reconciliation after absolute initial sync', () => {
|
|
841
|
-
it('should expand workersOldest backward when reconciliation start is earlier', async () => {
|
|
842
|
-
// Initial sync with absolute dates
|
|
843
|
-
await emitDone(adapter, '2025-01-01T00:00:00.000Z', '2025-03-01T00:00:00.000Z');
|
|
844
|
-
// Reconciliation with earlier start
|
|
845
|
-
await emitDone(adapter, '2024-06-01T00:00:00.000Z', '2025-02-01T00:00:00.000Z');
|
|
846
|
-
expect(adapter.state.workersOldest).toBe('2024-06-01T00:00:00.000Z');
|
|
847
|
-
expect(adapter.state.workersNewest).toBe('2025-03-01T00:00:00.000Z');
|
|
848
|
-
});
|
|
849
|
-
it('should NOT change boundaries when reconciliation is within existing range', async () => {
|
|
850
|
-
// Initial sync
|
|
851
|
-
await emitDone(adapter, '2025-01-01T00:00:00.000Z', '2025-03-01T00:00:00.000Z');
|
|
852
|
-
// Reconciliation entirely within existing range
|
|
853
|
-
await emitDone(adapter, '2025-01-15T00:00:00.000Z', '2025-02-15T00:00:00.000Z');
|
|
854
|
-
expect(adapter.state.workersOldest).toBe('2025-01-01T00:00:00.000Z');
|
|
855
|
-
expect(adapter.state.workersNewest).toBe('2025-03-01T00:00:00.000Z');
|
|
856
|
-
});
|
|
857
|
-
it('should expand both boundaries when reconciliation exceeds both', async () => {
|
|
858
|
-
// Initial sync
|
|
859
|
-
await emitDone(adapter, '2025-01-01T00:00:00.000Z', '2025-03-01T00:00:00.000Z');
|
|
860
|
-
// Reconciliation exceeding both ends
|
|
861
|
-
await emitDone(adapter, '2024-06-01T00:00:00.000Z', '2025-09-01T00:00:00.000Z');
|
|
862
|
-
expect(adapter.state.workersOldest).toBe('2024-06-01T00:00:00.000Z');
|
|
863
|
-
expect(adapter.state.workersNewest).toBe('2025-09-01T00:00:00.000Z');
|
|
864
|
-
});
|
|
865
|
-
});
|
|
866
|
-
describe('multiple forward syncs', () => {
|
|
867
|
-
it('should progressively expand workersNewest while preserving workersOldest', async () => {
|
|
868
|
-
// Initial import
|
|
869
|
-
await emitDone(adapter, constants_1.UNBOUNDED_DATE_TIME_VALUE, '2025-06-01T00:00:00.000Z');
|
|
870
|
-
// First forward sync
|
|
871
|
-
await emitDone(adapter, '2025-06-01T00:00:00.000Z', '2025-07-01T00:00:00.000Z');
|
|
872
|
-
expect(adapter.state.workersNewest).toBe('2025-07-01T00:00:00.000Z');
|
|
873
|
-
// Second forward sync
|
|
874
|
-
await emitDone(adapter, '2025-07-01T00:00:00.000Z', '2025-08-01T00:00:00.000Z');
|
|
875
|
-
expect(adapter.state.workersNewest).toBe('2025-08-01T00:00:00.000Z');
|
|
876
|
-
// workersOldest should remain the sentinel throughout
|
|
877
|
-
expect(adapter.state.workersOldest).toBe(constants_1.UNBOUNDED_DATE_TIME_VALUE);
|
|
878
|
-
});
|
|
879
|
-
});
|
|
880
|
-
describe('non-AttachmentExtractionDone events should NOT update boundaries', () => {
|
|
881
|
-
it('should not update boundaries on DataExtractionDone', async () => {
|
|
882
|
-
adapter.state.workersOldest = '2025-01-01T00:00:00.000Z';
|
|
883
|
-
adapter.state.workersNewest = '2025-03-01T00:00:00.000Z';
|
|
884
|
-
adapter.event.payload.event_context.extract_from =
|
|
885
|
-
'2024-01-01T00:00:00.000Z';
|
|
886
|
-
adapter.event.payload.event_context.extract_to =
|
|
887
|
-
'2025-12-01T00:00:00.000Z';
|
|
888
|
-
await adapter.emit(types_1.ExtractorEventType.DataExtractionDone, {
|
|
889
|
-
reports: [],
|
|
890
|
-
processed_files: [],
|
|
891
|
-
});
|
|
892
|
-
expect(adapter.state.workersOldest).toBe('2025-01-01T00:00:00.000Z');
|
|
893
|
-
expect(adapter.state.workersNewest).toBe('2025-03-01T00:00:00.000Z');
|
|
894
|
-
});
|
|
895
|
-
it('should not update boundaries on DataExtractionProgress', async () => {
|
|
896
|
-
adapter.state.workersOldest = '2025-01-01T00:00:00.000Z';
|
|
897
|
-
adapter.state.workersNewest = '2025-03-01T00:00:00.000Z';
|
|
898
|
-
adapter.event.payload.event_context.extract_from =
|
|
899
|
-
'2024-01-01T00:00:00.000Z';
|
|
900
|
-
adapter.event.payload.event_context.extract_to =
|
|
901
|
-
'2025-12-01T00:00:00.000Z';
|
|
902
|
-
await adapter.emit(types_1.ExtractorEventType.DataExtractionProgress, {
|
|
903
|
-
reports: [],
|
|
904
|
-
processed_files: [],
|
|
905
|
-
});
|
|
906
|
-
expect(adapter.state.workersOldest).toBe('2025-01-01T00:00:00.000Z');
|
|
907
|
-
expect(adapter.state.workersNewest).toBe('2025-03-01T00:00:00.000Z');
|
|
908
|
-
});
|
|
909
|
-
it('should not update boundaries on MetadataExtractionError', async () => {
|
|
910
|
-
adapter.state.workersOldest = '2025-01-01T00:00:00.000Z';
|
|
911
|
-
adapter.state.workersNewest = '2025-03-01T00:00:00.000Z';
|
|
912
|
-
adapter.event.payload.event_context.extract_from =
|
|
913
|
-
'2024-01-01T00:00:00.000Z';
|
|
914
|
-
adapter.event.payload.event_context.extract_to =
|
|
915
|
-
'2025-12-01T00:00:00.000Z';
|
|
916
|
-
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
917
|
-
reports: [],
|
|
918
|
-
processed_files: [],
|
|
919
|
-
});
|
|
920
|
-
expect(adapter.state.workersOldest).toBe('2025-01-01T00:00:00.000Z');
|
|
921
|
-
expect(adapter.state.workersNewest).toBe('2025-03-01T00:00:00.000Z');
|
|
922
|
-
});
|
|
923
|
-
it('should not update boundaries on AttachmentExtractionError', async () => {
|
|
924
|
-
adapter.state.workersOldest = '2025-01-01T00:00:00.000Z';
|
|
925
|
-
adapter.state.workersNewest = '2025-03-01T00:00:00.000Z';
|
|
926
|
-
adapter.event.payload.event_context.extract_from =
|
|
927
|
-
'2024-01-01T00:00:00.000Z';
|
|
928
|
-
adapter.event.payload.event_context.extract_to =
|
|
929
|
-
'2025-12-01T00:00:00.000Z';
|
|
930
|
-
await adapter.emit(types_1.ExtractorEventType.AttachmentExtractionError, {
|
|
931
|
-
reports: [],
|
|
932
|
-
processed_files: [],
|
|
933
|
-
});
|
|
934
|
-
expect(adapter.state.workersOldest).toBe('2025-01-01T00:00:00.000Z');
|
|
935
|
-
expect(adapter.state.workersNewest).toBe('2025-03-01T00:00:00.000Z');
|
|
936
|
-
});
|
|
937
|
-
});
|
|
938
|
-
});
|
|
939
|
-
describe('extractionScope', () => {
|
|
940
|
-
it('should return empty object by default', () => {
|
|
941
|
-
expect(adapter.extractionScope).toEqual({});
|
|
942
|
-
});
|
|
943
|
-
it('should return extraction scope from adapter state', () => {
|
|
944
|
-
const extractionScope = {
|
|
945
|
-
tasks: { extract: true },
|
|
946
|
-
users: { extract: false },
|
|
947
|
-
};
|
|
948
|
-
// Simulate what State.init() does when parsing objects from API
|
|
949
|
-
mockAdapterState._extractionScope = extractionScope;
|
|
950
|
-
expect(adapter.extractionScope).toEqual({
|
|
951
|
-
tasks: { extract: true },
|
|
952
|
-
users: { extract: false },
|
|
953
|
-
});
|
|
954
|
-
});
|
|
955
|
-
});
|
|
956
|
-
describe('shouldExtract', () => {
|
|
957
|
-
it('should return true when extraction scope is empty', () => {
|
|
958
|
-
expect(adapter.shouldExtract('tasks')).toBe(true);
|
|
959
|
-
expect(adapter.shouldExtract('users')).toBe(true);
|
|
960
|
-
});
|
|
961
|
-
it('should return true when item type is not in scope', () => {
|
|
962
|
-
mockAdapterState._extractionScope = {
|
|
963
|
-
tasks: { extract: true },
|
|
964
|
-
};
|
|
965
|
-
expect(adapter.shouldExtract('users')).toBe(true);
|
|
966
|
-
});
|
|
967
|
-
it('should return true when item type has extract: true', () => {
|
|
968
|
-
mockAdapterState._extractionScope = {
|
|
969
|
-
tasks: { extract: true },
|
|
970
|
-
};
|
|
971
|
-
expect(adapter.shouldExtract('tasks')).toBe(true);
|
|
972
|
-
});
|
|
973
|
-
it('should return false when item type has extract: false', () => {
|
|
974
|
-
mockAdapterState._extractionScope = {
|
|
975
|
-
tasks: { extract: false },
|
|
976
|
-
users: { extract: true },
|
|
977
|
-
};
|
|
978
|
-
expect(adapter.shouldExtract('tasks')).toBe(false);
|
|
979
|
-
expect(adapter.shouldExtract('users')).toBe(true);
|
|
980
|
-
});
|
|
981
|
-
});
|
|
982
|
-
describe(worker_adapter_1.WorkerAdapter.prototype.loadItemTypes.name, () => {
|
|
983
|
-
let exitSpy;
|
|
984
|
-
let emitSpy;
|
|
985
|
-
beforeEach(() => {
|
|
986
|
-
exitSpy = jest
|
|
987
|
-
.spyOn(process, 'exit')
|
|
988
|
-
.mockImplementation(() => undefined);
|
|
989
|
-
emitSpy = jest.spyOn(adapter, 'emit').mockResolvedValue();
|
|
990
|
-
// Set event type to loading continuation (not StartLoadingData) so we can
|
|
991
|
-
// set fromDevRev state directly without mocking getLoaderBatches
|
|
992
|
-
mockEvent.payload.event_type =
|
|
993
|
-
types_1.EventType.ContinueLoadingData;
|
|
994
|
-
});
|
|
995
|
-
afterEach(() => {
|
|
996
|
-
exitSpy.mockRestore();
|
|
997
|
-
});
|
|
998
|
-
function setupFilesToLoad(items) {
|
|
999
|
-
adapter['adapterState'].state.fromDevRev = {
|
|
1000
|
-
filesToLoad: [
|
|
1001
|
-
{
|
|
1002
|
-
id: 'artifact-1',
|
|
1003
|
-
file_name: 'file1.json',
|
|
1004
|
-
itemType: 'tasks',
|
|
1005
|
-
count: items.length,
|
|
1006
|
-
lineToProcess: 0,
|
|
1007
|
-
completed: false,
|
|
1008
|
-
},
|
|
1009
|
-
],
|
|
1010
|
-
};
|
|
1011
|
-
adapter['uploader'].getJsonObjectByArtifactId = jest
|
|
1012
|
-
.fn()
|
|
1013
|
-
.mockResolvedValue({ response: items });
|
|
1014
|
-
}
|
|
1015
|
-
it('should emit DataLoadingProgress and exit on timeout', async () => {
|
|
1016
|
-
const items = [
|
|
1017
|
-
{
|
|
1018
|
-
id: { devrev: 'dev-1', external: 'ext-1' },
|
|
1019
|
-
created_date: '',
|
|
1020
|
-
modified_date: '',
|
|
1021
|
-
data: {},
|
|
1022
|
-
},
|
|
1023
|
-
{
|
|
1024
|
-
id: { devrev: 'dev-2', external: 'ext-2' },
|
|
1025
|
-
created_date: '',
|
|
1026
|
-
modified_date: '',
|
|
1027
|
-
data: {},
|
|
1028
|
-
},
|
|
1029
|
-
];
|
|
1030
|
-
setupFilesToLoad(items);
|
|
1031
|
-
// Set timeout before calling loadItemTypes
|
|
1032
|
-
adapter.isTimeout = true;
|
|
1033
|
-
const itemTypesToLoad = [
|
|
1034
|
-
{
|
|
1035
|
-
itemType: 'tasks',
|
|
1036
|
-
create: jest.fn(),
|
|
1037
|
-
update: jest.fn(),
|
|
1038
|
-
},
|
|
1039
|
-
];
|
|
1040
|
-
await adapter.loadItemTypes({ itemTypesToLoad });
|
|
1041
|
-
expect(emitSpy).toHaveBeenCalledWith(types_1.LoaderEventType.DataLoadingProgress);
|
|
1042
|
-
expect(exitSpy).toHaveBeenCalledWith(0);
|
|
1043
|
-
});
|
|
1044
|
-
it('should emit DataLoadingProgress mid-loop when timeout arrives between items', async () => {
|
|
1045
|
-
const items = [
|
|
1046
|
-
{
|
|
1047
|
-
id: { devrev: 'dev-1', external: 'ext-1' },
|
|
1048
|
-
created_date: '',
|
|
1049
|
-
modified_date: '',
|
|
1050
|
-
data: {},
|
|
1051
|
-
},
|
|
1052
|
-
{
|
|
1053
|
-
id: { devrev: 'dev-2', external: 'ext-2' },
|
|
1054
|
-
created_date: '',
|
|
1055
|
-
modified_date: '',
|
|
1056
|
-
data: {},
|
|
1057
|
-
},
|
|
1058
|
-
{
|
|
1059
|
-
id: { devrev: 'dev-3', external: 'ext-3' },
|
|
1060
|
-
created_date: '',
|
|
1061
|
-
modified_date: '',
|
|
1062
|
-
data: {},
|
|
1063
|
-
},
|
|
1064
|
-
];
|
|
1065
|
-
setupFilesToLoad(items);
|
|
1066
|
-
// Mock process.exit to throw so it stops execution like a real exit would
|
|
1067
|
-
exitSpy.mockRestore();
|
|
1068
|
-
exitSpy = jest.spyOn(process, 'exit').mockImplementation((() => {
|
|
1069
|
-
throw new Error(`process.exit`);
|
|
1070
|
-
}));
|
|
1071
|
-
let loadItemCallCount = 0;
|
|
1072
|
-
// Mock loadItem to set timeout after the first call
|
|
1073
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/require-await
|
|
1074
|
-
jest.spyOn(adapter, 'loadItem').mockImplementation(async () => {
|
|
1075
|
-
loadItemCallCount++;
|
|
1076
|
-
if (loadItemCallCount === 1) {
|
|
1077
|
-
adapter.isTimeout = true;
|
|
1078
|
-
}
|
|
1079
|
-
return { report: { item_type: 'tasks', updated: 1 } };
|
|
1080
|
-
});
|
|
1081
|
-
const itemTypesToLoad = [
|
|
1082
|
-
{
|
|
1083
|
-
itemType: 'tasks',
|
|
1084
|
-
create: jest.fn(),
|
|
1085
|
-
update: jest.fn(),
|
|
1086
|
-
},
|
|
1087
|
-
];
|
|
1088
|
-
// process.exit throws, so this will throw
|
|
1089
|
-
await expect(adapter.loadItemTypes({ itemTypesToLoad })).rejects.toThrow('process.exit');
|
|
1090
|
-
// First item processed, then timeout detected on second iteration
|
|
1091
|
-
expect(loadItemCallCount).toBe(1);
|
|
1092
|
-
expect(emitSpy).toHaveBeenCalledWith(types_1.LoaderEventType.DataLoadingProgress);
|
|
1093
|
-
});
|
|
1094
|
-
it('should emit DataLoadingError and exit(1) on unexpected error', async () => {
|
|
1095
|
-
adapter['adapterState'].state.fromDevRev = {
|
|
1096
|
-
filesToLoad: [
|
|
1097
|
-
{
|
|
1098
|
-
id: 'artifact-1',
|
|
1099
|
-
file_name: 'file1.json',
|
|
1100
|
-
itemType: 'tasks',
|
|
1101
|
-
count: 1,
|
|
1102
|
-
lineToProcess: 0,
|
|
1103
|
-
completed: false,
|
|
1104
|
-
},
|
|
1105
|
-
],
|
|
1106
|
-
};
|
|
1107
|
-
// Make getJsonObjectByArtifactId throw (not return error — throw)
|
|
1108
|
-
adapter['uploader'].getJsonObjectByArtifactId = jest
|
|
1109
|
-
.fn()
|
|
1110
|
-
.mockRejectedValue(new Error('Unexpected network failure'));
|
|
1111
|
-
const itemTypesToLoad = [
|
|
1112
|
-
{
|
|
1113
|
-
itemType: 'tasks',
|
|
1114
|
-
create: jest.fn(),
|
|
1115
|
-
update: jest.fn(),
|
|
1116
|
-
},
|
|
1117
|
-
];
|
|
1118
|
-
await adapter.loadItemTypes({ itemTypesToLoad });
|
|
1119
|
-
expect(emitSpy).toHaveBeenCalledWith(types_1.LoaderEventType.DataLoadingError, expect.objectContaining({
|
|
1120
|
-
error: expect.objectContaining({
|
|
1121
|
-
message: expect.stringContaining('Error during data loading'),
|
|
1122
|
-
}),
|
|
1123
|
-
}));
|
|
1124
|
-
expect(exitSpy).toHaveBeenCalledWith(1);
|
|
1125
|
-
});
|
|
1126
|
-
});
|
|
1127
|
-
describe(worker_adapter_1.WorkerAdapter.prototype.loadAttachments.name, () => {
|
|
1128
|
-
let exitSpy;
|
|
1129
|
-
let emitSpy;
|
|
1130
|
-
beforeEach(() => {
|
|
1131
|
-
exitSpy = jest
|
|
1132
|
-
.spyOn(process, 'exit')
|
|
1133
|
-
.mockImplementation(() => undefined);
|
|
1134
|
-
emitSpy = jest.spyOn(adapter, 'emit').mockResolvedValue();
|
|
1135
|
-
// Set event type to continuation so we can set fromDevRev state directly
|
|
1136
|
-
mockEvent.payload.event_type =
|
|
1137
|
-
types_1.EventType.ContinueLoadingAttachments;
|
|
1138
|
-
});
|
|
1139
|
-
afterEach(() => {
|
|
1140
|
-
exitSpy.mockRestore();
|
|
1141
|
-
});
|
|
1142
|
-
function setupFilesToLoad(items) {
|
|
1143
|
-
adapter['adapterState'].state.fromDevRev = {
|
|
1144
|
-
filesToLoad: [
|
|
1145
|
-
{
|
|
1146
|
-
id: 'artifact-1',
|
|
1147
|
-
file_name: 'attachments.json',
|
|
1148
|
-
itemType: 'attachment',
|
|
1149
|
-
count: items.length,
|
|
1150
|
-
lineToProcess: 0,
|
|
1151
|
-
completed: false,
|
|
1152
|
-
},
|
|
1153
|
-
],
|
|
1154
|
-
};
|
|
1155
|
-
adapter['uploader'].getJsonObjectByArtifactId = jest
|
|
1156
|
-
.fn()
|
|
1157
|
-
.mockResolvedValue({ response: items });
|
|
1158
|
-
}
|
|
1159
|
-
it('should emit AttachmentLoadingProgress and exit on timeout', async () => {
|
|
1160
|
-
const items = [
|
|
1161
|
-
{
|
|
1162
|
-
reference_id: 'ref-1',
|
|
1163
|
-
parent_type: 'task',
|
|
1164
|
-
parent_reference_id: 'parent-1',
|
|
1165
|
-
file_name: 'file.pdf',
|
|
1166
|
-
file_type: 'application/pdf',
|
|
1167
|
-
file_size: 100,
|
|
1168
|
-
url: 'https://example.com/file.pdf',
|
|
1169
|
-
valid_until: '',
|
|
1170
|
-
created_by_id: 'user-1',
|
|
1171
|
-
created_date: '',
|
|
1172
|
-
modified_by_id: 'user-1',
|
|
1173
|
-
modified_date: '',
|
|
1174
|
-
},
|
|
1175
|
-
];
|
|
1176
|
-
setupFilesToLoad(items);
|
|
1177
|
-
adapter.isTimeout = true;
|
|
1178
|
-
await adapter.loadAttachments({
|
|
1179
|
-
create: jest.fn(),
|
|
1180
|
-
});
|
|
1181
|
-
expect(emitSpy).toHaveBeenCalledWith(types_1.LoaderEventType.AttachmentLoadingProgress);
|
|
1182
|
-
expect(exitSpy).toHaveBeenCalledWith(0);
|
|
1183
|
-
});
|
|
1184
|
-
it('should emit AttachmentLoadingError on transformer file error', async () => {
|
|
1185
|
-
adapter['adapterState'].state.fromDevRev = {
|
|
1186
|
-
filesToLoad: [
|
|
1187
|
-
{
|
|
1188
|
-
id: 'bad-artifact',
|
|
1189
|
-
file_name: 'attachments.json',
|
|
1190
|
-
itemType: 'attachment',
|
|
1191
|
-
count: 1,
|
|
1192
|
-
lineToProcess: 0,
|
|
1193
|
-
completed: false,
|
|
1194
|
-
},
|
|
1195
|
-
],
|
|
1196
|
-
};
|
|
1197
|
-
adapter['uploader'].getJsonObjectByArtifactId = jest
|
|
1198
|
-
.fn()
|
|
1199
|
-
.mockResolvedValue({
|
|
1200
|
-
response: null,
|
|
1201
|
-
error: new Error('Artifact not found'),
|
|
1202
|
-
});
|
|
1203
|
-
await adapter.loadAttachments({
|
|
1204
|
-
create: jest.fn(),
|
|
1205
|
-
});
|
|
1206
|
-
expect(emitSpy).toHaveBeenCalledWith(types_1.LoaderEventType.AttachmentLoadingError, expect.objectContaining({
|
|
1207
|
-
error: expect.objectContaining({
|
|
1208
|
-
message: expect.stringContaining('Transformer file not found'),
|
|
1209
|
-
}),
|
|
1210
|
-
}));
|
|
1211
|
-
});
|
|
1212
|
-
it('should emit AttachmentLoadingError and exit(1) on unexpected error', async () => {
|
|
1213
|
-
const items = [
|
|
1214
|
-
{
|
|
1215
|
-
reference_id: 'ref-1',
|
|
1216
|
-
parent_type: 'task',
|
|
1217
|
-
parent_reference_id: 'parent-1',
|
|
1218
|
-
file_name: 'file.pdf',
|
|
1219
|
-
file_type: 'application/pdf',
|
|
1220
|
-
file_size: 100,
|
|
1221
|
-
url: 'https://example.com/file.pdf',
|
|
1222
|
-
valid_until: '',
|
|
1223
|
-
created_by_id: 'user-1',
|
|
1224
|
-
created_date: '',
|
|
1225
|
-
modified_by_id: 'user-1',
|
|
1226
|
-
modified_date: '',
|
|
1227
|
-
},
|
|
1228
|
-
];
|
|
1229
|
-
setupFilesToLoad(items);
|
|
1230
|
-
// Make the create function throw
|
|
1231
|
-
const mockCreate = jest
|
|
1232
|
-
.fn()
|
|
1233
|
-
.mockRejectedValue(new Error('Unexpected API failure'));
|
|
1234
|
-
await adapter.loadAttachments({ create: mockCreate });
|
|
1235
|
-
expect(emitSpy).toHaveBeenCalledWith(types_1.LoaderEventType.AttachmentLoadingError, expect.objectContaining({
|
|
1236
|
-
error: expect.objectContaining({
|
|
1237
|
-
message: expect.stringContaining('Error during attachment loading'),
|
|
1238
|
-
}),
|
|
1239
|
-
}));
|
|
1240
|
-
expect(exitSpy).toHaveBeenCalledWith(1);
|
|
1241
|
-
});
|
|
1242
|
-
});
|
|
1243
|
-
});
|