@devrev/ts-adaas 1.19.4-beta.0 → 1.19.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/attachments-streaming/attachments-streaming-pool.test.js +3 -6
- package/dist/common/event-type-translation.test.d.ts +2 -0
- package/dist/common/event-type-translation.test.d.ts.map +1 -0
- package/dist/common/event-type-translation.test.js +175 -0
- package/dist/common/time-value-resolver.test.js +0 -1
- package/dist/multithreading/create-worker.test.js +34 -16
- package/dist/multithreading/process-task.test.d.ts +2 -0
- package/dist/multithreading/process-task.test.d.ts.map +1 -0
- package/dist/multithreading/process-task.test.js +166 -0
- package/dist/multithreading/spawn/spawn.test.d.ts +2 -0
- package/dist/multithreading/spawn/spawn.test.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.test.js +223 -0
- package/dist/multithreading/worker-adapter/worker-adapter.emit.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.emit.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.emit.test.js +415 -0
- package/dist/multithreading/worker-adapter/worker-adapter.extraction.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.extraction.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.extraction.test.js +801 -0
- package/dist/multithreading/worker-adapter/worker-adapter.loading.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.loading.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.loading.test.js +598 -0
- package/dist/multithreading/worker-adapter/worker-adapter.serialization.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.serialization.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.serialization.test.js +71 -0
- package/dist/repo/repo.test.js +41 -0
- package/dist/state/state.extract-window.test.d.ts +2 -0
- package/dist/state/state.extract-window.test.d.ts.map +1 -0
- package/dist/state/state.extract-window.test.js +163 -0
- package/dist/state/state.pending-boundaries.test.d.ts +2 -0
- package/dist/state/state.pending-boundaries.test.d.ts.map +1 -0
- package/dist/state/state.pending-boundaries.test.js +189 -0
- package/dist/state/state.post-state.test.d.ts +2 -0
- package/dist/state/state.post-state.test.d.ts.map +1 -0
- package/dist/state/state.post-state.test.js +77 -0
- package/dist/state/state.test.js +23 -506
- package/dist/state/state.time-value-resolution.test.d.ts +2 -0
- package/dist/state/state.time-value-resolution.test.d.ts.map +1 -0
- package/dist/state/state.time-value-resolution.test.js +175 -0
- package/dist/types/extraction.test.js +57 -21
- package/dist/uploader/uploader.helpers.test.js +0 -11
- package/dist/uploader/uploader.test.js +0 -9
- package/package.json +7 -6
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts +0 -2
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts.map +0 -1
- package/dist/multithreading/worker-adapter/worker-adapter.test.js +0 -1243
|
@@ -0,0 +1,801 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const attachments_streaming_pool_1 = require("../../attachments-streaming/attachments-streaming-pool");
|
|
4
|
+
const state_1 = require("../../state/state");
|
|
5
|
+
const jest_setup_1 = require("../../tests/jest.setup");
|
|
6
|
+
const test_utils_1 = require("../../common/test-utils");
|
|
7
|
+
const types_1 = require("../../types");
|
|
8
|
+
const worker_adapter_1 = require("./worker-adapter");
|
|
9
|
+
/* eslint-disable @typescript-eslint/no-require-imports */
|
|
10
|
+
jest.mock('../../common/control-protocol', () => ({
|
|
11
|
+
emit: jest.fn().mockResolvedValue({}),
|
|
12
|
+
}));
|
|
13
|
+
jest.mock('../../mappers/mappers');
|
|
14
|
+
jest.mock('../../uploader/uploader');
|
|
15
|
+
jest.mock('../../repo/repo');
|
|
16
|
+
jest.mock('node:worker_threads', () => ({
|
|
17
|
+
parentPort: { postMessage: jest.fn() },
|
|
18
|
+
}));
|
|
19
|
+
jest.mock('../../attachments-streaming/attachments-streaming-pool', () => ({
|
|
20
|
+
AttachmentsStreamingPool: jest.fn().mockImplementation(() => ({
|
|
21
|
+
streamAll: jest.fn().mockResolvedValue(undefined),
|
|
22
|
+
})),
|
|
23
|
+
}));
|
|
24
|
+
function makeAdapter(eventType = types_1.EventType.StartExtractingData) {
|
|
25
|
+
const event = (0, test_utils_1.createMockEvent)(jest_setup_1.mockServer.baseUrl, {
|
|
26
|
+
payload: { event_type: eventType },
|
|
27
|
+
});
|
|
28
|
+
const initialState = {
|
|
29
|
+
attachments: { completed: false },
|
|
30
|
+
lastSyncStarted: '',
|
|
31
|
+
lastSuccessfulSyncStarted: '',
|
|
32
|
+
snapInVersionId: '',
|
|
33
|
+
toDevRev: {
|
|
34
|
+
attachmentsMetadata: {
|
|
35
|
+
artifactIds: [],
|
|
36
|
+
lastProcessed: 0,
|
|
37
|
+
lastProcessedAttachmentsIdsList: [],
|
|
38
|
+
},
|
|
39
|
+
},
|
|
40
|
+
};
|
|
41
|
+
const adapterState = new state_1.State({ event, initialState });
|
|
42
|
+
const adapter = new worker_adapter_1.WorkerAdapter({ event, adapterState });
|
|
43
|
+
return { adapter, event, adapterState };
|
|
44
|
+
}
|
|
45
|
+
describe(`${worker_adapter_1.WorkerAdapter.name}.streamAttachments`, () => {
|
|
46
|
+
let adapter;
|
|
47
|
+
beforeEach(() => {
|
|
48
|
+
jest.clearAllMocks();
|
|
49
|
+
({ adapter } = makeAdapter());
|
|
50
|
+
});
|
|
51
|
+
it('should process all artifact batches successfully', async () => {
|
|
52
|
+
// Arrange
|
|
53
|
+
const mockStream = jest.fn();
|
|
54
|
+
adapter.state.toDevRev = {
|
|
55
|
+
attachmentsMetadata: {
|
|
56
|
+
artifactIds: ['artifact1', 'artifact2'],
|
|
57
|
+
lastProcessed: 0,
|
|
58
|
+
lastProcessedAttachmentsIdsList: [],
|
|
59
|
+
},
|
|
60
|
+
};
|
|
61
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
62
|
+
.fn()
|
|
63
|
+
.mockResolvedValueOnce({
|
|
64
|
+
attachments: [
|
|
65
|
+
{
|
|
66
|
+
url: 'http://example.com/file1.pdf',
|
|
67
|
+
id: 'attachment1',
|
|
68
|
+
file_name: 'file1.pdf',
|
|
69
|
+
parent_id: 'parent1',
|
|
70
|
+
},
|
|
71
|
+
{
|
|
72
|
+
url: 'http://example.com/file2.pdf',
|
|
73
|
+
id: 'attachment2',
|
|
74
|
+
file_name: 'file2.pdf',
|
|
75
|
+
parent_id: 'parent2',
|
|
76
|
+
},
|
|
77
|
+
],
|
|
78
|
+
})
|
|
79
|
+
.mockResolvedValueOnce({
|
|
80
|
+
attachments: [
|
|
81
|
+
{
|
|
82
|
+
url: 'http://example.com/file3.pdf',
|
|
83
|
+
id: 'attachment3',
|
|
84
|
+
file_name: 'file3.pdf',
|
|
85
|
+
parent_id: 'parent3',
|
|
86
|
+
},
|
|
87
|
+
],
|
|
88
|
+
});
|
|
89
|
+
adapter.initializeRepos = jest.fn();
|
|
90
|
+
// Act
|
|
91
|
+
const result = await adapter.streamAttachments({
|
|
92
|
+
stream: mockStream,
|
|
93
|
+
});
|
|
94
|
+
// Assert
|
|
95
|
+
expect(adapter.initializeRepos).toHaveBeenCalledWith([
|
|
96
|
+
{ itemType: 'ssor_attachment' },
|
|
97
|
+
]);
|
|
98
|
+
expect(adapter.initializeRepos).toHaveBeenCalledTimes(1);
|
|
99
|
+
expect(adapter['uploader'].getAttachmentsFromArtifactId).toHaveBeenCalledTimes(2);
|
|
100
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([]);
|
|
101
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.lastProcessed).toBe(0);
|
|
102
|
+
expect(result).toBeUndefined();
|
|
103
|
+
});
|
|
104
|
+
it('[edge] should handle invalid batch size by using 1 instead', async () => {
|
|
105
|
+
// Arrange
|
|
106
|
+
const mockStream = jest.fn();
|
|
107
|
+
adapter.state.toDevRev = {
|
|
108
|
+
attachmentsMetadata: {
|
|
109
|
+
artifactIds: ['artifact1'],
|
|
110
|
+
lastProcessed: 0,
|
|
111
|
+
lastProcessedAttachmentsIdsList: [],
|
|
112
|
+
},
|
|
113
|
+
};
|
|
114
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
115
|
+
.fn()
|
|
116
|
+
.mockResolvedValue({
|
|
117
|
+
attachments: [
|
|
118
|
+
{
|
|
119
|
+
url: 'http://example.com/file1.pdf',
|
|
120
|
+
id: 'attachment1',
|
|
121
|
+
file_name: 'file1.pdf',
|
|
122
|
+
parent_id: 'parent1',
|
|
123
|
+
},
|
|
124
|
+
],
|
|
125
|
+
});
|
|
126
|
+
adapter.initializeRepos = jest.fn();
|
|
127
|
+
// Act
|
|
128
|
+
const result = await adapter.streamAttachments({
|
|
129
|
+
stream: mockStream,
|
|
130
|
+
batchSize: 0,
|
|
131
|
+
});
|
|
132
|
+
// Assert
|
|
133
|
+
expect(result).toBeUndefined();
|
|
134
|
+
});
|
|
135
|
+
it('[edge] should cap batch size to 50 when batchSize is greater than 50', async () => {
|
|
136
|
+
// Arrange
|
|
137
|
+
const mockStream = jest.fn();
|
|
138
|
+
adapter.state.toDevRev = {
|
|
139
|
+
attachmentsMetadata: {
|
|
140
|
+
artifactIds: ['artifact1'],
|
|
141
|
+
lastProcessed: 0,
|
|
142
|
+
lastProcessedAttachmentsIdsList: [],
|
|
143
|
+
},
|
|
144
|
+
};
|
|
145
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
146
|
+
.fn()
|
|
147
|
+
.mockResolvedValue({
|
|
148
|
+
attachments: [
|
|
149
|
+
{
|
|
150
|
+
url: 'http://example.com/file1.pdf',
|
|
151
|
+
id: 'attachment1',
|
|
152
|
+
file_name: 'file1.pdf',
|
|
153
|
+
parent_id: 'parent1',
|
|
154
|
+
},
|
|
155
|
+
],
|
|
156
|
+
});
|
|
157
|
+
adapter.initializeRepos = jest.fn();
|
|
158
|
+
// Act
|
|
159
|
+
const result = await adapter.streamAttachments({
|
|
160
|
+
stream: mockStream,
|
|
161
|
+
batchSize: 100,
|
|
162
|
+
});
|
|
163
|
+
// Assert
|
|
164
|
+
expect(result).toBeUndefined();
|
|
165
|
+
});
|
|
166
|
+
it('[edge] should handle empty attachments metadata artifact IDs', async () => {
|
|
167
|
+
// Arrange
|
|
168
|
+
const mockStream = jest.fn();
|
|
169
|
+
adapter.state.toDevRev = {
|
|
170
|
+
attachmentsMetadata: {
|
|
171
|
+
artifactIds: [],
|
|
172
|
+
lastProcessed: 0,
|
|
173
|
+
},
|
|
174
|
+
};
|
|
175
|
+
// Act
|
|
176
|
+
const result = await adapter.streamAttachments({
|
|
177
|
+
stream: mockStream,
|
|
178
|
+
});
|
|
179
|
+
// Assert
|
|
180
|
+
expect(result).toBeUndefined();
|
|
181
|
+
});
|
|
182
|
+
it('[edge] should handle errors when getting attachments', async () => {
|
|
183
|
+
// Arrange
|
|
184
|
+
const mockStream = jest.fn();
|
|
185
|
+
adapter.state.toDevRev = {
|
|
186
|
+
attachmentsMetadata: {
|
|
187
|
+
artifactIds: ['artifact1'],
|
|
188
|
+
lastProcessed: 0,
|
|
189
|
+
lastProcessedAttachmentsIdsList: [],
|
|
190
|
+
},
|
|
191
|
+
};
|
|
192
|
+
const mockError = new Error('Failed to get attachments');
|
|
193
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
194
|
+
.fn()
|
|
195
|
+
.mockResolvedValue({
|
|
196
|
+
error: mockError,
|
|
197
|
+
});
|
|
198
|
+
adapter.initializeRepos = jest.fn();
|
|
199
|
+
// Act
|
|
200
|
+
const result = await adapter.streamAttachments({
|
|
201
|
+
stream: mockStream,
|
|
202
|
+
});
|
|
203
|
+
// Assert
|
|
204
|
+
expect(result).toEqual({
|
|
205
|
+
error: mockError,
|
|
206
|
+
});
|
|
207
|
+
});
|
|
208
|
+
it('[edge] should handle empty attachments array from artifact', async () => {
|
|
209
|
+
// Arrange
|
|
210
|
+
const mockStream = jest.fn();
|
|
211
|
+
adapter.state.toDevRev = {
|
|
212
|
+
attachmentsMetadata: {
|
|
213
|
+
artifactIds: ['artifact1'],
|
|
214
|
+
lastProcessed: 0,
|
|
215
|
+
lastProcessedAttachmentsIdsList: [],
|
|
216
|
+
},
|
|
217
|
+
};
|
|
218
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
219
|
+
.fn()
|
|
220
|
+
.mockResolvedValue({
|
|
221
|
+
attachments: [],
|
|
222
|
+
});
|
|
223
|
+
adapter.initializeRepos = jest.fn();
|
|
224
|
+
// Act
|
|
225
|
+
const result = await adapter.streamAttachments({
|
|
226
|
+
stream: mockStream,
|
|
227
|
+
});
|
|
228
|
+
// Assert
|
|
229
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([]);
|
|
230
|
+
expect(result).toBeUndefined();
|
|
231
|
+
});
|
|
232
|
+
it('should use custom processors when provided', async () => {
|
|
233
|
+
// Arrange
|
|
234
|
+
const mockStream = jest.fn();
|
|
235
|
+
const mockReducer = jest.fn().mockReturnValue(['custom-reduced']);
|
|
236
|
+
const mockIterator = jest.fn().mockResolvedValue({});
|
|
237
|
+
adapter.state.toDevRev = {
|
|
238
|
+
attachmentsMetadata: {
|
|
239
|
+
artifactIds: ['artifact1'],
|
|
240
|
+
lastProcessed: 0,
|
|
241
|
+
lastProcessedAttachmentsIdsList: [],
|
|
242
|
+
},
|
|
243
|
+
};
|
|
244
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
245
|
+
.fn()
|
|
246
|
+
.mockResolvedValue({
|
|
247
|
+
attachments: [{ id: 'attachment1' }],
|
|
248
|
+
});
|
|
249
|
+
adapter.initializeRepos = jest.fn();
|
|
250
|
+
// Act
|
|
251
|
+
const result = await adapter.streamAttachments({
|
|
252
|
+
stream: mockStream,
|
|
253
|
+
processors: {
|
|
254
|
+
reducer: mockReducer,
|
|
255
|
+
iterator: mockIterator,
|
|
256
|
+
},
|
|
257
|
+
});
|
|
258
|
+
// Assert
|
|
259
|
+
expect(mockReducer).toHaveBeenCalledWith({
|
|
260
|
+
attachments: [{ id: 'attachment1' }],
|
|
261
|
+
adapter: adapter,
|
|
262
|
+
batchSize: 1,
|
|
263
|
+
});
|
|
264
|
+
expect(mockIterator).toHaveBeenCalledWith({
|
|
265
|
+
reducedAttachments: ['custom-reduced'],
|
|
266
|
+
adapter: adapter,
|
|
267
|
+
stream: mockStream,
|
|
268
|
+
});
|
|
269
|
+
expect(result).toBeUndefined();
|
|
270
|
+
});
|
|
271
|
+
it('should handle rate limiting from iterator', async () => {
|
|
272
|
+
// Arrange
|
|
273
|
+
const mockStream = jest.fn();
|
|
274
|
+
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => ({
|
|
275
|
+
streamAll: jest.fn().mockResolvedValue({ delay: 30 }),
|
|
276
|
+
}));
|
|
277
|
+
adapter.state.toDevRev = {
|
|
278
|
+
attachmentsMetadata: {
|
|
279
|
+
artifactIds: ['artifact1'],
|
|
280
|
+
lastProcessed: 0,
|
|
281
|
+
lastProcessedAttachmentsIdsList: [],
|
|
282
|
+
},
|
|
283
|
+
};
|
|
284
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
285
|
+
.fn()
|
|
286
|
+
.mockResolvedValue({
|
|
287
|
+
attachments: [{ id: 'attachment1' }],
|
|
288
|
+
});
|
|
289
|
+
adapter.initializeRepos = jest.fn();
|
|
290
|
+
// Act
|
|
291
|
+
const result = await adapter.streamAttachments({
|
|
292
|
+
stream: mockStream,
|
|
293
|
+
});
|
|
294
|
+
// Assert
|
|
295
|
+
expect(result).toEqual({ delay: 30 });
|
|
296
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
297
|
+
'artifact1',
|
|
298
|
+
]);
|
|
299
|
+
});
|
|
300
|
+
it('should handle error from iterator', async () => {
|
|
301
|
+
// Arrange
|
|
302
|
+
const mockStream = jest.fn();
|
|
303
|
+
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => ({
|
|
304
|
+
streamAll: jest.fn().mockResolvedValue({
|
|
305
|
+
error: 'Mock error',
|
|
306
|
+
}),
|
|
307
|
+
}));
|
|
308
|
+
adapter.state.toDevRev = {
|
|
309
|
+
attachmentsMetadata: {
|
|
310
|
+
artifactIds: ['artifact1'],
|
|
311
|
+
lastProcessed: 0,
|
|
312
|
+
lastProcessedAttachmentsIdsList: [],
|
|
313
|
+
},
|
|
314
|
+
};
|
|
315
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
316
|
+
.fn()
|
|
317
|
+
.mockResolvedValue({
|
|
318
|
+
attachments: [{ id: 'attachment1' }],
|
|
319
|
+
});
|
|
320
|
+
adapter.initializeRepos = jest.fn();
|
|
321
|
+
// Act
|
|
322
|
+
const result = await adapter.streamAttachments({
|
|
323
|
+
stream: mockStream,
|
|
324
|
+
});
|
|
325
|
+
// Assert
|
|
326
|
+
expect(result).toEqual({ error: 'Mock error' });
|
|
327
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
328
|
+
'artifact1',
|
|
329
|
+
]);
|
|
330
|
+
});
|
|
331
|
+
it('should emit progress event and exit process on timeout, preserving state for resumption', async () => {
|
|
332
|
+
// Arrange
|
|
333
|
+
const mockStream = jest.fn();
|
|
334
|
+
const exitSpy = jest
|
|
335
|
+
.spyOn(process, 'exit')
|
|
336
|
+
.mockImplementation(() => undefined);
|
|
337
|
+
adapter.state.toDevRev = {
|
|
338
|
+
attachmentsMetadata: {
|
|
339
|
+
artifactIds: ['artifact1', 'artifact2', 'artifact3'],
|
|
340
|
+
lastProcessed: 0,
|
|
341
|
+
lastProcessedAttachmentsIdsList: [],
|
|
342
|
+
},
|
|
343
|
+
};
|
|
344
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
345
|
+
.fn()
|
|
346
|
+
.mockResolvedValue({
|
|
347
|
+
attachments: [
|
|
348
|
+
{
|
|
349
|
+
url: 'http://example.com/file1.pdf',
|
|
350
|
+
id: 'attachment1',
|
|
351
|
+
file_name: 'file1.pdf',
|
|
352
|
+
parent_id: 'parent1',
|
|
353
|
+
},
|
|
354
|
+
],
|
|
355
|
+
});
|
|
356
|
+
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => ({
|
|
357
|
+
streamAll: jest.fn().mockImplementation(() => {
|
|
358
|
+
adapter.isTimeout = true;
|
|
359
|
+
return {};
|
|
360
|
+
}),
|
|
361
|
+
}));
|
|
362
|
+
adapter.initializeRepos = jest.fn();
|
|
363
|
+
const emitSpy = jest.spyOn(adapter, 'emit').mockResolvedValue();
|
|
364
|
+
// Act
|
|
365
|
+
await adapter.streamAttachments({
|
|
366
|
+
stream: mockStream,
|
|
367
|
+
});
|
|
368
|
+
// Assert
|
|
369
|
+
expect(emitSpy).toHaveBeenCalledWith(types_1.ExtractorEventType.AttachmentExtractionProgress);
|
|
370
|
+
expect(exitSpy).toHaveBeenCalledWith(0);
|
|
371
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
372
|
+
'artifact1',
|
|
373
|
+
'artifact2',
|
|
374
|
+
'artifact3',
|
|
375
|
+
]);
|
|
376
|
+
expect(adapter['uploader'].getAttachmentsFromArtifactId).toHaveBeenCalledTimes(1);
|
|
377
|
+
exitSpy.mockRestore();
|
|
378
|
+
});
|
|
379
|
+
it('should stop after the timeout flips between batches and preserve unprocessed artifacts for resumption', async () => {
|
|
380
|
+
// Arrange: three artifacts. The first batch's streamAll completes
|
|
381
|
+
// successfully; the second sets isTimeout=true mid-run. The third batch
|
|
382
|
+
// must never be reached.
|
|
383
|
+
const mockStream = jest.fn();
|
|
384
|
+
const exitSpy = jest
|
|
385
|
+
.spyOn(process, 'exit')
|
|
386
|
+
.mockImplementation(() => undefined);
|
|
387
|
+
adapter.state.toDevRev = {
|
|
388
|
+
attachmentsMetadata: {
|
|
389
|
+
artifactIds: ['artifact1', 'artifact2', 'artifact3'],
|
|
390
|
+
lastProcessed: 0,
|
|
391
|
+
lastProcessedAttachmentsIdsList: [],
|
|
392
|
+
},
|
|
393
|
+
};
|
|
394
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
395
|
+
.fn()
|
|
396
|
+
.mockResolvedValue({
|
|
397
|
+
attachments: [
|
|
398
|
+
{
|
|
399
|
+
url: 'http://example.com/file.pdf',
|
|
400
|
+
id: 'attachment-x',
|
|
401
|
+
file_name: 'file.pdf',
|
|
402
|
+
parent_id: 'parent-x',
|
|
403
|
+
},
|
|
404
|
+
],
|
|
405
|
+
});
|
|
406
|
+
// First call: clean streamAll. Second call: flip isTimeout AFTER streaming.
|
|
407
|
+
attachments_streaming_pool_1.AttachmentsStreamingPool
|
|
408
|
+
.mockImplementationOnce(() => ({
|
|
409
|
+
streamAll: jest.fn().mockResolvedValue({}),
|
|
410
|
+
}))
|
|
411
|
+
.mockImplementationOnce(() => ({
|
|
412
|
+
streamAll: jest.fn().mockImplementation(() => {
|
|
413
|
+
adapter.isTimeout = true;
|
|
414
|
+
return {};
|
|
415
|
+
}),
|
|
416
|
+
}));
|
|
417
|
+
adapter.initializeRepos = jest.fn();
|
|
418
|
+
const emitSpy = jest.spyOn(adapter, 'emit').mockResolvedValue();
|
|
419
|
+
// Act
|
|
420
|
+
await adapter.streamAttachments({ stream: mockStream });
|
|
421
|
+
// Assert
|
|
422
|
+
// - Fetched attachments for the first two artifacts only; the third never ran
|
|
423
|
+
expect(adapter['uploader'].getAttachmentsFromArtifactId).toHaveBeenCalledTimes(2);
|
|
424
|
+
// - Progress emitted and process.exit(0) called once the timeout was detected
|
|
425
|
+
expect(emitSpy).toHaveBeenCalledWith(types_1.ExtractorEventType.AttachmentExtractionProgress);
|
|
426
|
+
expect(exitSpy).toHaveBeenCalledWith(0);
|
|
427
|
+
// - Artifact 1 was shifted out cleanly; artifact 2 remains (timeout caught
|
|
428
|
+
// before its shift) along with the untouched artifact 3
|
|
429
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
430
|
+
'artifact2',
|
|
431
|
+
'artifact3',
|
|
432
|
+
]);
|
|
433
|
+
exitSpy.mockRestore();
|
|
434
|
+
});
|
|
435
|
+
it('should reset lastProcessed and attachment IDs list after processing all artifacts', async () => {
|
|
436
|
+
// Arrange
|
|
437
|
+
const mockStream = jest.fn();
|
|
438
|
+
adapter.state.toDevRev = {
|
|
439
|
+
attachmentsMetadata: {
|
|
440
|
+
artifactIds: ['artifact1'],
|
|
441
|
+
lastProcessed: 0,
|
|
442
|
+
lastProcessedAttachmentsIdsList: [],
|
|
443
|
+
},
|
|
444
|
+
};
|
|
445
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
446
|
+
.fn()
|
|
447
|
+
.mockResolvedValueOnce({
|
|
448
|
+
attachments: [
|
|
449
|
+
{
|
|
450
|
+
url: 'http://example.com/file1.pdf',
|
|
451
|
+
id: 'attachment1',
|
|
452
|
+
file_name: 'file1.pdf',
|
|
453
|
+
parent_id: 'parent1',
|
|
454
|
+
},
|
|
455
|
+
{
|
|
456
|
+
url: 'http://example.com/file2.pdf',
|
|
457
|
+
id: 'attachment2',
|
|
458
|
+
file_name: 'file2.pdf',
|
|
459
|
+
parent_id: 'parent2',
|
|
460
|
+
},
|
|
461
|
+
{
|
|
462
|
+
url: 'http://example.com/file3.pdf',
|
|
463
|
+
id: 'attachment3',
|
|
464
|
+
file_name: 'file3.pdf',
|
|
465
|
+
parent_id: 'parent3',
|
|
466
|
+
},
|
|
467
|
+
],
|
|
468
|
+
});
|
|
469
|
+
adapter.processAttachment = jest.fn().mockResolvedValue(null);
|
|
470
|
+
// Act
|
|
471
|
+
await adapter.streamAttachments({
|
|
472
|
+
stream: mockStream,
|
|
473
|
+
});
|
|
474
|
+
// Assert
|
|
475
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toHaveLength(0);
|
|
476
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.lastProcessed).toBe(0);
|
|
477
|
+
});
|
|
478
|
+
});
|
|
479
|
+
describe(`${worker_adapter_1.WorkerAdapter.name}.processAttachment`, () => {
|
|
480
|
+
let adapter;
|
|
481
|
+
beforeEach(() => {
|
|
482
|
+
jest.clearAllMocks();
|
|
483
|
+
({ adapter } = makeAdapter(types_1.EventType.StartExtractingAttachments));
|
|
484
|
+
});
|
|
485
|
+
afterEach(() => {
|
|
486
|
+
jest.restoreAllMocks();
|
|
487
|
+
});
|
|
488
|
+
const createMockHttpStream = (headers = {}) => ({
|
|
489
|
+
headers,
|
|
490
|
+
data: { destroy: jest.fn() },
|
|
491
|
+
});
|
|
492
|
+
const makeAttachment = (overrides = {}) => (Object.assign({ id: 'att-1', url: 'https://example.com/file.pdf', file_name: 'file.pdf', parent_id: 'parent-1', content_type: 'application/pdf' }, overrides));
|
|
493
|
+
function setupUploaderHappyPath() {
|
|
494
|
+
adapter['uploader'].getArtifactUploadUrl = jest.fn().mockResolvedValue({
|
|
495
|
+
response: {
|
|
496
|
+
artifact_id: 'art_1',
|
|
497
|
+
upload_url: 'https://upload',
|
|
498
|
+
form_data: [],
|
|
499
|
+
},
|
|
500
|
+
});
|
|
501
|
+
adapter['uploader'].streamArtifact = jest
|
|
502
|
+
.fn()
|
|
503
|
+
.mockResolvedValue({ response: {} });
|
|
504
|
+
adapter['uploader'].confirmArtifactUpload = jest
|
|
505
|
+
.fn()
|
|
506
|
+
.mockResolvedValue({ response: {} });
|
|
507
|
+
const pushMock = jest.fn().mockResolvedValue(undefined);
|
|
508
|
+
adapter.getRepo = jest.fn().mockReturnValue({ push: pushMock });
|
|
509
|
+
return pushMock;
|
|
510
|
+
}
|
|
511
|
+
// ---- content-type resolution ----
|
|
512
|
+
it('should use attachment.content_type when provided, ignoring HTTP header', async () => {
|
|
513
|
+
// Arrange
|
|
514
|
+
setupUploaderHappyPath();
|
|
515
|
+
const mockStream = jest.fn().mockResolvedValue({
|
|
516
|
+
httpStream: createMockHttpStream({
|
|
517
|
+
'content-type': 'text/plain',
|
|
518
|
+
'content-length': '100',
|
|
519
|
+
}),
|
|
520
|
+
});
|
|
521
|
+
// Act
|
|
522
|
+
await adapter.processAttachment(makeAttachment({ content_type: 'application/pdf' }), mockStream);
|
|
523
|
+
// Assert
|
|
524
|
+
expect(adapter['uploader'].getArtifactUploadUrl).toHaveBeenCalledWith('file.pdf', 'application/pdf', 100);
|
|
525
|
+
});
|
|
526
|
+
it('should use HTTP header content-type when attachment.content_type is not set', async () => {
|
|
527
|
+
// Arrange
|
|
528
|
+
setupUploaderHappyPath();
|
|
529
|
+
const mockStream = jest.fn().mockResolvedValue({
|
|
530
|
+
httpStream: createMockHttpStream({
|
|
531
|
+
'content-type': 'image/jpeg',
|
|
532
|
+
'content-length': '200',
|
|
533
|
+
}),
|
|
534
|
+
});
|
|
535
|
+
const attachment = {
|
|
536
|
+
id: 'att-2',
|
|
537
|
+
url: 'https://example.com/photo.jpg',
|
|
538
|
+
file_name: 'photo.jpg',
|
|
539
|
+
parent_id: 'parent-2',
|
|
540
|
+
};
|
|
541
|
+
// Act
|
|
542
|
+
await adapter.processAttachment(attachment, mockStream);
|
|
543
|
+
// Assert
|
|
544
|
+
expect(adapter['uploader'].getArtifactUploadUrl).toHaveBeenCalledWith('photo.jpg', 'image/jpeg', 200);
|
|
545
|
+
});
|
|
546
|
+
it('should fall back to application/octet-stream when neither content_type nor HTTP header is set', async () => {
|
|
547
|
+
// Arrange
|
|
548
|
+
setupUploaderHappyPath();
|
|
549
|
+
const mockStream = jest.fn().mockResolvedValue({
|
|
550
|
+
httpStream: createMockHttpStream({}),
|
|
551
|
+
});
|
|
552
|
+
const attachment = {
|
|
553
|
+
id: 'att-3',
|
|
554
|
+
url: 'https://example.com/file.bin',
|
|
555
|
+
file_name: 'file.bin',
|
|
556
|
+
parent_id: 'parent-3',
|
|
557
|
+
};
|
|
558
|
+
// Act
|
|
559
|
+
await adapter.processAttachment(attachment, mockStream);
|
|
560
|
+
// Assert
|
|
561
|
+
expect(adapter['uploader'].getArtifactUploadUrl).toHaveBeenCalledWith('file.bin', 'application/octet-stream', undefined);
|
|
562
|
+
});
|
|
563
|
+
// ---- error paths ----
|
|
564
|
+
it('should return the stream error directly when the stream function returns an error', async () => {
|
|
565
|
+
// Arrange
|
|
566
|
+
const stream = jest
|
|
567
|
+
.fn()
|
|
568
|
+
.mockResolvedValue({ error: new Error('stream failed') });
|
|
569
|
+
// Act
|
|
570
|
+
const result = await adapter.processAttachment(makeAttachment(), stream);
|
|
571
|
+
// Assert
|
|
572
|
+
expect(result === null || result === void 0 ? void 0 : result.error).toBeDefined();
|
|
573
|
+
});
|
|
574
|
+
it('should propagate a rate-limit delay from the stream function', async () => {
|
|
575
|
+
// Arrange
|
|
576
|
+
const stream = jest.fn().mockResolvedValue({ delay: 5 });
|
|
577
|
+
// Act
|
|
578
|
+
const result = await adapter.processAttachment(makeAttachment(), stream);
|
|
579
|
+
// Assert
|
|
580
|
+
expect(result === null || result === void 0 ? void 0 : result.delay).toBe(5);
|
|
581
|
+
});
|
|
582
|
+
it('should return an error containing the attachment ID when getArtifactUploadUrl fails', async () => {
|
|
583
|
+
var _a, _b;
|
|
584
|
+
// Arrange
|
|
585
|
+
const stream = jest
|
|
586
|
+
.fn()
|
|
587
|
+
.mockResolvedValue({ httpStream: createMockHttpStream() });
|
|
588
|
+
adapter['uploader'].getArtifactUploadUrl = jest
|
|
589
|
+
.fn()
|
|
590
|
+
.mockResolvedValue({ error: new Error('upload url failed') });
|
|
591
|
+
// Act
|
|
592
|
+
const result = await adapter.processAttachment(makeAttachment(), stream);
|
|
593
|
+
// Assert
|
|
594
|
+
expect((_a = result === null || result === void 0 ? void 0 : result.error) === null || _a === void 0 ? void 0 : _a.message).toContain('att-1');
|
|
595
|
+
expect((_b = result === null || result === void 0 ? void 0 : result.error) === null || _b === void 0 ? void 0 : _b.message).toContain('preparing artifact');
|
|
596
|
+
});
|
|
597
|
+
it('should return an error when streamArtifact fails', async () => {
|
|
598
|
+
var _a;
|
|
599
|
+
// Arrange
|
|
600
|
+
const stream = jest
|
|
601
|
+
.fn()
|
|
602
|
+
.mockResolvedValue({ httpStream: createMockHttpStream() });
|
|
603
|
+
adapter['uploader'].getArtifactUploadUrl = jest.fn().mockResolvedValue({
|
|
604
|
+
response: {
|
|
605
|
+
artifact_id: 'art-1',
|
|
606
|
+
upload_url: 'https://upload',
|
|
607
|
+
form_data: [],
|
|
608
|
+
},
|
|
609
|
+
});
|
|
610
|
+
adapter['uploader'].streamArtifact = jest
|
|
611
|
+
.fn()
|
|
612
|
+
.mockResolvedValue({ error: new Error('stream failed') });
|
|
613
|
+
// Act
|
|
614
|
+
const result = await adapter.processAttachment(makeAttachment(), stream);
|
|
615
|
+
// Assert
|
|
616
|
+
expect((_a = result === null || result === void 0 ? void 0 : result.error) === null || _a === void 0 ? void 0 : _a.message).toContain('streaming to artifact');
|
|
617
|
+
});
|
|
618
|
+
it('should return an error when confirmArtifactUpload fails', async () => {
|
|
619
|
+
var _a;
|
|
620
|
+
// Arrange
|
|
621
|
+
const stream = jest
|
|
622
|
+
.fn()
|
|
623
|
+
.mockResolvedValue({ httpStream: createMockHttpStream() });
|
|
624
|
+
adapter['uploader'].getArtifactUploadUrl = jest.fn().mockResolvedValue({
|
|
625
|
+
response: {
|
|
626
|
+
artifact_id: 'art-1',
|
|
627
|
+
upload_url: 'https://upload',
|
|
628
|
+
form_data: [],
|
|
629
|
+
},
|
|
630
|
+
});
|
|
631
|
+
adapter['uploader'].streamArtifact = jest
|
|
632
|
+
.fn()
|
|
633
|
+
.mockResolvedValue({ response: {} });
|
|
634
|
+
adapter['uploader'].confirmArtifactUpload = jest
|
|
635
|
+
.fn()
|
|
636
|
+
.mockResolvedValue({ error: new Error('confirm failed') });
|
|
637
|
+
// Act
|
|
638
|
+
const result = await adapter.processAttachment(makeAttachment(), stream);
|
|
639
|
+
// Assert
|
|
640
|
+
expect((_a = result === null || result === void 0 ? void 0 : result.error) === null || _a === void 0 ? void 0 : _a.message).toContain('confirming upload');
|
|
641
|
+
});
|
|
642
|
+
it.each([
|
|
643
|
+
{ inline: true, expected: true },
|
|
644
|
+
{ inline: false, expected: false },
|
|
645
|
+
])('should set inline=$expected on the ssorAttachment when attachment.inline=$inline', async ({ inline, expected }) => {
|
|
646
|
+
// Arrange
|
|
647
|
+
const pushMock = setupUploaderHappyPath();
|
|
648
|
+
const stream = jest
|
|
649
|
+
.fn()
|
|
650
|
+
.mockResolvedValue({ httpStream: createMockHttpStream() });
|
|
651
|
+
// Act
|
|
652
|
+
await adapter.processAttachment(makeAttachment({ inline }), stream);
|
|
653
|
+
// Assert
|
|
654
|
+
const ssorItem = pushMock.mock.calls[0][0][0];
|
|
655
|
+
expect(ssorItem.inline).toBe(expected);
|
|
656
|
+
});
|
|
657
|
+
it('should return a descriptive error when the stream function returns no httpStream', async () => {
|
|
658
|
+
var _a;
|
|
659
|
+
// Arrange
|
|
660
|
+
const stream = jest.fn().mockResolvedValue({ httpStream: null });
|
|
661
|
+
// Act
|
|
662
|
+
const result = await adapter.processAttachment(makeAttachment(), stream);
|
|
663
|
+
// Assert
|
|
664
|
+
expect((_a = result === null || result === void 0 ? void 0 : result.error) === null || _a === void 0 ? void 0 : _a.message).toContain('Error while opening attachment stream');
|
|
665
|
+
});
|
|
666
|
+
});
|
|
667
|
+
describe(`${worker_adapter_1.WorkerAdapter.name}.initializeRepos — event size threshold`, () => {
|
|
668
|
+
it('should set isTimeout=true once the cumulative artifact payload exceeds EVENT_SIZE_THRESHOLD_BYTES', () => {
|
|
669
|
+
// Arrange
|
|
670
|
+
const { adapter } = makeAdapter();
|
|
671
|
+
let capturedOnUpload;
|
|
672
|
+
const { Repo } = require('../../repo/repo');
|
|
673
|
+
Repo.mockImplementationOnce((opts) => {
|
|
674
|
+
capturedOnUpload = opts.onUpload;
|
|
675
|
+
return { itemType: 'issues', upload: jest.fn(), uploadedArtifacts: [] };
|
|
676
|
+
});
|
|
677
|
+
// Act
|
|
678
|
+
adapter.initializeRepos([{ itemType: 'issues' }]);
|
|
679
|
+
expect(capturedOnUpload).toBeDefined();
|
|
680
|
+
capturedOnUpload({
|
|
681
|
+
id: 'artifact-x',
|
|
682
|
+
item_count: 1,
|
|
683
|
+
item_type: 'x'.repeat(200000),
|
|
684
|
+
});
|
|
685
|
+
// Assert
|
|
686
|
+
expect(adapter.isTimeout).toBe(true);
|
|
687
|
+
});
|
|
688
|
+
});
|
|
689
|
+
describe(`${worker_adapter_1.WorkerAdapter.name}.getRepo`, () => {
|
|
690
|
+
it('should return undefined when the requested repo was never initialised', () => {
|
|
691
|
+
// Arrange
|
|
692
|
+
const { adapter } = makeAdapter();
|
|
693
|
+
// Act
|
|
694
|
+
const result = adapter.getRepo('non-existent-type');
|
|
695
|
+
// Assert
|
|
696
|
+
expect(result).toBeUndefined();
|
|
697
|
+
});
|
|
698
|
+
});
|
|
699
|
+
describe(`${worker_adapter_1.WorkerAdapter.name}.destroyHttpStream`, () => {
|
|
700
|
+
let adapter;
|
|
701
|
+
beforeEach(() => {
|
|
702
|
+
({ adapter } = makeAdapter());
|
|
703
|
+
});
|
|
704
|
+
afterEach(() => {
|
|
705
|
+
jest.restoreAllMocks();
|
|
706
|
+
});
|
|
707
|
+
it.each([
|
|
708
|
+
{
|
|
709
|
+
label: 'calls destroy() when available',
|
|
710
|
+
data: { destroy: jest.fn(), close: jest.fn() },
|
|
711
|
+
expectDestroy: true,
|
|
712
|
+
expectClose: false,
|
|
713
|
+
},
|
|
714
|
+
{
|
|
715
|
+
label: 'calls close() when destroy is not present',
|
|
716
|
+
data: { close: jest.fn() },
|
|
717
|
+
expectDestroy: false,
|
|
718
|
+
expectClose: true,
|
|
719
|
+
},
|
|
720
|
+
{
|
|
721
|
+
label: 'does not throw when neither method is present',
|
|
722
|
+
data: {},
|
|
723
|
+
expectDestroy: false,
|
|
724
|
+
expectClose: false,
|
|
725
|
+
},
|
|
726
|
+
{
|
|
727
|
+
label: 'does not throw when data is null',
|
|
728
|
+
data: null,
|
|
729
|
+
expectDestroy: false,
|
|
730
|
+
expectClose: false,
|
|
731
|
+
},
|
|
732
|
+
])('$label', ({ data, expectDestroy, expectClose }) => {
|
|
733
|
+
// Arrange
|
|
734
|
+
const httpStream = { data };
|
|
735
|
+
// Act & Assert
|
|
736
|
+
expect(() => adapter['destroyHttpStream'](httpStream)).not.toThrow();
|
|
737
|
+
if (expectDestroy) {
|
|
738
|
+
expect(data.destroy).toHaveBeenCalled();
|
|
739
|
+
}
|
|
740
|
+
if (expectClose) {
|
|
741
|
+
expect(data.close).toHaveBeenCalled();
|
|
742
|
+
}
|
|
743
|
+
});
|
|
744
|
+
it('should not re-throw when destroy() itself throws', () => {
|
|
745
|
+
// Arrange
|
|
746
|
+
const httpStream = {
|
|
747
|
+
data: {
|
|
748
|
+
destroy: () => {
|
|
749
|
+
throw new Error('stream error');
|
|
750
|
+
},
|
|
751
|
+
},
|
|
752
|
+
};
|
|
753
|
+
// Act & Assert
|
|
754
|
+
expect(() => adapter['destroyHttpStream'](httpStream)).not.toThrow();
|
|
755
|
+
});
|
|
756
|
+
});
|
|
757
|
+
describe(`${worker_adapter_1.WorkerAdapter.name} — extractionScope`, () => {
|
|
758
|
+
it('should return empty object by default', () => {
|
|
759
|
+
const { adapter } = makeAdapter();
|
|
760
|
+
expect(adapter.extractionScope).toEqual({});
|
|
761
|
+
});
|
|
762
|
+
it('should return extraction scope from adapter state', () => {
|
|
763
|
+
const { adapter, adapterState } = makeAdapter();
|
|
764
|
+
const extractionScope = {
|
|
765
|
+
tasks: { extract: true },
|
|
766
|
+
users: { extract: false },
|
|
767
|
+
};
|
|
768
|
+
adapterState._extractionScope = extractionScope;
|
|
769
|
+
expect(adapter.extractionScope).toEqual(extractionScope);
|
|
770
|
+
});
|
|
771
|
+
});
|
|
772
|
+
describe(`${worker_adapter_1.WorkerAdapter.name} — shouldExtract`, () => {
|
|
773
|
+
it('should return true when extraction scope is empty', () => {
|
|
774
|
+
const { adapter } = makeAdapter();
|
|
775
|
+
expect(adapter.shouldExtract('tasks')).toBe(true);
|
|
776
|
+
expect(adapter.shouldExtract('users')).toBe(true);
|
|
777
|
+
});
|
|
778
|
+
it('should return true when item type is not in scope', () => {
|
|
779
|
+
const { adapter, adapterState } = makeAdapter();
|
|
780
|
+
adapterState._extractionScope = {
|
|
781
|
+
tasks: { extract: true },
|
|
782
|
+
};
|
|
783
|
+
expect(adapter.shouldExtract('users')).toBe(true);
|
|
784
|
+
});
|
|
785
|
+
it('should return true when item type has extract: true', () => {
|
|
786
|
+
const { adapter, adapterState } = makeAdapter();
|
|
787
|
+
adapterState._extractionScope = {
|
|
788
|
+
tasks: { extract: true },
|
|
789
|
+
};
|
|
790
|
+
expect(adapter.shouldExtract('tasks')).toBe(true);
|
|
791
|
+
});
|
|
792
|
+
it('should return false when item type has extract: false', () => {
|
|
793
|
+
const { adapter, adapterState } = makeAdapter();
|
|
794
|
+
adapterState._extractionScope = {
|
|
795
|
+
tasks: { extract: false },
|
|
796
|
+
users: { extract: true },
|
|
797
|
+
};
|
|
798
|
+
expect(adapter.shouldExtract('tasks')).toBe(false);
|
|
799
|
+
expect(adapter.shouldExtract('users')).toBe(true);
|
|
800
|
+
});
|
|
801
|
+
});
|