@devrev/ts-adaas 1.15.1 → 1.15.2-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/attachments-streaming/attachments-streaming-pool.d.ts +7 -0
- package/dist/attachments-streaming/attachments-streaming-pool.d.ts.map +1 -1
- package/dist/attachments-streaming/attachments-streaming-pool.js +31 -2
- package/dist/attachments-streaming/attachments-streaming-pool.test.js +58 -4
- package/dist/state/state.interfaces.d.ts +8 -1
- package/dist/state/state.interfaces.d.ts.map +1 -1
- package/package.json +1 -1
|
@@ -10,6 +10,13 @@ export declare class AttachmentsStreamingPool<ConnectorState> {
|
|
|
10
10
|
private readonly PROGRESS_REPORT_INTERVAL;
|
|
11
11
|
constructor({ adapter, attachments, batchSize, stream, }: AttachmentsStreamingPoolParams<ConnectorState>);
|
|
12
12
|
private updateProgress;
|
|
13
|
+
/**
|
|
14
|
+
* Migrates processed attachments from the legacy string[] format to the new ProcessedAttachment[] format.
|
|
15
|
+
*
|
|
16
|
+
* @param attachments - The attachments list to migrate (either string[] or ProcessedAttachment[])
|
|
17
|
+
* @returns Migrated array of ProcessedAttachment objects, or empty array if input is invalid
|
|
18
|
+
*/
|
|
19
|
+
private migrateProcessedAttachments;
|
|
13
20
|
streamAll(): Promise<ProcessAttachmentReturnType>;
|
|
14
21
|
startPoolStreaming(): Promise<void>;
|
|
15
22
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"attachments-streaming-pool.d.ts","sourceRoot":"","sources":["../../src/attachments-streaming/attachments-streaming-pool.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,2BAA2B,EAC5B,MAAM,UAAU,CAAC;AAElB,OAAO,EAAE,8BAA8B,EAAE,MAAM,yCAAyC,CAAC;
|
|
1
|
+
{"version":3,"file":"attachments-streaming-pool.d.ts","sourceRoot":"","sources":["../../src/attachments-streaming/attachments-streaming-pool.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,2BAA2B,EAC5B,MAAM,UAAU,CAAC;AAElB,OAAO,EAAE,8BAA8B,EAAE,MAAM,yCAAyC,CAAC;AAGzF,qBAAa,wBAAwB,CAAC,cAAc;IAClD,OAAO,CAAC,OAAO,CAAgC;IAC/C,OAAO,CAAC,WAAW,CAAyB;IAC5C,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,MAAM,CAA4C;IAE1D,OAAO,CAAC,mBAAmB,CAAa;IACxC,OAAO,CAAC,QAAQ,CAAC,wBAAwB,CAAM;gBAEnC,EACV,OAAO,EACP,WAAW,EACX,SAAc,EACd,MAAM,GACP,EAAE,8BAA8B,CAAC,cAAc,CAAC;YAQnC,cAAc;IAS5B;;;;;OAKG;IAEH,OAAO,CAAC,2BAA2B;IAsB7B,SAAS,IAAI,OAAO,CAAC,2BAA2B,CAAC;IA8CjD,kBAAkB;CA0EzB"}
|
|
@@ -20,6 +20,31 @@ class AttachmentsStreamingPool {
|
|
|
20
20
|
await (0, helpers_1.sleep)(100);
|
|
21
21
|
}
|
|
22
22
|
}
|
|
23
|
+
/**
|
|
24
|
+
* Migrates processed attachments from the legacy string[] format to the new ProcessedAttachment[] format.
|
|
25
|
+
*
|
|
26
|
+
* @param attachments - The attachments list to migrate (either string[] or ProcessedAttachment[])
|
|
27
|
+
* @returns Migrated array of ProcessedAttachment objects, or empty array if input is invalid
|
|
28
|
+
*/
|
|
29
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
30
|
+
migrateProcessedAttachments(attachments) {
|
|
31
|
+
// Handle null/undefined
|
|
32
|
+
if (!attachments || !Array.isArray(attachments)) {
|
|
33
|
+
return [];
|
|
34
|
+
}
|
|
35
|
+
// If already migrated (first element is an object), return as-is
|
|
36
|
+
if (attachments.length > 0 && typeof attachments[0] === 'object') {
|
|
37
|
+
return attachments;
|
|
38
|
+
}
|
|
39
|
+
// Migrate old string[] format
|
|
40
|
+
if (attachments.length > 0 && typeof attachments[0] === 'string') {
|
|
41
|
+
return attachments.map((it) => ({
|
|
42
|
+
id: it,
|
|
43
|
+
parent_id: '',
|
|
44
|
+
}));
|
|
45
|
+
}
|
|
46
|
+
return [];
|
|
47
|
+
}
|
|
23
48
|
async streamAll() {
|
|
24
49
|
console.log(`Starting download of ${this.attachments.length} attachments, streaming ${this.batchSize} at once.`);
|
|
25
50
|
if (!this.adapter.state.toDevRev) {
|
|
@@ -34,6 +59,10 @@ class AttachmentsStreamingPool {
|
|
|
34
59
|
this.adapter.state.toDevRev.attachmentsMetadata.lastProcessedAttachmentsIdsList =
|
|
35
60
|
[];
|
|
36
61
|
}
|
|
62
|
+
// Migrate old processed attachments to the new format.
|
|
63
|
+
this.adapter.state.toDevRev.attachmentsMetadata.lastProcessedAttachmentsIdsList =
|
|
64
|
+
this.migrateProcessedAttachments(this.adapter.state.toDevRev.attachmentsMetadata
|
|
65
|
+
.lastProcessedAttachmentsIdsList);
|
|
37
66
|
// Start initial batch of promises up to batchSize limit
|
|
38
67
|
const initialBatchSize = Math.min(this.batchSize, this.attachments.length);
|
|
39
68
|
const initialPromises = [];
|
|
@@ -66,7 +95,7 @@ class AttachmentsStreamingPool {
|
|
|
66
95
|
break; // Exit if no more attachments
|
|
67
96
|
}
|
|
68
97
|
if (this.adapter.state.toDevRev &&
|
|
69
|
-
((_a = this.adapter.state.toDevRev.attachmentsMetadata.lastProcessedAttachmentsIdsList) === null || _a === void 0 ? void 0 : _a.
|
|
98
|
+
((_a = this.adapter.state.toDevRev.attachmentsMetadata.lastProcessedAttachmentsIdsList) === null || _a === void 0 ? void 0 : _a.some((it) => it.id == attachment.id && it.parent_id == attachment.parent_id))) {
|
|
70
99
|
continue; // Skip if the attachment ID is already processed
|
|
71
100
|
}
|
|
72
101
|
try {
|
|
@@ -83,7 +112,7 @@ class AttachmentsStreamingPool {
|
|
|
83
112
|
}
|
|
84
113
|
// No rate limiting, process normally
|
|
85
114
|
if ((_c = (_b = this.adapter.state.toDevRev) === null || _b === void 0 ? void 0 : _b.attachmentsMetadata) === null || _c === void 0 ? void 0 : _c.lastProcessedAttachmentsIdsList) {
|
|
86
|
-
(_d = this.adapter.state.toDevRev) === null || _d === void 0 ? void 0 : _d.attachmentsMetadata.lastProcessedAttachmentsIdsList.push(attachment.id);
|
|
115
|
+
(_d = this.adapter.state.toDevRev) === null || _d === void 0 ? void 0 : _d.attachmentsMetadata.lastProcessedAttachmentsIdsList.push({ id: attachment.id, parent_id: attachment.parent_id });
|
|
87
116
|
}
|
|
88
117
|
await this.updateProgress();
|
|
89
118
|
}
|
|
@@ -135,11 +135,32 @@ describe(attachments_streaming_pool_1.AttachmentsStreamingPool.name, () => {
|
|
|
135
135
|
const result = await pool.streamAll();
|
|
136
136
|
expect(result).toEqual({ delay: 5000 });
|
|
137
137
|
});
|
|
138
|
+
it('should resume attachment extraction if it encounters old ids', async () => {
|
|
139
|
+
var _a;
|
|
140
|
+
// Test migration from old string[] format to new ProcessedAttachment[] format
|
|
141
|
+
// Using 'as any' because we're intentionally testing legacy data format
|
|
142
|
+
mockAdapter.state.toDevRev.attachmentsMetadata.lastProcessedAttachmentsIdsList =
|
|
143
|
+
['attachment-1', 'attachment-2'];
|
|
144
|
+
const pool = new attachments_streaming_pool_1.AttachmentsStreamingPool({
|
|
145
|
+
adapter: mockAdapter,
|
|
146
|
+
attachments: mockAttachments,
|
|
147
|
+
stream: mockStream,
|
|
148
|
+
});
|
|
149
|
+
const result = await pool.streamAll();
|
|
150
|
+
expect((_a = mockAdapter.state.toDevRev) === null || _a === void 0 ? void 0 : _a.attachmentsMetadata.lastProcessedAttachmentsIdsList).toEqual([
|
|
151
|
+
{ id: 'attachment-1', parent_id: '' },
|
|
152
|
+
{ id: 'attachment-2', parent_id: '' },
|
|
153
|
+
{ id: 'attachment-1', parent_id: 'parent-1' },
|
|
154
|
+
{ id: 'attachment-2', parent_id: 'parent-2' },
|
|
155
|
+
{ id: 'attachment-3', parent_id: 'parent-3' },
|
|
156
|
+
]);
|
|
157
|
+
expect(result).toEqual({});
|
|
158
|
+
});
|
|
138
159
|
});
|
|
139
160
|
describe(attachments_streaming_pool_1.AttachmentsStreamingPool.prototype.startPoolStreaming.name, () => {
|
|
140
161
|
it('should skip already processed attachments', async () => {
|
|
141
162
|
mockAdapter.state.toDevRev.attachmentsMetadata.lastProcessedAttachmentsIdsList =
|
|
142
|
-
['attachment-1'];
|
|
163
|
+
[{ id: 'attachment-1', parent_id: 'parent-1' }];
|
|
143
164
|
mockAdapter.processAttachment.mockResolvedValue({});
|
|
144
165
|
const pool = new attachments_streaming_pool_1.AttachmentsStreamingPool({
|
|
145
166
|
adapter: mockAdapter,
|
|
@@ -158,7 +179,11 @@ describe(attachments_streaming_pool_1.AttachmentsStreamingPool.name, () => {
|
|
|
158
179
|
});
|
|
159
180
|
await pool.streamAll();
|
|
160
181
|
expect(mockAdapter.state.toDevRev.attachmentsMetadata
|
|
161
|
-
.lastProcessedAttachmentsIdsList).toEqual([
|
|
182
|
+
.lastProcessedAttachmentsIdsList).toEqual([
|
|
183
|
+
{ id: 'attachment-1', parent_id: 'parent-1' },
|
|
184
|
+
{ id: 'attachment-2', parent_id: 'parent-2' },
|
|
185
|
+
{ id: 'attachment-3', parent_id: 'parent-3' },
|
|
186
|
+
]);
|
|
162
187
|
});
|
|
163
188
|
it('should handle processing errors gracefully', async () => {
|
|
164
189
|
const error = new Error('Processing failed');
|
|
@@ -174,7 +199,16 @@ describe(attachments_streaming_pool_1.AttachmentsStreamingPool.name, () => {
|
|
|
174
199
|
await pool.streamAll();
|
|
175
200
|
expect(console.warn).toHaveBeenCalledWith('Skipping attachment with ID attachment-2 due to error in processAttachment function', error);
|
|
176
201
|
expect(mockAdapter.state.toDevRev.attachmentsMetadata
|
|
177
|
-
.lastProcessedAttachmentsIdsList).toEqual([
|
|
202
|
+
.lastProcessedAttachmentsIdsList).toEqual([
|
|
203
|
+
{
|
|
204
|
+
id: 'attachment-1',
|
|
205
|
+
parent_id: 'parent-1',
|
|
206
|
+
},
|
|
207
|
+
{
|
|
208
|
+
id: 'attachment-3',
|
|
209
|
+
parent_id: 'parent-3',
|
|
210
|
+
},
|
|
211
|
+
]);
|
|
178
212
|
});
|
|
179
213
|
it('should stop processing when rate limit delay is encountered', async () => {
|
|
180
214
|
mockAdapter.processAttachment
|
|
@@ -189,7 +223,10 @@ describe(attachments_streaming_pool_1.AttachmentsStreamingPool.name, () => {
|
|
|
189
223
|
await pool.streamAll();
|
|
190
224
|
expect(mockAdapter.processAttachment).toHaveBeenCalledTimes(3);
|
|
191
225
|
expect(mockAdapter.state.toDevRev.attachmentsMetadata
|
|
192
|
-
.lastProcessedAttachmentsIdsList).toEqual([
|
|
226
|
+
.lastProcessedAttachmentsIdsList).toEqual([
|
|
227
|
+
{ id: 'attachment-1', parent_id: 'parent-1' },
|
|
228
|
+
{ id: 'attachment-3', parent_id: 'parent-3' },
|
|
229
|
+
]);
|
|
193
230
|
});
|
|
194
231
|
it('should pass correct parameters to processAttachment', async () => {
|
|
195
232
|
mockAdapter.processAttachment.mockResolvedValue({});
|
|
@@ -224,6 +261,23 @@ describe(attachments_streaming_pool_1.AttachmentsStreamingPool.name, () => {
|
|
|
224
261
|
await pool.streamAll();
|
|
225
262
|
expect(mockAdapter.processAttachment).toHaveBeenCalledTimes(3);
|
|
226
263
|
});
|
|
264
|
+
it('[edge] should upload attachments with same id, but different parent_id', async () => {
|
|
265
|
+
mockAdapter.processAttachment.mockResolvedValue({});
|
|
266
|
+
mockAttachments.push({
|
|
267
|
+
id: 'attachment-1',
|
|
268
|
+
url: 'http://example.com/file5.jpg',
|
|
269
|
+
file_name: 'file5.jpg',
|
|
270
|
+
parent_id: 'parent-4',
|
|
271
|
+
});
|
|
272
|
+
const pool = new attachments_streaming_pool_1.AttachmentsStreamingPool({
|
|
273
|
+
adapter: mockAdapter,
|
|
274
|
+
attachments: mockAttachments,
|
|
275
|
+
batchSize: 1,
|
|
276
|
+
stream: mockStream,
|
|
277
|
+
});
|
|
278
|
+
await pool.streamAll();
|
|
279
|
+
expect(mockAdapter.processAttachment).toHaveBeenCalledTimes(4);
|
|
280
|
+
});
|
|
227
281
|
it('[edge] should handle batch size of 1', async () => {
|
|
228
282
|
mockAdapter.processAttachment.mockResolvedValue({});
|
|
229
283
|
const pool = new attachments_streaming_pool_1.AttachmentsStreamingPool({
|
|
@@ -18,9 +18,16 @@ export interface ToDevRev {
|
|
|
18
18
|
attachmentsMetadata: {
|
|
19
19
|
artifactIds: string[];
|
|
20
20
|
lastProcessed: number;
|
|
21
|
-
lastProcessedAttachmentsIdsList?:
|
|
21
|
+
lastProcessedAttachmentsIdsList?: ProcessedAttachment[];
|
|
22
22
|
};
|
|
23
23
|
}
|
|
24
|
+
/**
|
|
25
|
+
* Attachment structure, that stores both attachment id and its parent_id for deduplication on the SDK side.
|
|
26
|
+
*/
|
|
27
|
+
export interface ProcessedAttachment {
|
|
28
|
+
id: string;
|
|
29
|
+
parent_id: string;
|
|
30
|
+
}
|
|
24
31
|
export interface FromDevRev {
|
|
25
32
|
filesToLoad: FileToLoad[];
|
|
26
33
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"state.interfaces.d.ts","sourceRoot":"","sources":["../../src/state/state.interfaces.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,iBAAiB,CAAC;AACvD,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC9C,OAAO,EAAE,oBAAoB,EAAE,MAAM,kBAAkB,CAAC;AAExD,MAAM,WAAW,QAAQ;IACvB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED;;;GAGG;AACH,MAAM,MAAM,YAAY,CAAC,cAAc,IAAI,cAAc,GAAG,QAAQ,CAAC;AAErE,MAAM,WAAW,QAAQ;IACvB,mBAAmB,EAAE;QACnB,WAAW,EAAE,MAAM,EAAE,CAAC;QACtB,aAAa,EAAE,MAAM,CAAC;QACtB,+BAA+B,CAAC,EAAE,
|
|
1
|
+
{"version":3,"file":"state.interfaces.d.ts","sourceRoot":"","sources":["../../src/state/state.interfaces.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,iBAAiB,CAAC;AACvD,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC9C,OAAO,EAAE,oBAAoB,EAAE,MAAM,kBAAkB,CAAC;AAExD,MAAM,WAAW,QAAQ;IACvB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC,QAAQ,CAAC,EAAE,QAAQ,CAAC;IACpB,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED;;;GAGG;AACH,MAAM,MAAM,YAAY,CAAC,cAAc,IAAI,cAAc,GAAG,QAAQ,CAAC;AAErE,MAAM,WAAW,QAAQ;IACvB,mBAAmB,EAAE;QACnB,WAAW,EAAE,MAAM,EAAE,CAAC;QACtB,aAAa,EAAE,MAAM,CAAC;QACtB,+BAA+B,CAAC,EAAE,mBAAmB,EAAE,CAAC;KACzD,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,EAAE,EAAE,MAAM,CAAC;IACX,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,UAAU;IACzB,WAAW,EAAE,UAAU,EAAE,CAAC;CAC3B;AAED,MAAM,WAAW,cAAc,CAAC,cAAc;IAC5C,KAAK,EAAE,YAAY,CAAC;IACpB,YAAY,EAAE,cAAc,CAAC;IAC7B,oBAAoB,CAAC,EAAE,oBAAoB,CAAC;IAC5C,OAAO,CAAC,EAAE,oBAAoB,CAAC;CAChC;AAED,eAAO,MAAM,kBAAkB;;;;;;;;;;;CAW9B,CAAC;AAEF,eAAO,MAAM,eAAe;;;;;CAK3B,CAAC"}
|