@devrev/ts-adaas 1.1.6 → 1.2.0-beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +39 -2
- package/dist/common/constants.js +2 -2
- package/dist/common/helpers.d.ts +3 -3
- package/dist/common/helpers.js +9 -5
- package/dist/deprecated/uploader/index.js +18 -8
- package/dist/http/client.js +17 -7
- package/dist/repo/repo.d.ts +1 -1
- package/dist/repo/repo.js +25 -28
- package/dist/tests/from_devrev/loading.test.js +17 -5
- package/dist/tests/test-worker.js +0 -2
- package/dist/types/extraction.d.ts +3 -1
- package/dist/types/extraction.js +3 -1
- package/dist/types/index.d.ts +1 -1
- package/dist/types/loading.d.ts +34 -5
- package/dist/types/loading.js +6 -0
- package/dist/types/workers.d.ts +2 -2
- package/dist/uploader/uploader.js +27 -16
- package/dist/workers/default-workers/delete-loader-attachment-state.d.ts +1 -0
- package/dist/workers/default-workers/delete-loader-attachment-state.js +15 -0
- package/dist/workers/default-workers/delete-loader-state.d.ts +1 -0
- package/dist/workers/default-workers/delete-loader-state.js +15 -0
- package/dist/workers/default-workers/load-attachments.d.ts +1 -0
- package/dist/workers/default-workers/load-attachments.js +19 -0
- package/dist/workers/default-workers/load-data.d.ts +1 -0
- package/dist/workers/default-workers/load-data.js +19 -0
- package/dist/workers/process-task.js +1 -1
- package/dist/workers/spawn.js +22 -18
- package/dist/workers/worker-adapter.d.ts +11 -1
- package/dist/workers/worker-adapter.js +112 -58
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -2,6 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
## Release Notes
|
|
4
4
|
|
|
5
|
+
### v1.2.0
|
|
6
|
+
|
|
7
|
+
- Add support for loading attachments from DevRev to external system.
|
|
8
|
+
|
|
5
9
|
### v1.1.6
|
|
6
10
|
|
|
7
11
|
- Add exponential retry and handle rate-limiting towards DevRev.
|
|
@@ -307,9 +311,9 @@ export interface NormalizedAttachment {
|
|
|
307
311
|
|
|
308
312
|
## Loading phases
|
|
309
313
|
|
|
310
|
-
### 1. Data
|
|
314
|
+
### 1. Loading Data
|
|
311
315
|
|
|
312
|
-
This phase is defined in `data
|
|
316
|
+
This phase is defined in `load-data.ts` and is responsible for loading the data to the external system.
|
|
313
317
|
|
|
314
318
|
Loading is done by providing an ordered list of itemTypes to load and their respective create and update functions.
|
|
315
319
|
|
|
@@ -347,3 +351,36 @@ Loading is done by providing an ordered list of itemTypes to load and their resp
|
|
|
347
351
|
The loading functions `create` and `update` provide loading to the external system. They provide denormalization of the records to the schema of the external system and provide HTTP calls to the external system. Both loading functions must handle rate limiting for the external system and handle errors.
|
|
348
352
|
|
|
349
353
|
Functions return an ID and modified date of the record in the external system, or specify rate-liming offset or errors, if the record could not be created or updated.
|
|
354
|
+
|
|
355
|
+
### 2. Loading Attachments
|
|
356
|
+
|
|
357
|
+
This phase is defined in `load-attachments.ts` and is responsible for loading the attachments to the external system.
|
|
358
|
+
|
|
359
|
+
Loading is done by providing the create function to create attachments in the external system.
|
|
360
|
+
|
|
361
|
+
```typescript
|
|
362
|
+
processTask({
|
|
363
|
+
task: async ({ adapter }) => {
|
|
364
|
+
const { reports, processed_files } = await adapter.loadAttachments({
|
|
365
|
+
create,
|
|
366
|
+
});
|
|
367
|
+
|
|
368
|
+
await adapter.emit(LoaderEventType.AttachmentLoadingDone, {
|
|
369
|
+
reports,
|
|
370
|
+
processed_files,
|
|
371
|
+
});
|
|
372
|
+
},
|
|
373
|
+
onTimeout: async ({ adapter }) => {
|
|
374
|
+
await adapter.postState();
|
|
375
|
+
await adapter.emit(LoaderEventType.AttachmentLoadingProgress, {
|
|
376
|
+
reports: adapter.reports,
|
|
377
|
+
processed_files: adapter.processedFiles,
|
|
378
|
+
});
|
|
379
|
+
},
|
|
380
|
+
});
|
|
381
|
+
|
|
382
|
+
```
|
|
383
|
+
|
|
384
|
+
The loading function `create` provides loading to the external system, to make API calls to the external system to create the attachments and handle errors and external system's rate limiting.
|
|
385
|
+
|
|
386
|
+
Functions return an ID and modified date of the record in the external system, specify rate-liming back-off, or log errors, if the attachment could not be created.
|
package/dist/common/constants.js
CHANGED
|
@@ -8,7 +8,7 @@ exports.STATELESS_EVENT_TYPES = [
|
|
|
8
8
|
extraction_1.EventType.ExtractionDataDelete,
|
|
9
9
|
extraction_1.EventType.ExtractionAttachmentsDelete,
|
|
10
10
|
extraction_1.EventType.StartDeletingLoaderState,
|
|
11
|
-
extraction_1.EventType.
|
|
11
|
+
extraction_1.EventType.StartDeletingLoaderAttachmentState,
|
|
12
12
|
];
|
|
13
13
|
exports.ALLOWED_EXTRACTION_EVENT_TYPES = [
|
|
14
14
|
extraction_1.EventType.ExtractionExternalSyncUnitsStart,
|
|
@@ -24,7 +24,7 @@ exports.ALLOWED_LOADING_EVENT_TYPES = [
|
|
|
24
24
|
extraction_1.EventType.StartLoadingData,
|
|
25
25
|
extraction_1.EventType.ContinueLoadingData,
|
|
26
26
|
extraction_1.EventType.StartDeletingLoaderState,
|
|
27
|
-
extraction_1.EventType.
|
|
27
|
+
extraction_1.EventType.StartDeletingLoaderAttachmentState,
|
|
28
28
|
];
|
|
29
29
|
exports.ALLOWED_EVENT_TYPES = [
|
|
30
30
|
...exports.ALLOWED_EXTRACTION_EVENT_TYPES,
|
package/dist/common/helpers.d.ts
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import { AirdropEvent, EventType, ExtractorEventType } from '../types/extraction';
|
|
2
|
-
import { FileToLoad,
|
|
2
|
+
import { FileToLoad, LoaderEventType, LoaderReport, StatsFileObject } from '../types/loading';
|
|
3
3
|
export declare function getTimeoutErrorEventType(eventType: EventType): {
|
|
4
4
|
eventType: ExtractorEventType | LoaderEventType;
|
|
5
5
|
} | null;
|
|
6
6
|
export declare function getSyncDirection({ event }: {
|
|
7
7
|
event: AirdropEvent;
|
|
8
8
|
}): string;
|
|
9
|
-
export declare function getFilesToLoad({
|
|
10
|
-
|
|
9
|
+
export declare function getFilesToLoad({ supportedItemTypes, statsFile, }: {
|
|
10
|
+
supportedItemTypes: string[];
|
|
11
11
|
statsFile: StatsFileObject[];
|
|
12
12
|
}): FileToLoad[];
|
|
13
13
|
export declare function addReportToLoaderReport({ loaderReports, report, }: {
|
package/dist/common/helpers.js
CHANGED
|
@@ -41,25 +41,29 @@ function getTimeoutErrorEventType(eventType) {
|
|
|
41
41
|
eventType: loading_1.LoaderEventType.DataLoadingError,
|
|
42
42
|
};
|
|
43
43
|
case extraction_1.EventType.StartDeletingLoaderState:
|
|
44
|
-
case extraction_1.EventType.StartDeletingLoaderAttachmentsState:
|
|
45
44
|
return {
|
|
46
45
|
eventType: loading_1.LoaderEventType.LoaderStateDeletionError,
|
|
47
46
|
};
|
|
47
|
+
case extraction_1.EventType.StartDeletingLoaderAttachmentState:
|
|
48
|
+
return {
|
|
49
|
+
eventType: loading_1.LoaderEventType.LoaderAttachmentStateDeletionError,
|
|
50
|
+
};
|
|
48
51
|
default:
|
|
49
52
|
console.error('Event type not recognized in getTimeoutErrorEventType function: ' +
|
|
50
53
|
eventType);
|
|
51
|
-
return
|
|
54
|
+
return {
|
|
55
|
+
eventType: loading_1.LoaderEventType.UnknownEventType,
|
|
56
|
+
};
|
|
52
57
|
}
|
|
53
58
|
}
|
|
54
59
|
function getSyncDirection({ event }) {
|
|
55
60
|
return event.payload.event_context.mode;
|
|
56
61
|
}
|
|
57
|
-
function getFilesToLoad({
|
|
62
|
+
function getFilesToLoad({ supportedItemTypes, statsFile, }) {
|
|
58
63
|
const filesToLoad = [];
|
|
59
|
-
if (
|
|
64
|
+
if (supportedItemTypes.length === 0 || statsFile.length === 0) {
|
|
60
65
|
return [];
|
|
61
66
|
}
|
|
62
|
-
const supportedItemTypes = itemTypesToLoad.map((itemTypeToLoad) => itemTypeToLoad.itemType);
|
|
63
67
|
const filteredStatsFile = statsFile.filter((file) => supportedItemTypes.includes(file.item_type));
|
|
64
68
|
const orderedFiles = filteredStatsFile.sort((a, b) => {
|
|
65
69
|
const aIndex = supportedItemTypes.indexOf(a.item_type);
|
|
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|
|
15
15
|
}) : function(o, v) {
|
|
16
16
|
o["default"] = v;
|
|
17
17
|
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
};
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
25
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
36
|
exports.Uploader = void 0;
|
|
27
37
|
const axios_devrev_client_1 = require("../../http/axios-devrev-client");
|
|
@@ -62,7 +72,7 @@ class Uploader {
|
|
|
62
72
|
*/
|
|
63
73
|
async upload(filename, entity, fetchedObjects, filetype = 'application/jsonl+json') {
|
|
64
74
|
if (this.local) {
|
|
65
|
-
this.downloadToLocal(filename, fetchedObjects);
|
|
75
|
+
await this.downloadToLocal(filename, fetchedObjects);
|
|
66
76
|
}
|
|
67
77
|
const preparedArtifact = await this.prepareArtifact(filename, filetype);
|
|
68
78
|
if (!preparedArtifact) {
|
package/dist/http/client.js
CHANGED
|
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|
|
15
15
|
}) : function(o, v) {
|
|
16
16
|
o["default"] = v;
|
|
17
17
|
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
};
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
25
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
36
|
exports.HTTPClient = exports.defaultResponse = void 0;
|
|
27
37
|
const axios_1 = __importStar(require("axios"));
|
package/dist/repo/repo.d.ts
CHANGED
|
@@ -10,5 +10,5 @@ export declare class Repo {
|
|
|
10
10
|
constructor({ event, itemType, normalize, onUpload, options, }: RepoFactoryInterface);
|
|
11
11
|
getItems(): (NormalizedItem | NormalizedAttachment | Item)[];
|
|
12
12
|
upload(batch?: (NormalizedItem | NormalizedAttachment | Item)[]): Promise<void | ErrorRecord>;
|
|
13
|
-
push(items: Item[]): Promise<
|
|
13
|
+
push(items: Item[]): Promise<boolean | ErrorRecord>;
|
|
14
14
|
}
|
package/dist/repo/repo.js
CHANGED
|
@@ -35,36 +35,33 @@ class Repo {
|
|
|
35
35
|
}
|
|
36
36
|
}
|
|
37
37
|
async push(items) {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
38
|
+
let recordsToPush;
|
|
39
|
+
// Normalize items if needed
|
|
40
|
+
if (this.normalize &&
|
|
41
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA &&
|
|
42
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT) {
|
|
43
|
+
recordsToPush = items.map((item) => this.normalize(item));
|
|
44
|
+
}
|
|
45
|
+
else {
|
|
46
|
+
recordsToPush = items;
|
|
47
|
+
}
|
|
48
|
+
// Add the new records to the items array
|
|
49
|
+
this.items.push(...recordsToPush);
|
|
50
|
+
console.info(`Extracted ${recordsToPush.length} new items of type ${this.itemType}. Total number of items in repo: ${this.items.length}.`);
|
|
51
|
+
// Upload in batches while the number of items exceeds the batch size
|
|
52
|
+
while (this.items.length >= constants_1.ARTIFACT_BATCH_SIZE) {
|
|
53
|
+
// Slice out a batch of ARTIFACT_BATCH_SIZE items to upload
|
|
54
|
+
const batch = this.items.splice(0, constants_1.ARTIFACT_BATCH_SIZE);
|
|
55
|
+
try {
|
|
56
|
+
// Upload the batch
|
|
57
|
+
await this.upload(batch);
|
|
48
58
|
}
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
// Upload in batches while the number of items exceeds the batch size
|
|
53
|
-
while (this.items.length >= constants_1.ARTIFACT_BATCH_SIZE) {
|
|
54
|
-
// Slice out a batch of ARTIFACT_BATCH_SIZE items to upload
|
|
55
|
-
const batch = this.items.splice(0, constants_1.ARTIFACT_BATCH_SIZE);
|
|
56
|
-
try {
|
|
57
|
-
// Upload the batch
|
|
58
|
-
await this.upload(batch);
|
|
59
|
-
}
|
|
60
|
-
catch (error) {
|
|
61
|
-
console.error('Error while uploading batch', error);
|
|
62
|
-
reject(error);
|
|
63
|
-
return;
|
|
64
|
-
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
console.error('Error while uploading batch', error);
|
|
61
|
+
return false;
|
|
65
62
|
}
|
|
66
|
-
|
|
67
|
-
|
|
63
|
+
}
|
|
64
|
+
return true;
|
|
68
65
|
}
|
|
69
66
|
}
|
|
70
67
|
exports.Repo = Repo;
|
|
@@ -52,19 +52,28 @@ describe('getFilesToLoad', () => {
|
|
|
52
52
|
it('should return an empty array if statsFile is empty', () => {
|
|
53
53
|
statsFile = [];
|
|
54
54
|
const itemTypesToLoad = [];
|
|
55
|
-
const result = (0, helpers_1.getFilesToLoad)({
|
|
55
|
+
const result = (0, helpers_1.getFilesToLoad)({
|
|
56
|
+
supportedItemTypes: itemTypesToLoad.map((it) => it.itemType),
|
|
57
|
+
statsFile,
|
|
58
|
+
});
|
|
56
59
|
expect(result).toEqual([]);
|
|
57
60
|
});
|
|
58
61
|
it('should return an empty array if itemTypesToLoad is empty', () => {
|
|
59
62
|
const itemTypesToLoad = [];
|
|
60
|
-
const result = (0, helpers_1.getFilesToLoad)({
|
|
63
|
+
const result = (0, helpers_1.getFilesToLoad)({
|
|
64
|
+
supportedItemTypes: itemTypesToLoad.map((it) => it.itemType),
|
|
65
|
+
statsFile,
|
|
66
|
+
});
|
|
61
67
|
expect(result).toEqual([]);
|
|
62
68
|
});
|
|
63
69
|
it('should return an empty array if statsFile has no matching items', () => {
|
|
64
70
|
const itemTypesToLoad = [
|
|
65
71
|
{ itemType: 'users', create: jest.fn(), update: jest.fn() },
|
|
66
72
|
];
|
|
67
|
-
const result = (0, helpers_1.getFilesToLoad)({
|
|
73
|
+
const result = (0, helpers_1.getFilesToLoad)({
|
|
74
|
+
supportedItemTypes: itemTypesToLoad.map((it) => it.itemType),
|
|
75
|
+
statsFile,
|
|
76
|
+
});
|
|
68
77
|
expect(result).toEqual([]);
|
|
69
78
|
});
|
|
70
79
|
it('should filter out files not in itemTypesToLoad and order them by itemTypesToLoad', () => {
|
|
@@ -72,7 +81,10 @@ describe('getFilesToLoad', () => {
|
|
|
72
81
|
{ itemType: 'attachments', create: jest.fn(), update: jest.fn() },
|
|
73
82
|
{ itemType: 'issues', create: jest.fn(), update: jest.fn() },
|
|
74
83
|
];
|
|
75
|
-
const result = (0, helpers_1.getFilesToLoad)({
|
|
84
|
+
const result = (0, helpers_1.getFilesToLoad)({
|
|
85
|
+
supportedItemTypes: itemTypesToLoad.map((it) => it.itemType),
|
|
86
|
+
statsFile,
|
|
87
|
+
});
|
|
76
88
|
expect(result).toEqual([
|
|
77
89
|
{
|
|
78
90
|
id: 'don:core:dvrv-us-1:devo/1:artifact/99',
|
|
@@ -113,7 +125,7 @@ describe('getFilesToLoad', () => {
|
|
|
113
125
|
{ itemType: 'issues', create: jest.fn(), update: jest.fn() },
|
|
114
126
|
];
|
|
115
127
|
const result = (0, helpers_1.getFilesToLoad)({
|
|
116
|
-
itemTypesToLoad,
|
|
128
|
+
supportedItemTypes: itemTypesToLoad.map((it) => it.itemType),
|
|
117
129
|
statsFile,
|
|
118
130
|
});
|
|
119
131
|
expect(result).toEqual([
|
|
@@ -5,12 +5,10 @@ const extraction_1 = require("../types/extraction");
|
|
|
5
5
|
(0, process_task_1.processTask)({
|
|
6
6
|
task: async ({ adapter }) => {
|
|
7
7
|
await adapter.emit(extraction_1.ExtractorEventType.ExtractionExternalSyncUnitsDone, {});
|
|
8
|
-
return 0;
|
|
9
8
|
},
|
|
10
9
|
onTimeout: async ({ adapter }) => {
|
|
11
10
|
await adapter.emit(extraction_1.ExtractorEventType.ExtractionExternalSyncUnitsError, {
|
|
12
11
|
error: { message: 'External sync unit failed.' },
|
|
13
12
|
});
|
|
14
|
-
return 0;
|
|
15
13
|
},
|
|
16
14
|
});
|
|
@@ -19,8 +19,10 @@ export declare enum EventType {
|
|
|
19
19
|
ExtractionAttachmentsDelete = "EXTRACTION_ATTACHMENTS_DELETE",
|
|
20
20
|
StartLoadingData = "START_LOADING_DATA",
|
|
21
21
|
ContinueLoadingData = "CONTINUE_LOADING_DATA",
|
|
22
|
+
StartLoadingAttachments = "START_LOADING_ATTACHMENTS",
|
|
23
|
+
ContinueLoadingAttachments = "CONTINUE_LOADING_ATTACHMENTS",
|
|
22
24
|
StartDeletingLoaderState = "START_DELETING_LOADER_STATE",
|
|
23
|
-
|
|
25
|
+
StartDeletingLoaderAttachmentState = "START_DELETING_LOADER_ATTACHMENT_STATE"
|
|
24
26
|
}
|
|
25
27
|
/**
|
|
26
28
|
* ExtractorEventType is an enum that defines the different types of events that can be sent from the external extractor to ADaaS.
|
package/dist/types/extraction.js
CHANGED
|
@@ -19,8 +19,10 @@ var EventType;
|
|
|
19
19
|
// Loading
|
|
20
20
|
EventType["StartLoadingData"] = "START_LOADING_DATA";
|
|
21
21
|
EventType["ContinueLoadingData"] = "CONTINUE_LOADING_DATA";
|
|
22
|
+
EventType["StartLoadingAttachments"] = "START_LOADING_ATTACHMENTS";
|
|
23
|
+
EventType["ContinueLoadingAttachments"] = "CONTINUE_LOADING_ATTACHMENTS";
|
|
22
24
|
EventType["StartDeletingLoaderState"] = "START_DELETING_LOADER_STATE";
|
|
23
|
-
EventType["
|
|
25
|
+
EventType["StartDeletingLoaderAttachmentState"] = "START_DELETING_LOADER_ATTACHMENT_STATE";
|
|
24
26
|
})(EventType || (exports.EventType = EventType = {}));
|
|
25
27
|
/**
|
|
26
28
|
* ExtractorEventType is an enum that defines the different types of events that can be sent from the external extractor to ADaaS.
|
package/dist/types/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export { ErrorLevel, ErrorRecord, LogRecord, AdapterUpdateParams, InitialDomainMapping, } from './common';
|
|
2
2
|
export { EventType, ExtractorEventType, ExtractionMode, ExternalSyncUnit, EventContextIn, EventContextOut, ConnectionData, EventData, DomainObjectState, AirdropEvent, AirdropMessage, ExtractorEvent, SyncMode, ExternalSystemAttachmentStreamingParams, ExternalSystemAttachmentStreamingResponse, ExternalSystemAttachmentStreamingFunction, } from './extraction';
|
|
3
|
-
export { LoaderEventType, ExternalSystemItem, ExternalSystemItemLoadingResponse, ExternalSystemItemLoadingParams, } from './loading';
|
|
3
|
+
export { LoaderEventType, ExternalSystemItem, ExternalSystemItemLoadingResponse, ExternalSystemItemLoadingParams, ExternalSystemAttachment, } from './loading';
|
|
4
4
|
export { NormalizedItem, NormalizedAttachment, RepoInterface, } from '../repo/repo.interfaces';
|
|
5
5
|
export { AdapterState } from '../state/state.interfaces';
|
|
6
6
|
export { Artifact, ArtifactsPrepareResponse, UploadResponse, StreamResponse, StreamAttachmentsResponse, SsorAttachment, } from '../uploader/uploader.interfaces';
|
package/dist/types/loading.d.ts
CHANGED
|
@@ -15,6 +15,29 @@ export interface FileToLoad {
|
|
|
15
15
|
lineToProcess: number;
|
|
16
16
|
completed: boolean;
|
|
17
17
|
}
|
|
18
|
+
export interface ExternalSystemAttachment {
|
|
19
|
+
reference_id: DonV2;
|
|
20
|
+
parent_type: string;
|
|
21
|
+
parent_reference_id: DonV2;
|
|
22
|
+
file_name: string;
|
|
23
|
+
file_type: string;
|
|
24
|
+
file_size: number;
|
|
25
|
+
url: string;
|
|
26
|
+
valid_until: string;
|
|
27
|
+
created_by_id: string;
|
|
28
|
+
created_date: string;
|
|
29
|
+
modified_by_id: string;
|
|
30
|
+
modified_date: string;
|
|
31
|
+
}
|
|
32
|
+
export interface ExternalSystemItem {
|
|
33
|
+
id: {
|
|
34
|
+
devrev: DonV2;
|
|
35
|
+
external?: string;
|
|
36
|
+
};
|
|
37
|
+
created_date: string;
|
|
38
|
+
modified_date: string;
|
|
39
|
+
data: any;
|
|
40
|
+
}
|
|
18
41
|
export interface ExternalSystemItem {
|
|
19
42
|
id: {
|
|
20
43
|
devrev: DonV2;
|
|
@@ -24,8 +47,8 @@ export interface ExternalSystemItem {
|
|
|
24
47
|
modified_date: string;
|
|
25
48
|
data: any;
|
|
26
49
|
}
|
|
27
|
-
export interface ExternalSystemItemLoadingParams {
|
|
28
|
-
item:
|
|
50
|
+
export interface ExternalSystemItemLoadingParams<Type> {
|
|
51
|
+
item: Type;
|
|
29
52
|
mappers: Mappers;
|
|
30
53
|
event: AirdropEvent;
|
|
31
54
|
}
|
|
@@ -40,11 +63,11 @@ export interface ExternalSystemItemLoadedItem {
|
|
|
40
63
|
error?: string;
|
|
41
64
|
modifiedDate?: string;
|
|
42
65
|
}
|
|
43
|
-
export type ExternalSystemLoadingFunction = ({ item, mappers, event, }: ExternalSystemItemLoadingParams) => Promise<ExternalSystemItemLoadingResponse>;
|
|
66
|
+
export type ExternalSystemLoadingFunction<Item> = ({ item, mappers, event, }: ExternalSystemItemLoadingParams<Item>) => Promise<ExternalSystemItemLoadingResponse>;
|
|
44
67
|
export interface ItemTypeToLoad {
|
|
45
68
|
itemType: string;
|
|
46
|
-
create: ExternalSystemLoadingFunction
|
|
47
|
-
update: ExternalSystemLoadingFunction
|
|
69
|
+
create: ExternalSystemLoadingFunction<ExternalSystemItem>;
|
|
70
|
+
update: ExternalSystemLoadingFunction<ExternalSystemItem>;
|
|
48
71
|
}
|
|
49
72
|
export interface ItemTypesToLoadParams {
|
|
50
73
|
itemTypesToLoad: ItemTypeToLoad[];
|
|
@@ -89,7 +112,13 @@ export declare enum LoaderEventType {
|
|
|
89
112
|
DataLoadingDelay = "DATA_LOADING_DELAYED",
|
|
90
113
|
DataLoadingDone = "DATA_LOADING_DONE",
|
|
91
114
|
DataLoadingError = "DATA_LOADING_ERROR",
|
|
115
|
+
AttachmentLoadingProgress = "ATTACHMENT_LOADING_PROGRESS",
|
|
116
|
+
AttachmentLoadingDelayed = "ATTACHMENT_LOADING_DELAYED",
|
|
117
|
+
AttachmentLoadingDone = "ATTACHMENT_LOADING_DONE",
|
|
118
|
+
AttachmentLoadingError = "ATTACHMENT_LOADING_ERROR",
|
|
92
119
|
LoaderStateDeletionDone = "LOADER_STATE_DELETION_DONE",
|
|
93
120
|
LoaderStateDeletionError = "LOADER_STATE_DELETION_ERROR",
|
|
121
|
+
LoaderAttachmentStateDeletionDone = "LOADER_ATTACHMENT_STATE_DELETION_DONE",
|
|
122
|
+
LoaderAttachmentStateDeletionError = "LOADER_ATTACHMENT_STATE_DELETION_ERROR",
|
|
94
123
|
UnknownEventType = "UNKNOWN_EVENT_TYPE"
|
|
95
124
|
}
|
package/dist/types/loading.js
CHANGED
|
@@ -15,7 +15,13 @@ var LoaderEventType;
|
|
|
15
15
|
LoaderEventType["DataLoadingDelay"] = "DATA_LOADING_DELAYED";
|
|
16
16
|
LoaderEventType["DataLoadingDone"] = "DATA_LOADING_DONE";
|
|
17
17
|
LoaderEventType["DataLoadingError"] = "DATA_LOADING_ERROR";
|
|
18
|
+
LoaderEventType["AttachmentLoadingProgress"] = "ATTACHMENT_LOADING_PROGRESS";
|
|
19
|
+
LoaderEventType["AttachmentLoadingDelayed"] = "ATTACHMENT_LOADING_DELAYED";
|
|
20
|
+
LoaderEventType["AttachmentLoadingDone"] = "ATTACHMENT_LOADING_DONE";
|
|
21
|
+
LoaderEventType["AttachmentLoadingError"] = "ATTACHMENT_LOADING_ERROR";
|
|
18
22
|
LoaderEventType["LoaderStateDeletionDone"] = "LOADER_STATE_DELETION_DONE";
|
|
19
23
|
LoaderEventType["LoaderStateDeletionError"] = "LOADER_STATE_DELETION_ERROR";
|
|
24
|
+
LoaderEventType["LoaderAttachmentStateDeletionDone"] = "LOADER_ATTACHMENT_STATE_DELETION_DONE";
|
|
25
|
+
LoaderEventType["LoaderAttachmentStateDeletionError"] = "LOADER_ATTACHMENT_STATE_DELETION_ERROR";
|
|
20
26
|
LoaderEventType["UnknownEventType"] = "UNKNOWN_EVENT_TYPE";
|
|
21
27
|
})(LoaderEventType || (exports.LoaderEventType = LoaderEventType = {}));
|
package/dist/types/workers.d.ts
CHANGED
|
@@ -79,8 +79,8 @@ export interface TaskAdapterInterface<ConnectorState> {
|
|
|
79
79
|
* @param {function} onTimeout - The task to be executed on timeout, returns exit code
|
|
80
80
|
*/
|
|
81
81
|
export interface ProcessTaskInterface<ConnectorState> {
|
|
82
|
-
task: (params: TaskAdapterInterface<ConnectorState>) => void
|
|
83
|
-
onTimeout: (params: TaskAdapterInterface<ConnectorState>) => void
|
|
82
|
+
task: (params: TaskAdapterInterface<ConnectorState>) => Promise<void>;
|
|
83
|
+
onTimeout: (params: TaskAdapterInterface<ConnectorState>) => Promise<void>;
|
|
84
84
|
}
|
|
85
85
|
/**
|
|
86
86
|
* WorkerEvent represents the standard worker events.
|
|
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|
|
15
15
|
}) : function(o, v) {
|
|
16
16
|
o["default"] = v;
|
|
17
17
|
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
};
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
25
35
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
37
|
};
|
|
@@ -55,10 +65,10 @@ class Uploader {
|
|
|
55
65
|
*/
|
|
56
66
|
async upload(itemType, fetchedObjects) {
|
|
57
67
|
if (this.isLocalDevelopment) {
|
|
58
|
-
this.downloadToLocal(itemType, fetchedObjects);
|
|
68
|
+
await this.downloadToLocal(itemType, fetchedObjects);
|
|
59
69
|
}
|
|
60
70
|
// 1. Compress the fetched objects to a gzipped jsonl object
|
|
61
|
-
const file =
|
|
71
|
+
const file = this.compressGzip(js_jsonl_1.jsonl.stringify(fetchedObjects));
|
|
62
72
|
if (!file) {
|
|
63
73
|
return {
|
|
64
74
|
error: { message: 'Error while compressing jsonl object.' },
|
|
@@ -174,14 +184,14 @@ class Uploader {
|
|
|
174
184
|
};
|
|
175
185
|
}
|
|
176
186
|
// 3. Decompress the gzipped jsonl object
|
|
177
|
-
const jsonlObject =
|
|
187
|
+
const jsonlObject = this.decompressGzip(gzippedJsonlObject);
|
|
178
188
|
if (!jsonlObject) {
|
|
179
189
|
return {
|
|
180
190
|
error: { message: 'Error while decompressing gzipped jsonl object.' },
|
|
181
191
|
};
|
|
182
192
|
}
|
|
183
193
|
// 4. Parse the jsonl object to get the attachment metadata
|
|
184
|
-
const jsonObject =
|
|
194
|
+
const jsonObject = this.parseJsonl(jsonlObject);
|
|
185
195
|
if (!jsonObject) {
|
|
186
196
|
return {
|
|
187
197
|
error: { message: 'Error while parsing jsonl object.' },
|
|
@@ -216,7 +226,7 @@ class Uploader {
|
|
|
216
226
|
}
|
|
217
227
|
}
|
|
218
228
|
}
|
|
219
|
-
|
|
229
|
+
compressGzip(jsonlObject) {
|
|
220
230
|
try {
|
|
221
231
|
return zlib_1.default.gzipSync(jsonlObject);
|
|
222
232
|
}
|
|
@@ -224,22 +234,23 @@ class Uploader {
|
|
|
224
234
|
console.error('Error while compressing jsonl object.', error);
|
|
225
235
|
}
|
|
226
236
|
}
|
|
227
|
-
|
|
237
|
+
decompressGzip(gzippedJsonlObject) {
|
|
228
238
|
try {
|
|
229
|
-
const jsonlObject =
|
|
239
|
+
const jsonlObject = zlib_1.default.gunzipSync(gzippedJsonlObject);
|
|
230
240
|
return jsonlObject.toString();
|
|
231
241
|
}
|
|
232
242
|
catch (error) {
|
|
233
243
|
console.error('Error while decompressing gzipped jsonl object.', error);
|
|
234
244
|
}
|
|
235
245
|
}
|
|
236
|
-
|
|
246
|
+
parseJsonl(jsonlObject) {
|
|
237
247
|
try {
|
|
238
248
|
return js_jsonl_1.jsonl.parse(jsonlObject);
|
|
239
249
|
}
|
|
240
250
|
catch (error) {
|
|
241
251
|
console.error('Error while parsing jsonl object.', error);
|
|
242
252
|
}
|
|
253
|
+
return null;
|
|
243
254
|
}
|
|
244
255
|
async getJsonObjectByArtifactId({ artifactId, isGzipped = false, }) {
|
|
245
256
|
const artifactUrl = await this.getArtifactDownloadUrl(artifactId);
|
|
@@ -251,7 +262,7 @@ class Uploader {
|
|
|
251
262
|
return;
|
|
252
263
|
}
|
|
253
264
|
if (isGzipped) {
|
|
254
|
-
const decompressedArtifact =
|
|
265
|
+
const decompressedArtifact = this.decompressGzip(artifact);
|
|
255
266
|
if (!decompressedArtifact) {
|
|
256
267
|
return;
|
|
257
268
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const index_1 = require("../../index");
|
|
4
|
+
(0, index_1.processTask)({
|
|
5
|
+
task: async ({ adapter }) => {
|
|
6
|
+
await adapter.emit(index_1.LoaderEventType.LoaderAttachmentStateDeletionDone);
|
|
7
|
+
},
|
|
8
|
+
onTimeout: async ({ adapter }) => {
|
|
9
|
+
await adapter.emit(index_1.LoaderEventType.LoaderAttachmentStateDeletionError, {
|
|
10
|
+
error: {
|
|
11
|
+
message: 'Failed to delete attachment state. Timeout.',
|
|
12
|
+
},
|
|
13
|
+
});
|
|
14
|
+
},
|
|
15
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const index_1 = require("../../index");
|
|
4
|
+
(0, index_1.processTask)({
|
|
5
|
+
task: async ({ adapter }) => {
|
|
6
|
+
await adapter.emit(index_1.LoaderEventType.LoaderStateDeletionDone);
|
|
7
|
+
},
|
|
8
|
+
onTimeout: async ({ adapter }) => {
|
|
9
|
+
await adapter.emit(index_1.LoaderEventType.LoaderStateDeletionError, {
|
|
10
|
+
error: {
|
|
11
|
+
message: 'Failed to delete data. Lambda timeout.',
|
|
12
|
+
},
|
|
13
|
+
});
|
|
14
|
+
},
|
|
15
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const process_task_1 = require("workers/process-task");
|
|
4
|
+
const types_1 = require("types");
|
|
5
|
+
(0, process_task_1.processTask)({
|
|
6
|
+
task: async ({ adapter }) => {
|
|
7
|
+
await adapter.emit(types_1.LoaderEventType.AttachmentLoadingDone, {
|
|
8
|
+
reports: adapter.reports,
|
|
9
|
+
processed_files: adapter.processedFiles,
|
|
10
|
+
});
|
|
11
|
+
},
|
|
12
|
+
onTimeout: async ({ adapter }) => {
|
|
13
|
+
await adapter.postState();
|
|
14
|
+
await adapter.emit(types_1.LoaderEventType.AttachmentLoadingError, {
|
|
15
|
+
reports: adapter.reports,
|
|
16
|
+
processed_files: adapter.processedFiles,
|
|
17
|
+
});
|
|
18
|
+
},
|
|
19
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const process_task_1 = require("workers/process-task");
|
|
4
|
+
const loading_1 = require("../../types/loading");
|
|
5
|
+
(0, process_task_1.processTask)({
|
|
6
|
+
task: async ({ adapter }) => {
|
|
7
|
+
await adapter.emit(loading_1.LoaderEventType.DataLoadingDone, {
|
|
8
|
+
reports: adapter.reports,
|
|
9
|
+
processed_files: adapter.processedFiles,
|
|
10
|
+
});
|
|
11
|
+
},
|
|
12
|
+
onTimeout: async ({ adapter }) => {
|
|
13
|
+
await adapter.postState();
|
|
14
|
+
await adapter.emit(loading_1.LoaderEventType.DataLoadingError, {
|
|
15
|
+
reports: adapter.reports,
|
|
16
|
+
processed_files: adapter.processedFiles,
|
|
17
|
+
});
|
|
18
|
+
},
|
|
19
|
+
});
|
|
@@ -8,7 +8,7 @@ const workers_1 = require("../types/workers");
|
|
|
8
8
|
const logger_1 = require("../logger/logger");
|
|
9
9
|
function processTask({ task, onTimeout, }) {
|
|
10
10
|
if (!node_worker_threads_1.isMainThread) {
|
|
11
|
-
(async () => {
|
|
11
|
+
void (async () => {
|
|
12
12
|
const event = node_worker_threads_1.workerData.event;
|
|
13
13
|
const initialState = node_worker_threads_1.workerData.initialState;
|
|
14
14
|
const options = node_worker_threads_1.workerData.options;
|
package/dist/workers/spawn.js
CHANGED
|
@@ -6,13 +6,9 @@ const extraction_1 = require("../types/extraction");
|
|
|
6
6
|
const control_protocol_1 = require("../common/control-protocol");
|
|
7
7
|
const helpers_1 = require("../common/helpers");
|
|
8
8
|
const logger_1 = require("../logger/logger");
|
|
9
|
-
const constants_1 = require("../common/constants");
|
|
10
9
|
const workers_1 = require("../types/workers");
|
|
11
10
|
const create_worker_1 = require("./create-worker");
|
|
12
11
|
function getWorkerPath({ event, connectorWorkerPath, }) {
|
|
13
|
-
if (!constants_1.ALLOWED_EVENT_TYPES.includes(event.payload.event_type)) {
|
|
14
|
-
return null;
|
|
15
|
-
}
|
|
16
12
|
if (connectorWorkerPath)
|
|
17
13
|
return connectorWorkerPath;
|
|
18
14
|
let path = null;
|
|
@@ -41,23 +37,20 @@ function getWorkerPath({ event, connectorWorkerPath, }) {
|
|
|
41
37
|
// Loading
|
|
42
38
|
case extraction_1.EventType.StartLoadingData:
|
|
43
39
|
case extraction_1.EventType.ContinueLoadingData:
|
|
44
|
-
path = __dirname + '/default-workers/data
|
|
40
|
+
path = __dirname + '/default-workers/load-data';
|
|
41
|
+
break;
|
|
42
|
+
case extraction_1.EventType.StartLoadingAttachments:
|
|
43
|
+
case extraction_1.EventType.ContinueLoadingAttachments:
|
|
44
|
+
path = __dirname + '/default-workers/load-attachments';
|
|
45
45
|
break;
|
|
46
46
|
case extraction_1.EventType.StartDeletingLoaderState:
|
|
47
|
-
path = __dirname + '/default-workers/loader-state
|
|
47
|
+
path = __dirname + '/default-workers/delete-loader-state';
|
|
48
|
+
break;
|
|
49
|
+
case extraction_1.EventType.StartDeletingLoaderAttachmentState:
|
|
50
|
+
path = __dirname + '/default-workers/delete-loader-attachment-state';
|
|
48
51
|
break;
|
|
49
52
|
default:
|
|
50
|
-
|
|
51
|
-
event,
|
|
52
|
-
eventType: extraction_1.ExtractorEventType.UnknownEventType,
|
|
53
|
-
data: {
|
|
54
|
-
error: {
|
|
55
|
-
message: 'Unrecognized event type in spawn ' +
|
|
56
|
-
event.payload.event_type +
|
|
57
|
-
'.',
|
|
58
|
-
},
|
|
59
|
-
},
|
|
60
|
-
});
|
|
53
|
+
path = null;
|
|
61
54
|
}
|
|
62
55
|
return path;
|
|
63
56
|
}
|
|
@@ -102,7 +95,18 @@ async function spawn({ event, initialState, workerPath, options, }) {
|
|
|
102
95
|
}
|
|
103
96
|
}
|
|
104
97
|
else {
|
|
105
|
-
|
|
98
|
+
await (0, control_protocol_1.emit)({
|
|
99
|
+
event,
|
|
100
|
+
eventType: extraction_1.ExtractorEventType.UnknownEventType,
|
|
101
|
+
data: {
|
|
102
|
+
error: {
|
|
103
|
+
message: 'Unrecognized event type in spawn ' +
|
|
104
|
+
event.payload.event_type +
|
|
105
|
+
'.',
|
|
106
|
+
},
|
|
107
|
+
},
|
|
108
|
+
});
|
|
109
|
+
return false;
|
|
106
110
|
}
|
|
107
111
|
}
|
|
108
112
|
class Spawn {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { AirdropEvent, ExtractorEventType, EventData, ExternalSystemAttachmentStreamingFunction } from '../types/extraction';
|
|
2
|
-
import { LoaderEventType } from '../types/loading';
|
|
2
|
+
import { ExternalSystemAttachment, ExternalSystemLoadingFunction, FileToLoad, LoaderEventType } from '../types/loading';
|
|
3
3
|
import { AdapterState } from '../state/state.interfaces';
|
|
4
4
|
import { Artifact } from '../uploader/uploader.interfaces';
|
|
5
5
|
import { WorkerAdapterInterface, WorkerAdapterOptions } from '../types/workers';
|
|
@@ -54,10 +54,20 @@ export declare class WorkerAdapter<ConnectorState> {
|
|
|
54
54
|
uploadAllRepos(): Promise<void>;
|
|
55
55
|
handleTimeout(): void;
|
|
56
56
|
loadItemTypes({ itemTypesToLoad, }: ItemTypesToLoadParams): Promise<LoadItemTypesResponse>;
|
|
57
|
+
getLoaderBatches({ supportedItemTypes, }: {
|
|
58
|
+
supportedItemTypes: string[];
|
|
59
|
+
}): Promise<FileToLoad[]>;
|
|
60
|
+
loadAttachments({ create, }: {
|
|
61
|
+
create: ExternalSystemLoadingFunction<ExternalSystemAttachment>;
|
|
62
|
+
}): Promise<LoadItemTypesResponse>;
|
|
57
63
|
loadItem({ item, itemTypeToLoad, }: {
|
|
58
64
|
item: ExternalSystemItem;
|
|
59
65
|
itemTypeToLoad: ItemTypeToLoad;
|
|
60
66
|
}): Promise<LoadItemResponse>;
|
|
67
|
+
loadAttachment({ item, create, }: {
|
|
68
|
+
item: ExternalSystemAttachment;
|
|
69
|
+
create: ExternalSystemLoadingFunction<ExternalSystemAttachment>;
|
|
70
|
+
}): Promise<LoadItemResponse>;
|
|
61
71
|
/**
|
|
62
72
|
* Streams the attachments to the DevRev platform.
|
|
63
73
|
* The attachments are streamed to the platform and the artifact information is returned.
|
|
@@ -163,68 +163,22 @@ class WorkerAdapter {
|
|
|
163
163
|
}
|
|
164
164
|
async loadItemTypes({ itemTypesToLoad, }) {
|
|
165
165
|
var _a, _b, _c;
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
eventType: loading_1.LoaderEventType.DataLoadingError,
|
|
172
|
-
data: {
|
|
173
|
-
error: {
|
|
174
|
-
message: 'Stats file artifact id not found in event data.',
|
|
175
|
-
},
|
|
176
|
-
},
|
|
177
|
-
});
|
|
178
|
-
return {
|
|
179
|
-
reports: this.reports,
|
|
180
|
-
processed_files: this.processedFiles,
|
|
166
|
+
if (this.event.payload.event_type === extraction_1.EventType.StartLoadingData) {
|
|
167
|
+
this.adapterState.state.fromDevRev = {
|
|
168
|
+
filesToLoad: await this.getLoaderBatches({
|
|
169
|
+
supportedItemTypes: itemTypesToLoad.map((it) => it.itemType),
|
|
170
|
+
}),
|
|
181
171
|
};
|
|
182
172
|
}
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
}));
|
|
186
|
-
console.log('Stats file', statsFile);
|
|
187
|
-
if (!statsFile || statsFile.length === 0) {
|
|
188
|
-
console.warn('Stats file not found or empty.');
|
|
173
|
+
if (((_a = this.adapterState.state.fromDevRev) === null || _a === void 0 ? void 0 : _a.filesToLoad.length) === 0) {
|
|
174
|
+
console.warn('No files to load, returning.');
|
|
189
175
|
return {
|
|
190
176
|
reports: this.reports,
|
|
191
177
|
processed_files: this.processedFiles,
|
|
192
178
|
};
|
|
193
179
|
}
|
|
194
|
-
if (this.event.payload.event_type === extraction_1.EventType.StartLoadingData) {
|
|
195
|
-
console.log('Recieved event type ' +
|
|
196
|
-
extraction_1.EventType.StartLoadingData +
|
|
197
|
-
'. Preparing files to load.');
|
|
198
|
-
const filesToLoad = (0, helpers_1.getFilesToLoad)({
|
|
199
|
-
itemTypesToLoad,
|
|
200
|
-
statsFile,
|
|
201
|
-
});
|
|
202
|
-
if (filesToLoad.length === 0) {
|
|
203
|
-
console.warn('No files to load, returning.');
|
|
204
|
-
return {
|
|
205
|
-
reports: this.reports,
|
|
206
|
-
processed_files: this.processedFiles,
|
|
207
|
-
};
|
|
208
|
-
}
|
|
209
|
-
this.adapterState.state.fromDevRev = { filesToLoad };
|
|
210
|
-
}
|
|
211
180
|
console.log('Files to load in state', (_b = this.adapterState.state.fromDevRev) === null || _b === void 0 ? void 0 : _b.filesToLoad);
|
|
212
|
-
|
|
213
|
-
await (0, control_protocol_1.emit)({
|
|
214
|
-
event: this.event,
|
|
215
|
-
eventType: loading_1.LoaderEventType.DataLoadingError,
|
|
216
|
-
data: {
|
|
217
|
-
error: {
|
|
218
|
-
message: 'Unexpected state set in LOAD_DATA.',
|
|
219
|
-
},
|
|
220
|
-
},
|
|
221
|
-
});
|
|
222
|
-
return {
|
|
223
|
-
reports: this.reports,
|
|
224
|
-
processed_files: this.processedFiles,
|
|
225
|
-
};
|
|
226
|
-
}
|
|
227
|
-
outerloop: for (const fileToLoad of (_c = this.adapterState.state.fromDevRev) === null || _c === void 0 ? void 0 : _c.filesToLoad) {
|
|
181
|
+
outerloop: for (const fileToLoad of ((_c = this.adapterState.state.fromDevRev) === null || _c === void 0 ? void 0 : _c.filesToLoad) || []) {
|
|
228
182
|
const itemTypeToLoad = itemTypesToLoad.find((itemTypeToLoad) => itemTypeToLoad.itemType === fileToLoad.itemType);
|
|
229
183
|
if (!itemTypeToLoad) {
|
|
230
184
|
console.error(`Item type to load not found for item type: ${fileToLoad.itemType}.`);
|
|
@@ -249,20 +203,88 @@ class WorkerAdapter {
|
|
|
249
203
|
break outerloop;
|
|
250
204
|
}
|
|
251
205
|
for (let i = fileToLoad.lineToProcess; i < fileToLoad.count; i++) {
|
|
252
|
-
const { report, rateLimit
|
|
206
|
+
const { report, rateLimit } = await this.loadItem({
|
|
253
207
|
item: transformerFile[i],
|
|
254
208
|
itemTypeToLoad,
|
|
255
209
|
});
|
|
256
|
-
if (
|
|
210
|
+
if (rateLimit === null || rateLimit === void 0 ? void 0 : rateLimit.delay) {
|
|
257
211
|
await (0, control_protocol_1.emit)({
|
|
258
212
|
event: this.event,
|
|
259
|
-
eventType: loading_1.LoaderEventType.
|
|
213
|
+
eventType: loading_1.LoaderEventType.DataLoadingDelay,
|
|
260
214
|
data: {
|
|
261
|
-
|
|
215
|
+
delay: rateLimit.delay,
|
|
216
|
+
reports: this.reports,
|
|
217
|
+
processed_files: this.processedFiles,
|
|
262
218
|
},
|
|
263
219
|
});
|
|
264
220
|
break outerloop;
|
|
265
221
|
}
|
|
222
|
+
if (report) {
|
|
223
|
+
(0, helpers_1.addReportToLoaderReport)({
|
|
224
|
+
loaderReports: this.loaderReports,
|
|
225
|
+
report,
|
|
226
|
+
});
|
|
227
|
+
fileToLoad.lineToProcess = fileToLoad.lineToProcess + 1;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
fileToLoad.completed = true;
|
|
231
|
+
this._processedFiles.push(fileToLoad.id);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
return {
|
|
235
|
+
reports: this.reports,
|
|
236
|
+
processed_files: this.processedFiles,
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
async getLoaderBatches({ supportedItemTypes, }) {
|
|
240
|
+
var _a;
|
|
241
|
+
const statsFileArtifactId = (_a = this.event.payload.event_data) === null || _a === void 0 ? void 0 : _a.stats_file;
|
|
242
|
+
if (statsFileArtifactId) {
|
|
243
|
+
const statsFile = (await this.uploader.getJsonObjectByArtifactId({
|
|
244
|
+
artifactId: statsFileArtifactId,
|
|
245
|
+
}));
|
|
246
|
+
if (!statsFile || statsFile.length === 0) {
|
|
247
|
+
return [];
|
|
248
|
+
}
|
|
249
|
+
const filesToLoad = (0, helpers_1.getFilesToLoad)({
|
|
250
|
+
supportedItemTypes,
|
|
251
|
+
statsFile,
|
|
252
|
+
});
|
|
253
|
+
return filesToLoad;
|
|
254
|
+
}
|
|
255
|
+
return [];
|
|
256
|
+
}
|
|
257
|
+
async loadAttachments({ create, }) {
|
|
258
|
+
var _a, _b;
|
|
259
|
+
if (this.event.payload.event_type === extraction_1.EventType.StartLoadingAttachments) {
|
|
260
|
+
this.adapterState.state.fromDevRev = {
|
|
261
|
+
filesToLoad: await this.getLoaderBatches({
|
|
262
|
+
supportedItemTypes: ['attachment'],
|
|
263
|
+
}),
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
if (!this.adapterState.state.fromDevRev ||
|
|
267
|
+
((_a = this.adapterState.state.fromDevRev) === null || _a === void 0 ? void 0 : _a.filesToLoad.length) === 0) {
|
|
268
|
+
return {
|
|
269
|
+
reports: this.reports,
|
|
270
|
+
processed_files: this.processedFiles,
|
|
271
|
+
};
|
|
272
|
+
}
|
|
273
|
+
outerloop: for (const fileToLoad of (_b = this.adapterState.state.fromDevRev) === null || _b === void 0 ? void 0 : _b.filesToLoad) {
|
|
274
|
+
if (!fileToLoad.completed) {
|
|
275
|
+
const transformerFile = (await this.uploader.getJsonObjectByArtifactId({
|
|
276
|
+
artifactId: fileToLoad.id,
|
|
277
|
+
isGzipped: true,
|
|
278
|
+
}));
|
|
279
|
+
if (!transformerFile) {
|
|
280
|
+
console.error('Transformer file not found.');
|
|
281
|
+
break outerloop;
|
|
282
|
+
}
|
|
283
|
+
for (let i = fileToLoad.lineToProcess; i < fileToLoad.count; i++) {
|
|
284
|
+
const { report, rateLimit } = await this.loadAttachment({
|
|
285
|
+
item: transformerFile[i],
|
|
286
|
+
create,
|
|
287
|
+
});
|
|
266
288
|
if (rateLimit === null || rateLimit === void 0 ? void 0 : rateLimit.delay) {
|
|
267
289
|
await (0, control_protocol_1.emit)({
|
|
268
290
|
event: this.event,
|
|
@@ -461,6 +483,38 @@ class WorkerAdapter {
|
|
|
461
483
|
};
|
|
462
484
|
}
|
|
463
485
|
}
|
|
486
|
+
async loadAttachment({ item, create, }) {
|
|
487
|
+
// Create item
|
|
488
|
+
const { id, delay, error } = await create({
|
|
489
|
+
item,
|
|
490
|
+
mappers: this.mappers,
|
|
491
|
+
event: this.event,
|
|
492
|
+
});
|
|
493
|
+
if (delay) {
|
|
494
|
+
return {
|
|
495
|
+
rateLimit: {
|
|
496
|
+
delay,
|
|
497
|
+
},
|
|
498
|
+
};
|
|
499
|
+
}
|
|
500
|
+
else if (id) {
|
|
501
|
+
return {
|
|
502
|
+
report: {
|
|
503
|
+
item_type: 'attachment',
|
|
504
|
+
[loading_1.ActionType.CREATED]: 1,
|
|
505
|
+
},
|
|
506
|
+
};
|
|
507
|
+
}
|
|
508
|
+
else {
|
|
509
|
+
console.error('Failed to create item', error);
|
|
510
|
+
return {
|
|
511
|
+
report: {
|
|
512
|
+
item_type: 'attachment',
|
|
513
|
+
[loading_1.ActionType.FAILED]: 1,
|
|
514
|
+
},
|
|
515
|
+
};
|
|
516
|
+
}
|
|
517
|
+
}
|
|
464
518
|
/**
|
|
465
519
|
* Streams the attachments to the DevRev platform.
|
|
466
520
|
* The attachments are streamed to the platform and the artifact information is returned.
|