@devrev/ts-adaas 1.4.2 → 1.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/dist/common/constants.d.ts +1 -0
- package/dist/common/constants.js +2 -1
- package/dist/common/helpers.d.ts +1 -0
- package/dist/common/helpers.js +5 -0
- package/dist/mappers/mappers.d.ts +2 -2
- package/dist/mappers/mappers.js +8 -8
- package/dist/types/extraction.d.ts +11 -1
- package/dist/types/extraction.js +9 -1
- package/dist/types/extraction.test.d.ts +1 -0
- package/dist/types/extraction.test.js +23 -0
- package/dist/uploader/uploader.d.ts +7 -4
- package/dist/uploader/uploader.interfaces.d.ts +11 -0
- package/dist/uploader/uploader.js +74 -37
- package/dist/uploader/uploader.test.js +76 -16
- package/dist/workers/worker-adapter.js +16 -3
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -4,8 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
## Overview
|
|
6
6
|
|
|
7
|
-
The Airdrop SDK for TypeScript helps developers build snap-ins that integrate with DevRev’s Airdrop platform.
|
|
8
|
-
This SDK simplifies the workflow for handling data extraction and loading, event-driven actions, state management, and artifact handling.
|
|
7
|
+
The Airdrop SDK for TypeScript helps developers build snap-ins that integrate with DevRev’s Airdrop platform. This SDK simplifies the workflow for handling data extraction and loading, event-driven actions, state management, and artifact handling.
|
|
9
8
|
|
|
10
9
|
It provides features such as:
|
|
11
10
|
|
package/dist/common/constants.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.LIBRARY_VERSION = exports.AIRDROP_DEFAULT_ITEM_TYPES = exports.MAX_DEVREV_ARTIFACT_SIZE = exports.ARTIFACT_BATCH_SIZE = exports.ALLOWED_EVENT_TYPES = exports.ALLOWED_LOADING_EVENT_TYPES = exports.ALLOWED_EXTRACTION_EVENT_TYPES = exports.STATELESS_EVENT_TYPES = void 0;
|
|
3
|
+
exports.DEFAULT_SLEEP_DELAY_MS = exports.LIBRARY_VERSION = exports.AIRDROP_DEFAULT_ITEM_TYPES = exports.MAX_DEVREV_ARTIFACT_SIZE = exports.ARTIFACT_BATCH_SIZE = exports.ALLOWED_EVENT_TYPES = exports.ALLOWED_LOADING_EVENT_TYPES = exports.ALLOWED_EXTRACTION_EVENT_TYPES = exports.STATELESS_EVENT_TYPES = void 0;
|
|
4
4
|
const extraction_1 = require("../types/extraction");
|
|
5
5
|
const helpers_1 = require("./helpers");
|
|
6
6
|
exports.STATELESS_EVENT_TYPES = [
|
|
@@ -39,3 +39,4 @@ exports.AIRDROP_DEFAULT_ITEM_TYPES = {
|
|
|
39
39
|
SSOR_ATTACHMENT: 'ssor_attachment',
|
|
40
40
|
};
|
|
41
41
|
exports.LIBRARY_VERSION = (0, helpers_1.getLibraryVersion)();
|
|
42
|
+
exports.DEFAULT_SLEEP_DELAY_MS = 180000; // 3 minutes
|
package/dist/common/helpers.d.ts
CHANGED
|
@@ -16,3 +16,4 @@ export declare function addReportToLoaderReport({ loaderReports, report, }: {
|
|
|
16
16
|
}): LoaderReport[];
|
|
17
17
|
export declare function getCircularReplacer(): (key: any, value: any) => any;
|
|
18
18
|
export declare function getLibraryVersion(): any;
|
|
19
|
+
export declare function sleep(ms: number): Promise<unknown>;
|
package/dist/common/helpers.js
CHANGED
|
@@ -39,6 +39,7 @@ exports.getFilesToLoad = getFilesToLoad;
|
|
|
39
39
|
exports.addReportToLoaderReport = addReportToLoaderReport;
|
|
40
40
|
exports.getCircularReplacer = getCircularReplacer;
|
|
41
41
|
exports.getLibraryVersion = getLibraryVersion;
|
|
42
|
+
exports.sleep = sleep;
|
|
42
43
|
const extraction_1 = require("../types/extraction");
|
|
43
44
|
const loading_1 = require("../types/loading");
|
|
44
45
|
const fs_1 = require("fs");
|
|
@@ -176,3 +177,7 @@ function getLibraryVersion() {
|
|
|
176
177
|
return '';
|
|
177
178
|
}
|
|
178
179
|
}
|
|
180
|
+
function sleep(ms) {
|
|
181
|
+
console.log(`Sleeping for ${ms}ms.`);
|
|
182
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
183
|
+
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { AxiosResponse } from 'axios';
|
|
2
2
|
import { MappersFactoryInterface, MappersCreateParams, MappersCreateResponse, MappersGetByTargetIdParams, MappersGetByTargetIdResponse, MappersUpdateParams, MappersUpdateResponse } from './mappers.interface';
|
|
3
3
|
export declare class Mappers {
|
|
4
|
-
private
|
|
5
|
-
private
|
|
4
|
+
private devrevApiEndpoint;
|
|
5
|
+
private devrevApiToken;
|
|
6
6
|
constructor({ event }: MappersFactoryInterface);
|
|
7
7
|
getByTargetId(params: MappersGetByTargetIdParams): Promise<AxiosResponse<MappersGetByTargetIdResponse>>;
|
|
8
8
|
create(params: MappersCreateParams): Promise<AxiosResponse<MappersCreateResponse>>;
|
package/dist/mappers/mappers.js
CHANGED
|
@@ -4,29 +4,29 @@ exports.Mappers = void 0;
|
|
|
4
4
|
const axios_client_1 = require("../http/axios-client");
|
|
5
5
|
class Mappers {
|
|
6
6
|
constructor({ event }) {
|
|
7
|
-
this.
|
|
8
|
-
this.
|
|
7
|
+
this.devrevApiEndpoint = event.execution_metadata.devrev_endpoint;
|
|
8
|
+
this.devrevApiToken = event.context.secrets.service_account_token;
|
|
9
9
|
}
|
|
10
10
|
async getByTargetId(params) {
|
|
11
11
|
const { sync_unit, target } = params;
|
|
12
|
-
return axios_client_1.axiosClient.get(`${this.
|
|
12
|
+
return axios_client_1.axiosClient.get(`${this.devrevApiEndpoint}/internal/airdrop.sync-mapper-record.get-by-target`, {
|
|
13
13
|
headers: {
|
|
14
|
-
Authorization: this.
|
|
14
|
+
Authorization: this.devrevApiToken,
|
|
15
15
|
},
|
|
16
16
|
params: { sync_unit, target },
|
|
17
17
|
});
|
|
18
18
|
}
|
|
19
19
|
async create(params) {
|
|
20
|
-
return axios_client_1.axiosClient.post(`${this.
|
|
20
|
+
return axios_client_1.axiosClient.post(`${this.devrevApiEndpoint}/internal/airdrop.sync-mapper-record.create`, params, {
|
|
21
21
|
headers: {
|
|
22
|
-
Authorization: this.
|
|
22
|
+
Authorization: this.devrevApiToken,
|
|
23
23
|
},
|
|
24
24
|
});
|
|
25
25
|
}
|
|
26
26
|
async update(params) {
|
|
27
|
-
return axios_client_1.axiosClient.post(`${this.
|
|
27
|
+
return axios_client_1.axiosClient.post(`${this.devrevApiEndpoint}/internal/airdrop.sync-mapper-record.update`, params, {
|
|
28
28
|
headers: {
|
|
29
|
-
Authorization: this.
|
|
29
|
+
Authorization: this.devrevApiToken,
|
|
30
30
|
},
|
|
31
31
|
});
|
|
32
32
|
}
|
|
@@ -77,6 +77,13 @@ export interface ExternalSyncUnit {
|
|
|
77
77
|
item_count?: number;
|
|
78
78
|
item_type?: string;
|
|
79
79
|
}
|
|
80
|
+
/**
|
|
81
|
+
* InitialSyncScope is an enum that defines the different scopes of initial sync that can be used by the external extractor.
|
|
82
|
+
*/
|
|
83
|
+
export declare enum InitialSyncScope {
|
|
84
|
+
FULL_HISTORY = "full-history",
|
|
85
|
+
TIME_SCOPED = "time-scoped"
|
|
86
|
+
}
|
|
80
87
|
/**
|
|
81
88
|
* EventContextIn is an interface that defines the structure of the input event context that is sent to the external extractor from ADaaS.
|
|
82
89
|
* @deprecated
|
|
@@ -114,7 +121,7 @@ export interface EventContextOut {
|
|
|
114
121
|
sync_unit?: string;
|
|
115
122
|
}
|
|
116
123
|
/**
|
|
117
|
-
* EventContext is an interface that defines the structure of the event context that is sent to
|
|
124
|
+
* EventContext is an interface that defines the structure of the event context that is sent to the external connector from Airdrop.
|
|
118
125
|
*/
|
|
119
126
|
export interface EventContext {
|
|
120
127
|
callback_url: string;
|
|
@@ -127,9 +134,12 @@ export interface EventContext {
|
|
|
127
134
|
external_sync_unit_name: string;
|
|
128
135
|
external_system: string;
|
|
129
136
|
external_system_type: string;
|
|
137
|
+
extract_from?: string;
|
|
130
138
|
import_slug: string;
|
|
139
|
+
initial_sync_scope?: InitialSyncScope;
|
|
131
140
|
mode: string;
|
|
132
141
|
request_id: string;
|
|
142
|
+
reset_extraction?: boolean;
|
|
133
143
|
snap_in_slug: string;
|
|
134
144
|
snap_in_version_id: string;
|
|
135
145
|
sync_run: string;
|
package/dist/types/extraction.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.SyncMode = exports.ExtractionMode = exports.ExtractorEventType = exports.EventType = void 0;
|
|
3
|
+
exports.InitialSyncScope = exports.SyncMode = exports.ExtractionMode = exports.ExtractorEventType = exports.EventType = void 0;
|
|
4
4
|
/**
|
|
5
5
|
* EventType is an enum that defines the different types of events that can be sent to the external extractor from ADaaS.
|
|
6
6
|
* The external extractor can use these events to know what to do next in the extraction process.
|
|
@@ -70,3 +70,11 @@ var SyncMode;
|
|
|
70
70
|
SyncMode["INCREMENTAL"] = "INCREMENTAL";
|
|
71
71
|
SyncMode["LOADING"] = "LOADING";
|
|
72
72
|
})(SyncMode || (exports.SyncMode = SyncMode = {}));
|
|
73
|
+
/**
|
|
74
|
+
* InitialSyncScope is an enum that defines the different scopes of initial sync that can be used by the external extractor.
|
|
75
|
+
*/
|
|
76
|
+
var InitialSyncScope;
|
|
77
|
+
(function (InitialSyncScope) {
|
|
78
|
+
InitialSyncScope["FULL_HISTORY"] = "full-history";
|
|
79
|
+
InitialSyncScope["TIME_SCOPED"] = "time-scoped";
|
|
80
|
+
})(InitialSyncScope || (exports.InitialSyncScope = InitialSyncScope = {}));
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const extraction_1 = require("./extraction");
|
|
4
|
+
const test_helpers_1 = require("../tests/test-helpers");
|
|
5
|
+
describe('EventContext type tests', () => {
|
|
6
|
+
const baseEvent = (0, test_helpers_1.createEvent)({ eventType: extraction_1.EventType.ExtractionDataStart });
|
|
7
|
+
it('should handle context without optional fields', () => {
|
|
8
|
+
const event = Object.assign({}, baseEvent);
|
|
9
|
+
// If this compiles, the test passes
|
|
10
|
+
expect(event).toBeDefined();
|
|
11
|
+
});
|
|
12
|
+
it('should handle context with all optional fields', () => {
|
|
13
|
+
const event = Object.assign({}, baseEvent);
|
|
14
|
+
event.payload.event_context = Object.assign(Object.assign({}, baseEvent.payload.event_context), { extract_from: '2024-01-01T00:00:00Z', initial_sync_scope: extraction_1.InitialSyncScope.TIME_SCOPED, reset_extraction: true });
|
|
15
|
+
// Test with all optionals present
|
|
16
|
+
expect(event).toBeDefined();
|
|
17
|
+
});
|
|
18
|
+
it('should handle partial optional fields', () => {
|
|
19
|
+
const event = Object.assign({}, baseEvent);
|
|
20
|
+
event.payload.event_context = Object.assign(Object.assign({}, baseEvent.payload.event_context), { extract_from: '2024-01-01T00:00:00Z' });
|
|
21
|
+
expect(event).toBeDefined();
|
|
22
|
+
});
|
|
23
|
+
});
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import { NormalizedAttachment } from '../repo/repo.interfaces';
|
|
2
|
-
import {
|
|
2
|
+
import { UploadResponse, UploaderFactoryInterface, ArtifactToUpload } from './uploader.interfaces';
|
|
3
3
|
import { AxiosResponse } from 'axios';
|
|
4
4
|
export declare class Uploader {
|
|
5
5
|
private event;
|
|
6
6
|
private isLocalDevelopment?;
|
|
7
7
|
private devrevApiEndpoint;
|
|
8
8
|
private devrevApiToken;
|
|
9
|
+
private requestId;
|
|
10
|
+
private defaultHeaders;
|
|
9
11
|
constructor({ event, options }: UploaderFactoryInterface);
|
|
10
12
|
/**
|
|
11
13
|
* Uploads the fetched objects to the DevRev platform.
|
|
@@ -17,9 +19,10 @@ export declare class Uploader {
|
|
|
17
19
|
* or error information if there was an error
|
|
18
20
|
*/
|
|
19
21
|
upload(itemType: string, fetchedObjects: object[] | object): Promise<UploadResponse>;
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
22
|
+
getArtifactUploadUrl(filename: string, fileType: string): Promise<ArtifactToUpload | void>;
|
|
23
|
+
uploadArtifact(artifact: ArtifactToUpload, file: Buffer): Promise<AxiosResponse | void>;
|
|
24
|
+
streamArtifact(artifact: ArtifactToUpload, fileStream: any): Promise<AxiosResponse | void>;
|
|
25
|
+
confirmArtifactUpload(artifactId: string): Promise<AxiosResponse | void>;
|
|
23
26
|
getAttachmentsFromArtifactId({ artifact, }: {
|
|
24
27
|
artifact: string;
|
|
25
28
|
}): Promise<{
|
|
@@ -25,6 +25,17 @@ export interface ArtifactsPrepareResponse {
|
|
|
25
25
|
value: string;
|
|
26
26
|
}[];
|
|
27
27
|
}
|
|
28
|
+
/**
|
|
29
|
+
* ArtifactToUpload is an interface that defines the structure of the response from the get upload url endpoint.
|
|
30
|
+
*/
|
|
31
|
+
export interface ArtifactToUpload {
|
|
32
|
+
upload_url: string;
|
|
33
|
+
artifact_id: string;
|
|
34
|
+
form_data: {
|
|
35
|
+
key: string;
|
|
36
|
+
value: string;
|
|
37
|
+
}[];
|
|
38
|
+
}
|
|
28
39
|
/**
|
|
29
40
|
* UploadResponse is an interface that defines the structure of the response from upload through Uploader.
|
|
30
41
|
*/
|
|
@@ -49,7 +49,11 @@ class Uploader {
|
|
|
49
49
|
this.event = event;
|
|
50
50
|
this.devrevApiEndpoint = event.execution_metadata.devrev_endpoint;
|
|
51
51
|
this.devrevApiToken = event.context.secrets.service_account_token;
|
|
52
|
+
this.requestId = event.payload.event_context.request_id;
|
|
52
53
|
this.isLocalDevelopment = options === null || options === void 0 ? void 0 : options.isLocalDevelopment;
|
|
54
|
+
this.defaultHeaders = {
|
|
55
|
+
Authorization: `Bearer ${this.devrevApiToken}`,
|
|
56
|
+
};
|
|
53
57
|
}
|
|
54
58
|
/**
|
|
55
59
|
* Uploads the fetched objects to the DevRev platform.
|
|
@@ -64,7 +68,7 @@ class Uploader {
|
|
|
64
68
|
if (this.isLocalDevelopment) {
|
|
65
69
|
await this.downloadToLocal(itemType, fetchedObjects);
|
|
66
70
|
}
|
|
67
|
-
//
|
|
71
|
+
// Compress the fetched objects to a gzipped jsonl object
|
|
68
72
|
const file = this.compressGzip(js_jsonl_1.jsonl.stringify(fetchedObjects));
|
|
69
73
|
if (!file) {
|
|
70
74
|
return {
|
|
@@ -73,58 +77,65 @@ class Uploader {
|
|
|
73
77
|
}
|
|
74
78
|
const filename = itemType + '.jsonl.gz';
|
|
75
79
|
const fileType = 'application/x-gzip';
|
|
76
|
-
//
|
|
77
|
-
const preparedArtifact = await this.
|
|
80
|
+
// Get upload url
|
|
81
|
+
const preparedArtifact = await this.getArtifactUploadUrl(filename, fileType);
|
|
78
82
|
if (!preparedArtifact) {
|
|
79
83
|
return {
|
|
80
|
-
error: { message: 'Error while
|
|
84
|
+
error: { message: 'Error while getting artifact upload URL.' },
|
|
81
85
|
};
|
|
82
86
|
}
|
|
83
|
-
//
|
|
84
|
-
const
|
|
85
|
-
if (!
|
|
87
|
+
// Upload prepared artifact to the given url
|
|
88
|
+
const uploadItemResponse = await this.uploadArtifact(preparedArtifact, file);
|
|
89
|
+
if (!uploadItemResponse) {
|
|
86
90
|
return {
|
|
87
91
|
error: { message: 'Error while uploading artifact.' },
|
|
88
92
|
};
|
|
89
93
|
}
|
|
90
|
-
//
|
|
94
|
+
// Confirm upload
|
|
95
|
+
const confirmArtifactUploadResponse = await this.confirmArtifactUpload(preparedArtifact.artifact_id);
|
|
96
|
+
if (!confirmArtifactUploadResponse) {
|
|
97
|
+
return {
|
|
98
|
+
error: { message: 'Error while confirming artifact upload.' },
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
// Return the artifact information to the platform
|
|
91
102
|
const artifact = {
|
|
92
|
-
id: preparedArtifact.
|
|
103
|
+
id: preparedArtifact.artifact_id,
|
|
93
104
|
item_type: itemType,
|
|
94
105
|
item_count: Array.isArray(fetchedObjects) ? fetchedObjects.length : 1,
|
|
95
106
|
};
|
|
96
|
-
console.log('Successful upload of artifact', artifact);
|
|
97
107
|
return { artifact };
|
|
98
108
|
}
|
|
99
|
-
async
|
|
109
|
+
async getArtifactUploadUrl(filename, fileType) {
|
|
110
|
+
const url = `${this.devrevApiEndpoint}/internal/airdrop.artifacts.upload-url`;
|
|
100
111
|
try {
|
|
101
|
-
const response = await axios_client_1.axiosClient.
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
112
|
+
const response = await axios_client_1.axiosClient.get(url, {
|
|
113
|
+
headers: Object.assign({}, this.defaultHeaders),
|
|
114
|
+
params: {
|
|
115
|
+
request_id: this.requestId,
|
|
116
|
+
file_type: fileType,
|
|
117
|
+
file_name: filename,
|
|
107
118
|
},
|
|
108
119
|
});
|
|
109
120
|
return response.data;
|
|
110
121
|
}
|
|
111
122
|
catch (error) {
|
|
112
123
|
if (axios_client_1.axios.isAxiosError(error)) {
|
|
113
|
-
console.error('Error while
|
|
124
|
+
console.error('Error while getting artifact upload URL.', (0, logger_1.serializeAxiosError)(error));
|
|
114
125
|
}
|
|
115
126
|
else {
|
|
116
|
-
console.error('Error while
|
|
127
|
+
console.error('Error while getting artifact upload URL.', error);
|
|
117
128
|
}
|
|
118
129
|
}
|
|
119
130
|
}
|
|
120
|
-
async
|
|
131
|
+
async uploadArtifact(artifact, file) {
|
|
121
132
|
const formData = new form_data_1.default();
|
|
122
|
-
for (const field
|
|
123
|
-
formData.append(field
|
|
133
|
+
for (const field in artifact.form_data) {
|
|
134
|
+
formData.append(field, artifact.form_data[field]);
|
|
124
135
|
}
|
|
125
136
|
formData.append('file', file);
|
|
126
137
|
try {
|
|
127
|
-
const response = await axios_client_1.axiosClient.post(
|
|
138
|
+
const response = await axios_client_1.axiosClient.post(artifact.upload_url, formData, {
|
|
128
139
|
headers: Object.assign({}, formData.getHeaders()),
|
|
129
140
|
});
|
|
130
141
|
return response;
|
|
@@ -138,19 +149,19 @@ class Uploader {
|
|
|
138
149
|
}
|
|
139
150
|
}
|
|
140
151
|
}
|
|
141
|
-
async
|
|
152
|
+
async streamArtifact(artifact, fileStream) {
|
|
142
153
|
const formData = new form_data_1.default();
|
|
143
|
-
for (const field
|
|
144
|
-
formData.append(field
|
|
154
|
+
for (const field in artifact.form_data) {
|
|
155
|
+
formData.append(field, artifact.form_data[field]);
|
|
145
156
|
}
|
|
146
|
-
formData.append('file',
|
|
147
|
-
if (
|
|
157
|
+
formData.append('file', fileStream.data);
|
|
158
|
+
if (fileStream.headers['content-length'] > constants_1.MAX_DEVREV_ARTIFACT_SIZE) {
|
|
148
159
|
console.warn(`File size exceeds the maximum limit of ${constants_1.MAX_DEVREV_ARTIFACT_SIZE} bytes.`);
|
|
149
160
|
return;
|
|
150
161
|
}
|
|
151
162
|
try {
|
|
152
|
-
const response = await axios_client_1.axiosClient.post(
|
|
153
|
-
headers: Object.assign(Object.assign({}, formData.getHeaders()), (!
|
|
163
|
+
const response = await axios_client_1.axiosClient.post(artifact.upload_url, formData, {
|
|
164
|
+
headers: Object.assign(Object.assign({}, formData.getHeaders()), (!fileStream.headers['content-length']
|
|
154
165
|
? {
|
|
155
166
|
'Content-Length': constants_1.MAX_DEVREV_ARTIFACT_SIZE,
|
|
156
167
|
}
|
|
@@ -168,6 +179,26 @@ class Uploader {
|
|
|
168
179
|
return;
|
|
169
180
|
}
|
|
170
181
|
}
|
|
182
|
+
async confirmArtifactUpload(artifactId) {
|
|
183
|
+
const url = `${this.devrevApiEndpoint}/internal/airdrop.artifacts.confirm-upload`;
|
|
184
|
+
try {
|
|
185
|
+
const response = await axios_client_1.axiosClient.post(url, {
|
|
186
|
+
request_id: this.requestId,
|
|
187
|
+
artifact_id: artifactId,
|
|
188
|
+
}, {
|
|
189
|
+
headers: Object.assign({}, this.defaultHeaders),
|
|
190
|
+
});
|
|
191
|
+
return response;
|
|
192
|
+
}
|
|
193
|
+
catch (error) {
|
|
194
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
195
|
+
console.error('Error while confirming artifact upload.', (0, logger_1.serializeAxiosError)(error));
|
|
196
|
+
}
|
|
197
|
+
else {
|
|
198
|
+
console.error('Error while confirming artifact upload.', error);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
171
202
|
async getAttachmentsFromArtifactId({ artifact, }) {
|
|
172
203
|
// Get the URL of the attachments metadata artifact
|
|
173
204
|
const artifactUrl = await this.getArtifactDownloadUrl(artifact);
|
|
@@ -200,18 +231,24 @@ class Uploader {
|
|
|
200
231
|
return { attachments: jsonObject };
|
|
201
232
|
}
|
|
202
233
|
async getArtifactDownloadUrl(artifactId) {
|
|
234
|
+
const url = `${this.devrevApiEndpoint}/internal/airdrop.artifacts.download-url`;
|
|
203
235
|
try {
|
|
204
|
-
const response = await axios_client_1.axiosClient.
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
236
|
+
const response = await axios_client_1.axiosClient.get(url, {
|
|
237
|
+
headers: Object.assign({}, this.defaultHeaders),
|
|
238
|
+
params: {
|
|
239
|
+
request_id: this.requestId,
|
|
240
|
+
artifact_id: artifactId,
|
|
209
241
|
},
|
|
210
242
|
});
|
|
211
|
-
return response.data.
|
|
243
|
+
return response.data.download_url;
|
|
212
244
|
}
|
|
213
245
|
catch (error) {
|
|
214
|
-
|
|
246
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
247
|
+
console.error('Error while getting artifact download URL.', (0, logger_1.serializeAxiosError)(error));
|
|
248
|
+
}
|
|
249
|
+
else {
|
|
250
|
+
console.error('Error while getting artifact download URL.', error);
|
|
251
|
+
}
|
|
215
252
|
}
|
|
216
253
|
}
|
|
217
254
|
async downloadArtifact(artifactUrl) {
|
|
@@ -3,29 +3,89 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
const test_helpers_1 = require("../tests/test-helpers");
|
|
4
4
|
const types_1 = require("../types");
|
|
5
5
|
const uploader_1 = require("./uploader");
|
|
6
|
-
|
|
7
|
-
jest.mock('
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
error: undefined,
|
|
14
|
-
}),
|
|
15
|
-
};
|
|
16
|
-
}),
|
|
17
|
-
};
|
|
6
|
+
const axios_client_1 = require("../http/axios-client");
|
|
7
|
+
jest.mock('../http/axios-client', () => {
|
|
8
|
+
const originalModule = jest.requireActual('../http/axios-client');
|
|
9
|
+
return Object.assign(Object.assign({}, originalModule), { axiosClient: {
|
|
10
|
+
get: jest.fn(),
|
|
11
|
+
post: jest.fn(),
|
|
12
|
+
} });
|
|
18
13
|
});
|
|
19
|
-
|
|
14
|
+
const getSuccessResponse = () => ({
|
|
15
|
+
data: {
|
|
16
|
+
message: 'Success',
|
|
17
|
+
},
|
|
18
|
+
status: 200,
|
|
19
|
+
statusText: 'OK',
|
|
20
|
+
headers: {},
|
|
21
|
+
config: {},
|
|
22
|
+
});
|
|
23
|
+
const getArtifactUploadUrlMockResponse = {
|
|
24
|
+
data: {
|
|
25
|
+
artifact_id: 'mockArtifactId',
|
|
26
|
+
upload_url: 'mockUploadUrl',
|
|
27
|
+
form_data: [],
|
|
28
|
+
},
|
|
29
|
+
};
|
|
30
|
+
describe('Uploader Class Tests', () => {
|
|
20
31
|
const mockEvent = (0, test_helpers_1.createEvent)({ eventType: types_1.EventType.ExtractionDataStart });
|
|
21
|
-
|
|
32
|
+
let uploader;
|
|
33
|
+
beforeEach(() => {
|
|
34
|
+
uploader = new uploader_1.Uploader({ event: mockEvent });
|
|
35
|
+
});
|
|
36
|
+
afterEach(() => {
|
|
37
|
+
jest.clearAllMocks();
|
|
38
|
+
});
|
|
22
39
|
it('should upload the file to the DevRev platform and return the artifact information', async () => {
|
|
40
|
+
// Mock successful response from getArtifactUploadUrl
|
|
41
|
+
axios_client_1.axiosClient.get.mockResolvedValueOnce(getArtifactUploadUrlMockResponse);
|
|
42
|
+
// Mock successful response from confirmArtifactUpload and uploadArtifact
|
|
43
|
+
axios_client_1.axiosClient.post.mockResolvedValue(getSuccessResponse());
|
|
44
|
+
const entity = 'entity';
|
|
45
|
+
const fetchedObjects = [{ key: 'value' }];
|
|
46
|
+
const uploadResponse = await uploader.upload(entity, fetchedObjects);
|
|
47
|
+
expect(uploadResponse).toEqual({
|
|
48
|
+
artifact: {
|
|
49
|
+
id: 'mockArtifactId',
|
|
50
|
+
item_type: entity,
|
|
51
|
+
item_count: fetchedObjects.length,
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
it('should handle failure in getArtifactUploadUrl', async () => {
|
|
56
|
+
// Mock unsuccessful response for getArtifactUploadUrl
|
|
57
|
+
axios_client_1.axiosClient.get.mockResolvedValueOnce(undefined);
|
|
58
|
+
const entity = 'entity';
|
|
59
|
+
const fetchedObjects = [{ key: 'value' }];
|
|
60
|
+
const uploadResponse = await uploader.upload(entity, fetchedObjects);
|
|
61
|
+
expect(uploadResponse).toEqual({
|
|
62
|
+
error: { message: 'Error while getting artifact upload URL.' },
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
it('should handle failure in uploadArtifact', async () => {
|
|
66
|
+
// Mock successful response for getArtifactUploadUrl
|
|
67
|
+
axios_client_1.axiosClient.get.mockResolvedValueOnce(getArtifactUploadUrlMockResponse);
|
|
68
|
+
// Mock unsuccessful response for uploadArtifact
|
|
69
|
+
axios_client_1.axiosClient.post.mockResolvedValueOnce(undefined);
|
|
70
|
+
const entity = 'entity';
|
|
71
|
+
const fetchedObjects = [{ key: 'value' }];
|
|
72
|
+
const uploadResponse = await uploader.upload(entity, fetchedObjects);
|
|
73
|
+
expect(uploadResponse).toEqual({
|
|
74
|
+
error: { message: 'Error while uploading artifact.' },
|
|
75
|
+
});
|
|
76
|
+
});
|
|
77
|
+
it('should handle failure in confirmArtifactUpload', async () => {
|
|
78
|
+
// Mock successful response for getArtifactUploadUrl
|
|
79
|
+
axios_client_1.axiosClient.get.mockResolvedValueOnce(getArtifactUploadUrlMockResponse);
|
|
80
|
+
// Mock successful response from uploadArtifact
|
|
81
|
+
axios_client_1.axiosClient.post.mockResolvedValueOnce(getSuccessResponse());
|
|
82
|
+
// Mock unsuccessful response from confirmArtifactUpload
|
|
83
|
+
axios_client_1.axiosClient.post.mockResolvedValueOnce(undefined);
|
|
23
84
|
const entity = 'entity';
|
|
24
85
|
const fetchedObjects = [{ key: 'value' }];
|
|
25
86
|
const uploadResponse = await uploader.upload(entity, fetchedObjects);
|
|
26
87
|
expect(uploadResponse).toEqual({
|
|
27
|
-
|
|
28
|
-
error: undefined,
|
|
88
|
+
error: { message: 'Error while confirming artifact upload.' },
|
|
29
89
|
});
|
|
30
90
|
});
|
|
31
91
|
});
|
|
@@ -15,6 +15,7 @@ const mappers_1 = require("../mappers/mappers");
|
|
|
15
15
|
const uploader_1 = require("../uploader/uploader");
|
|
16
16
|
const logger_1 = require("../logger/logger");
|
|
17
17
|
const mappers_interface_1 = require("../mappers/mappers.interface");
|
|
18
|
+
const helpers_2 = require("../common/helpers");
|
|
18
19
|
function createWorkerAdapter({ event, adapterState, options, }) {
|
|
19
20
|
return new WorkerAdapter({
|
|
20
21
|
event,
|
|
@@ -91,6 +92,10 @@ class WorkerAdapter {
|
|
|
91
92
|
}
|
|
92
93
|
// Loop through the batches of attachments
|
|
93
94
|
for (let i = lastProcessedBatchIndex; i < reducedAttachments.length; i++) {
|
|
95
|
+
// Check if we hit timeout
|
|
96
|
+
if (adapter.isTimeout) {
|
|
97
|
+
await (0, helpers_2.sleep)(constants_1.DEFAULT_SLEEP_DELAY_MS);
|
|
98
|
+
}
|
|
94
99
|
const attachmentsBatch = reducedAttachments[i];
|
|
95
100
|
// Create a list of promises for parallel processing
|
|
96
101
|
const promises = [];
|
|
@@ -617,19 +622,27 @@ class WorkerAdapter {
|
|
|
617
622
|
}
|
|
618
623
|
if (httpStream) {
|
|
619
624
|
const fileType = ((_a = httpStream.headers) === null || _a === void 0 ? void 0 : _a['content-type']) || 'application/octet-stream';
|
|
620
|
-
|
|
625
|
+
// Get upload URL
|
|
626
|
+
const preparedArtifact = await this.uploader.getArtifactUploadUrl(attachment.file_name, fileType);
|
|
621
627
|
if (!preparedArtifact) {
|
|
622
628
|
console.warn(`Error while preparing artifact for attachment ID ${attachment.id}. Skipping attachment.`);
|
|
623
629
|
return;
|
|
624
630
|
}
|
|
625
|
-
|
|
631
|
+
// Stream attachment
|
|
632
|
+
const uploadedArtifact = await this.uploader.streamArtifact(preparedArtifact, httpStream);
|
|
626
633
|
if (!uploadedArtifact) {
|
|
627
634
|
console.warn(`Error while streaming to artifact for attachment ID ${attachment.id}. Skipping attachment.`);
|
|
628
635
|
return;
|
|
629
636
|
}
|
|
637
|
+
// Confirm attachment upload
|
|
638
|
+
const confirmArtifactUploadResponse = await this.uploader.confirmArtifactUpload(preparedArtifact.artifact_id);
|
|
639
|
+
if (!confirmArtifactUploadResponse) {
|
|
640
|
+
console.warn('Error while confirming upload for attachment ID ' + attachment.id);
|
|
641
|
+
return;
|
|
642
|
+
}
|
|
630
643
|
const ssorAttachment = {
|
|
631
644
|
id: {
|
|
632
|
-
devrev: preparedArtifact.
|
|
645
|
+
devrev: preparedArtifact.artifact_id,
|
|
633
646
|
external: attachment.id,
|
|
634
647
|
},
|
|
635
648
|
parent_id: {
|