@devrev/ts-adaas 1.1.4 → 1.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -36
- package/dist/common/constants.d.ts +1 -1
- package/dist/common/constants.js +1 -1
- package/dist/deprecated/uploader/index.js +2 -5
- package/dist/http/axios-client.js +4 -4
- package/dist/http/axios-devrev-client.d.ts +3 -0
- package/dist/http/axios-devrev-client.js +37 -0
- package/dist/repo/repo.interfaces.d.ts +1 -0
- package/dist/repo/repo.js +1 -1
- package/dist/state/state.interfaces.js +0 -1
- package/dist/types/extraction.d.ts +18 -1
- package/dist/types/index.d.ts +1 -1
- package/dist/uploader/uploader.d.ts +14 -15
- package/dist/uploader/uploader.js +14 -92
- package/dist/workers/default-workers/attachments-extraction.js +33 -32
- package/dist/workers/worker-adapter.d.ts +25 -1
- package/dist/workers/worker-adapter.js +92 -4
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -2,71 +2,82 @@
|
|
|
2
2
|
|
|
3
3
|
## Release Notes
|
|
4
4
|
|
|
5
|
+
### v1.1.6
|
|
6
|
+
|
|
7
|
+
- Add exponential retry and handle rate-limiting towards DevRev.
|
|
8
|
+
- Gracefully handle failure to upload extracted attachments.
|
|
9
|
+
|
|
10
|
+
### v1.1.5
|
|
11
|
+
|
|
12
|
+
- Increase `delayFactor` and number of retries for the exponential backoff retry mechanism for HTTP requests.
|
|
13
|
+
- Provide an inject function for streaming attachments.
|
|
14
|
+
- Fix the attachments streaming bug.
|
|
15
|
+
|
|
5
16
|
### v1.1.4
|
|
6
17
|
|
|
7
|
-
- Provide log
|
|
18
|
+
- Provide log lines and stack traces for runtime worker errors.
|
|
8
19
|
|
|
9
20
|
### v1.1.3
|
|
10
21
|
|
|
11
|
-
-
|
|
12
|
-
-
|
|
13
|
-
-
|
|
14
|
-
-
|
|
22
|
+
- Export `axios` and `axiosClient` with the exponential backoff retry mechanism for HTTP requests and omit Authorization headers from Axios errors.
|
|
23
|
+
- Resolve circular structure logging issues.
|
|
24
|
+
- Fix the attachments metadata normalization bug.
|
|
25
|
+
- Improve repository logging.
|
|
15
26
|
|
|
16
|
-
|
|
27
|
+
### v1.1.2
|
|
17
28
|
|
|
18
|
-
-
|
|
19
|
-
-
|
|
29
|
+
- Unify incoming and outgoing event context.
|
|
30
|
+
- Add `dev_oid` to logger tags.
|
|
20
31
|
|
|
21
|
-
|
|
32
|
+
### v1.1.1
|
|
22
33
|
|
|
23
|
-
-
|
|
34
|
+
- Add default workers for loading deletion events.
|
|
24
35
|
|
|
25
|
-
|
|
36
|
+
### v1.1.0
|
|
26
37
|
|
|
27
|
-
- Support
|
|
38
|
+
- Support sync from DevRev to the external system. (Known limitations: no support for loading attachments.)
|
|
28
39
|
|
|
29
|
-
|
|
40
|
+
### v1.0.4
|
|
30
41
|
|
|
31
42
|
- Fix logging from worker threads.
|
|
32
43
|
|
|
33
|
-
|
|
44
|
+
### v1.0.3
|
|
34
45
|
|
|
35
|
-
-
|
|
46
|
+
- Add release notes.
|
|
36
47
|
|
|
37
|
-
|
|
48
|
+
### v1.0.2
|
|
38
49
|
|
|
39
|
-
-
|
|
50
|
+
- Fix bugs and improve local development.
|
|
40
51
|
- Expose `formatAxiosError` function for error handling.
|
|
41
52
|
|
|
42
|
-
|
|
53
|
+
### v1.0.1
|
|
43
54
|
|
|
44
|
-
-
|
|
55
|
+
- Fix bugs and improve logging.
|
|
45
56
|
|
|
46
|
-
|
|
57
|
+
### v1.0.0
|
|
47
58
|
|
|
48
|
-
-
|
|
49
|
-
-
|
|
50
|
-
-
|
|
51
|
-
-
|
|
52
|
-
-
|
|
59
|
+
- Enable extractions to use the full lambda runtime and gracefully handle execution context timeout.
|
|
60
|
+
- Simplify metadata and data normalization and uploading with the repo implementation.
|
|
61
|
+
- Provide default handling of the attachment extraction phase in the ADaaS SDK library.
|
|
62
|
+
- Reduce file size and streamline processes with gzip compression.
|
|
63
|
+
- Fix bugs and improve error handling.
|
|
53
64
|
|
|
54
|
-
|
|
65
|
+
### v0.0.3
|
|
55
66
|
|
|
56
|
-
- Support
|
|
67
|
+
- Support new recipe management.
|
|
57
68
|
|
|
58
|
-
|
|
69
|
+
### v0.0.2
|
|
59
70
|
|
|
60
|
-
- Support
|
|
61
|
-
- HTTP client for API requests
|
|
62
|
-
-
|
|
63
|
-
-
|
|
71
|
+
- Support the State API.
|
|
72
|
+
- Provide an HTTP client for API requests.
|
|
73
|
+
- Create local artifact files in the local development environment.
|
|
74
|
+
- Improve logging.
|
|
64
75
|
|
|
65
|
-
|
|
76
|
+
### v0.0.1
|
|
66
77
|
|
|
67
|
-
-
|
|
68
|
-
-
|
|
69
|
-
-
|
|
78
|
+
- Implement a demo of the ADaaS snap-in.
|
|
79
|
+
- Add an adapter for the ADaaS control protocol with helper functions.
|
|
80
|
+
- Provide an uploader for uploading artifacts.
|
|
70
81
|
|
|
71
82
|
# Overview
|
|
72
83
|
|
|
@@ -4,7 +4,7 @@ export declare const ALLOWED_EXTRACTION_EVENT_TYPES: EventType[];
|
|
|
4
4
|
export declare const ALLOWED_LOADING_EVENT_TYPES: EventType[];
|
|
5
5
|
export declare const ALLOWED_EVENT_TYPES: EventType[];
|
|
6
6
|
export declare const ARTIFACT_BATCH_SIZE = 2000;
|
|
7
|
-
export declare const MAX_DEVREV_ARTIFACT_SIZE =
|
|
7
|
+
export declare const MAX_DEVREV_ARTIFACT_SIZE = 262144000;
|
|
8
8
|
export declare const AIRDROP_DEFAULT_ITEM_TYPES: {
|
|
9
9
|
EXTERNAL_DOMAIN_METADATA: string;
|
|
10
10
|
ATTACHMENTS: string;
|
package/dist/common/constants.js
CHANGED
|
@@ -31,7 +31,7 @@ exports.ALLOWED_EVENT_TYPES = [
|
|
|
31
31
|
...exports.ALLOWED_LOADING_EVENT_TYPES,
|
|
32
32
|
];
|
|
33
33
|
exports.ARTIFACT_BATCH_SIZE = 2000;
|
|
34
|
-
exports.MAX_DEVREV_ARTIFACT_SIZE =
|
|
34
|
+
exports.MAX_DEVREV_ARTIFACT_SIZE = 262144000; // 250MB
|
|
35
35
|
exports.AIRDROP_DEFAULT_ITEM_TYPES = {
|
|
36
36
|
EXTERNAL_DOMAIN_METADATA: 'external_domain_metadata',
|
|
37
37
|
ATTACHMENTS: 'attachments',
|
|
@@ -22,12 +22,9 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
22
22
|
__setModuleDefault(result, mod);
|
|
23
23
|
return result;
|
|
24
24
|
};
|
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
-
};
|
|
28
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
26
|
exports.Uploader = void 0;
|
|
30
|
-
const
|
|
27
|
+
const axios_devrev_client_1 = require("../../http/axios-devrev-client");
|
|
31
28
|
const typescript_sdk_1 = require("@devrev/typescript-sdk");
|
|
32
29
|
const fs_1 = __importStar(require("fs"));
|
|
33
30
|
const helpers_1 = require("../common/helpers");
|
|
@@ -111,7 +108,7 @@ class Uploader {
|
|
|
111
108
|
) {
|
|
112
109
|
const formData = (0, helpers_1.createFormData)(preparedArtifact, fetchedObjects);
|
|
113
110
|
try {
|
|
114
|
-
const response = await
|
|
111
|
+
const response = await axios_devrev_client_1.axiosDevRevClient.post(preparedArtifact.url, formData, {
|
|
115
112
|
headers: {
|
|
116
113
|
'Content-Type': 'multipart/form',
|
|
117
114
|
},
|
|
@@ -9,13 +9,13 @@ exports.axios = axios_1.default;
|
|
|
9
9
|
const axios_retry_1 = __importDefault(require("axios-retry"));
|
|
10
10
|
const axiosClient = axios_1.default.create();
|
|
11
11
|
exports.axiosClient = axiosClient;
|
|
12
|
-
// Exponential backoff algorithm: Retry 3 times and there will be a delay of more than 1 * no. of retries second + random number of milliseconds between each retry.
|
|
13
12
|
(0, axios_retry_1.default)(axiosClient, {
|
|
14
|
-
retries:
|
|
13
|
+
retries: 5,
|
|
15
14
|
retryDelay: (retryCount, error) => {
|
|
16
15
|
var _a;
|
|
17
|
-
console.
|
|
18
|
-
|
|
16
|
+
console.warn('Retry attempt: ' + retryCount + 'to url: ' + ((_a = error.config) === null || _a === void 0 ? void 0 : _a.url) + '.');
|
|
17
|
+
// Exponential backoff algorithm: 1 * 2 ^ retryCount * 5000ms
|
|
18
|
+
return axios_retry_1.default.exponentialDelay(retryCount, error, 5000);
|
|
19
19
|
},
|
|
20
20
|
retryCondition: (error) => {
|
|
21
21
|
var _a, _b, _c, _d;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.axiosDevRevClient = exports.axios = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
exports.axios = axios_1.default;
|
|
9
|
+
const axios_retry_1 = __importDefault(require("axios-retry"));
|
|
10
|
+
const axiosDevRevClient = axios_1.default.create();
|
|
11
|
+
exports.axiosDevRevClient = axiosDevRevClient;
|
|
12
|
+
(0, axios_retry_1.default)(axiosDevRevClient, {
|
|
13
|
+
retries: 5,
|
|
14
|
+
retryDelay: (retryCount, error) => {
|
|
15
|
+
var _a, _b;
|
|
16
|
+
console.warn('Retry attempt: ' + retryCount + 'to url: ' + ((_a = error.config) === null || _a === void 0 ? void 0 : _a.url) + '.');
|
|
17
|
+
if (error.response) {
|
|
18
|
+
const retry_after = (_b = error.response) === null || _b === void 0 ? void 0 : _b.headers['retry-after'];
|
|
19
|
+
if (retry_after) {
|
|
20
|
+
return retry_after;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
// Exponential backoff algorithm: 1 * 2 ^ retryCount * 1000ms
|
|
24
|
+
return axios_retry_1.default.exponentialDelay(retryCount, error, 1000);
|
|
25
|
+
},
|
|
26
|
+
retryCondition: (error) => {
|
|
27
|
+
var _a;
|
|
28
|
+
return (axios_retry_1.default.isNetworkOrIdempotentRequestError(error) ||
|
|
29
|
+
((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 429);
|
|
30
|
+
},
|
|
31
|
+
onMaxRetryTimesExceeded(error, retryCount) {
|
|
32
|
+
var _a;
|
|
33
|
+
console.log(`Max retries attempted: ${retryCount}`);
|
|
34
|
+
(_a = error.config) === null || _a === void 0 ? true : delete _a.headers.Authorization;
|
|
35
|
+
delete error.request._header;
|
|
36
|
+
},
|
|
37
|
+
});
|
package/dist/repo/repo.js
CHANGED
|
@@ -48,7 +48,7 @@ class Repo {
|
|
|
48
48
|
}
|
|
49
49
|
// Add the new records to the items array
|
|
50
50
|
this.items.push(...recordsToPush);
|
|
51
|
-
console.
|
|
51
|
+
console.info(`Extracted ${recordsToPush.length} new items of type ${this.itemType}. Total number of items in repo: ${this.items.length}.`);
|
|
52
52
|
// Upload in batches while the number of items exceeds the batch size
|
|
53
53
|
while (this.items.length >= constants_1.ARTIFACT_BATCH_SIZE) {
|
|
54
54
|
// Slice out a batch of ARTIFACT_BATCH_SIZE items to upload
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { InputData } from '@devrev/typescript-sdk/dist/snap-ins';
|
|
2
2
|
import { Artifact } from '../uploader/uploader.interfaces';
|
|
3
3
|
import { ErrorRecord } from './common';
|
|
4
|
-
import { DonV2, LoaderReport } from './loading';
|
|
4
|
+
import { DonV2, LoaderReport, RateLimited } from './loading';
|
|
5
|
+
import { NormalizedAttachment } from 'repo/repo.interfaces';
|
|
6
|
+
import { AxiosResponse } from 'axios';
|
|
5
7
|
/**
|
|
6
8
|
* EventType is an enum that defines the different types of events that can be sent to the external extractor from ADaaS.
|
|
7
9
|
* The external extractor can use these events to know what to do next in the extraction process.
|
|
@@ -218,3 +220,18 @@ export interface LoaderEvent {
|
|
|
218
220
|
event_context: EventContext;
|
|
219
221
|
event_data?: EventData;
|
|
220
222
|
}
|
|
223
|
+
export type ExternalSystemAttachmentStreamingFunction = ({ item, event, }: ExternalSystemAttachmentStreamingParams) => Promise<ExternalSystemAttachmentStreamingResponse>;
|
|
224
|
+
export interface ExternalSystemAttachmentStreamingParams {
|
|
225
|
+
item: NormalizedAttachment;
|
|
226
|
+
event: AirdropEvent;
|
|
227
|
+
}
|
|
228
|
+
export interface ExternalSystemAttachmentStreamingResponse {
|
|
229
|
+
httpStream?: AxiosResponse;
|
|
230
|
+
error?: ErrorRecord;
|
|
231
|
+
delay?: number;
|
|
232
|
+
}
|
|
233
|
+
export interface StreamAttachmentsResponse {
|
|
234
|
+
error?: ErrorRecord;
|
|
235
|
+
report?: LoaderReport;
|
|
236
|
+
rateLimit?: RateLimited;
|
|
237
|
+
}
|
package/dist/types/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
export { ErrorLevel, ErrorRecord, LogRecord, AdapterUpdateParams, InitialDomainMapping, } from './common';
|
|
2
|
-
export { EventType, ExtractorEventType, ExtractionMode, ExternalSyncUnit, EventContextIn, EventContextOut, ConnectionData, EventData, DomainObjectState, AirdropEvent, AirdropMessage, ExtractorEvent, SyncMode, } from './extraction';
|
|
2
|
+
export { EventType, ExtractorEventType, ExtractionMode, ExternalSyncUnit, EventContextIn, EventContextOut, ConnectionData, EventData, DomainObjectState, AirdropEvent, AirdropMessage, ExtractorEvent, SyncMode, ExternalSystemAttachmentStreamingParams, ExternalSystemAttachmentStreamingResponse, ExternalSystemAttachmentStreamingFunction, } from './extraction';
|
|
3
3
|
export { LoaderEventType, ExternalSystemItem, ExternalSystemItemLoadingResponse, ExternalSystemItemLoadingParams, } from './loading';
|
|
4
4
|
export { NormalizedItem, NormalizedAttachment, RepoInterface, } from '../repo/repo.interfaces';
|
|
5
5
|
export { AdapterState } from '../state/state.interfaces';
|
|
@@ -1,4 +1,7 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { betaSDK } from '@devrev/typescript-sdk';
|
|
2
|
+
import { NormalizedAttachment } from '../repo/repo.interfaces';
|
|
3
|
+
import { UploadResponse, UploaderFactoryInterface } from './uploader.interfaces';
|
|
4
|
+
import { AxiosResponse } from 'axios';
|
|
2
5
|
export declare class Uploader {
|
|
3
6
|
private event;
|
|
4
7
|
private betaDevrevSdk;
|
|
@@ -14,26 +17,22 @@ export declare class Uploader {
|
|
|
14
17
|
* or error information if there was an error
|
|
15
18
|
*/
|
|
16
19
|
upload(itemType: string, fetchedObjects: object[] | object): Promise<UploadResponse>;
|
|
17
|
-
|
|
20
|
+
prepareArtifact(filename: string, fileType: string): Promise<betaSDK.ArtifactsPrepareResponse | void>;
|
|
18
21
|
private uploadToArtifact;
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
attachmentsMetadataArtifactId: string;
|
|
29
|
-
}): Promise<StreamAttachmentsResponse>;
|
|
22
|
+
streamToArtifact(preparedArtifact: betaSDK.ArtifactsPrepareResponse, fileStreamResponse: any): Promise<AxiosResponse | void>;
|
|
23
|
+
getAttachmentsFromArtifactId({ artifact, }: {
|
|
24
|
+
artifact: string;
|
|
25
|
+
}): Promise<{
|
|
26
|
+
attachments?: NormalizedAttachment[];
|
|
27
|
+
error?: {
|
|
28
|
+
message: string;
|
|
29
|
+
};
|
|
30
|
+
}>;
|
|
30
31
|
private getArtifactDownloadUrl;
|
|
31
32
|
private downloadArtifact;
|
|
32
33
|
private compressGzip;
|
|
33
34
|
private decompressGzip;
|
|
34
35
|
private parseJsonl;
|
|
35
|
-
private stream;
|
|
36
|
-
private getFileStreamResponse;
|
|
37
36
|
getJsonObjectByArtifactId({ artifactId, isGzipped, }: {
|
|
38
37
|
artifactId: string;
|
|
39
38
|
isGzipped?: boolean;
|
|
@@ -27,8 +27,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
27
27
|
};
|
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
29
|
exports.Uploader = void 0;
|
|
30
|
-
const axios_client_1 = require("../http/axios-client");
|
|
31
30
|
const fs_1 = __importStar(require("fs"));
|
|
31
|
+
const axios_devrev_client_1 = require("../http/axios-devrev-client");
|
|
32
32
|
const zlib_1 = __importDefault(require("zlib"));
|
|
33
33
|
const js_jsonl_1 = require("js-jsonl");
|
|
34
34
|
const form_data_1 = __importDefault(require("form-data"));
|
|
@@ -98,7 +98,7 @@ class Uploader {
|
|
|
98
98
|
return response.data;
|
|
99
99
|
}
|
|
100
100
|
catch (error) {
|
|
101
|
-
if (
|
|
101
|
+
if (axios_devrev_client_1.axios.isAxiosError(error)) {
|
|
102
102
|
console.error('Error while preparing artifact.', (0, logger_1.serializeAxiosError)(error));
|
|
103
103
|
}
|
|
104
104
|
else {
|
|
@@ -115,13 +115,13 @@ class Uploader {
|
|
|
115
115
|
}
|
|
116
116
|
formData.append('file', file);
|
|
117
117
|
try {
|
|
118
|
-
const response = await
|
|
118
|
+
const response = await axios_devrev_client_1.axiosDevRevClient.post(preparedArtifact.url, formData, {
|
|
119
119
|
headers: Object.assign({}, formData.getHeaders()),
|
|
120
120
|
});
|
|
121
121
|
return response;
|
|
122
122
|
}
|
|
123
123
|
catch (error) {
|
|
124
|
-
if (
|
|
124
|
+
if (axios_devrev_client_1.axios.isAxiosError(error)) {
|
|
125
125
|
console.error('Error while uploading artifact.', (0, logger_1.serializeAxiosError)(error));
|
|
126
126
|
}
|
|
127
127
|
else {
|
|
@@ -137,8 +137,11 @@ class Uploader {
|
|
|
137
137
|
formData.append(field.key, field.value);
|
|
138
138
|
}
|
|
139
139
|
formData.append('file', fileStreamResponse.data);
|
|
140
|
+
if (fileStreamResponse.headers['content-length'] > constants_1.MAX_DEVREV_ARTIFACT_SIZE) {
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
140
143
|
try {
|
|
141
|
-
const response = await
|
|
144
|
+
const response = await axios_devrev_client_1.axiosDevRevClient.post(preparedArtifact.url, formData, {
|
|
142
145
|
headers: Object.assign(Object.assign({}, formData.getHeaders()), (!fileStreamResponse.headers['content-length'] && {
|
|
143
146
|
'Content-Length': constants_1.MAX_DEVREV_ARTIFACT_SIZE,
|
|
144
147
|
})),
|
|
@@ -146,7 +149,7 @@ class Uploader {
|
|
|
146
149
|
return response;
|
|
147
150
|
}
|
|
148
151
|
catch (error) {
|
|
149
|
-
if (
|
|
152
|
+
if (axios_devrev_client_1.axios.isAxiosError(error)) {
|
|
150
153
|
console.error('Error while streaming artifact.', (0, logger_1.serializeAxiosError)(error));
|
|
151
154
|
}
|
|
152
155
|
else {
|
|
@@ -155,17 +158,9 @@ class Uploader {
|
|
|
155
158
|
return;
|
|
156
159
|
}
|
|
157
160
|
}
|
|
158
|
-
|
|
159
|
-
* Streams the attachments to the DevRev platform.
|
|
160
|
-
* The attachments are streamed to the platform and the artifact information is returned.
|
|
161
|
-
* @param {string} attachmentsMetadataArtifactId - The artifact ID of the attachments metadata
|
|
162
|
-
* @returns {Promise<UploadResponse>} - The response object containing the ssoAttachment artifact information
|
|
163
|
-
* or error information if there was an error
|
|
164
|
-
*/
|
|
165
|
-
async streamAttachments({ attachmentsMetadataArtifactId, }) {
|
|
166
|
-
console.log('Started streaming attachments to the platform.');
|
|
161
|
+
async getAttachmentsFromArtifactId({ artifact, }) {
|
|
167
162
|
// 1. Get the URL of the attachments metadata artifact
|
|
168
|
-
const artifactUrl = await this.getArtifactDownloadUrl(
|
|
163
|
+
const artifactUrl = await this.getArtifactDownloadUrl(artifact);
|
|
169
164
|
if (!artifactUrl) {
|
|
170
165
|
return {
|
|
171
166
|
error: { message: 'Error while getting artifact download URL.' },
|
|
@@ -192,23 +187,7 @@ class Uploader {
|
|
|
192
187
|
error: { message: 'Error while parsing jsonl object.' },
|
|
193
188
|
};
|
|
194
189
|
}
|
|
195
|
-
|
|
196
|
-
const ssorAttachments = [];
|
|
197
|
-
for (const attachmentMetadata of jsonObject) {
|
|
198
|
-
const { ssorAttachment, error } = await this.stream(attachmentMetadata);
|
|
199
|
-
if (error || !ssorAttachment) {
|
|
200
|
-
console.warn('Error while streaming attachment', error);
|
|
201
|
-
continue;
|
|
202
|
-
}
|
|
203
|
-
ssorAttachments.push(ssorAttachment);
|
|
204
|
-
}
|
|
205
|
-
if (!ssorAttachments.length) {
|
|
206
|
-
console.warn('No attachments were streamed to the platform.');
|
|
207
|
-
return {
|
|
208
|
-
error: { message: 'No attachments were streamed to the platform.' },
|
|
209
|
-
};
|
|
210
|
-
}
|
|
211
|
-
return { ssorAttachments };
|
|
190
|
+
return { attachments: jsonObject };
|
|
212
191
|
}
|
|
213
192
|
async getArtifactDownloadUrl(artifactId) {
|
|
214
193
|
try {
|
|
@@ -223,13 +202,13 @@ class Uploader {
|
|
|
223
202
|
}
|
|
224
203
|
async downloadArtifact(artifactUrl) {
|
|
225
204
|
try {
|
|
226
|
-
const response = await
|
|
205
|
+
const response = await axios_devrev_client_1.axiosDevRevClient.get(artifactUrl, {
|
|
227
206
|
responseType: 'arraybuffer',
|
|
228
207
|
});
|
|
229
208
|
return response.data;
|
|
230
209
|
}
|
|
231
210
|
catch (error) {
|
|
232
|
-
if (
|
|
211
|
+
if (axios_devrev_client_1.axios.isAxiosError(error)) {
|
|
233
212
|
console.error('Error while downloading artifact from URL.', (0, logger_1.serializeAxiosError)(error));
|
|
234
213
|
}
|
|
235
214
|
else {
|
|
@@ -262,63 +241,6 @@ class Uploader {
|
|
|
262
241
|
console.error('Error while parsing jsonl object.', error);
|
|
263
242
|
}
|
|
264
243
|
}
|
|
265
|
-
async stream(attachmentMetadata) {
|
|
266
|
-
var _a;
|
|
267
|
-
const { id: externalId, file_name: filename, url, parent_id: parentId, author_id: actorId, } = attachmentMetadata;
|
|
268
|
-
const fileStreamResponse = await this.getFileStreamResponse(url);
|
|
269
|
-
if (!fileStreamResponse) {
|
|
270
|
-
return {
|
|
271
|
-
error: { message: 'Error while fetching attachment from URL' },
|
|
272
|
-
};
|
|
273
|
-
}
|
|
274
|
-
const fileType = ((_a = fileStreamResponse.headers) === null || _a === void 0 ? void 0 : _a['content-type']) ||
|
|
275
|
-
'application/octet-stream';
|
|
276
|
-
const preparedArtifact = await this.prepareArtifact(filename, fileType);
|
|
277
|
-
if (!preparedArtifact) {
|
|
278
|
-
return {
|
|
279
|
-
error: { message: 'Error while preparing artifact.' },
|
|
280
|
-
};
|
|
281
|
-
}
|
|
282
|
-
const uploadedArtifact = await this.streamToArtifact(preparedArtifact, fileStreamResponse);
|
|
283
|
-
if (!uploadedArtifact) {
|
|
284
|
-
return {
|
|
285
|
-
error: { message: 'Error while streaming artifact.' },
|
|
286
|
-
};
|
|
287
|
-
}
|
|
288
|
-
const ssorAttachment = {
|
|
289
|
-
id: {
|
|
290
|
-
devrev: preparedArtifact.id,
|
|
291
|
-
external: externalId,
|
|
292
|
-
},
|
|
293
|
-
parent_id: {
|
|
294
|
-
external: parentId,
|
|
295
|
-
},
|
|
296
|
-
actor_id: {
|
|
297
|
-
external: actorId,
|
|
298
|
-
},
|
|
299
|
-
};
|
|
300
|
-
console.log('Successful stream of attachment: ', ssorAttachment);
|
|
301
|
-
return { ssorAttachment };
|
|
302
|
-
}
|
|
303
|
-
async getFileStreamResponse(url) {
|
|
304
|
-
try {
|
|
305
|
-
const fileStreamResponse = await axios_client_1.axiosClient.get(url, {
|
|
306
|
-
responseType: 'stream',
|
|
307
|
-
headers: {
|
|
308
|
-
Authorization: this.event.payload.connection_data.key,
|
|
309
|
-
},
|
|
310
|
-
});
|
|
311
|
-
return fileStreamResponse;
|
|
312
|
-
}
|
|
313
|
-
catch (error) {
|
|
314
|
-
if (axios_client_1.axios.isAxiosError(error)) {
|
|
315
|
-
console.error('Error while fetching attachment from URL.', (0, logger_1.serializeAxiosError)(error));
|
|
316
|
-
}
|
|
317
|
-
else {
|
|
318
|
-
console.error('Error while fetching attachment from URL.', error);
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
244
|
async getJsonObjectByArtifactId({ artifactId, isGzipped = false, }) {
|
|
323
245
|
const artifactUrl = await this.getArtifactDownloadUrl(artifactId);
|
|
324
246
|
if (!artifactUrl) {
|
|
@@ -1,42 +1,43 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
const index_1 = require("../../index");
|
|
4
|
-
const
|
|
5
|
-
const
|
|
6
|
-
{
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
4
|
+
const axios_client_1 = require("../../http/axios-client");
|
|
5
|
+
const getAttachmentStream = async ({ item, }) => {
|
|
6
|
+
const { id, url } = item;
|
|
7
|
+
try {
|
|
8
|
+
const fileStreamResponse = await axios_client_1.axiosClient.get(url, {
|
|
9
|
+
responseType: 'stream',
|
|
10
|
+
});
|
|
11
|
+
return { httpStream: fileStreamResponse };
|
|
12
|
+
}
|
|
13
|
+
catch (error) {
|
|
14
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
15
|
+
console.error('Error while fetching attachment from URL.', (0, index_1.serializeAxiosError)(error));
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
console.error('Error while fetching attachment from URL.', error);
|
|
19
|
+
}
|
|
20
|
+
return {
|
|
21
|
+
error: {
|
|
22
|
+
message: 'Error while fetching attachment ' + id + ' from URL.',
|
|
23
|
+
},
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
};
|
|
10
27
|
(0, index_1.processTask)({
|
|
11
28
|
task: async ({ adapter }) => {
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
adapter.state.toDevRev.attachmentsMetadata.artifactIds.length === 0) {
|
|
15
|
-
console.log('No attachments to extract, skipping.');
|
|
16
|
-
await adapter.emit(index_1.ExtractorEventType.ExtractionAttachmentsDone);
|
|
17
|
-
return;
|
|
18
|
-
}
|
|
19
|
-
adapter.initializeRepos(repos);
|
|
20
|
-
const uploader = new uploader_1.Uploader({
|
|
21
|
-
event: adapter.event,
|
|
22
|
-
options: adapter.options,
|
|
29
|
+
const { error, delay } = await adapter.streamAttachments({
|
|
30
|
+
stream: getAttachmentStream,
|
|
23
31
|
});
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
32
|
+
if (delay) {
|
|
33
|
+
await adapter.emit(index_1.ExtractorEventType.ExtractionAttachmentsDelay, {
|
|
34
|
+
delay,
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
else if (error) {
|
|
38
|
+
await adapter.emit(index_1.ExtractorEventType.ExtractionAttachmentsError, {
|
|
39
|
+
error,
|
|
27
40
|
});
|
|
28
|
-
if (error || !ssorAttachments) {
|
|
29
|
-
await adapter.emit(index_1.ExtractorEventType.ExtractionAttachmentsError, {
|
|
30
|
-
error,
|
|
31
|
-
});
|
|
32
|
-
return;
|
|
33
|
-
}
|
|
34
|
-
await ((_c = adapter.getRepo('ssor_attachment')) === null || _c === void 0 ? void 0 : _c.push(ssorAttachments));
|
|
35
|
-
(_d = adapter.state.toDevRev) === null || _d === void 0 ? void 0 : _d.attachmentsMetadata.artifactIds.shift();
|
|
36
|
-
adapter.state.toDevRev.attachmentsMetadata.lastProcessed = 0;
|
|
37
|
-
if (((_e = adapter.state.toDevRev) === null || _e === void 0 ? void 0 : _e.attachmentsMetadata.artifactIds.length) === 0) {
|
|
38
|
-
break;
|
|
39
|
-
}
|
|
40
41
|
}
|
|
41
42
|
await adapter.emit(index_1.ExtractorEventType.ExtractionAttachmentsDone);
|
|
42
43
|
},
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { AirdropEvent, ExtractorEventType, EventData } from '../types/extraction';
|
|
1
|
+
import { AirdropEvent, ExtractorEventType, EventData, ExternalSystemAttachmentStreamingFunction } from '../types/extraction';
|
|
2
2
|
import { LoaderEventType } from '../types/loading';
|
|
3
3
|
import { AdapterState } from '../state/state.interfaces';
|
|
4
4
|
import { Artifact } from '../uploader/uploader.interfaces';
|
|
@@ -58,4 +58,28 @@ export declare class WorkerAdapter<ConnectorState> {
|
|
|
58
58
|
item: ExternalSystemItem;
|
|
59
59
|
itemTypeToLoad: ItemTypeToLoad;
|
|
60
60
|
}): Promise<LoadItemResponse>;
|
|
61
|
+
/**
|
|
62
|
+
* Streams the attachments to the DevRev platform.
|
|
63
|
+
* The attachments are streamed to the platform and the artifact information is returned.
|
|
64
|
+
* @param {string} attachmentsMetadataArtifactId - The artifact ID of the attachments metadata
|
|
65
|
+
* @returns {Promise<UploadResponse>} - The response object containing the ssoAttachment artifact information
|
|
66
|
+
* or error information if there was an error
|
|
67
|
+
*/
|
|
68
|
+
streamAttachments({ stream, }: {
|
|
69
|
+
stream: ExternalSystemAttachmentStreamingFunction;
|
|
70
|
+
}): Promise<{
|
|
71
|
+
report: {};
|
|
72
|
+
error?: undefined;
|
|
73
|
+
delay?: undefined;
|
|
74
|
+
} | {
|
|
75
|
+
error: {
|
|
76
|
+
message: string;
|
|
77
|
+
};
|
|
78
|
+
report?: undefined;
|
|
79
|
+
delay?: undefined;
|
|
80
|
+
} | {
|
|
81
|
+
delay: number;
|
|
82
|
+
report?: undefined;
|
|
83
|
+
error?: undefined;
|
|
84
|
+
}>;
|
|
61
85
|
}
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.WorkerAdapter = void 0;
|
|
4
4
|
exports.createWorkerAdapter = createWorkerAdapter;
|
|
5
|
-
const
|
|
5
|
+
const axios_devrev_client_1 = require("../http/axios-devrev-client");
|
|
6
6
|
const extraction_1 = require("../types/extraction");
|
|
7
7
|
const loading_1 = require("../types/loading");
|
|
8
8
|
const constants_1 = require("../common/constants");
|
|
@@ -340,7 +340,7 @@ class WorkerAdapter {
|
|
|
340
340
|
console.log('Updated sync mapper record', JSON.stringify(updateSyncMapperRecordResponse.data));
|
|
341
341
|
}
|
|
342
342
|
catch (error) {
|
|
343
|
-
if (
|
|
343
|
+
if (axios_devrev_client_1.axios.isAxiosError(error)) {
|
|
344
344
|
console.error('Failed to update sync mapper record', (0, logger_1.serializeAxiosError)(error));
|
|
345
345
|
return {
|
|
346
346
|
error: {
|
|
@@ -385,7 +385,7 @@ class WorkerAdapter {
|
|
|
385
385
|
// Update mapper (optional)
|
|
386
386
|
}
|
|
387
387
|
catch (error) {
|
|
388
|
-
if (
|
|
388
|
+
if (axios_devrev_client_1.axios.isAxiosError(error)) {
|
|
389
389
|
if (((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {
|
|
390
390
|
// Create item
|
|
391
391
|
const { id, delay, error } = await itemTypeToLoad.create({
|
|
@@ -411,7 +411,7 @@ class WorkerAdapter {
|
|
|
411
411
|
};
|
|
412
412
|
}
|
|
413
413
|
catch (error) {
|
|
414
|
-
if (
|
|
414
|
+
if (axios_devrev_client_1.axios.isAxiosError(error)) {
|
|
415
415
|
console.error('Failed to create sync mapper record', (0, logger_1.serializeAxiosError)(error));
|
|
416
416
|
return {
|
|
417
417
|
error: {
|
|
@@ -461,5 +461,93 @@ class WorkerAdapter {
|
|
|
461
461
|
};
|
|
462
462
|
}
|
|
463
463
|
}
|
|
464
|
+
/**
|
|
465
|
+
* Streams the attachments to the DevRev platform.
|
|
466
|
+
* The attachments are streamed to the platform and the artifact information is returned.
|
|
467
|
+
* @param {string} attachmentsMetadataArtifactId - The artifact ID of the attachments metadata
|
|
468
|
+
* @returns {Promise<UploadResponse>} - The response object containing the ssoAttachment artifact information
|
|
469
|
+
* or error information if there was an error
|
|
470
|
+
*/
|
|
471
|
+
async streamAttachments({ stream, }) {
|
|
472
|
+
var _a, _b, _c, _d, _e, _f;
|
|
473
|
+
const repos = [
|
|
474
|
+
{
|
|
475
|
+
itemType: 'ssor_attachment',
|
|
476
|
+
},
|
|
477
|
+
];
|
|
478
|
+
this.initializeRepos(repos);
|
|
479
|
+
for (const attachmentsMetadataArtifactId of ((_a = this.state.toDevRev) === null || _a === void 0 ? void 0 : _a.attachmentsMetadata.artifactIds) || []) {
|
|
480
|
+
if (((_b = this.state.toDevRev) === null || _b === void 0 ? void 0 : _b.attachmentsMetadata.artifactIds.length) === 0) {
|
|
481
|
+
return { report: {} };
|
|
482
|
+
}
|
|
483
|
+
console.log('Started streaming attachments to the platform.');
|
|
484
|
+
const { attachments, error } = await this.uploader.getAttachmentsFromArtifactId({
|
|
485
|
+
artifact: attachmentsMetadataArtifactId,
|
|
486
|
+
});
|
|
487
|
+
if (error) {
|
|
488
|
+
return { error };
|
|
489
|
+
}
|
|
490
|
+
if (attachments) {
|
|
491
|
+
const attachmentsToProcess = attachments.slice((_d = (_c = this.state.toDevRev) === null || _c === void 0 ? void 0 : _c.attachmentsMetadata) === null || _d === void 0 ? void 0 : _d.lastProcessed, attachments.length);
|
|
492
|
+
for (const attachment of attachmentsToProcess) {
|
|
493
|
+
const { httpStream, delay, error } = await stream({
|
|
494
|
+
item: attachment,
|
|
495
|
+
event: this.event,
|
|
496
|
+
});
|
|
497
|
+
if (error) {
|
|
498
|
+
console.warn('Error while streaming attachment', error === null || error === void 0 ? void 0 : error.message);
|
|
499
|
+
continue;
|
|
500
|
+
}
|
|
501
|
+
else if (delay) {
|
|
502
|
+
return { delay };
|
|
503
|
+
}
|
|
504
|
+
if (httpStream) {
|
|
505
|
+
const fileType = ((_e = httpStream.headers) === null || _e === void 0 ? void 0 : _e['content-type']) ||
|
|
506
|
+
'application/octet-stream';
|
|
507
|
+
const preparedArtifact = await this.uploader.prepareArtifact(attachment.file_name, fileType);
|
|
508
|
+
if (!preparedArtifact) {
|
|
509
|
+
console.warn('Error while preparing artifact for attachment ID ' +
|
|
510
|
+
attachment.id +
|
|
511
|
+
'. Skipping attachment');
|
|
512
|
+
if (this.state.toDevRev) {
|
|
513
|
+
this.state.toDevRev.attachmentsMetadata.lastProcessed++;
|
|
514
|
+
}
|
|
515
|
+
continue;
|
|
516
|
+
}
|
|
517
|
+
const uploadedArtifact = await this.uploader.streamToArtifact(preparedArtifact, httpStream);
|
|
518
|
+
if (!uploadedArtifact) {
|
|
519
|
+
console.warn('Error while preparing artifact for attachment ID ' +
|
|
520
|
+
attachment.id);
|
|
521
|
+
if (this.state.toDevRev) {
|
|
522
|
+
this.state.toDevRev.attachmentsMetadata.lastProcessed++;
|
|
523
|
+
}
|
|
524
|
+
continue;
|
|
525
|
+
}
|
|
526
|
+
const ssorAttachment = {
|
|
527
|
+
id: {
|
|
528
|
+
devrev: preparedArtifact.id,
|
|
529
|
+
external: attachment.id,
|
|
530
|
+
},
|
|
531
|
+
parent_id: {
|
|
532
|
+
external: attachment.parent_id,
|
|
533
|
+
},
|
|
534
|
+
actor_id: {
|
|
535
|
+
external: attachment.author_id,
|
|
536
|
+
},
|
|
537
|
+
};
|
|
538
|
+
await ((_f = this.getRepo('ssor_attachment')) === null || _f === void 0 ? void 0 : _f.push([ssorAttachment]));
|
|
539
|
+
if (this.state.toDevRev) {
|
|
540
|
+
this.state.toDevRev.attachmentsMetadata.lastProcessed++;
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
if (this.state.toDevRev) {
|
|
546
|
+
this.state.toDevRev.attachmentsMetadata.artifactIds.shift();
|
|
547
|
+
this.state.toDevRev.attachmentsMetadata.lastProcessed = 0;
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
return { report: {} };
|
|
551
|
+
}
|
|
464
552
|
}
|
|
465
553
|
exports.WorkerAdapter = WorkerAdapter;
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@devrev/ts-adaas",
|
|
3
|
-
"version": "1.1.
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "1.1.6",
|
|
4
|
+
"description": "DevRev ADaaS (AirDrop-as-a-Service) Typescript SDK.",
|
|
5
5
|
"type": "commonjs",
|
|
6
6
|
"main": "./dist/index.js",
|
|
7
7
|
"typings": "./dist/index.d.ts",
|