@devrev/ts-adaas 1.1.2 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -0
- package/dist/common/control-protocol.js +3 -6
- package/dist/common/helpers.d.ts +1 -0
- package/dist/common/helpers.js +15 -0
- package/dist/common/install-initial-domain-mapping.js +7 -10
- package/dist/http/axios-client.d.ts +3 -0
- package/dist/http/axios-client.js +41 -0
- package/dist/http/client.d.ts +4 -0
- package/dist/http/client.js +4 -0
- package/dist/http/index.d.ts +1 -0
- package/dist/http/index.js +1 -0
- package/dist/http/types.d.ts +4 -0
- package/dist/logger/logger.js +2 -1
- package/dist/mappers/mappers.js +4 -7
- package/dist/repo/repo.js +5 -3
- package/dist/repo/repo.test.js +15 -3
- package/dist/state/state.js +5 -8
- package/dist/uploader/uploader.js +10 -10
- package/dist/workers/worker-adapter.js +6 -8
- package/package.json +3 -2
package/README.md
CHANGED
|
@@ -2,6 +2,13 @@
|
|
|
2
2
|
|
|
3
3
|
## Release Notes
|
|
4
4
|
|
|
5
|
+
### v1.1.3
|
|
6
|
+
|
|
7
|
+
- Exported `axios` and `axiosClient` with exponential backoff retry mechanism for HTTP requests and omitting Authorization headers from Axios errors.
|
|
8
|
+
- Resolved issues with circular structure logging.
|
|
9
|
+
- Fixed attachments metadata normalization bug.
|
|
10
|
+
- Improved repository logging.
|
|
11
|
+
|
|
5
12
|
#### v1.1.2
|
|
6
13
|
|
|
7
14
|
- Unified incoming and outgoing event context.
|
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.emit = void 0;
|
|
7
|
-
const
|
|
4
|
+
const axios_client_1 = require("../http/axios-client");
|
|
8
5
|
const logger_1 = require("../logger/logger");
|
|
9
6
|
const emit = async ({ event, eventType, data, }) => {
|
|
10
7
|
const newEvent = {
|
|
@@ -15,7 +12,7 @@ const emit = async ({ event, eventType, data, }) => {
|
|
|
15
12
|
return new Promise(async (resolve, reject) => {
|
|
16
13
|
console.info('Emitting event', JSON.stringify(newEvent));
|
|
17
14
|
try {
|
|
18
|
-
await
|
|
15
|
+
await axios_client_1.axiosClient.post(event.payload.event_context.callback_url, Object.assign({}, newEvent), {
|
|
19
16
|
headers: {
|
|
20
17
|
Accept: 'application/json, text/plain, */*',
|
|
21
18
|
Authorization: event.context.secrets.service_account_token,
|
|
@@ -25,7 +22,7 @@ const emit = async ({ event, eventType, data, }) => {
|
|
|
25
22
|
resolve();
|
|
26
23
|
}
|
|
27
24
|
catch (error) {
|
|
28
|
-
if (
|
|
25
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
29
26
|
console.error(`Failed to emit event with event type ${eventType}.`, (0, logger_1.formatAxiosError)(error));
|
|
30
27
|
}
|
|
31
28
|
else {
|
package/dist/common/helpers.d.ts
CHANGED
package/dist/common/helpers.js
CHANGED
|
@@ -4,6 +4,7 @@ exports.getTimeoutErrorEventType = getTimeoutErrorEventType;
|
|
|
4
4
|
exports.getSyncDirection = getSyncDirection;
|
|
5
5
|
exports.getFilesToLoad = getFilesToLoad;
|
|
6
6
|
exports.addReportToLoaderReport = addReportToLoaderReport;
|
|
7
|
+
exports.getCircularReplacer = getCircularReplacer;
|
|
7
8
|
const extraction_1 = require("../types/extraction");
|
|
8
9
|
const loading_1 = require("../types/loading");
|
|
9
10
|
function getTimeoutErrorEventType(eventType) {
|
|
@@ -101,3 +102,17 @@ function addReportToLoaderReport({ loaderReports, report, }) {
|
|
|
101
102
|
}
|
|
102
103
|
return loaderReports;
|
|
103
104
|
}
|
|
105
|
+
// https://stackoverflow.com/a/53731154
|
|
106
|
+
function getCircularReplacer() {
|
|
107
|
+
const seen = new WeakSet();
|
|
108
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
109
|
+
return (key, value) => {
|
|
110
|
+
if (typeof value === 'object' && value !== null) {
|
|
111
|
+
if (seen.has(value)) {
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
seen.add(value);
|
|
115
|
+
}
|
|
116
|
+
return value;
|
|
117
|
+
};
|
|
118
|
+
}
|
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.installInitialDomainMapping = installInitialDomainMapping;
|
|
7
|
-
const
|
|
4
|
+
const axios_client_1 = require("../http/axios-client");
|
|
8
5
|
const logger_1 = require("../logger/logger");
|
|
9
6
|
async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
10
7
|
var _a, _b, _c, _d, _e, _f, _g;
|
|
@@ -16,7 +13,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
16
13
|
return;
|
|
17
14
|
}
|
|
18
15
|
try {
|
|
19
|
-
const snapInResponse = await
|
|
16
|
+
const snapInResponse = await axios_client_1.axiosClient.get(devrevEndpoint + '/internal/snap-ins.get', {
|
|
20
17
|
headers: {
|
|
21
18
|
Authorization: devrevToken,
|
|
22
19
|
},
|
|
@@ -35,7 +32,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
35
32
|
if (startingRecipeBlueprint &&
|
|
36
33
|
Object.keys(startingRecipeBlueprint).length !== 0) {
|
|
37
34
|
try {
|
|
38
|
-
const recipeBlueprintResponse = await
|
|
35
|
+
const recipeBlueprintResponse = await axios_client_1.axiosClient.post(`${devrevEndpoint}/internal/airdrop.recipe.blueprints.create`, Object.assign({}, startingRecipeBlueprint), {
|
|
39
36
|
headers: {
|
|
40
37
|
Authorization: devrevToken,
|
|
41
38
|
},
|
|
@@ -44,7 +41,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
44
41
|
console.log('Successfully created recipe blueprint with id: ' + recipeBlueprintId);
|
|
45
42
|
}
|
|
46
43
|
catch (error) {
|
|
47
|
-
if (
|
|
44
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
48
45
|
console.error('Error while creating recipe blueprint', (0, logger_1.formatAxiosError)(error));
|
|
49
46
|
}
|
|
50
47
|
else {
|
|
@@ -55,7 +52,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
55
52
|
try {
|
|
56
53
|
// 2. Install the initial domain mappings
|
|
57
54
|
const additionalMappings = initialDomainMappingJson.additional_mappings || {};
|
|
58
|
-
const initialDomainMappingInstallResponse = await
|
|
55
|
+
const initialDomainMappingInstallResponse = await axios_client_1.axiosClient.post(`${devrevEndpoint}/internal/airdrop.recipe.initial-domain-mappings.install`, Object.assign(Object.assign({ external_system_type: 'ADaaS', import_slug: importSlug, snap_in_slug: snapInSlug }, (recipeBlueprintId && {
|
|
59
56
|
starting_recipe_blueprint: recipeBlueprintId,
|
|
60
57
|
})), additionalMappings), {
|
|
61
58
|
headers: {
|
|
@@ -66,7 +63,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
66
63
|
JSON.stringify(initialDomainMappingInstallResponse.data));
|
|
67
64
|
}
|
|
68
65
|
catch (error) {
|
|
69
|
-
if (
|
|
66
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
70
67
|
console.error('Error while installing initial domain mapping', (0, logger_1.formatAxiosError)(error));
|
|
71
68
|
}
|
|
72
69
|
else {
|
|
@@ -76,7 +73,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
76
73
|
}
|
|
77
74
|
}
|
|
78
75
|
catch (error) {
|
|
79
|
-
if (
|
|
76
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
80
77
|
console.error('Error while fetching snap in', (0, logger_1.formatAxiosError)(error));
|
|
81
78
|
}
|
|
82
79
|
else {
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.axiosClient = exports.axios = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
exports.axios = axios_1.default;
|
|
9
|
+
const axios_retry_1 = __importDefault(require("axios-retry"));
|
|
10
|
+
const axiosClient = axios_1.default.create();
|
|
11
|
+
exports.axiosClient = axiosClient;
|
|
12
|
+
// Exponential backoff algorithm: Retry 3 times and there will be a delay of more than 1 * no. of retries second + random number of milliseconds between each retry.
|
|
13
|
+
(0, axios_retry_1.default)(axiosClient, {
|
|
14
|
+
retries: 3,
|
|
15
|
+
retryDelay: (retryCount, error) => {
|
|
16
|
+
var _a;
|
|
17
|
+
console.log(`Retry attempt: ${retryCount} of ${(_a = error.config) === null || _a === void 0 ? void 0 : _a.url}.`);
|
|
18
|
+
return axios_retry_1.default.exponentialDelay(retryCount, error, 1000);
|
|
19
|
+
},
|
|
20
|
+
retryCondition: (error) => {
|
|
21
|
+
var _a, _b, _c, _d;
|
|
22
|
+
if (((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) &&
|
|
23
|
+
((_b = error.response) === null || _b === void 0 ? void 0 : _b.status) >= 500 &&
|
|
24
|
+
((_c = error.response) === null || _c === void 0 ? void 0 : _c.status) <= 599) {
|
|
25
|
+
return true;
|
|
26
|
+
}
|
|
27
|
+
else if (((_d = error.response) === null || _d === void 0 ? void 0 : _d.status) === 429) {
|
|
28
|
+
console.log('Rate limit exceeded. Delay: ' + error.response.headers['retry-after']);
|
|
29
|
+
return false;
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
onMaxRetryTimesExceeded(error, retryCount) {
|
|
36
|
+
var _a;
|
|
37
|
+
console.log(`Max retries attempted: ${retryCount}`);
|
|
38
|
+
(_a = error.config) === null || _a === void 0 ? true : delete _a.headers.Authorization;
|
|
39
|
+
delete error.request._header;
|
|
40
|
+
},
|
|
41
|
+
});
|
package/dist/http/client.d.ts
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import { RawAxiosRequestHeaders } from 'axios';
|
|
2
2
|
import { HTTPResponse } from './types';
|
|
3
3
|
export declare const defaultResponse: HTTPResponse;
|
|
4
|
+
/**
|
|
5
|
+
* HTTPClient class to make HTTP requests
|
|
6
|
+
* @deprecated
|
|
7
|
+
*/
|
|
4
8
|
export declare class HTTPClient {
|
|
5
9
|
private retryAfter;
|
|
6
10
|
private retryAt;
|
package/dist/http/client.js
CHANGED
package/dist/http/index.d.ts
CHANGED
package/dist/http/index.js
CHANGED
|
@@ -16,3 +16,4 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
17
|
__exportStar(require("./client"), exports);
|
|
18
18
|
__exportStar(require("./types"), exports);
|
|
19
|
+
__exportStar(require("./axios-client"), exports);
|
package/dist/http/types.d.ts
CHANGED
package/dist/logger/logger.js
CHANGED
|
@@ -11,6 +11,7 @@ const node_console_1 = require("node:console");
|
|
|
11
11
|
const logger_interfaces_1 = require("./logger.interfaces");
|
|
12
12
|
const node_worker_threads_1 = require("node:worker_threads");
|
|
13
13
|
const workers_1 = require("../types/workers");
|
|
14
|
+
const helpers_1 = require("../common/helpers");
|
|
14
15
|
class Logger extends node_console_1.Console {
|
|
15
16
|
constructor({ event, options }) {
|
|
16
17
|
super(process.stdout, process.stderr);
|
|
@@ -34,7 +35,7 @@ class Logger extends node_console_1.Console {
|
|
|
34
35
|
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage({
|
|
35
36
|
subject: workers_1.WorkerMessageSubject.WorkerMessageLog,
|
|
36
37
|
payload: {
|
|
37
|
-
args: JSON.parse(JSON.stringify(args)),
|
|
38
|
+
args: JSON.parse(JSON.stringify(args, (0, helpers_1.getCircularReplacer)())),
|
|
38
39
|
level,
|
|
39
40
|
},
|
|
40
41
|
});
|
package/dist/mappers/mappers.js
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.Mappers = void 0;
|
|
7
|
-
const
|
|
4
|
+
const axios_client_1 = require("../http/axios-client");
|
|
8
5
|
class Mappers {
|
|
9
6
|
constructor({ event }) {
|
|
10
7
|
this.endpoint = event.execution_metadata.devrev_endpoint;
|
|
@@ -12,7 +9,7 @@ class Mappers {
|
|
|
12
9
|
}
|
|
13
10
|
async getByTargetId(params) {
|
|
14
11
|
const { sync_unit, target } = params;
|
|
15
|
-
return
|
|
12
|
+
return axios_client_1.axiosClient.get(`${this.endpoint}/internal/airdrop.sync-mapper-record.get-by-target`, {
|
|
16
13
|
headers: {
|
|
17
14
|
Authorization: this.token,
|
|
18
15
|
},
|
|
@@ -20,14 +17,14 @@ class Mappers {
|
|
|
20
17
|
});
|
|
21
18
|
}
|
|
22
19
|
async create(params) {
|
|
23
|
-
return
|
|
20
|
+
return axios_client_1.axiosClient.post(`${this.endpoint}/internal/airdrop.sync-mapper-record.create`, params, {
|
|
24
21
|
headers: {
|
|
25
22
|
Authorization: this.token,
|
|
26
23
|
},
|
|
27
24
|
});
|
|
28
25
|
}
|
|
29
26
|
async update(params) {
|
|
30
|
-
return
|
|
27
|
+
return axios_client_1.axiosClient.post(`${this.endpoint}/internal/airdrop.sync-mapper-record.update`, params, {
|
|
31
28
|
headers: {
|
|
32
29
|
Authorization: this.token,
|
|
33
30
|
},
|
package/dist/repo/repo.js
CHANGED
|
@@ -17,7 +17,7 @@ class Repo {
|
|
|
17
17
|
async upload(batch) {
|
|
18
18
|
const itemsToUpload = batch || this.items;
|
|
19
19
|
if (itemsToUpload.length > 0) {
|
|
20
|
-
console.log(`Uploading ${itemsToUpload.length} items of type ${this.itemType}
|
|
20
|
+
console.log(`Uploading ${itemsToUpload.length} items of type ${this.itemType}. `);
|
|
21
21
|
const { artifact, error } = await this.uploader.upload(this.itemType, itemsToUpload);
|
|
22
22
|
if (error || !artifact) {
|
|
23
23
|
console.error('Error while uploading batch', error);
|
|
@@ -28,6 +28,7 @@ class Repo {
|
|
|
28
28
|
if (!batch) {
|
|
29
29
|
this.items = [];
|
|
30
30
|
}
|
|
31
|
+
console.log(`Uploaded ${itemsToUpload.length} items of type ${this.itemType}. Number of items left in repo: ${this.items.length}.`);
|
|
31
32
|
}
|
|
32
33
|
else {
|
|
33
34
|
console.log(`No items to upload for type ${this.itemType}. Skipping upload.`);
|
|
@@ -38,7 +39,8 @@ class Repo {
|
|
|
38
39
|
let recordsToPush;
|
|
39
40
|
// Normalize items if needed
|
|
40
41
|
if (this.normalize &&
|
|
41
|
-
|
|
42
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA &&
|
|
43
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT) {
|
|
42
44
|
recordsToPush = items.map((item) => this.normalize(item));
|
|
43
45
|
}
|
|
44
46
|
else {
|
|
@@ -46,7 +48,7 @@ class Repo {
|
|
|
46
48
|
}
|
|
47
49
|
// Add the new records to the items array
|
|
48
50
|
this.items.push(...recordsToPush);
|
|
49
|
-
console.log(`Extracted ${
|
|
51
|
+
console.log(`Extracted ${recordsToPush.length} new items of type ${this.itemType}. Total number of items in repo: ${this.items.length}.`);
|
|
50
52
|
// Upload in batches while the number of items exceeds the batch size
|
|
51
53
|
while (this.items.length >= constants_1.ARTIFACT_BATCH_SIZE) {
|
|
52
54
|
// Slice out a batch of ARTIFACT_BATCH_SIZE items to upload
|
package/dist/repo/repo.test.js
CHANGED
|
@@ -43,11 +43,23 @@ describe('Repo class push method', () => {
|
|
|
43
43
|
await repo.push(items);
|
|
44
44
|
expect(normalize).not.toHaveBeenCalled();
|
|
45
45
|
});
|
|
46
|
-
describe('should not normalize items if
|
|
47
|
-
it
|
|
46
|
+
describe('should not normalize items if type is "external_domain_metadata" or "ssor_attachment"', () => {
|
|
47
|
+
it('item type: external_domain_metadata', async () => {
|
|
48
48
|
repo = new repo_1.Repo({
|
|
49
49
|
event: (0, test_helpers_1.createEvent)({ eventType: types_1.EventType.ExtractionDataStart }),
|
|
50
|
-
itemType,
|
|
50
|
+
itemType: constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA,
|
|
51
|
+
normalize,
|
|
52
|
+
onUpload: jest.fn(),
|
|
53
|
+
options: {},
|
|
54
|
+
});
|
|
55
|
+
const items = (0, test_helpers_1.createItems)(10);
|
|
56
|
+
await repo.push(items);
|
|
57
|
+
expect(normalize).not.toHaveBeenCalled();
|
|
58
|
+
});
|
|
59
|
+
it('item type: ssor_attachment', async () => {
|
|
60
|
+
repo = new repo_1.Repo({
|
|
61
|
+
event: (0, test_helpers_1.createEvent)({ eventType: types_1.EventType.ExtractionDataStart }),
|
|
62
|
+
itemType: constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT,
|
|
51
63
|
normalize,
|
|
52
64
|
onUpload: jest.fn(),
|
|
53
65
|
options: {},
|
package/dist/state/state.js
CHANGED
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.State = void 0;
|
|
7
4
|
exports.createAdapterState = createAdapterState;
|
|
8
|
-
const
|
|
5
|
+
const axios_client_1 = require("../http/axios-client");
|
|
9
6
|
const extraction_1 = require("../types/extraction");
|
|
10
7
|
const constants_1 = require("../common/constants");
|
|
11
8
|
const logger_1 = require("../logger/logger");
|
|
@@ -59,7 +56,7 @@ class State {
|
|
|
59
56
|
*/
|
|
60
57
|
async postState(state) {
|
|
61
58
|
try {
|
|
62
|
-
await
|
|
59
|
+
await axios_client_1.axiosClient.post(this.workerUrl + '.update', {
|
|
63
60
|
state: JSON.stringify(state || this.state),
|
|
64
61
|
}, {
|
|
65
62
|
headers: {
|
|
@@ -74,7 +71,7 @@ class State {
|
|
|
74
71
|
console.log('State updated successfully to:', (0, logger_1.getPrintableState)(this.state));
|
|
75
72
|
}
|
|
76
73
|
catch (error) {
|
|
77
|
-
if (
|
|
74
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
78
75
|
console.error('Failed to update state.', (0, logger_1.formatAxiosError)(error));
|
|
79
76
|
}
|
|
80
77
|
else {
|
|
@@ -93,7 +90,7 @@ class State {
|
|
|
93
90
|
this.event.payload.event_context.sync_unit_id +
|
|
94
91
|
'.');
|
|
95
92
|
try {
|
|
96
|
-
const response = await
|
|
93
|
+
const response = await axios_client_1.axiosClient.post(this.workerUrl + '.get', {}, {
|
|
97
94
|
headers: {
|
|
98
95
|
Authorization: this.devrevToken,
|
|
99
96
|
},
|
|
@@ -107,7 +104,7 @@ class State {
|
|
|
107
104
|
return this.state;
|
|
108
105
|
}
|
|
109
106
|
catch (error) {
|
|
110
|
-
if (
|
|
107
|
+
if (axios_client_1.axios.isAxiosError(error) && ((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {
|
|
111
108
|
const state = Object.assign(Object.assign({}, initialState), this.initialSdkState);
|
|
112
109
|
this.state = state;
|
|
113
110
|
console.log('State not found, returning initial state. Current state:', (0, logger_1.getPrintableState)(this.state));
|
|
@@ -27,7 +27,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
27
27
|
};
|
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
29
|
exports.Uploader = void 0;
|
|
30
|
-
const
|
|
30
|
+
const axios_client_1 = require("../http/axios-client");
|
|
31
31
|
const fs_1 = __importStar(require("fs"));
|
|
32
32
|
const zlib_1 = __importDefault(require("zlib"));
|
|
33
33
|
const js_jsonl_1 = require("js-jsonl");
|
|
@@ -98,7 +98,7 @@ class Uploader {
|
|
|
98
98
|
return response.data;
|
|
99
99
|
}
|
|
100
100
|
catch (error) {
|
|
101
|
-
if (
|
|
101
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
102
102
|
console.error('Error while preparing artifact.', (0, logger_1.formatAxiosError)(error));
|
|
103
103
|
}
|
|
104
104
|
else {
|
|
@@ -115,13 +115,13 @@ class Uploader {
|
|
|
115
115
|
}
|
|
116
116
|
formData.append('file', file);
|
|
117
117
|
try {
|
|
118
|
-
const response = await
|
|
118
|
+
const response = await axios_client_1.axiosClient.post(preparedArtifact.url, formData, {
|
|
119
119
|
headers: Object.assign({}, formData.getHeaders()),
|
|
120
120
|
});
|
|
121
121
|
return response;
|
|
122
122
|
}
|
|
123
123
|
catch (error) {
|
|
124
|
-
if (
|
|
124
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
125
125
|
console.error('Error while uploading artifact.', (0, logger_1.formatAxiosError)(error));
|
|
126
126
|
}
|
|
127
127
|
else {
|
|
@@ -138,7 +138,7 @@ class Uploader {
|
|
|
138
138
|
}
|
|
139
139
|
formData.append('file', fileStreamResponse.data);
|
|
140
140
|
try {
|
|
141
|
-
const response = await
|
|
141
|
+
const response = await axios_client_1.axiosClient.post(preparedArtifact.url, formData, {
|
|
142
142
|
headers: Object.assign(Object.assign({}, formData.getHeaders()), (!fileStreamResponse.headers['content-length'] && {
|
|
143
143
|
'Content-Length': constants_1.MAX_DEVREV_ARTIFACT_SIZE,
|
|
144
144
|
})),
|
|
@@ -146,7 +146,7 @@ class Uploader {
|
|
|
146
146
|
return response;
|
|
147
147
|
}
|
|
148
148
|
catch (error) {
|
|
149
|
-
if (
|
|
149
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
150
150
|
console.error('Error while streaming artifact.', (0, logger_1.formatAxiosError)(error));
|
|
151
151
|
}
|
|
152
152
|
else {
|
|
@@ -223,13 +223,13 @@ class Uploader {
|
|
|
223
223
|
}
|
|
224
224
|
async downloadArtifact(artifactUrl) {
|
|
225
225
|
try {
|
|
226
|
-
const response = await
|
|
226
|
+
const response = await axios_client_1.axiosClient.get(artifactUrl, {
|
|
227
227
|
responseType: 'arraybuffer',
|
|
228
228
|
});
|
|
229
229
|
return response.data;
|
|
230
230
|
}
|
|
231
231
|
catch (error) {
|
|
232
|
-
if (
|
|
232
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
233
233
|
console.error('Error while downloading artifact from URL.', (0, logger_1.formatAxiosError)(error));
|
|
234
234
|
}
|
|
235
235
|
else {
|
|
@@ -302,7 +302,7 @@ class Uploader {
|
|
|
302
302
|
}
|
|
303
303
|
async getFileStreamResponse(url) {
|
|
304
304
|
try {
|
|
305
|
-
const fileStreamResponse = await
|
|
305
|
+
const fileStreamResponse = await axios_client_1.axiosClient.get(url, {
|
|
306
306
|
responseType: 'stream',
|
|
307
307
|
headers: {
|
|
308
308
|
Authorization: this.event.payload.connection_data.key,
|
|
@@ -311,7 +311,7 @@ class Uploader {
|
|
|
311
311
|
return fileStreamResponse;
|
|
312
312
|
}
|
|
313
313
|
catch (error) {
|
|
314
|
-
if (
|
|
314
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
315
315
|
console.error('Error while fetching attachment from URL.', (0, logger_1.formatAxiosError)(error));
|
|
316
316
|
}
|
|
317
317
|
else {
|
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.WorkerAdapter = void 0;
|
|
7
4
|
exports.createWorkerAdapter = createWorkerAdapter;
|
|
8
|
-
const
|
|
5
|
+
const axios_client_1 = require("../http/axios-client");
|
|
9
6
|
const extraction_1 = require("../types/extraction");
|
|
10
7
|
const loading_1 = require("../types/loading");
|
|
11
8
|
const constants_1 = require("../common/constants");
|
|
@@ -77,7 +74,8 @@ class WorkerAdapter {
|
|
|
77
74
|
}
|
|
78
75
|
initializeRepos(repos) {
|
|
79
76
|
this.repos = repos.map((repo) => {
|
|
80
|
-
const shouldNormalize =
|
|
77
|
+
const shouldNormalize = repo.itemType !== constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA &&
|
|
78
|
+
repo.itemType !== constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT;
|
|
81
79
|
return new repo_1.Repo(Object.assign(Object.assign({ event: this.event, itemType: repo.itemType }, (shouldNormalize && { normalize: repo.normalize })), { onUpload: (artifact) => {
|
|
82
80
|
var _a;
|
|
83
81
|
this.artifacts.push(artifact);
|
|
@@ -342,7 +340,7 @@ class WorkerAdapter {
|
|
|
342
340
|
console.log('Updated sync mapper record', JSON.stringify(updateSyncMapperRecordResponse.data));
|
|
343
341
|
}
|
|
344
342
|
catch (error) {
|
|
345
|
-
if (
|
|
343
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
346
344
|
console.error('Failed to update sync mapper record', (0, logger_1.formatAxiosError)(error));
|
|
347
345
|
return {
|
|
348
346
|
error: {
|
|
@@ -387,7 +385,7 @@ class WorkerAdapter {
|
|
|
387
385
|
// Update mapper (optional)
|
|
388
386
|
}
|
|
389
387
|
catch (error) {
|
|
390
|
-
if (
|
|
388
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
391
389
|
if (((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {
|
|
392
390
|
// Create item
|
|
393
391
|
const { id, delay, error } = await itemTypeToLoad.create({
|
|
@@ -413,7 +411,7 @@ class WorkerAdapter {
|
|
|
413
411
|
};
|
|
414
412
|
}
|
|
415
413
|
catch (error) {
|
|
416
|
-
if (
|
|
414
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
417
415
|
console.error('Failed to create sync mapper record', (0, logger_1.formatAxiosError)(error));
|
|
418
416
|
return {
|
|
419
417
|
error: {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@devrev/ts-adaas",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.3",
|
|
4
4
|
"description": "Typescript library containing the ADaaS(AirDrop as a Service) control protocol.",
|
|
5
5
|
"type": "commonjs",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -37,7 +37,8 @@
|
|
|
37
37
|
},
|
|
38
38
|
"dependencies": {
|
|
39
39
|
"@devrev/typescript-sdk": "^1.1.27",
|
|
40
|
-
"axios": "^1.
|
|
40
|
+
"axios": "^1.7.9",
|
|
41
|
+
"axios-retry": "^4.5.0",
|
|
41
42
|
"form-data": "^4.0.1",
|
|
42
43
|
"js-jsonl": "^1.1.1",
|
|
43
44
|
"lambda-log": "^3.1.0",
|