@devrev/ts-adaas 1.1.1 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +76 -12
- package/dist/common/control-protocol.js +4 -9
- package/dist/common/helpers.d.ts +1 -0
- package/dist/common/helpers.js +15 -0
- package/dist/common/install-initial-domain-mapping.js +7 -10
- package/dist/deprecated/adapter/index.js +1 -3
- package/dist/http/axios-client.d.ts +3 -0
- package/dist/http/axios-client.js +41 -0
- package/dist/http/client.d.ts +4 -0
- package/dist/http/client.js +4 -0
- package/dist/http/index.d.ts +1 -0
- package/dist/http/index.js +1 -0
- package/dist/http/types.d.ts +4 -0
- package/dist/logger/logger.d.ts +0 -1
- package/dist/logger/logger.js +3 -3
- package/dist/mappers/mappers.js +4 -7
- package/dist/repo/repo.js +5 -3
- package/dist/repo/repo.test.js +15 -3
- package/dist/state/state.js +5 -8
- package/dist/types/extraction.d.ts +31 -3
- package/dist/uploader/uploader.js +10 -10
- package/dist/workers/worker-adapter.js +6 -8
- package/package.json +3 -2
package/README.md
CHANGED
|
@@ -2,6 +2,18 @@
|
|
|
2
2
|
|
|
3
3
|
## Release Notes
|
|
4
4
|
|
|
5
|
+
### v1.1.3
|
|
6
|
+
|
|
7
|
+
- Exported `axios` and `axiosClient` with exponential backoff retry mechanism for HTTP requests and omitting Authorization headers from Axios errors.
|
|
8
|
+
- Resolved issues with circular structure logging.
|
|
9
|
+
- Fixed attachments metadata normalization bug.
|
|
10
|
+
- Improved repository logging.
|
|
11
|
+
|
|
12
|
+
#### v1.1.2
|
|
13
|
+
|
|
14
|
+
- Unified incoming and outgoing event context.
|
|
15
|
+
- Added `dev_oid` to logger tags.
|
|
16
|
+
|
|
5
17
|
#### v1.1.1
|
|
6
18
|
|
|
7
19
|
- Added default workers for loading deletion events.
|
|
@@ -54,14 +66,14 @@
|
|
|
54
66
|
|
|
55
67
|
# Overview
|
|
56
68
|
|
|
57
|
-
The ADaaS (Airdrop-as-a-Service) Library for TypeScript helps developers build Snap-ins that integrate with DevRev’s ADaaS platform. This library simplifies the workflow for handling data extraction, event-driven actions, state management, and artifact handling.
|
|
69
|
+
The ADaaS (Airdrop-as-a-Service) Library for TypeScript helps developers build Snap-ins that integrate with DevRev’s ADaaS platform. This library simplifies the workflow for handling data extraction and loading, event-driven actions, state management, and artifact handling.
|
|
58
70
|
|
|
59
|
-
|
|
71
|
+
It provides features such as:
|
|
60
72
|
|
|
61
73
|
- Type Definitions: Structured types for ADaaS control protocol
|
|
62
|
-
- Event Management: Easily emit events for different extraction phases
|
|
74
|
+
- Event Management: Easily emit events for different extraction or loading phases
|
|
63
75
|
- State Handling: Update and access state in real-time within tasks
|
|
64
|
-
- Artifact Management: Supports batched storage of artifacts
|
|
76
|
+
- Artifact Management: Supports batched storage of artifacts
|
|
65
77
|
- Error & Timeout Support: Error handling and timeout management for long-running tasks
|
|
66
78
|
|
|
67
79
|
# Installation
|
|
@@ -72,7 +84,22 @@ npm install @devrev/ts-adaas
|
|
|
72
84
|
|
|
73
85
|
# Usage
|
|
74
86
|
|
|
75
|
-
ADaaS Snap-ins
|
|
87
|
+
ADaaS Snap-ins can import data in both directions: from external sources to DevRev and from DevRev to external sources. Both directions are composed of several phases.
|
|
88
|
+
|
|
89
|
+
From external source to DevRev:
|
|
90
|
+
|
|
91
|
+
- External Sync Units Extraction
|
|
92
|
+
- Metadata Extraction
|
|
93
|
+
- Data Extraction
|
|
94
|
+
- Attachments Extraction
|
|
95
|
+
|
|
96
|
+
From DevRev to external source:
|
|
97
|
+
|
|
98
|
+
- Data Loading
|
|
99
|
+
|
|
100
|
+
Each phase comes with unique requirements for processing task, and both timeout and error handling.
|
|
101
|
+
|
|
102
|
+
The ADaaS library exports processTask to structure the work within each phase, and onTimeout function to handle timeouts.
|
|
76
103
|
|
|
77
104
|
### ADaaS Snap-in Invocation
|
|
78
105
|
|
|
@@ -127,10 +154,14 @@ const run = async (events: AirdropEvent[]) => {
|
|
|
127
154
|
export default run;
|
|
128
155
|
```
|
|
129
156
|
|
|
130
|
-
## Extraction
|
|
157
|
+
## Extraction
|
|
131
158
|
|
|
132
159
|
The ADaaS snap-in extraction lifecycle consists of three main phases: External Sync Units Extraction, Metadata Extraction, and Data Extraction. Each phase is defined in a separate file and is responsible for fetching the respective data.
|
|
133
160
|
|
|
161
|
+
The ADaaS library provides a repository management system to handle artifacts in batches. The `initializeRepos` function initializes the repositories, and the `push` function uploads the artifacts to the repositories. The `postState` function is used to post the state of the extraction task.
|
|
162
|
+
|
|
163
|
+
State management is crucial for ADaaS Snap-ins to maintain the state of the extraction task. The `postState` function is used to post the state of the extraction task. The state is stored in the adapter and can be retrieved using the `adapter.state` property.
|
|
164
|
+
|
|
134
165
|
### 1. External Sync Units Extraction
|
|
135
166
|
|
|
136
167
|
This phase is defined in `external-sync-units-extraction.ts` and is responsible for fetching the external sync units.
|
|
@@ -243,7 +274,7 @@ processTask({
|
|
|
243
274
|
});
|
|
244
275
|
```
|
|
245
276
|
|
|
246
|
-
|
|
277
|
+
### 4. Attachments Streaming
|
|
247
278
|
|
|
248
279
|
The ADaaS library handles attachments streaming to improve efficiency and reduce complexity for developers. During the extraction phase, developers need only to provide metadata in a specific format for each attachment, and the library manages the streaming process.
|
|
249
280
|
|
|
@@ -259,12 +290,45 @@ export interface NormalizedAttachment {
|
|
|
259
290
|
}
|
|
260
291
|
```
|
|
261
292
|
|
|
262
|
-
##
|
|
293
|
+
## Loading phases
|
|
263
294
|
|
|
264
|
-
|
|
295
|
+
### 1. Data Loading
|
|
265
296
|
|
|
266
|
-
|
|
297
|
+
This phase is defined in `data-loading.ts` and is responsible for loading the data to the external system.
|
|
298
|
+
|
|
299
|
+
Loading is done by providing an ordered list of itemTypes to load and their respective create and update functions.
|
|
300
|
+
|
|
301
|
+
```typescript
|
|
302
|
+
processTask({
|
|
303
|
+
task: async ({ adapter }) => {
|
|
304
|
+
const { reports, processed_files } = await adapter.loadItemTypes({
|
|
305
|
+
itemTypesToLoad: [
|
|
306
|
+
{
|
|
307
|
+
itemType: 'tickets',
|
|
308
|
+
create: createTicket,
|
|
309
|
+
update: updateTicket,
|
|
310
|
+
},
|
|
311
|
+
{
|
|
312
|
+
itemType: 'conversations',
|
|
313
|
+
create: createConversation,
|
|
314
|
+
update: updateConversation,
|
|
315
|
+
},
|
|
316
|
+
],
|
|
317
|
+
});
|
|
318
|
+
|
|
319
|
+
await adapter.emit(LoaderEventType.DataLoadingDone, {
|
|
320
|
+
reports,
|
|
321
|
+
processed_files,
|
|
322
|
+
});
|
|
323
|
+
},
|
|
324
|
+
onTimeout: async ({ adapter }) => {
|
|
325
|
+
await adapter.emit(LoaderEventType.DataLoadingProgress, {
|
|
326
|
+
reports: adapter.reports,
|
|
327
|
+
processed_files: adapter.processedFiles,
|
|
328
|
+
});
|
|
329
|
+
});
|
|
330
|
+
```
|
|
267
331
|
|
|
268
|
-
|
|
332
|
+
The loading functions `create` and `update` provide loading to the external system. They provide denormalization of the records to the schema of the external system and provide HTTP calls to the external system. Both loading functions must handle rate limiting for the external system and handle errors.
|
|
269
333
|
|
|
270
|
-
|
|
334
|
+
Functions return an ID and modified date of the record in the external system, or specify rate-liming offset or errors, if the record could not be created or updated.
|
|
@@ -1,23 +1,18 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.emit = void 0;
|
|
7
|
-
const
|
|
4
|
+
const axios_client_1 = require("../http/axios-client");
|
|
8
5
|
const logger_1 = require("../logger/logger");
|
|
9
6
|
const emit = async ({ event, eventType, data, }) => {
|
|
10
7
|
const newEvent = {
|
|
11
8
|
event_type: eventType,
|
|
12
|
-
event_context:
|
|
13
|
-
sync_unit: event.payload.event_context.sync_unit_id,
|
|
14
|
-
})),
|
|
9
|
+
event_context: event.payload.event_context,
|
|
15
10
|
event_data: Object.assign({}, data),
|
|
16
11
|
};
|
|
17
12
|
return new Promise(async (resolve, reject) => {
|
|
18
13
|
console.info('Emitting event', JSON.stringify(newEvent));
|
|
19
14
|
try {
|
|
20
|
-
await
|
|
15
|
+
await axios_client_1.axiosClient.post(event.payload.event_context.callback_url, Object.assign({}, newEvent), {
|
|
21
16
|
headers: {
|
|
22
17
|
Accept: 'application/json, text/plain, */*',
|
|
23
18
|
Authorization: event.context.secrets.service_account_token,
|
|
@@ -27,7 +22,7 @@ const emit = async ({ event, eventType, data, }) => {
|
|
|
27
22
|
resolve();
|
|
28
23
|
}
|
|
29
24
|
catch (error) {
|
|
30
|
-
if (
|
|
25
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
31
26
|
console.error(`Failed to emit event with event type ${eventType}.`, (0, logger_1.formatAxiosError)(error));
|
|
32
27
|
}
|
|
33
28
|
else {
|
package/dist/common/helpers.d.ts
CHANGED
package/dist/common/helpers.js
CHANGED
|
@@ -4,6 +4,7 @@ exports.getTimeoutErrorEventType = getTimeoutErrorEventType;
|
|
|
4
4
|
exports.getSyncDirection = getSyncDirection;
|
|
5
5
|
exports.getFilesToLoad = getFilesToLoad;
|
|
6
6
|
exports.addReportToLoaderReport = addReportToLoaderReport;
|
|
7
|
+
exports.getCircularReplacer = getCircularReplacer;
|
|
7
8
|
const extraction_1 = require("../types/extraction");
|
|
8
9
|
const loading_1 = require("../types/loading");
|
|
9
10
|
function getTimeoutErrorEventType(eventType) {
|
|
@@ -101,3 +102,17 @@ function addReportToLoaderReport({ loaderReports, report, }) {
|
|
|
101
102
|
}
|
|
102
103
|
return loaderReports;
|
|
103
104
|
}
|
|
105
|
+
// https://stackoverflow.com/a/53731154
|
|
106
|
+
function getCircularReplacer() {
|
|
107
|
+
const seen = new WeakSet();
|
|
108
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
109
|
+
return (key, value) => {
|
|
110
|
+
if (typeof value === 'object' && value !== null) {
|
|
111
|
+
if (seen.has(value)) {
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
seen.add(value);
|
|
115
|
+
}
|
|
116
|
+
return value;
|
|
117
|
+
};
|
|
118
|
+
}
|
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.installInitialDomainMapping = installInitialDomainMapping;
|
|
7
|
-
const
|
|
4
|
+
const axios_client_1 = require("../http/axios-client");
|
|
8
5
|
const logger_1 = require("../logger/logger");
|
|
9
6
|
async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
10
7
|
var _a, _b, _c, _d, _e, _f, _g;
|
|
@@ -16,7 +13,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
16
13
|
return;
|
|
17
14
|
}
|
|
18
15
|
try {
|
|
19
|
-
const snapInResponse = await
|
|
16
|
+
const snapInResponse = await axios_client_1.axiosClient.get(devrevEndpoint + '/internal/snap-ins.get', {
|
|
20
17
|
headers: {
|
|
21
18
|
Authorization: devrevToken,
|
|
22
19
|
},
|
|
@@ -35,7 +32,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
35
32
|
if (startingRecipeBlueprint &&
|
|
36
33
|
Object.keys(startingRecipeBlueprint).length !== 0) {
|
|
37
34
|
try {
|
|
38
|
-
const recipeBlueprintResponse = await
|
|
35
|
+
const recipeBlueprintResponse = await axios_client_1.axiosClient.post(`${devrevEndpoint}/internal/airdrop.recipe.blueprints.create`, Object.assign({}, startingRecipeBlueprint), {
|
|
39
36
|
headers: {
|
|
40
37
|
Authorization: devrevToken,
|
|
41
38
|
},
|
|
@@ -44,7 +41,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
44
41
|
console.log('Successfully created recipe blueprint with id: ' + recipeBlueprintId);
|
|
45
42
|
}
|
|
46
43
|
catch (error) {
|
|
47
|
-
if (
|
|
44
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
48
45
|
console.error('Error while creating recipe blueprint', (0, logger_1.formatAxiosError)(error));
|
|
49
46
|
}
|
|
50
47
|
else {
|
|
@@ -55,7 +52,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
55
52
|
try {
|
|
56
53
|
// 2. Install the initial domain mappings
|
|
57
54
|
const additionalMappings = initialDomainMappingJson.additional_mappings || {};
|
|
58
|
-
const initialDomainMappingInstallResponse = await
|
|
55
|
+
const initialDomainMappingInstallResponse = await axios_client_1.axiosClient.post(`${devrevEndpoint}/internal/airdrop.recipe.initial-domain-mappings.install`, Object.assign(Object.assign({ external_system_type: 'ADaaS', import_slug: importSlug, snap_in_slug: snapInSlug }, (recipeBlueprintId && {
|
|
59
56
|
starting_recipe_blueprint: recipeBlueprintId,
|
|
60
57
|
})), additionalMappings), {
|
|
61
58
|
headers: {
|
|
@@ -66,7 +63,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
66
63
|
JSON.stringify(initialDomainMappingInstallResponse.data));
|
|
67
64
|
}
|
|
68
65
|
catch (error) {
|
|
69
|
-
if (
|
|
66
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
70
67
|
console.error('Error while installing initial domain mapping', (0, logger_1.formatAxiosError)(error));
|
|
71
68
|
}
|
|
72
69
|
else {
|
|
@@ -76,7 +73,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
76
73
|
}
|
|
77
74
|
}
|
|
78
75
|
catch (error) {
|
|
79
|
-
if (
|
|
76
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
80
77
|
console.error('Error while fetching snap in', (0, logger_1.formatAxiosError)(error));
|
|
81
78
|
}
|
|
82
79
|
else {
|
|
@@ -93,9 +93,7 @@ class Adapter {
|
|
|
93
93
|
}
|
|
94
94
|
const newEvent = {
|
|
95
95
|
event_type: newEventType,
|
|
96
|
-
event_context:
|
|
97
|
-
sync_unit: this.event.payload.event_context.sync_unit_id,
|
|
98
|
-
})),
|
|
96
|
+
event_context: this.event.payload.event_context,
|
|
99
97
|
event_data: Object.assign({}, data),
|
|
100
98
|
};
|
|
101
99
|
try {
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.axiosClient = exports.axios = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
exports.axios = axios_1.default;
|
|
9
|
+
const axios_retry_1 = __importDefault(require("axios-retry"));
|
|
10
|
+
const axiosClient = axios_1.default.create();
|
|
11
|
+
exports.axiosClient = axiosClient;
|
|
12
|
+
// Exponential backoff algorithm: Retry 3 times and there will be a delay of more than 1 * no. of retries second + random number of milliseconds between each retry.
|
|
13
|
+
(0, axios_retry_1.default)(axiosClient, {
|
|
14
|
+
retries: 3,
|
|
15
|
+
retryDelay: (retryCount, error) => {
|
|
16
|
+
var _a;
|
|
17
|
+
console.log(`Retry attempt: ${retryCount} of ${(_a = error.config) === null || _a === void 0 ? void 0 : _a.url}.`);
|
|
18
|
+
return axios_retry_1.default.exponentialDelay(retryCount, error, 1000);
|
|
19
|
+
},
|
|
20
|
+
retryCondition: (error) => {
|
|
21
|
+
var _a, _b, _c, _d;
|
|
22
|
+
if (((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) &&
|
|
23
|
+
((_b = error.response) === null || _b === void 0 ? void 0 : _b.status) >= 500 &&
|
|
24
|
+
((_c = error.response) === null || _c === void 0 ? void 0 : _c.status) <= 599) {
|
|
25
|
+
return true;
|
|
26
|
+
}
|
|
27
|
+
else if (((_d = error.response) === null || _d === void 0 ? void 0 : _d.status) === 429) {
|
|
28
|
+
console.log('Rate limit exceeded. Delay: ' + error.response.headers['retry-after']);
|
|
29
|
+
return false;
|
|
30
|
+
}
|
|
31
|
+
else {
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
onMaxRetryTimesExceeded(error, retryCount) {
|
|
36
|
+
var _a;
|
|
37
|
+
console.log(`Max retries attempted: ${retryCount}`);
|
|
38
|
+
(_a = error.config) === null || _a === void 0 ? true : delete _a.headers.Authorization;
|
|
39
|
+
delete error.request._header;
|
|
40
|
+
},
|
|
41
|
+
});
|
package/dist/http/client.d.ts
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import { RawAxiosRequestHeaders } from 'axios';
|
|
2
2
|
import { HTTPResponse } from './types';
|
|
3
3
|
export declare const defaultResponse: HTTPResponse;
|
|
4
|
+
/**
|
|
5
|
+
* HTTPClient class to make HTTP requests
|
|
6
|
+
* @deprecated
|
|
7
|
+
*/
|
|
4
8
|
export declare class HTTPClient {
|
|
5
9
|
private retryAfter;
|
|
6
10
|
private retryAt;
|
package/dist/http/client.js
CHANGED
package/dist/http/index.d.ts
CHANGED
package/dist/http/index.js
CHANGED
|
@@ -16,3 +16,4 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
17
|
__exportStar(require("./client"), exports);
|
|
18
18
|
__exportStar(require("./types"), exports);
|
|
19
|
+
__exportStar(require("./axios-client"), exports);
|
package/dist/http/types.d.ts
CHANGED
package/dist/logger/logger.d.ts
CHANGED
|
@@ -2,7 +2,6 @@ import { Console } from 'node:console';
|
|
|
2
2
|
import { LoggerFactoryInterface, LogLevel, PrintableState } from './logger.interfaces';
|
|
3
3
|
import { AxiosError } from 'axios';
|
|
4
4
|
export declare class Logger extends Console {
|
|
5
|
-
private event;
|
|
6
5
|
private options?;
|
|
7
6
|
constructor({ event, options }: LoggerFactoryInterface);
|
|
8
7
|
logFn(args: unknown[], level: LogLevel): void;
|
package/dist/logger/logger.js
CHANGED
|
@@ -11,15 +11,15 @@ const node_console_1 = require("node:console");
|
|
|
11
11
|
const logger_interfaces_1 = require("./logger.interfaces");
|
|
12
12
|
const node_worker_threads_1 = require("node:worker_threads");
|
|
13
13
|
const workers_1 = require("../types/workers");
|
|
14
|
+
const helpers_1 = require("../common/helpers");
|
|
14
15
|
class Logger extends node_console_1.Console {
|
|
15
16
|
constructor({ event, options }) {
|
|
16
17
|
super(process.stdout, process.stderr);
|
|
17
|
-
this.event = event;
|
|
18
18
|
this.options = options;
|
|
19
19
|
lambda_log_1.default.options.levelKey = null;
|
|
20
20
|
lambda_log_1.default.options.tagsKey = null;
|
|
21
21
|
lambda_log_1.default.options.messageKey = 'message';
|
|
22
|
-
lambda_log_1.default.options.meta = Object.assign({}, event.payload.event_context);
|
|
22
|
+
lambda_log_1.default.options.meta = Object.assign(Object.assign({}, event.payload.event_context), { dev_oid: event.payload.event_context.dev_org });
|
|
23
23
|
}
|
|
24
24
|
logFn(args, level) {
|
|
25
25
|
var _a;
|
|
@@ -35,7 +35,7 @@ class Logger extends node_console_1.Console {
|
|
|
35
35
|
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage({
|
|
36
36
|
subject: workers_1.WorkerMessageSubject.WorkerMessageLog,
|
|
37
37
|
payload: {
|
|
38
|
-
args: JSON.parse(JSON.stringify(args)),
|
|
38
|
+
args: JSON.parse(JSON.stringify(args, (0, helpers_1.getCircularReplacer)())),
|
|
39
39
|
level,
|
|
40
40
|
},
|
|
41
41
|
});
|
package/dist/mappers/mappers.js
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.Mappers = void 0;
|
|
7
|
-
const
|
|
4
|
+
const axios_client_1 = require("../http/axios-client");
|
|
8
5
|
class Mappers {
|
|
9
6
|
constructor({ event }) {
|
|
10
7
|
this.endpoint = event.execution_metadata.devrev_endpoint;
|
|
@@ -12,7 +9,7 @@ class Mappers {
|
|
|
12
9
|
}
|
|
13
10
|
async getByTargetId(params) {
|
|
14
11
|
const { sync_unit, target } = params;
|
|
15
|
-
return
|
|
12
|
+
return axios_client_1.axiosClient.get(`${this.endpoint}/internal/airdrop.sync-mapper-record.get-by-target`, {
|
|
16
13
|
headers: {
|
|
17
14
|
Authorization: this.token,
|
|
18
15
|
},
|
|
@@ -20,14 +17,14 @@ class Mappers {
|
|
|
20
17
|
});
|
|
21
18
|
}
|
|
22
19
|
async create(params) {
|
|
23
|
-
return
|
|
20
|
+
return axios_client_1.axiosClient.post(`${this.endpoint}/internal/airdrop.sync-mapper-record.create`, params, {
|
|
24
21
|
headers: {
|
|
25
22
|
Authorization: this.token,
|
|
26
23
|
},
|
|
27
24
|
});
|
|
28
25
|
}
|
|
29
26
|
async update(params) {
|
|
30
|
-
return
|
|
27
|
+
return axios_client_1.axiosClient.post(`${this.endpoint}/internal/airdrop.sync-mapper-record.update`, params, {
|
|
31
28
|
headers: {
|
|
32
29
|
Authorization: this.token,
|
|
33
30
|
},
|
package/dist/repo/repo.js
CHANGED
|
@@ -17,7 +17,7 @@ class Repo {
|
|
|
17
17
|
async upload(batch) {
|
|
18
18
|
const itemsToUpload = batch || this.items;
|
|
19
19
|
if (itemsToUpload.length > 0) {
|
|
20
|
-
console.log(`Uploading ${itemsToUpload.length} items of type ${this.itemType}
|
|
20
|
+
console.log(`Uploading ${itemsToUpload.length} items of type ${this.itemType}. `);
|
|
21
21
|
const { artifact, error } = await this.uploader.upload(this.itemType, itemsToUpload);
|
|
22
22
|
if (error || !artifact) {
|
|
23
23
|
console.error('Error while uploading batch', error);
|
|
@@ -28,6 +28,7 @@ class Repo {
|
|
|
28
28
|
if (!batch) {
|
|
29
29
|
this.items = [];
|
|
30
30
|
}
|
|
31
|
+
console.log(`Uploaded ${itemsToUpload.length} items of type ${this.itemType}. Number of items left in repo: ${this.items.length}.`);
|
|
31
32
|
}
|
|
32
33
|
else {
|
|
33
34
|
console.log(`No items to upload for type ${this.itemType}. Skipping upload.`);
|
|
@@ -38,7 +39,8 @@ class Repo {
|
|
|
38
39
|
let recordsToPush;
|
|
39
40
|
// Normalize items if needed
|
|
40
41
|
if (this.normalize &&
|
|
41
|
-
|
|
42
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA &&
|
|
43
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT) {
|
|
42
44
|
recordsToPush = items.map((item) => this.normalize(item));
|
|
43
45
|
}
|
|
44
46
|
else {
|
|
@@ -46,7 +48,7 @@ class Repo {
|
|
|
46
48
|
}
|
|
47
49
|
// Add the new records to the items array
|
|
48
50
|
this.items.push(...recordsToPush);
|
|
49
|
-
console.log(`Extracted ${
|
|
51
|
+
console.log(`Extracted ${recordsToPush.length} new items of type ${this.itemType}. Total number of items in repo: ${this.items.length}.`);
|
|
50
52
|
// Upload in batches while the number of items exceeds the batch size
|
|
51
53
|
while (this.items.length >= constants_1.ARTIFACT_BATCH_SIZE) {
|
|
52
54
|
// Slice out a batch of ARTIFACT_BATCH_SIZE items to upload
|
package/dist/repo/repo.test.js
CHANGED
|
@@ -43,11 +43,23 @@ describe('Repo class push method', () => {
|
|
|
43
43
|
await repo.push(items);
|
|
44
44
|
expect(normalize).not.toHaveBeenCalled();
|
|
45
45
|
});
|
|
46
|
-
describe('should not normalize items if
|
|
47
|
-
it
|
|
46
|
+
describe('should not normalize items if type is "external_domain_metadata" or "ssor_attachment"', () => {
|
|
47
|
+
it('item type: external_domain_metadata', async () => {
|
|
48
48
|
repo = new repo_1.Repo({
|
|
49
49
|
event: (0, test_helpers_1.createEvent)({ eventType: types_1.EventType.ExtractionDataStart }),
|
|
50
|
-
itemType,
|
|
50
|
+
itemType: constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA,
|
|
51
|
+
normalize,
|
|
52
|
+
onUpload: jest.fn(),
|
|
53
|
+
options: {},
|
|
54
|
+
});
|
|
55
|
+
const items = (0, test_helpers_1.createItems)(10);
|
|
56
|
+
await repo.push(items);
|
|
57
|
+
expect(normalize).not.toHaveBeenCalled();
|
|
58
|
+
});
|
|
59
|
+
it('item type: ssor_attachment', async () => {
|
|
60
|
+
repo = new repo_1.Repo({
|
|
61
|
+
event: (0, test_helpers_1.createEvent)({ eventType: types_1.EventType.ExtractionDataStart }),
|
|
62
|
+
itemType: constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT,
|
|
51
63
|
normalize,
|
|
52
64
|
onUpload: jest.fn(),
|
|
53
65
|
options: {},
|
package/dist/state/state.js
CHANGED
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.State = void 0;
|
|
7
4
|
exports.createAdapterState = createAdapterState;
|
|
8
|
-
const
|
|
5
|
+
const axios_client_1 = require("../http/axios-client");
|
|
9
6
|
const extraction_1 = require("../types/extraction");
|
|
10
7
|
const constants_1 = require("../common/constants");
|
|
11
8
|
const logger_1 = require("../logger/logger");
|
|
@@ -59,7 +56,7 @@ class State {
|
|
|
59
56
|
*/
|
|
60
57
|
async postState(state) {
|
|
61
58
|
try {
|
|
62
|
-
await
|
|
59
|
+
await axios_client_1.axiosClient.post(this.workerUrl + '.update', {
|
|
63
60
|
state: JSON.stringify(state || this.state),
|
|
64
61
|
}, {
|
|
65
62
|
headers: {
|
|
@@ -74,7 +71,7 @@ class State {
|
|
|
74
71
|
console.log('State updated successfully to:', (0, logger_1.getPrintableState)(this.state));
|
|
75
72
|
}
|
|
76
73
|
catch (error) {
|
|
77
|
-
if (
|
|
74
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
78
75
|
console.error('Failed to update state.', (0, logger_1.formatAxiosError)(error));
|
|
79
76
|
}
|
|
80
77
|
else {
|
|
@@ -93,7 +90,7 @@ class State {
|
|
|
93
90
|
this.event.payload.event_context.sync_unit_id +
|
|
94
91
|
'.');
|
|
95
92
|
try {
|
|
96
|
-
const response = await
|
|
93
|
+
const response = await axios_client_1.axiosClient.post(this.workerUrl + '.get', {}, {
|
|
97
94
|
headers: {
|
|
98
95
|
Authorization: this.devrevToken,
|
|
99
96
|
},
|
|
@@ -107,7 +104,7 @@ class State {
|
|
|
107
104
|
return this.state;
|
|
108
105
|
}
|
|
109
106
|
catch (error) {
|
|
110
|
-
if (
|
|
107
|
+
if (axios_client_1.axios.isAxiosError(error) && ((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {
|
|
111
108
|
const state = Object.assign(Object.assign({}, initialState), this.initialSdkState);
|
|
112
109
|
this.state = state;
|
|
113
110
|
console.log('State not found, returning initial state. Current state:', (0, logger_1.getPrintableState)(this.state));
|
|
@@ -74,6 +74,7 @@ export interface ExternalSyncUnit {
|
|
|
74
74
|
}
|
|
75
75
|
/**
|
|
76
76
|
* EventContextIn is an interface that defines the structure of the input event context that is sent to the external extractor from ADaaS.
|
|
77
|
+
* @deprecated
|
|
77
78
|
*/
|
|
78
79
|
export interface EventContextIn {
|
|
79
80
|
callback_url: string;
|
|
@@ -100,12 +101,39 @@ export interface EventContextIn {
|
|
|
100
101
|
}
|
|
101
102
|
/**
|
|
102
103
|
* EventContextOut is an interface that defines the structure of the output event context that is sent from the external extractor to ADaaS.
|
|
104
|
+
* @deprecated
|
|
103
105
|
*/
|
|
104
106
|
export interface EventContextOut {
|
|
105
107
|
uuid: string;
|
|
106
108
|
sync_run: string;
|
|
107
109
|
sync_unit?: string;
|
|
108
110
|
}
|
|
111
|
+
/**
|
|
112
|
+
* EventContext is an interface that defines the structure of the event context that is sent to and from the external connector.
|
|
113
|
+
*/
|
|
114
|
+
export interface EventContext {
|
|
115
|
+
callback_url: string;
|
|
116
|
+
dev_org: string;
|
|
117
|
+
dev_org_id: string;
|
|
118
|
+
dev_user: string;
|
|
119
|
+
dev_user_id: string;
|
|
120
|
+
external_sync_unit: string;
|
|
121
|
+
external_sync_unit_id: string;
|
|
122
|
+
external_sync_unit_name: string;
|
|
123
|
+
external_system: string;
|
|
124
|
+
external_system_type: string;
|
|
125
|
+
import_slug: string;
|
|
126
|
+
mode: string;
|
|
127
|
+
request_id: string;
|
|
128
|
+
snap_in_slug: string;
|
|
129
|
+
sync_run: string;
|
|
130
|
+
sync_run_id: string;
|
|
131
|
+
sync_tier: string;
|
|
132
|
+
sync_unit: DonV2;
|
|
133
|
+
sync_unit_id: string;
|
|
134
|
+
uuid: string;
|
|
135
|
+
worker_data_url: string;
|
|
136
|
+
}
|
|
109
137
|
/**
|
|
110
138
|
* ConnectionData is an interface that defines the structure of the connection data that is sent to the external extractor from ADaaS.
|
|
111
139
|
* It contains the organization ID, organization name, key, and key type.
|
|
@@ -169,7 +197,7 @@ export interface AirdropEvent {
|
|
|
169
197
|
*/
|
|
170
198
|
export interface AirdropMessage {
|
|
171
199
|
connection_data: ConnectionData;
|
|
172
|
-
event_context:
|
|
200
|
+
event_context: EventContext;
|
|
173
201
|
event_type: EventType;
|
|
174
202
|
event_data?: EventData;
|
|
175
203
|
}
|
|
@@ -179,7 +207,7 @@ export interface AirdropMessage {
|
|
|
179
207
|
*/
|
|
180
208
|
export interface ExtractorEvent {
|
|
181
209
|
event_type: string;
|
|
182
|
-
event_context:
|
|
210
|
+
event_context: EventContext;
|
|
183
211
|
event_data?: EventData;
|
|
184
212
|
}
|
|
185
213
|
/**
|
|
@@ -187,6 +215,6 @@ export interface ExtractorEvent {
|
|
|
187
215
|
*/
|
|
188
216
|
export interface LoaderEvent {
|
|
189
217
|
event_type: string;
|
|
190
|
-
event_context:
|
|
218
|
+
event_context: EventContext;
|
|
191
219
|
event_data?: EventData;
|
|
192
220
|
}
|
|
@@ -27,7 +27,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
27
27
|
};
|
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
29
|
exports.Uploader = void 0;
|
|
30
|
-
const
|
|
30
|
+
const axios_client_1 = require("../http/axios-client");
|
|
31
31
|
const fs_1 = __importStar(require("fs"));
|
|
32
32
|
const zlib_1 = __importDefault(require("zlib"));
|
|
33
33
|
const js_jsonl_1 = require("js-jsonl");
|
|
@@ -98,7 +98,7 @@ class Uploader {
|
|
|
98
98
|
return response.data;
|
|
99
99
|
}
|
|
100
100
|
catch (error) {
|
|
101
|
-
if (
|
|
101
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
102
102
|
console.error('Error while preparing artifact.', (0, logger_1.formatAxiosError)(error));
|
|
103
103
|
}
|
|
104
104
|
else {
|
|
@@ -115,13 +115,13 @@ class Uploader {
|
|
|
115
115
|
}
|
|
116
116
|
formData.append('file', file);
|
|
117
117
|
try {
|
|
118
|
-
const response = await
|
|
118
|
+
const response = await axios_client_1.axiosClient.post(preparedArtifact.url, formData, {
|
|
119
119
|
headers: Object.assign({}, formData.getHeaders()),
|
|
120
120
|
});
|
|
121
121
|
return response;
|
|
122
122
|
}
|
|
123
123
|
catch (error) {
|
|
124
|
-
if (
|
|
124
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
125
125
|
console.error('Error while uploading artifact.', (0, logger_1.formatAxiosError)(error));
|
|
126
126
|
}
|
|
127
127
|
else {
|
|
@@ -138,7 +138,7 @@ class Uploader {
|
|
|
138
138
|
}
|
|
139
139
|
formData.append('file', fileStreamResponse.data);
|
|
140
140
|
try {
|
|
141
|
-
const response = await
|
|
141
|
+
const response = await axios_client_1.axiosClient.post(preparedArtifact.url, formData, {
|
|
142
142
|
headers: Object.assign(Object.assign({}, formData.getHeaders()), (!fileStreamResponse.headers['content-length'] && {
|
|
143
143
|
'Content-Length': constants_1.MAX_DEVREV_ARTIFACT_SIZE,
|
|
144
144
|
})),
|
|
@@ -146,7 +146,7 @@ class Uploader {
|
|
|
146
146
|
return response;
|
|
147
147
|
}
|
|
148
148
|
catch (error) {
|
|
149
|
-
if (
|
|
149
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
150
150
|
console.error('Error while streaming artifact.', (0, logger_1.formatAxiosError)(error));
|
|
151
151
|
}
|
|
152
152
|
else {
|
|
@@ -223,13 +223,13 @@ class Uploader {
|
|
|
223
223
|
}
|
|
224
224
|
async downloadArtifact(artifactUrl) {
|
|
225
225
|
try {
|
|
226
|
-
const response = await
|
|
226
|
+
const response = await axios_client_1.axiosClient.get(artifactUrl, {
|
|
227
227
|
responseType: 'arraybuffer',
|
|
228
228
|
});
|
|
229
229
|
return response.data;
|
|
230
230
|
}
|
|
231
231
|
catch (error) {
|
|
232
|
-
if (
|
|
232
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
233
233
|
console.error('Error while downloading artifact from URL.', (0, logger_1.formatAxiosError)(error));
|
|
234
234
|
}
|
|
235
235
|
else {
|
|
@@ -302,7 +302,7 @@ class Uploader {
|
|
|
302
302
|
}
|
|
303
303
|
async getFileStreamResponse(url) {
|
|
304
304
|
try {
|
|
305
|
-
const fileStreamResponse = await
|
|
305
|
+
const fileStreamResponse = await axios_client_1.axiosClient.get(url, {
|
|
306
306
|
responseType: 'stream',
|
|
307
307
|
headers: {
|
|
308
308
|
Authorization: this.event.payload.connection_data.key,
|
|
@@ -311,7 +311,7 @@ class Uploader {
|
|
|
311
311
|
return fileStreamResponse;
|
|
312
312
|
}
|
|
313
313
|
catch (error) {
|
|
314
|
-
if (
|
|
314
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
315
315
|
console.error('Error while fetching attachment from URL.', (0, logger_1.formatAxiosError)(error));
|
|
316
316
|
}
|
|
317
317
|
else {
|
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.WorkerAdapter = void 0;
|
|
7
4
|
exports.createWorkerAdapter = createWorkerAdapter;
|
|
8
|
-
const
|
|
5
|
+
const axios_client_1 = require("../http/axios-client");
|
|
9
6
|
const extraction_1 = require("../types/extraction");
|
|
10
7
|
const loading_1 = require("../types/loading");
|
|
11
8
|
const constants_1 = require("../common/constants");
|
|
@@ -77,7 +74,8 @@ class WorkerAdapter {
|
|
|
77
74
|
}
|
|
78
75
|
initializeRepos(repos) {
|
|
79
76
|
this.repos = repos.map((repo) => {
|
|
80
|
-
const shouldNormalize =
|
|
77
|
+
const shouldNormalize = repo.itemType !== constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA &&
|
|
78
|
+
repo.itemType !== constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT;
|
|
81
79
|
return new repo_1.Repo(Object.assign(Object.assign({ event: this.event, itemType: repo.itemType }, (shouldNormalize && { normalize: repo.normalize })), { onUpload: (artifact) => {
|
|
82
80
|
var _a;
|
|
83
81
|
this.artifacts.push(artifact);
|
|
@@ -342,7 +340,7 @@ class WorkerAdapter {
|
|
|
342
340
|
console.log('Updated sync mapper record', JSON.stringify(updateSyncMapperRecordResponse.data));
|
|
343
341
|
}
|
|
344
342
|
catch (error) {
|
|
345
|
-
if (
|
|
343
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
346
344
|
console.error('Failed to update sync mapper record', (0, logger_1.formatAxiosError)(error));
|
|
347
345
|
return {
|
|
348
346
|
error: {
|
|
@@ -387,7 +385,7 @@ class WorkerAdapter {
|
|
|
387
385
|
// Update mapper (optional)
|
|
388
386
|
}
|
|
389
387
|
catch (error) {
|
|
390
|
-
if (
|
|
388
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
391
389
|
if (((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {
|
|
392
390
|
// Create item
|
|
393
391
|
const { id, delay, error } = await itemTypeToLoad.create({
|
|
@@ -413,7 +411,7 @@ class WorkerAdapter {
|
|
|
413
411
|
};
|
|
414
412
|
}
|
|
415
413
|
catch (error) {
|
|
416
|
-
if (
|
|
414
|
+
if (axios_client_1.axios.isAxiosError(error)) {
|
|
417
415
|
console.error('Failed to create sync mapper record', (0, logger_1.formatAxiosError)(error));
|
|
418
416
|
return {
|
|
419
417
|
error: {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@devrev/ts-adaas",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.3",
|
|
4
4
|
"description": "Typescript library containing the ADaaS(AirDrop as a Service) control protocol.",
|
|
5
5
|
"type": "commonjs",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -37,7 +37,8 @@
|
|
|
37
37
|
},
|
|
38
38
|
"dependencies": {
|
|
39
39
|
"@devrev/typescript-sdk": "^1.1.27",
|
|
40
|
-
"axios": "^1.
|
|
40
|
+
"axios": "^1.7.9",
|
|
41
|
+
"axios-retry": "^4.5.0",
|
|
41
42
|
"form-data": "^4.0.1",
|
|
42
43
|
"js-jsonl": "^1.1.1",
|
|
43
44
|
"lambda-log": "^3.1.0",
|