@devrev/ts-adaas 1.7.0-beta.0 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/common/install-initial-domain-mapping.js +1 -7
- package/dist/common/install-initial-domain-mapping.test.js +0 -2
- package/dist/logger/logger.d.ts +1 -1
- package/dist/logger/logger.js +5 -8
- package/dist/logger/logger.test.js +4 -4
- package/dist/state/state.js +2 -12
- package/dist/types/extraction.d.ts +4 -0
- package/dist/types/extraction.test.js +1 -1
- package/dist/uploader/uploader.js +14 -44
- package/dist/uploader/uploader.test.js +9 -9
- package/dist/workers/spawn.js +2 -13
- package/dist/workers/worker-adapter.js +15 -37
- package/package.json +1 -1
|
@@ -42,13 +42,7 @@ async function installInitialDomainMapping(event, initialDomainMappingJson) {
|
|
|
42
42
|
console.log('Successfully created recipe blueprint with id: ' + recipeBlueprintId);
|
|
43
43
|
}
|
|
44
44
|
catch (error) {
|
|
45
|
-
|
|
46
|
-
if (axios_client_1.axios.isAxiosError(error)) {
|
|
47
|
-
console.error(errorMessage, (0, logger_1.serializeAxiosError)(error));
|
|
48
|
-
}
|
|
49
|
-
else {
|
|
50
|
-
console.error(errorMessage, error);
|
|
51
|
-
}
|
|
45
|
+
console.warn('Error while creating recipe blueprint. Continuing without it.', (0, logger_1.serializeError)(error));
|
|
52
46
|
}
|
|
53
47
|
}
|
|
54
48
|
// Install the initial domain mappings
|
|
@@ -6,7 +6,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
6
|
const axios_1 = __importDefault(require("axios"));
|
|
7
7
|
const install_initial_domain_mapping_1 = require("./install-initial-domain-mapping");
|
|
8
8
|
const axios_client_1 = require("../http/axios-client");
|
|
9
|
-
const logger_1 = require("../logger/logger");
|
|
10
9
|
const test_helpers_1 = require("../tests/test-helpers");
|
|
11
10
|
const extraction_1 = require("../types/extraction");
|
|
12
11
|
// Mock dependencies
|
|
@@ -15,7 +14,6 @@ jest.mock('../http/axios-client');
|
|
|
15
14
|
jest.mock('../logger/logger');
|
|
16
15
|
const mockAxiosClient = axios_client_1.axiosClient;
|
|
17
16
|
const mockIsAxiosError = axios_1.default.isAxiosError;
|
|
18
|
-
const mockSerializeAxiosError = logger_1.serializeAxiosError;
|
|
19
17
|
describe('installInitialDomainMapping', () => {
|
|
20
18
|
// Create mock objects
|
|
21
19
|
const mockEvent = (0, test_helpers_1.createEvent)({ eventType: extraction_1.EventType.ExtractionDataStart });
|
package/dist/logger/logger.d.ts
CHANGED
|
@@ -17,7 +17,7 @@ export declare function getPrintableState(state: Record<string, any>): Printable
|
|
|
17
17
|
* @deprecated
|
|
18
18
|
*/
|
|
19
19
|
export declare function formatAxiosError(error: AxiosError): object;
|
|
20
|
-
export declare const serializeError: (error: unknown) =>
|
|
20
|
+
export declare const serializeError: (error: unknown) => unknown;
|
|
21
21
|
export declare function serializeAxiosError(error: AxiosError): {
|
|
22
22
|
config: {
|
|
23
23
|
method: string | undefined;
|
package/dist/logger/logger.js
CHANGED
|
@@ -9,6 +9,7 @@ const node_util_1 = require("node:util");
|
|
|
9
9
|
const logger_interfaces_1 = require("./logger.interfaces");
|
|
10
10
|
const node_worker_threads_1 = require("node:worker_threads");
|
|
11
11
|
const workers_1 = require("../types/workers");
|
|
12
|
+
const axios_1 = require("axios");
|
|
12
13
|
class Logger extends node_console_1.Console {
|
|
13
14
|
constructor({ event, options }) {
|
|
14
15
|
super(process.stdout, process.stderr);
|
|
@@ -22,7 +23,7 @@ class Logger extends node_console_1.Console {
|
|
|
22
23
|
// Use Node.js built-in inspect for everything including errors
|
|
23
24
|
return (0, node_util_1.inspect)(value, {
|
|
24
25
|
compact: false,
|
|
25
|
-
|
|
26
|
+
depth: Infinity,
|
|
26
27
|
});
|
|
27
28
|
}
|
|
28
29
|
logFn(args, level) {
|
|
@@ -110,14 +111,10 @@ function formatAxiosError(error) {
|
|
|
110
111
|
return serializeAxiosError(error);
|
|
111
112
|
}
|
|
112
113
|
const serializeError = (error) => {
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
serializedError = JSON.parse(JSON.stringify(error, Object.getOwnPropertyNames(error)));
|
|
114
|
+
if ((0, axios_1.isAxiosError)(error)) {
|
|
115
|
+
return serializeAxiosError(error);
|
|
116
116
|
}
|
|
117
|
-
|
|
118
|
-
console.error('Failed to serialize error object for logger', err);
|
|
119
|
-
}
|
|
120
|
-
return serializedError;
|
|
117
|
+
return error;
|
|
121
118
|
};
|
|
122
119
|
exports.serializeError = serializeError;
|
|
123
120
|
function serializeAxiosError(error) {
|
|
@@ -69,7 +69,7 @@ describe('Logger', () => {
|
|
|
69
69
|
logger.info(data);
|
|
70
70
|
const expectedMessage = (0, node_util_1.inspect)(data, {
|
|
71
71
|
compact: false,
|
|
72
|
-
|
|
72
|
+
depth: Infinity,
|
|
73
73
|
});
|
|
74
74
|
expect(mockConsoleInfo).toHaveBeenCalledWith(JSON.stringify(Object.assign(Object.assign({ message: expectedMessage }, mockEvent.payload.event_context), { dev_oid: mockEvent.payload.event_context.dev_org })));
|
|
75
75
|
});
|
|
@@ -79,7 +79,7 @@ describe('Logger', () => {
|
|
|
79
79
|
logger.info(text, data);
|
|
80
80
|
const expectedDataMessage = (0, node_util_1.inspect)(data, {
|
|
81
81
|
compact: false,
|
|
82
|
-
|
|
82
|
+
depth: Infinity,
|
|
83
83
|
});
|
|
84
84
|
expect(mockConsoleInfo).toHaveBeenCalledWith(JSON.stringify(Object.assign(Object.assign({ message: `${text} ${expectedDataMessage}` }, mockEvent.payload.event_context), { dev_oid: mockEvent.payload.event_context.dev_org })));
|
|
85
85
|
});
|
|
@@ -90,7 +90,7 @@ describe('Logger', () => {
|
|
|
90
90
|
logger.info(text1, data, text2);
|
|
91
91
|
const expectedDataMessage = (0, node_util_1.inspect)(data, {
|
|
92
92
|
compact: false,
|
|
93
|
-
|
|
93
|
+
depth: Infinity,
|
|
94
94
|
});
|
|
95
95
|
expect(mockConsoleInfo).toHaveBeenCalledWith(JSON.stringify(Object.assign(Object.assign({ message: `${text1} ${expectedDataMessage} ${text2}` }, mockEvent.payload.event_context), { dev_oid: mockEvent.payload.event_context.dev_org })));
|
|
96
96
|
});
|
|
@@ -171,7 +171,7 @@ describe('Logger', () => {
|
|
|
171
171
|
// The logger uses inspect() with formatting, not JSON.stringify()
|
|
172
172
|
const expectedMessage = require('util').inspect(complexObject, {
|
|
173
173
|
compact: false,
|
|
174
|
-
|
|
174
|
+
depth: Infinity,
|
|
175
175
|
});
|
|
176
176
|
expect(logObject.message).toBe(expectedMessage);
|
|
177
177
|
expect(logObject.dev_oid).toBe(mockEvent.payload.event_context.dev_org);
|
package/dist/state/state.js
CHANGED
|
@@ -36,12 +36,7 @@ async function createAdapterState({ event, initialState, initialDomainMapping, o
|
|
|
36
36
|
}
|
|
37
37
|
}
|
|
38
38
|
catch (error) {
|
|
39
|
-
|
|
40
|
-
console.error('Error while installing initial domain mapping', (0, logger_1.serializeAxiosError)(error));
|
|
41
|
-
}
|
|
42
|
-
else {
|
|
43
|
-
console.error('Error while installing initial domain mapping', error);
|
|
44
|
-
}
|
|
39
|
+
console.error('Error while installing initial domain mapping.', (0, logger_1.serializeError)(error));
|
|
45
40
|
}
|
|
46
41
|
}
|
|
47
42
|
// Set lastSyncStarted if the event type is ExtractionDataStart
|
|
@@ -108,12 +103,7 @@ class State {
|
|
|
108
103
|
console.log('State updated successfully to', (0, logger_1.getPrintableState)(this.state));
|
|
109
104
|
}
|
|
110
105
|
catch (error) {
|
|
111
|
-
|
|
112
|
-
console.error('Failed to update state', (0, logger_1.serializeAxiosError)(error));
|
|
113
|
-
}
|
|
114
|
-
else {
|
|
115
|
-
console.error('Failed to update state', error);
|
|
116
|
-
}
|
|
106
|
+
console.error('Failed to update the state.', (0, logger_1.serializeError)(error));
|
|
117
107
|
process.exit(1);
|
|
118
108
|
}
|
|
119
109
|
}
|
|
@@ -139,7 +139,11 @@ export interface EventContext {
|
|
|
139
139
|
initial_sync_scope?: InitialSyncScope;
|
|
140
140
|
mode: string;
|
|
141
141
|
request_id: string;
|
|
142
|
+
/**
|
|
143
|
+
* @deprecated reset_extraction is deprecated and should not be used. Use reset_extract_from instead.
|
|
144
|
+
*/
|
|
142
145
|
reset_extraction?: boolean;
|
|
146
|
+
reset_extract_from?: boolean;
|
|
143
147
|
snap_in_slug: string;
|
|
144
148
|
snap_in_version_id: string;
|
|
145
149
|
sync_run: string;
|
|
@@ -11,7 +11,7 @@ describe('EventContext type tests', () => {
|
|
|
11
11
|
});
|
|
12
12
|
it('should handle context with all optional fields', () => {
|
|
13
13
|
const event = Object.assign({}, baseEvent);
|
|
14
|
-
event.payload.event_context = Object.assign(Object.assign({}, baseEvent.payload.event_context), { extract_from: '2024-01-01T00:00:00Z', initial_sync_scope: extraction_1.InitialSyncScope.TIME_SCOPED,
|
|
14
|
+
event.payload.event_context = Object.assign(Object.assign({}, baseEvent.payload.event_context), { extract_from: '2024-01-01T00:00:00Z', initial_sync_scope: extraction_1.InitialSyncScope.TIME_SCOPED, reset_extract_from: true });
|
|
15
15
|
// Test with all optionals present
|
|
16
16
|
expect(event).toBeDefined();
|
|
17
17
|
});
|
|
@@ -73,7 +73,7 @@ class Uploader {
|
|
|
73
73
|
const file = this.compressGzip(js_jsonl_1.jsonl.stringify(fetchedObjects));
|
|
74
74
|
if (!file) {
|
|
75
75
|
return {
|
|
76
|
-
error:
|
|
76
|
+
error: new Error('Error while compressing jsonl object.'),
|
|
77
77
|
};
|
|
78
78
|
}
|
|
79
79
|
const filename = itemType + '.jsonl.gz';
|
|
@@ -82,21 +82,21 @@ class Uploader {
|
|
|
82
82
|
const preparedArtifact = await this.getArtifactUploadUrl(filename, fileType);
|
|
83
83
|
if (!preparedArtifact) {
|
|
84
84
|
return {
|
|
85
|
-
error:
|
|
85
|
+
error: new Error('Error while getting artifact upload URL.'),
|
|
86
86
|
};
|
|
87
87
|
}
|
|
88
88
|
// Upload prepared artifact to the given url
|
|
89
89
|
const uploadItemResponse = await this.uploadArtifact(preparedArtifact, file);
|
|
90
90
|
if (!uploadItemResponse) {
|
|
91
91
|
return {
|
|
92
|
-
error:
|
|
92
|
+
error: new Error('Error while uploading artifact.'),
|
|
93
93
|
};
|
|
94
94
|
}
|
|
95
95
|
// Confirm upload
|
|
96
96
|
const confirmArtifactUploadResponse = await this.confirmArtifactUpload(preparedArtifact.artifact_id);
|
|
97
97
|
if (!confirmArtifactUploadResponse) {
|
|
98
98
|
return {
|
|
99
|
-
error:
|
|
99
|
+
error: new Error('Error while confirming artifact upload.'),
|
|
100
100
|
};
|
|
101
101
|
}
|
|
102
102
|
// Return the artifact information to the platform
|
|
@@ -121,12 +121,7 @@ class Uploader {
|
|
|
121
121
|
return response.data;
|
|
122
122
|
}
|
|
123
123
|
catch (error) {
|
|
124
|
-
|
|
125
|
-
console.error('Error while getting artifact upload URL.', (0, logger_1.serializeAxiosError)(error));
|
|
126
|
-
}
|
|
127
|
-
else {
|
|
128
|
-
console.error('Error while getting artifact upload URL.', error);
|
|
129
|
-
}
|
|
124
|
+
console.error('Error while getting artifact upload URL.', (0, logger_1.serializeError)(error));
|
|
130
125
|
}
|
|
131
126
|
}
|
|
132
127
|
async uploadArtifact(artifact, file) {
|
|
@@ -142,12 +137,7 @@ class Uploader {
|
|
|
142
137
|
return response;
|
|
143
138
|
}
|
|
144
139
|
catch (error) {
|
|
145
|
-
|
|
146
|
-
console.error('Error while uploading artifact.', (0, logger_1.serializeAxiosError)(error));
|
|
147
|
-
}
|
|
148
|
-
else {
|
|
149
|
-
console.error('Error while uploading artifact.', error);
|
|
150
|
-
}
|
|
140
|
+
console.error('Error while uploading artifact.', (0, logger_1.serializeError)(error));
|
|
151
141
|
}
|
|
152
142
|
}
|
|
153
143
|
async streamArtifact(artifact, fileStream) {
|
|
@@ -171,12 +161,7 @@ class Uploader {
|
|
|
171
161
|
return response;
|
|
172
162
|
}
|
|
173
163
|
catch (error) {
|
|
174
|
-
|
|
175
|
-
console.error('Error while streaming artifact.', (0, logger_1.serializeAxiosError)(error));
|
|
176
|
-
}
|
|
177
|
-
else {
|
|
178
|
-
console.error('Error while streaming artifact.', error);
|
|
179
|
-
}
|
|
164
|
+
console.error('Error while streaming artifact.', (0, logger_1.serializeError)(error));
|
|
180
165
|
return;
|
|
181
166
|
}
|
|
182
167
|
}
|
|
@@ -192,12 +177,7 @@ class Uploader {
|
|
|
192
177
|
return response;
|
|
193
178
|
}
|
|
194
179
|
catch (error) {
|
|
195
|
-
|
|
196
|
-
console.error('Error while confirming artifact upload.', (0, logger_1.serializeAxiosError)(error));
|
|
197
|
-
}
|
|
198
|
-
else {
|
|
199
|
-
console.error('Error while confirming artifact upload.', error);
|
|
200
|
-
}
|
|
180
|
+
console.error('Error while confirming artifact upload.', (0, logger_1.serializeError)(error));
|
|
201
181
|
}
|
|
202
182
|
}
|
|
203
183
|
async getAttachmentsFromArtifactId({ artifact, }) {
|
|
@@ -205,28 +185,28 @@ class Uploader {
|
|
|
205
185
|
const artifactUrl = await this.getArtifactDownloadUrl(artifact);
|
|
206
186
|
if (!artifactUrl) {
|
|
207
187
|
return {
|
|
208
|
-
error:
|
|
188
|
+
error: new Error('Error while getting artifact download URL.'),
|
|
209
189
|
};
|
|
210
190
|
}
|
|
211
191
|
// Download artifact from the URL
|
|
212
192
|
const gzippedJsonlObject = await this.downloadArtifact(artifactUrl);
|
|
213
193
|
if (!gzippedJsonlObject) {
|
|
214
194
|
return {
|
|
215
|
-
error:
|
|
195
|
+
error: new Error('Error while downloading gzipped jsonl object.'),
|
|
216
196
|
};
|
|
217
197
|
}
|
|
218
198
|
// Decompress the gzipped jsonl object
|
|
219
199
|
const jsonlObject = this.decompressGzip(gzippedJsonlObject);
|
|
220
200
|
if (!jsonlObject) {
|
|
221
201
|
return {
|
|
222
|
-
error:
|
|
202
|
+
error: new Error('Error while decompressing gzipped jsonl object.'),
|
|
223
203
|
};
|
|
224
204
|
}
|
|
225
205
|
// Parse the jsonl object to get the attachment metadata
|
|
226
206
|
const jsonObject = this.parseJsonl(jsonlObject);
|
|
227
207
|
if (!jsonObject) {
|
|
228
208
|
return {
|
|
229
|
-
error:
|
|
209
|
+
error: new Error('Error while parsing jsonl object.'),
|
|
230
210
|
};
|
|
231
211
|
}
|
|
232
212
|
return { attachments: jsonObject };
|
|
@@ -244,12 +224,7 @@ class Uploader {
|
|
|
244
224
|
return response.data.download_url;
|
|
245
225
|
}
|
|
246
226
|
catch (error) {
|
|
247
|
-
|
|
248
|
-
console.error('Error while getting artifact download URL.', (0, logger_1.serializeAxiosError)(error));
|
|
249
|
-
}
|
|
250
|
-
else {
|
|
251
|
-
console.error('Error while getting artifact download URL.', error);
|
|
252
|
-
}
|
|
227
|
+
console.error('Error while getting artifact download URL.', (0, logger_1.serializeError)(error));
|
|
253
228
|
}
|
|
254
229
|
}
|
|
255
230
|
async downloadArtifact(artifactUrl) {
|
|
@@ -260,12 +235,7 @@ class Uploader {
|
|
|
260
235
|
return response.data;
|
|
261
236
|
}
|
|
262
237
|
catch (error) {
|
|
263
|
-
|
|
264
|
-
console.error('Error while downloading artifact from URL.', (0, logger_1.serializeAxiosError)(error));
|
|
265
|
-
}
|
|
266
|
-
else {
|
|
267
|
-
console.error('Error while downloading artifact from URL.', error);
|
|
268
|
-
}
|
|
238
|
+
console.error('Error while downloading artifact from URL.', (0, logger_1.serializeError)(error));
|
|
269
239
|
}
|
|
270
240
|
}
|
|
271
241
|
compressGzip(jsonlObject) {
|
|
@@ -56,16 +56,17 @@ describe('Uploader Class Tests', () => {
|
|
|
56
56
|
});
|
|
57
57
|
});
|
|
58
58
|
it('should handle failure in getArtifactUploadUrl', async () => {
|
|
59
|
+
var _a;
|
|
59
60
|
// Mock unsuccessful response for getArtifactUploadUrl
|
|
60
61
|
axios_client_1.axiosClient.get.mockResolvedValueOnce(undefined);
|
|
61
62
|
const entity = 'entity';
|
|
62
63
|
const fetchedObjects = [{ key: 'value' }];
|
|
63
64
|
const uploadResponse = await uploader.upload(entity, fetchedObjects);
|
|
64
|
-
expect(uploadResponse).
|
|
65
|
-
|
|
66
|
-
});
|
|
65
|
+
expect(uploadResponse.error).toBeInstanceOf(Error);
|
|
66
|
+
expect((_a = uploadResponse.error) === null || _a === void 0 ? void 0 : _a.message).toBe('Error while getting artifact upload URL.');
|
|
67
67
|
});
|
|
68
68
|
it('should handle failure in uploadArtifact', async () => {
|
|
69
|
+
var _a;
|
|
69
70
|
// Mock successful response for getArtifactUploadUrl
|
|
70
71
|
axios_client_1.axiosClient.get.mockResolvedValueOnce(getArtifactUploadUrlMockResponse);
|
|
71
72
|
// Mock unsuccessful response for uploadArtifact
|
|
@@ -73,11 +74,11 @@ describe('Uploader Class Tests', () => {
|
|
|
73
74
|
const entity = 'entity';
|
|
74
75
|
const fetchedObjects = [{ key: 'value' }];
|
|
75
76
|
const uploadResponse = await uploader.upload(entity, fetchedObjects);
|
|
76
|
-
expect(uploadResponse).
|
|
77
|
-
|
|
78
|
-
});
|
|
77
|
+
expect(uploadResponse.error).toBeInstanceOf(Error);
|
|
78
|
+
expect((_a = uploadResponse.error) === null || _a === void 0 ? void 0 : _a.message).toBe('Error while uploading artifact.');
|
|
79
79
|
});
|
|
80
80
|
it('should handle failure in confirmArtifactUpload', async () => {
|
|
81
|
+
var _a;
|
|
81
82
|
// Mock successful response for getArtifactUploadUrl
|
|
82
83
|
axios_client_1.axiosClient.get.mockResolvedValueOnce(getArtifactUploadUrlMockResponse);
|
|
83
84
|
// Mock successful response from uploadArtifact
|
|
@@ -87,8 +88,7 @@ describe('Uploader Class Tests', () => {
|
|
|
87
88
|
const entity = 'entity';
|
|
88
89
|
const fetchedObjects = [{ key: 'value' }];
|
|
89
90
|
const uploadResponse = await uploader.upload(entity, fetchedObjects);
|
|
90
|
-
expect(uploadResponse).
|
|
91
|
-
|
|
92
|
-
});
|
|
91
|
+
expect(uploadResponse.error).toBeInstanceOf(Error);
|
|
92
|
+
expect((_a = uploadResponse.error) === null || _a === void 0 ? void 0 : _a.message).toBe('Error while confirming artifact upload.');
|
|
93
93
|
});
|
|
94
94
|
});
|
package/dist/workers/spawn.js
CHANGED
|
@@ -5,7 +5,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.Spawn = void 0;
|
|
7
7
|
exports.spawn = spawn;
|
|
8
|
-
const axios_1 = __importDefault(require("axios"));
|
|
9
8
|
const helpers_1 = require("yargs/helpers");
|
|
10
9
|
const yargs_1 = __importDefault(require("yargs"));
|
|
11
10
|
const extraction_1 = require("../types/extraction");
|
|
@@ -124,12 +123,7 @@ async function spawn({ event, initialState, workerPath, initialDomainMapping, op
|
|
|
124
123
|
});
|
|
125
124
|
}
|
|
126
125
|
catch (error) {
|
|
127
|
-
|
|
128
|
-
console.error('Error while emitting event', (0, logger_1.serializeAxiosError)(error));
|
|
129
|
-
}
|
|
130
|
-
else {
|
|
131
|
-
console.error('Error while emitting event', error);
|
|
132
|
-
}
|
|
126
|
+
console.error('Error while emitting event.', (0, logger_1.serializeError)(error));
|
|
133
127
|
}
|
|
134
128
|
}
|
|
135
129
|
}
|
|
@@ -217,12 +211,7 @@ class Spawn {
|
|
|
217
211
|
this.resolve();
|
|
218
212
|
}
|
|
219
213
|
catch (error) {
|
|
220
|
-
|
|
221
|
-
console.error('Error while emitting event', (0, logger_1.serializeAxiosError)(error));
|
|
222
|
-
}
|
|
223
|
-
else {
|
|
224
|
-
console.error('Error while emitting event', error);
|
|
225
|
-
}
|
|
214
|
+
console.error('Error while emitting event.', (0, logger_1.serializeError)(error));
|
|
226
215
|
}
|
|
227
216
|
}
|
|
228
217
|
}
|
|
@@ -92,7 +92,7 @@ class WorkerAdapter {
|
|
|
92
92
|
}
|
|
93
93
|
// Loop through the batches of attachments
|
|
94
94
|
for (let i = lastProcessedBatchIndex; i < reducedAttachments.length; i++) {
|
|
95
|
-
// Check if we hit timeout
|
|
95
|
+
// Check if we hit timeout
|
|
96
96
|
if (adapter.isTimeout) {
|
|
97
97
|
await (0, helpers_2.sleep)(constants_1.DEFAULT_SLEEP_DELAY_MS);
|
|
98
98
|
}
|
|
@@ -263,12 +263,7 @@ class WorkerAdapter {
|
|
|
263
263
|
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage(message);
|
|
264
264
|
}
|
|
265
265
|
catch (error) {
|
|
266
|
-
|
|
267
|
-
console.error(`Error while emitting event with event type: ${newEventType}`, (0, logger_1.serializeAxiosError)(error));
|
|
268
|
-
}
|
|
269
|
-
else {
|
|
270
|
-
console.error(`Unknown error while emitting event with event type: ${newEventType}`, error);
|
|
271
|
-
}
|
|
266
|
+
console.error(`Error while emitting event with event type: ${newEventType}.`, (0, logger_1.serializeError)(error));
|
|
272
267
|
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage(workers_1.WorkerMessageSubject.WorkerMessageExit);
|
|
273
268
|
}
|
|
274
269
|
}
|
|
@@ -487,22 +482,12 @@ class WorkerAdapter {
|
|
|
487
482
|
});
|
|
488
483
|
}
|
|
489
484
|
catch (error) {
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
};
|
|
497
|
-
}
|
|
498
|
-
else {
|
|
499
|
-
console.warn('Failed to update sync mapper record', error);
|
|
500
|
-
return {
|
|
501
|
-
error: {
|
|
502
|
-
message: 'Failed to update sync mapper record' + error,
|
|
503
|
-
},
|
|
504
|
-
};
|
|
505
|
-
}
|
|
485
|
+
console.warn('Failed to update sync mapper record.', (0, logger_1.serializeError)(error));
|
|
486
|
+
return {
|
|
487
|
+
error: {
|
|
488
|
+
message: 'Failed to update sync mapper record' + (0, logger_1.serializeError)(error),
|
|
489
|
+
},
|
|
490
|
+
};
|
|
506
491
|
}
|
|
507
492
|
}
|
|
508
493
|
return {
|
|
@@ -557,18 +542,11 @@ class WorkerAdapter {
|
|
|
557
542
|
};
|
|
558
543
|
}
|
|
559
544
|
catch (error) {
|
|
560
|
-
|
|
561
|
-
console.warn('Failed to create sync mapper record', (0, logger_1.serializeAxiosError)(error));
|
|
562
|
-
return {
|
|
563
|
-
error: {
|
|
564
|
-
message: error.message,
|
|
565
|
-
},
|
|
566
|
-
};
|
|
567
|
-
}
|
|
568
|
-
console.warn('Failed to create sync mapper record', error);
|
|
545
|
+
console.warn('Failed to create sync mapper record.', (0, logger_1.serializeError)(error));
|
|
569
546
|
return {
|
|
570
547
|
error: {
|
|
571
|
-
message: 'Failed to create sync mapper record' +
|
|
548
|
+
message: 'Failed to create sync mapper record. ' +
|
|
549
|
+
(0, logger_1.serializeError)(error),
|
|
572
550
|
},
|
|
573
551
|
};
|
|
574
552
|
}
|
|
@@ -581,7 +559,7 @@ class WorkerAdapter {
|
|
|
581
559
|
};
|
|
582
560
|
}
|
|
583
561
|
else {
|
|
584
|
-
console.warn('Failed to create item in external system', error);
|
|
562
|
+
console.warn('Failed to create item in external system.', (0, logger_1.serializeError)(error));
|
|
585
563
|
return {
|
|
586
564
|
report: {
|
|
587
565
|
item_type: itemTypeToLoad.itemType,
|
|
@@ -591,7 +569,7 @@ class WorkerAdapter {
|
|
|
591
569
|
}
|
|
592
570
|
}
|
|
593
571
|
else {
|
|
594
|
-
console.warn('Failed to get sync mapper record', (0, logger_1.
|
|
572
|
+
console.warn('Failed to get sync mapper record.', (0, logger_1.serializeError)(error));
|
|
595
573
|
return {
|
|
596
574
|
error: {
|
|
597
575
|
message: error.message,
|
|
@@ -599,10 +577,10 @@ class WorkerAdapter {
|
|
|
599
577
|
};
|
|
600
578
|
}
|
|
601
579
|
}
|
|
602
|
-
console.warn('Failed to get sync mapper record', error);
|
|
580
|
+
console.warn('Failed to get sync mapper record.', (0, logger_1.serializeError)(error));
|
|
603
581
|
return {
|
|
604
582
|
error: {
|
|
605
|
-
message: 'Failed to get sync mapper record' + error,
|
|
583
|
+
message: 'Failed to get sync mapper record. ' + (0, logger_1.serializeError)(error),
|
|
606
584
|
},
|
|
607
585
|
};
|
|
608
586
|
}
|