@devrev/airsync-sdk 2.0.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +31 -0
- package/dist/attachments-streaming/attachments-streaming-pool.d.ts +16 -0
- package/dist/attachments-streaming/attachments-streaming-pool.d.ts.map +1 -0
- package/dist/attachments-streaming/attachments-streaming-pool.interfaces.d.ts +9 -0
- package/dist/attachments-streaming/attachments-streaming-pool.interfaces.d.ts.map +1 -0
- package/dist/attachments-streaming/attachments-streaming-pool.interfaces.js +2 -0
- package/dist/attachments-streaming/attachments-streaming-pool.js +97 -0
- package/dist/attachments-streaming/attachments-streaming-pool.test.d.ts +2 -0
- package/dist/attachments-streaming/attachments-streaming-pool.test.d.ts.map +1 -0
- package/dist/attachments-streaming/attachments-streaming-pool.test.js +267 -0
- package/dist/common/constants.d.ts +25 -0
- package/dist/common/constants.d.ts.map +1 -0
- package/dist/common/constants.js +58 -0
- package/dist/common/control-protocol.d.ts +10 -0
- package/dist/common/control-protocol.d.ts.map +1 -0
- package/dist/common/control-protocol.js +31 -0
- package/dist/common/errors.d.ts +6 -0
- package/dist/common/errors.d.ts.map +1 -0
- package/dist/common/errors.js +4 -0
- package/dist/common/event-type-translation.d.ts +24 -0
- package/dist/common/event-type-translation.d.ts.map +1 -0
- package/dist/common/event-type-translation.js +117 -0
- package/dist/common/helpers.d.ts +41 -0
- package/dist/common/helpers.d.ts.map +1 -0
- package/dist/common/helpers.js +124 -0
- package/dist/common/install-initial-domain-mapping.d.ts +4 -0
- package/dist/common/install-initial-domain-mapping.d.ts.map +1 -0
- package/dist/common/install-initial-domain-mapping.js +58 -0
- package/dist/common/install-initial-domain-mapping.test.d.ts +2 -0
- package/dist/common/install-initial-domain-mapping.test.d.ts.map +1 -0
- package/dist/common/install-initial-domain-mapping.test.js +207 -0
- package/dist/deprecated/adapter/index.d.ts +62 -0
- package/dist/deprecated/adapter/index.d.ts.map +1 -0
- package/dist/deprecated/adapter/index.js +151 -0
- package/dist/deprecated/common/helpers.d.ts +7 -0
- package/dist/deprecated/common/helpers.d.ts.map +1 -0
- package/dist/deprecated/common/helpers.js +47 -0
- package/dist/deprecated/demo-extractor/external_domain_metadata.json +38 -0
- package/dist/deprecated/demo-extractor/index.d.ts +18 -0
- package/dist/deprecated/demo-extractor/index.d.ts.map +1 -0
- package/dist/deprecated/demo-extractor/index.js +161 -0
- package/dist/deprecated/http/client.d.ts +22 -0
- package/dist/deprecated/http/client.d.ts.map +1 -0
- package/dist/deprecated/http/client.js +161 -0
- package/dist/deprecated/uploader/index.d.ts +35 -0
- package/dist/deprecated/uploader/index.d.ts.map +1 -0
- package/dist/deprecated/uploader/index.js +161 -0
- package/dist/http/axios-client-internal.d.ts +3 -0
- package/dist/http/axios-client-internal.d.ts.map +1 -0
- package/dist/http/axios-client-internal.js +66 -0
- package/dist/http/axios-client-internal.test.d.ts +2 -0
- package/dist/http/axios-client-internal.test.d.ts.map +1 -0
- package/dist/http/axios-client-internal.test.js +154 -0
- package/dist/http/axios-client.d.ts +27 -0
- package/dist/http/axios-client.d.ts.map +1 -0
- package/dist/http/axios-client.js +57 -0
- package/dist/http/constants.d.ts +4 -0
- package/dist/http/constants.d.ts.map +1 -0
- package/dist/http/constants.js +6 -0
- package/dist/http/index.d.ts +3 -0
- package/dist/http/index.d.ts.map +1 -0
- package/dist/http/index.js +18 -0
- package/dist/http/types.d.ts +17 -0
- package/dist/http/types.d.ts.map +1 -0
- package/dist/http/types.js +2 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +34 -0
- package/dist/logger/logger.constants.d.ts +6 -0
- package/dist/logger/logger.constants.d.ts.map +1 -0
- package/dist/logger/logger.constants.js +13 -0
- package/dist/logger/logger.context.d.ts +58 -0
- package/dist/logger/logger.context.d.ts.map +1 -0
- package/dist/logger/logger.context.js +86 -0
- package/dist/logger/logger.d.ts +89 -0
- package/dist/logger/logger.d.ts.map +1 -0
- package/dist/logger/logger.interfaces.d.ts +43 -0
- package/dist/logger/logger.interfaces.d.ts.map +1 -0
- package/dist/logger/logger.interfaces.js +9 -0
- package/dist/logger/logger.js +196 -0
- package/dist/logger/logger.test.d.ts +2 -0
- package/dist/logger/logger.test.d.ts.map +1 -0
- package/dist/logger/logger.test.js +490 -0
- package/dist/mappers/mappers.d.ts +52 -0
- package/dist/mappers/mappers.d.ts.map +1 -0
- package/dist/mappers/mappers.interface.d.ts +294 -0
- package/dist/mappers/mappers.interface.d.ts.map +1 -0
- package/dist/mappers/mappers.interface.js +48 -0
- package/dist/mappers/mappers.js +83 -0
- package/dist/mappers/mappers.test.d.ts +2 -0
- package/dist/mappers/mappers.test.d.ts.map +1 -0
- package/dist/mappers/mappers.test.js +107 -0
- package/dist/multithreading/create-worker.d.ts +5 -0
- package/dist/multithreading/create-worker.d.ts.map +1 -0
- package/dist/multithreading/create-worker.js +28 -0
- package/dist/multithreading/create-worker.test.d.ts +2 -0
- package/dist/multithreading/create-worker.test.d.ts.map +1 -0
- package/dist/multithreading/create-worker.test.js +89 -0
- package/dist/multithreading/process-task.d.ts +3 -0
- package/dist/multithreading/process-task.d.ts.map +1 -0
- package/dist/multithreading/process-task.js +58 -0
- package/dist/multithreading/spawn/spawn.d.ts +30 -0
- package/dist/multithreading/spawn/spawn.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.helpers.d.ts +21 -0
- package/dist/multithreading/spawn/spawn.helpers.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.helpers.js +114 -0
- package/dist/multithreading/spawn/spawn.helpers.test.d.ts +2 -0
- package/dist/multithreading/spawn/spawn.helpers.test.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.helpers.test.js +293 -0
- package/dist/multithreading/spawn/spawn.js +249 -0
- package/dist/multithreading/worker-adapter/worker-adapter.artifacts.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.artifacts.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.artifacts.test.js +127 -0
- package/dist/multithreading/worker-adapter/worker-adapter.d.ts +91 -0
- package/dist/multithreading/worker-adapter/worker-adapter.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.d.ts +22 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.js +64 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.test.js +514 -0
- package/dist/multithreading/worker-adapter/worker-adapter.js +747 -0
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.test.js +483 -0
- package/dist/multithreading/worker.d.ts +2 -0
- package/dist/multithreading/worker.d.ts.map +1 -0
- package/dist/multithreading/worker.js +9 -0
- package/dist/repo/repo.d.ts +18 -0
- package/dist/repo/repo.d.ts.map +1 -0
- package/dist/repo/repo.interfaces.d.ts +46 -0
- package/dist/repo/repo.interfaces.d.ts.map +1 -0
- package/dist/repo/repo.interfaces.js +2 -0
- package/dist/repo/repo.js +75 -0
- package/dist/repo/repo.test.d.ts +2 -0
- package/dist/repo/repo.test.d.ts.map +1 -0
- package/dist/repo/repo.test.js +131 -0
- package/dist/state/state.d.ts +30 -0
- package/dist/state/state.d.ts.map +1 -0
- package/dist/state/state.interfaces.d.ts +51 -0
- package/dist/state/state.interfaces.d.ts.map +1 -0
- package/dist/state/state.interfaces.js +21 -0
- package/dist/state/state.js +166 -0
- package/dist/state/state.test.d.ts +2 -0
- package/dist/state/state.test.d.ts.map +1 -0
- package/dist/state/state.test.js +224 -0
- package/dist/types/common.d.ts +50 -0
- package/dist/types/common.d.ts.map +1 -0
- package/dist/types/common.js +25 -0
- package/dist/types/extraction.d.ts +417 -0
- package/dist/types/extraction.d.ts.map +1 -0
- package/dist/types/extraction.js +170 -0
- package/dist/types/extraction.test.d.ts +2 -0
- package/dist/types/extraction.test.d.ts.map +1 -0
- package/dist/types/extraction.test.js +70 -0
- package/dist/types/index.d.ts +9 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +18 -0
- package/dist/types/loading.d.ts +147 -0
- package/dist/types/loading.d.ts.map +1 -0
- package/dist/types/loading.js +48 -0
- package/dist/types/workers.d.ts +161 -0
- package/dist/types/workers.d.ts.map +1 -0
- package/dist/types/workers.js +22 -0
- package/dist/uploader/uploader.d.ts +92 -0
- package/dist/uploader/uploader.d.ts.map +1 -0
- package/dist/uploader/uploader.helpers.d.ts +33 -0
- package/dist/uploader/uploader.helpers.d.ts.map +1 -0
- package/dist/uploader/uploader.helpers.js +139 -0
- package/dist/uploader/uploader.helpers.test.d.ts +2 -0
- package/dist/uploader/uploader.helpers.test.d.ts.map +1 -0
- package/dist/uploader/uploader.helpers.test.js +267 -0
- package/dist/uploader/uploader.interfaces.d.ts +95 -0
- package/dist/uploader/uploader.interfaces.d.ts.map +1 -0
- package/dist/uploader/uploader.interfaces.js +2 -0
- package/dist/uploader/uploader.js +305 -0
- package/dist/uploader/uploader.test.d.ts +2 -0
- package/dist/uploader/uploader.test.d.ts.map +1 -0
- package/dist/uploader/uploader.test.js +589 -0
- package/package.json +60 -0
|
@@ -0,0 +1,747 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.WorkerAdapter = void 0;
|
|
7
|
+
exports.createWorkerAdapter = createWorkerAdapter;
|
|
8
|
+
const axios_1 = __importDefault(require("axios"));
|
|
9
|
+
const node_worker_threads_1 = require("node:worker_threads");
|
|
10
|
+
const attachments_streaming_pool_1 = require("../../attachments-streaming/attachments-streaming-pool");
|
|
11
|
+
const constants_1 = require("../../common/constants");
|
|
12
|
+
const control_protocol_1 = require("../../common/control-protocol");
|
|
13
|
+
const worker_adapter_helpers_1 = require("./worker-adapter.helpers");
|
|
14
|
+
const logger_1 = require("../../logger/logger");
|
|
15
|
+
const logger_context_1 = require("../../logger/logger.context");
|
|
16
|
+
const mappers_1 = require("../../mappers/mappers");
|
|
17
|
+
const mappers_interface_1 = require("../../mappers/mappers.interface");
|
|
18
|
+
const repo_1 = require("../../repo/repo");
|
|
19
|
+
const extraction_1 = require("../../types/extraction");
|
|
20
|
+
const loading_1 = require("../../types/loading");
|
|
21
|
+
const workers_1 = require("../../types/workers");
|
|
22
|
+
const uploader_1 = require("../../uploader/uploader");
|
|
23
|
+
const event_type_translation_1 = require("../../common/event-type-translation");
|
|
24
|
+
function createWorkerAdapter({ event, adapterState, options, }) {
|
|
25
|
+
return new WorkerAdapter({
|
|
26
|
+
event,
|
|
27
|
+
adapterState,
|
|
28
|
+
options,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* WorkerAdapter class is used to interact with Airdrop platform. It is passed to the snap-in
|
|
33
|
+
* as parameter in processTask and onTimeout functions. The class provides
|
|
34
|
+
* utilities to emit control events to the platform, update the state of the connector,
|
|
35
|
+
* and upload artifacts to the platform.
|
|
36
|
+
* @class WorkerAdapter
|
|
37
|
+
* @constructor
|
|
38
|
+
* @param options - The options to create a new instance of WorkerAdapter class
|
|
39
|
+
* @param event - The event object received from the platform
|
|
40
|
+
* @param initialState - The initial state of the adapter
|
|
41
|
+
* @param isLocalDevelopment - A flag to indicate if the adapter is being used in local development
|
|
42
|
+
* @param workerPath - The path to the worker file
|
|
43
|
+
*
|
|
44
|
+
* @public
|
|
45
|
+
*/
|
|
46
|
+
class WorkerAdapter {
|
|
47
|
+
constructor({ event, adapterState, options, }) {
|
|
48
|
+
this.repos = [];
|
|
49
|
+
this.event = event;
|
|
50
|
+
this.options = options;
|
|
51
|
+
this.adapterState = adapterState;
|
|
52
|
+
this._artifacts = [];
|
|
53
|
+
this.hasWorkerEmitted = false;
|
|
54
|
+
this.isTimeout = false;
|
|
55
|
+
// Loader
|
|
56
|
+
this.loaderReports = [];
|
|
57
|
+
this._processedFiles = [];
|
|
58
|
+
this._mappers = new mappers_1.Mappers({
|
|
59
|
+
event,
|
|
60
|
+
options,
|
|
61
|
+
});
|
|
62
|
+
this.uploader = new uploader_1.Uploader({
|
|
63
|
+
event,
|
|
64
|
+
options,
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
get state() {
|
|
68
|
+
return this.adapterState.state;
|
|
69
|
+
}
|
|
70
|
+
set state(value) {
|
|
71
|
+
if (!this.isTimeout) {
|
|
72
|
+
this.adapterState.state = value;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
get reports() {
|
|
76
|
+
return this.loaderReports;
|
|
77
|
+
}
|
|
78
|
+
get processedFiles() {
|
|
79
|
+
return this._processedFiles;
|
|
80
|
+
}
|
|
81
|
+
get mappers() {
|
|
82
|
+
return this._mappers;
|
|
83
|
+
}
|
|
84
|
+
initializeRepos(repos) {
|
|
85
|
+
this.repos = repos.map((repo) => {
|
|
86
|
+
const shouldNormalize = repo.itemType !== constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA &&
|
|
87
|
+
repo.itemType !== constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT;
|
|
88
|
+
return new repo_1.Repo(Object.assign(Object.assign({ event: this.event, itemType: repo.itemType }, (shouldNormalize && { normalize: repo.normalize })), { onUpload: (artifact) => {
|
|
89
|
+
var _a;
|
|
90
|
+
// We need to store artifacts ids in state for later use when streaming attachments
|
|
91
|
+
if (repo.itemType === constants_1.AIRDROP_DEFAULT_ITEM_TYPES.ATTACHMENTS) {
|
|
92
|
+
(_a = this.state.toDevRev) === null || _a === void 0 ? void 0 : _a.attachmentsMetadata.artifactIds.push(artifact.id);
|
|
93
|
+
}
|
|
94
|
+
}, options: this.options }));
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
getRepo(itemType) {
|
|
98
|
+
return (0, logger_context_1.runWithSdkLogContext)(() => {
|
|
99
|
+
const repo = this.repos.find((repo) => repo.itemType === itemType);
|
|
100
|
+
if (!repo) {
|
|
101
|
+
console.error(`Repo for item type ${itemType} not found.`);
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
return repo;
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
async postState() {
|
|
108
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
109
|
+
await this.adapterState.postState();
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
get artifacts() {
|
|
113
|
+
return this._artifacts;
|
|
114
|
+
}
|
|
115
|
+
set artifacts(artifacts) {
|
|
116
|
+
this._artifacts = this._artifacts
|
|
117
|
+
.concat(artifacts)
|
|
118
|
+
.filter((value, index, self) => self.indexOf(value) === index);
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Emits an event to the platform.
|
|
122
|
+
*
|
|
123
|
+
* @param newEventType - The event type to be emitted
|
|
124
|
+
* @param data - The data to be sent with the event
|
|
125
|
+
*/
|
|
126
|
+
async emit(newEventType, data) {
|
|
127
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
128
|
+
newEventType = (0, event_type_translation_1.translateOutgoingEventType)(newEventType);
|
|
129
|
+
if (this.hasWorkerEmitted) {
|
|
130
|
+
console.warn(`Trying to emit event with event type: ${newEventType}. Ignoring emit request because it has already been emitted.`);
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
// We want to upload all the repos before emitting the event, except for the external sync units done event
|
|
134
|
+
if (newEventType !== extraction_1.ExtractorEventType.ExternalSyncUnitExtractionDone) {
|
|
135
|
+
console.log(`Uploading all repos before emitting event with event type: ${newEventType}.`);
|
|
136
|
+
try {
|
|
137
|
+
await this.uploadAllRepos();
|
|
138
|
+
}
|
|
139
|
+
catch (error) {
|
|
140
|
+
console.error('Error while uploading repos', error);
|
|
141
|
+
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage(workers_1.WorkerMessageSubject.WorkerMessageExit);
|
|
142
|
+
this.hasWorkerEmitted = true;
|
|
143
|
+
return;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
// If the extraction is done, we want to save the timestamp of the last successful sync
|
|
147
|
+
if (newEventType === extraction_1.ExtractorEventType.AttachmentExtractionDone) {
|
|
148
|
+
console.log(`Overwriting lastSuccessfulSyncStarted with lastSyncStarted (${this.state.lastSyncStarted}).`);
|
|
149
|
+
this.state.lastSuccessfulSyncStarted = this.state.lastSyncStarted;
|
|
150
|
+
this.state.lastSyncStarted = '';
|
|
151
|
+
}
|
|
152
|
+
// We want to save the state every time we emit an event, except for the start and delete events
|
|
153
|
+
if (!constants_1.STATELESS_EVENT_TYPES.includes(this.event.payload.event_type)) {
|
|
154
|
+
console.log(`Saving state before emitting event with event type: ${newEventType}.`);
|
|
155
|
+
try {
|
|
156
|
+
await this.adapterState.postState(this.state);
|
|
157
|
+
}
|
|
158
|
+
catch (error) {
|
|
159
|
+
console.error('Error while posting state', error);
|
|
160
|
+
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage(workers_1.WorkerMessageSubject.WorkerMessageExit);
|
|
161
|
+
this.hasWorkerEmitted = true;
|
|
162
|
+
return;
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
try {
|
|
166
|
+
await (0, control_protocol_1.emit)({
|
|
167
|
+
eventType: newEventType,
|
|
168
|
+
event: this.event,
|
|
169
|
+
data: Object.assign(Object.assign({}, data), (constants_1.ALLOWED_EXTRACTION_EVENT_TYPES.includes(this.event.payload.event_type)
|
|
170
|
+
? { artifacts: this.artifacts }
|
|
171
|
+
: {})),
|
|
172
|
+
});
|
|
173
|
+
const message = {
|
|
174
|
+
subject: workers_1.WorkerMessageSubject.WorkerMessageEmitted,
|
|
175
|
+
payload: { eventType: newEventType },
|
|
176
|
+
};
|
|
177
|
+
this.artifacts = [];
|
|
178
|
+
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage(message);
|
|
179
|
+
this.hasWorkerEmitted = true;
|
|
180
|
+
}
|
|
181
|
+
catch (error) {
|
|
182
|
+
console.error(`Error while emitting event with event type: ${newEventType}.`, (0, logger_1.serializeError)(error));
|
|
183
|
+
node_worker_threads_1.parentPort === null || node_worker_threads_1.parentPort === void 0 ? void 0 : node_worker_threads_1.parentPort.postMessage(workers_1.WorkerMessageSubject.WorkerMessageExit);
|
|
184
|
+
this.hasWorkerEmitted = true;
|
|
185
|
+
}
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
async uploadAllRepos() {
|
|
189
|
+
for (const repo of this.repos) {
|
|
190
|
+
const error = await repo.upload();
|
|
191
|
+
this.artifacts.push(...repo.uploadedArtifacts);
|
|
192
|
+
if (error) {
|
|
193
|
+
throw error;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
handleTimeout() {
|
|
198
|
+
this.isTimeout = true;
|
|
199
|
+
}
|
|
200
|
+
async loadItemTypes({ itemTypesToLoad, }) {
|
|
201
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
202
|
+
var _a;
|
|
203
|
+
if (this.event.payload.event_type === extraction_1.EventType.StartLoadingData) {
|
|
204
|
+
const itemTypes = itemTypesToLoad.map((itemTypeToLoad) => itemTypeToLoad.itemType);
|
|
205
|
+
if (!itemTypes.length) {
|
|
206
|
+
console.warn('No item types to load, returning.');
|
|
207
|
+
return {
|
|
208
|
+
reports: this.reports,
|
|
209
|
+
processed_files: this.processedFiles,
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
const filesToLoad = await this.getLoaderBatches({
|
|
213
|
+
supportedItemTypes: itemTypes,
|
|
214
|
+
});
|
|
215
|
+
this.adapterState.state.fromDevRev = {
|
|
216
|
+
filesToLoad,
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
if (!this.adapterState.state.fromDevRev ||
|
|
220
|
+
!this.adapterState.state.fromDevRev.filesToLoad.length) {
|
|
221
|
+
console.warn('No files to load, returning.');
|
|
222
|
+
return {
|
|
223
|
+
reports: this.reports,
|
|
224
|
+
processed_files: this.processedFiles,
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
console.log('Files to load in state', (_a = this.adapterState.state.fromDevRev) === null || _a === void 0 ? void 0 : _a.filesToLoad);
|
|
228
|
+
outerloop: for (const fileToLoad of this.adapterState.state.fromDevRev
|
|
229
|
+
.filesToLoad) {
|
|
230
|
+
const itemTypeToLoad = itemTypesToLoad.find((itemTypeToLoad) => itemTypeToLoad.itemType === fileToLoad.itemType);
|
|
231
|
+
if (!itemTypeToLoad) {
|
|
232
|
+
console.error(`Item type to load not found for item type: ${fileToLoad.itemType}.`);
|
|
233
|
+
await this.emit(loading_1.LoaderEventType.DataLoadingError, {
|
|
234
|
+
error: {
|
|
235
|
+
message: `Item type to load not found for item type: ${fileToLoad.itemType}.`,
|
|
236
|
+
},
|
|
237
|
+
});
|
|
238
|
+
break;
|
|
239
|
+
}
|
|
240
|
+
if (!fileToLoad.completed) {
|
|
241
|
+
const { response, error: transformerFileError } = await this.uploader.getJsonObjectByArtifactId({
|
|
242
|
+
artifactId: fileToLoad.id,
|
|
243
|
+
isGzipped: true,
|
|
244
|
+
});
|
|
245
|
+
if (transformerFileError) {
|
|
246
|
+
console.error(`Transformer file not found for artifact ID: ${fileToLoad.id}.`);
|
|
247
|
+
await this.emit(loading_1.LoaderEventType.DataLoadingError, {
|
|
248
|
+
error: {
|
|
249
|
+
message: `Transformer file not found for artifact ID: ${fileToLoad.id}.`,
|
|
250
|
+
},
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
const transformerFile = response;
|
|
254
|
+
for (let i = fileToLoad.lineToProcess; i < fileToLoad.count; i++) {
|
|
255
|
+
const { report, rateLimit } = await this.loadItem({
|
|
256
|
+
item: transformerFile[i],
|
|
257
|
+
itemTypeToLoad,
|
|
258
|
+
});
|
|
259
|
+
if (rateLimit === null || rateLimit === void 0 ? void 0 : rateLimit.delay) {
|
|
260
|
+
await this.emit(loading_1.LoaderEventType.DataLoadingDelayed, {
|
|
261
|
+
delay: rateLimit.delay,
|
|
262
|
+
reports: this.reports,
|
|
263
|
+
processed_files: this.processedFiles,
|
|
264
|
+
});
|
|
265
|
+
break outerloop;
|
|
266
|
+
}
|
|
267
|
+
if (report) {
|
|
268
|
+
(0, worker_adapter_helpers_1.addReportToLoaderReport)({
|
|
269
|
+
loaderReports: this.loaderReports,
|
|
270
|
+
report,
|
|
271
|
+
});
|
|
272
|
+
fileToLoad.lineToProcess = fileToLoad.lineToProcess + 1;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
fileToLoad.completed = true;
|
|
276
|
+
this._processedFiles.push(fileToLoad.id);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
return {
|
|
280
|
+
reports: this.reports,
|
|
281
|
+
processed_files: this.processedFiles,
|
|
282
|
+
};
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
async getLoaderBatches({ supportedItemTypes, }) {
|
|
286
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
287
|
+
var _a;
|
|
288
|
+
const statsFileArtifactId = (_a = this.event.payload.event_data) === null || _a === void 0 ? void 0 : _a.stats_file;
|
|
289
|
+
if (statsFileArtifactId) {
|
|
290
|
+
const { response, error: statsFileError } = await this.uploader.getJsonObjectByArtifactId({
|
|
291
|
+
artifactId: statsFileArtifactId,
|
|
292
|
+
});
|
|
293
|
+
const statsFile = response;
|
|
294
|
+
if (statsFileError || statsFile.length === 0) {
|
|
295
|
+
return [];
|
|
296
|
+
}
|
|
297
|
+
const filesToLoad = (0, worker_adapter_helpers_1.getFilesToLoad)({
|
|
298
|
+
supportedItemTypes,
|
|
299
|
+
statsFile,
|
|
300
|
+
});
|
|
301
|
+
return filesToLoad;
|
|
302
|
+
}
|
|
303
|
+
return [];
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
async loadAttachments({ create, }) {
|
|
307
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
308
|
+
var _a, _b;
|
|
309
|
+
if (this.event.payload.event_type === extraction_1.EventType.StartLoadingAttachments) {
|
|
310
|
+
this.adapterState.state.fromDevRev = {
|
|
311
|
+
filesToLoad: await this.getLoaderBatches({
|
|
312
|
+
supportedItemTypes: ['attachment'],
|
|
313
|
+
}),
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
if (!this.adapterState.state.fromDevRev ||
|
|
317
|
+
((_a = this.adapterState.state.fromDevRev) === null || _a === void 0 ? void 0 : _a.filesToLoad.length) === 0) {
|
|
318
|
+
console.log('No files to load, returning.');
|
|
319
|
+
return {
|
|
320
|
+
reports: this.reports,
|
|
321
|
+
processed_files: this.processedFiles,
|
|
322
|
+
};
|
|
323
|
+
}
|
|
324
|
+
const filesToLoad = (_b = this.adapterState.state.fromDevRev) === null || _b === void 0 ? void 0 : _b.filesToLoad;
|
|
325
|
+
outerloop: for (const fileToLoad of filesToLoad) {
|
|
326
|
+
if (!fileToLoad.completed) {
|
|
327
|
+
const { response, error: transformerFileError } = await this.uploader.getJsonObjectByArtifactId({
|
|
328
|
+
artifactId: fileToLoad.id,
|
|
329
|
+
isGzipped: true,
|
|
330
|
+
});
|
|
331
|
+
const transformerFile = response;
|
|
332
|
+
if (transformerFileError) {
|
|
333
|
+
console.error(`Transformer file not found for artifact ID: ${fileToLoad.id}.`);
|
|
334
|
+
break outerloop;
|
|
335
|
+
}
|
|
336
|
+
for (let i = fileToLoad.lineToProcess; i < fileToLoad.count; i++) {
|
|
337
|
+
const { report, rateLimit } = await this.loadAttachment({
|
|
338
|
+
item: transformerFile[i],
|
|
339
|
+
create,
|
|
340
|
+
});
|
|
341
|
+
if (rateLimit === null || rateLimit === void 0 ? void 0 : rateLimit.delay) {
|
|
342
|
+
await this.emit(loading_1.LoaderEventType.DataLoadingDelayed, {
|
|
343
|
+
delay: rateLimit.delay,
|
|
344
|
+
reports: this.reports,
|
|
345
|
+
processed_files: this.processedFiles,
|
|
346
|
+
});
|
|
347
|
+
break outerloop;
|
|
348
|
+
}
|
|
349
|
+
if (report) {
|
|
350
|
+
(0, worker_adapter_helpers_1.addReportToLoaderReport)({
|
|
351
|
+
loaderReports: this.loaderReports,
|
|
352
|
+
report,
|
|
353
|
+
});
|
|
354
|
+
fileToLoad.lineToProcess = fileToLoad.lineToProcess + 1;
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
fileToLoad.completed = true;
|
|
358
|
+
this._processedFiles.push(fileToLoad.id);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
return {
|
|
362
|
+
reports: this.reports,
|
|
363
|
+
processed_files: this.processedFiles,
|
|
364
|
+
};
|
|
365
|
+
});
|
|
366
|
+
}
|
|
367
|
+
async loadItem({ item, itemTypeToLoad, }) {
|
|
368
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
369
|
+
var _a;
|
|
370
|
+
const devrevId = item.id.devrev;
|
|
371
|
+
try {
|
|
372
|
+
const syncMapperRecordResponse = await this._mappers.getByTargetId({
|
|
373
|
+
sync_unit: this.event.payload.event_context.sync_unit,
|
|
374
|
+
target: devrevId,
|
|
375
|
+
});
|
|
376
|
+
const syncMapperRecord = syncMapperRecordResponse.data;
|
|
377
|
+
if (!syncMapperRecord) {
|
|
378
|
+
console.warn('Failed to get sync mapper record from response.');
|
|
379
|
+
return {
|
|
380
|
+
error: {
|
|
381
|
+
message: 'Failed to get sync mapper record from response.',
|
|
382
|
+
},
|
|
383
|
+
};
|
|
384
|
+
}
|
|
385
|
+
// Update item in external system
|
|
386
|
+
const { id, modifiedDate, delay, error } = await itemTypeToLoad.update({
|
|
387
|
+
item,
|
|
388
|
+
mappers: this._mappers,
|
|
389
|
+
event: this.event,
|
|
390
|
+
});
|
|
391
|
+
if (id) {
|
|
392
|
+
try {
|
|
393
|
+
const syncMapperRecordUpdateResponse = await this._mappers.update(Object.assign(Object.assign({ id: syncMapperRecord.sync_mapper_record.id, sync_unit: this.event.payload.event_context.sync_unit, status: mappers_interface_1.SyncMapperRecordStatus.OPERATIONAL }, (modifiedDate && {
|
|
394
|
+
external_versions: {
|
|
395
|
+
add: [
|
|
396
|
+
{
|
|
397
|
+
modified_date: modifiedDate,
|
|
398
|
+
recipe_version: 0,
|
|
399
|
+
},
|
|
400
|
+
],
|
|
401
|
+
},
|
|
402
|
+
})), { external_ids: {
|
|
403
|
+
add: [id],
|
|
404
|
+
}, targets: {
|
|
405
|
+
add: [devrevId],
|
|
406
|
+
} }));
|
|
407
|
+
console.log('Successfully updated sync mapper record.', syncMapperRecordUpdateResponse.data);
|
|
408
|
+
}
|
|
409
|
+
catch (error) {
|
|
410
|
+
console.warn('Failed to update sync mapper record.', (0, logger_1.serializeError)(error));
|
|
411
|
+
return {
|
|
412
|
+
error: {
|
|
413
|
+
message: 'Failed to update sync mapper record' + (0, logger_1.serializeError)(error),
|
|
414
|
+
},
|
|
415
|
+
};
|
|
416
|
+
}
|
|
417
|
+
return {
|
|
418
|
+
report: {
|
|
419
|
+
item_type: itemTypeToLoad.itemType,
|
|
420
|
+
[loading_1.ActionType.UPDATED]: 1,
|
|
421
|
+
},
|
|
422
|
+
};
|
|
423
|
+
}
|
|
424
|
+
else if (delay) {
|
|
425
|
+
console.log(`Rate limited while updating item in external system, delaying for ${delay} seconds.`);
|
|
426
|
+
return {
|
|
427
|
+
rateLimit: {
|
|
428
|
+
delay,
|
|
429
|
+
},
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
else {
|
|
433
|
+
console.warn('Failed to update item in external system', error);
|
|
434
|
+
return {
|
|
435
|
+
report: {
|
|
436
|
+
item_type: itemTypeToLoad.itemType,
|
|
437
|
+
[loading_1.ActionType.FAILED]: 1,
|
|
438
|
+
},
|
|
439
|
+
};
|
|
440
|
+
}
|
|
441
|
+
// TODO: Update mapper (optional)
|
|
442
|
+
}
|
|
443
|
+
catch (error) {
|
|
444
|
+
if (axios_1.default.isAxiosError(error)) {
|
|
445
|
+
if (((_a = error.response) === null || _a === void 0 ? void 0 : _a.status) === 404) {
|
|
446
|
+
// Create item in external system if mapper record not found
|
|
447
|
+
const { id, modifiedDate, delay, error } = await itemTypeToLoad.create({
|
|
448
|
+
item,
|
|
449
|
+
mappers: this._mappers,
|
|
450
|
+
event: this.event,
|
|
451
|
+
});
|
|
452
|
+
if (id) {
|
|
453
|
+
// Create mapper
|
|
454
|
+
try {
|
|
455
|
+
const syncMapperRecordCreateResponse = await this._mappers.create(Object.assign({ sync_unit: this.event.payload.event_context.sync_unit, status: mappers_interface_1.SyncMapperRecordStatus.OPERATIONAL, external_ids: [id], targets: [devrevId] }, (modifiedDate && {
|
|
456
|
+
external_versions: [
|
|
457
|
+
{
|
|
458
|
+
modified_date: modifiedDate,
|
|
459
|
+
recipe_version: 0,
|
|
460
|
+
},
|
|
461
|
+
],
|
|
462
|
+
})));
|
|
463
|
+
console.log('Successfully created sync mapper record.', syncMapperRecordCreateResponse.data);
|
|
464
|
+
return {
|
|
465
|
+
report: {
|
|
466
|
+
item_type: itemTypeToLoad.itemType,
|
|
467
|
+
[loading_1.ActionType.CREATED]: 1,
|
|
468
|
+
},
|
|
469
|
+
};
|
|
470
|
+
}
|
|
471
|
+
catch (error) {
|
|
472
|
+
console.warn('Failed to create sync mapper record.', (0, logger_1.serializeError)(error));
|
|
473
|
+
return {
|
|
474
|
+
error: {
|
|
475
|
+
message: 'Failed to create sync mapper record. ' +
|
|
476
|
+
(0, logger_1.serializeError)(error),
|
|
477
|
+
},
|
|
478
|
+
};
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
else if (delay) {
|
|
482
|
+
return {
|
|
483
|
+
rateLimit: {
|
|
484
|
+
delay,
|
|
485
|
+
},
|
|
486
|
+
};
|
|
487
|
+
}
|
|
488
|
+
else {
|
|
489
|
+
console.warn('Failed to create item in external system.', (0, logger_1.serializeError)(error));
|
|
490
|
+
return {
|
|
491
|
+
report: {
|
|
492
|
+
item_type: itemTypeToLoad.itemType,
|
|
493
|
+
[loading_1.ActionType.FAILED]: 1,
|
|
494
|
+
},
|
|
495
|
+
};
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
else {
|
|
499
|
+
console.warn('Failed to get sync mapper record.', (0, logger_1.serializeError)(error));
|
|
500
|
+
return {
|
|
501
|
+
error: {
|
|
502
|
+
message: error.message,
|
|
503
|
+
},
|
|
504
|
+
};
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
console.warn('Failed to get sync mapper record.', (0, logger_1.serializeError)(error));
|
|
508
|
+
return {
|
|
509
|
+
error: {
|
|
510
|
+
message: 'Failed to get sync mapper record. ' + (0, logger_1.serializeError)(error),
|
|
511
|
+
},
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
});
|
|
515
|
+
}
|
|
516
|
+
async processAttachment(attachment, stream) {
|
|
517
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
518
|
+
var _a;
|
|
519
|
+
const { httpStream, delay, error } = await stream({
|
|
520
|
+
item: attachment,
|
|
521
|
+
event: this.event,
|
|
522
|
+
});
|
|
523
|
+
if (error) {
|
|
524
|
+
return { error };
|
|
525
|
+
}
|
|
526
|
+
else if (delay) {
|
|
527
|
+
return { delay };
|
|
528
|
+
}
|
|
529
|
+
if (httpStream) {
|
|
530
|
+
const fileType = httpStream.headers['content-type'] || 'application/octet-stream';
|
|
531
|
+
const fileSize = httpStream.headers['content-length']
|
|
532
|
+
? parseInt(httpStream.headers['content-length'])
|
|
533
|
+
: undefined;
|
|
534
|
+
// Get upload URL
|
|
535
|
+
const { error: artifactUrlError, response: artifactUrlResponse } = await this.uploader.getArtifactUploadUrl(attachment.file_name, fileType, fileSize);
|
|
536
|
+
if (artifactUrlError) {
|
|
537
|
+
console.warn(`Error while preparing artifact for attachment ID ${attachment.id}. Skipping attachment. ` +
|
|
538
|
+
(0, logger_1.serializeError)(artifactUrlError));
|
|
539
|
+
this.destroyHttpStream(httpStream);
|
|
540
|
+
return;
|
|
541
|
+
}
|
|
542
|
+
if (this.isTimeout) {
|
|
543
|
+
this.destroyHttpStream(httpStream);
|
|
544
|
+
return;
|
|
545
|
+
}
|
|
546
|
+
// Stream attachment
|
|
547
|
+
const { error: uploadedArtifactError } = await this.uploader.streamArtifact(artifactUrlResponse, httpStream);
|
|
548
|
+
if (uploadedArtifactError) {
|
|
549
|
+
console.warn(`Error while streaming to artifact for attachment ID ${attachment.id}. Skipping attachment. ` +
|
|
550
|
+
(0, logger_1.serializeError)(uploadedArtifactError));
|
|
551
|
+
this.destroyHttpStream(httpStream);
|
|
552
|
+
return;
|
|
553
|
+
}
|
|
554
|
+
// Confirm attachment upload
|
|
555
|
+
const { error: confirmArtifactUploadError } = await this.uploader.confirmArtifactUpload(artifactUrlResponse.artifact_id);
|
|
556
|
+
if (confirmArtifactUploadError) {
|
|
557
|
+
console.warn('Error while confirming upload for attachment ID ' +
|
|
558
|
+
attachment.id +
|
|
559
|
+
'.', confirmArtifactUploadError);
|
|
560
|
+
return;
|
|
561
|
+
}
|
|
562
|
+
const ssorAttachment = {
|
|
563
|
+
id: {
|
|
564
|
+
devrev: artifactUrlResponse.artifact_id,
|
|
565
|
+
external: attachment.id,
|
|
566
|
+
},
|
|
567
|
+
parent_id: {
|
|
568
|
+
external: attachment.parent_id,
|
|
569
|
+
},
|
|
570
|
+
};
|
|
571
|
+
if (attachment.author_id) {
|
|
572
|
+
ssorAttachment.actor_id = {
|
|
573
|
+
external: attachment.author_id,
|
|
574
|
+
};
|
|
575
|
+
}
|
|
576
|
+
// This will set inline flag in ssor_attachment only if it is explicity
|
|
577
|
+
// set in the attachment object.
|
|
578
|
+
if (attachment.inline === true) {
|
|
579
|
+
ssorAttachment.inline = true;
|
|
580
|
+
}
|
|
581
|
+
else if (attachment.inline === false) {
|
|
582
|
+
ssorAttachment.inline = false;
|
|
583
|
+
}
|
|
584
|
+
await ((_a = this.getRepo('ssor_attachment')) === null || _a === void 0 ? void 0 : _a.push([ssorAttachment]));
|
|
585
|
+
}
|
|
586
|
+
return;
|
|
587
|
+
});
|
|
588
|
+
}
|
|
589
|
+
/**
|
|
590
|
+
* Destroys a stream to prevent memory leaks.
|
|
591
|
+
* @param httpStream - The axios response stream to destroy
|
|
592
|
+
*/
|
|
593
|
+
destroyHttpStream(httpStream) {
|
|
594
|
+
try {
|
|
595
|
+
if (httpStream && httpStream.data) {
|
|
596
|
+
if (typeof httpStream.data.destroy === 'function') {
|
|
597
|
+
httpStream.data.destroy();
|
|
598
|
+
}
|
|
599
|
+
else if (typeof httpStream.data.close === 'function') {
|
|
600
|
+
httpStream.data.close();
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
catch (error) {
|
|
605
|
+
console.warn('Error while destroying HTTP stream:', error);
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
async loadAttachment({ item, create, }) {
|
|
609
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
610
|
+
// Create item
|
|
611
|
+
const { id, delay, error } = await create({
|
|
612
|
+
item,
|
|
613
|
+
mappers: this._mappers,
|
|
614
|
+
event: this.event,
|
|
615
|
+
});
|
|
616
|
+
if (delay) {
|
|
617
|
+
return {
|
|
618
|
+
rateLimit: {
|
|
619
|
+
delay,
|
|
620
|
+
},
|
|
621
|
+
};
|
|
622
|
+
}
|
|
623
|
+
else if (id) {
|
|
624
|
+
try {
|
|
625
|
+
const syncMapperRecordCreateResponse = await this._mappers.create({
|
|
626
|
+
sync_unit: this.event.payload.event_context.sync_unit,
|
|
627
|
+
external_ids: [id],
|
|
628
|
+
targets: [item.reference_id],
|
|
629
|
+
status: mappers_interface_1.SyncMapperRecordStatus.OPERATIONAL,
|
|
630
|
+
});
|
|
631
|
+
console.log('Successfully created sync mapper record.', syncMapperRecordCreateResponse.data);
|
|
632
|
+
}
|
|
633
|
+
catch (error) {
|
|
634
|
+
console.warn('Failed to create sync mapper record.', (0, logger_1.serializeError)(error));
|
|
635
|
+
}
|
|
636
|
+
return {
|
|
637
|
+
report: {
|
|
638
|
+
item_type: 'attachment',
|
|
639
|
+
[loading_1.ActionType.CREATED]: 1,
|
|
640
|
+
},
|
|
641
|
+
};
|
|
642
|
+
}
|
|
643
|
+
else {
|
|
644
|
+
console.warn('Failed to create attachment in external system', error);
|
|
645
|
+
return {
|
|
646
|
+
report: {
|
|
647
|
+
item_type: 'attachment',
|
|
648
|
+
[loading_1.ActionType.FAILED]: 1,
|
|
649
|
+
},
|
|
650
|
+
};
|
|
651
|
+
}
|
|
652
|
+
});
|
|
653
|
+
}
|
|
654
|
+
/**
|
|
655
|
+
* Streams the attachments to the DevRev platform.
|
|
656
|
+
* The attachments are streamed to the platform and the artifact information is returned.
|
|
657
|
+
* @param params - The parameters to stream the attachments
|
|
658
|
+
* @returns The response object containing the ssorAttachment artifact information
|
|
659
|
+
* or error information if there was an error
|
|
660
|
+
*/
|
|
661
|
+
async streamAttachments({ stream, processors, batchSize = 1, // By default, we want to stream one attachment at a time
|
|
662
|
+
}) {
|
|
663
|
+
return (0, logger_context_1.runWithSdkLogContext)(async () => {
|
|
664
|
+
var _a, _b;
|
|
665
|
+
if (batchSize <= 0) {
|
|
666
|
+
console.warn(`The specified batch size (${batchSize}) is invalid. Using 1 instead.`);
|
|
667
|
+
batchSize = 1;
|
|
668
|
+
}
|
|
669
|
+
if (batchSize > 50) {
|
|
670
|
+
console.warn(`The specified batch size (${batchSize}) is too large. Using 50 instead.`);
|
|
671
|
+
batchSize = 50;
|
|
672
|
+
}
|
|
673
|
+
const repos = [
|
|
674
|
+
{
|
|
675
|
+
itemType: 'ssor_attachment',
|
|
676
|
+
},
|
|
677
|
+
];
|
|
678
|
+
this.initializeRepos(repos);
|
|
679
|
+
const attachmentsMetadata = (_a = this.state.toDevRev) === null || _a === void 0 ? void 0 : _a.attachmentsMetadata;
|
|
680
|
+
// If there are no attachments metadata artifact IDs in state, finish here
|
|
681
|
+
if (!((_b = attachmentsMetadata === null || attachmentsMetadata === void 0 ? void 0 : attachmentsMetadata.artifactIds) === null || _b === void 0 ? void 0 : _b.length)) {
|
|
682
|
+
console.log(`No attachments metadata artifact IDs found in state.`);
|
|
683
|
+
return;
|
|
684
|
+
}
|
|
685
|
+
else {
|
|
686
|
+
console.log(`Found ${attachmentsMetadata.artifactIds.length} attachments metadata artifact IDs in state.`);
|
|
687
|
+
}
|
|
688
|
+
// Loop through the attachments metadata artifact IDs
|
|
689
|
+
while (attachmentsMetadata.artifactIds.length > 0) {
|
|
690
|
+
const attachmentsMetadataArtifactId = attachmentsMetadata.artifactIds[0];
|
|
691
|
+
console.log(`Started processing attachments for attachments metadata artifact ID: ${attachmentsMetadataArtifactId}.`);
|
|
692
|
+
const { attachments, error } = await this.uploader.getAttachmentsFromArtifactId({
|
|
693
|
+
artifact: attachmentsMetadataArtifactId,
|
|
694
|
+
});
|
|
695
|
+
if (error) {
|
|
696
|
+
console.error(`Failed to get attachments for artifact ID: ${attachmentsMetadataArtifactId}.`);
|
|
697
|
+
return { error };
|
|
698
|
+
}
|
|
699
|
+
if (!attachments || attachments.length === 0) {
|
|
700
|
+
console.warn(`No attachments found for artifact ID: ${attachmentsMetadataArtifactId}.`);
|
|
701
|
+
// Remove empty artifact and reset lastProcessed
|
|
702
|
+
attachmentsMetadata.artifactIds.shift();
|
|
703
|
+
attachmentsMetadata.lastProcessed = 0;
|
|
704
|
+
continue;
|
|
705
|
+
}
|
|
706
|
+
console.log(`Found ${attachments.length} attachments for artifact ID: ${attachmentsMetadataArtifactId}.`);
|
|
707
|
+
let response;
|
|
708
|
+
if (processors) {
|
|
709
|
+
console.log(`Using custom processors for attachments.`);
|
|
710
|
+
const reducer = processors.reducer;
|
|
711
|
+
const iterator = processors.iterator;
|
|
712
|
+
const reducedAttachments = reducer({
|
|
713
|
+
attachments,
|
|
714
|
+
adapter: this,
|
|
715
|
+
batchSize,
|
|
716
|
+
});
|
|
717
|
+
response = await iterator({
|
|
718
|
+
reducedAttachments,
|
|
719
|
+
adapter: this,
|
|
720
|
+
stream,
|
|
721
|
+
});
|
|
722
|
+
}
|
|
723
|
+
else {
|
|
724
|
+
console.log(`Using attachments streaming pool for attachments streaming.`);
|
|
725
|
+
const attachmentsPool = new attachments_streaming_pool_1.AttachmentsStreamingPool({
|
|
726
|
+
adapter: this,
|
|
727
|
+
attachments,
|
|
728
|
+
batchSize,
|
|
729
|
+
stream,
|
|
730
|
+
});
|
|
731
|
+
response = await attachmentsPool.streamAll();
|
|
732
|
+
}
|
|
733
|
+
if ((response === null || response === void 0 ? void 0 : response.delay) || (response === null || response === void 0 ? void 0 : response.error)) {
|
|
734
|
+
return response;
|
|
735
|
+
}
|
|
736
|
+
console.log(`Finished processing all attachments for artifact ID: ${attachmentsMetadataArtifactId}.`);
|
|
737
|
+
attachmentsMetadata.artifactIds.shift();
|
|
738
|
+
attachmentsMetadata.lastProcessed = 0;
|
|
739
|
+
if (attachmentsMetadata.lastProcessedAttachmentsIdsList) {
|
|
740
|
+
attachmentsMetadata.lastProcessedAttachmentsIdsList.length = 0;
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
return;
|
|
744
|
+
});
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
exports.WorkerAdapter = WorkerAdapter;
|