@devrev/airsync-sdk 2.0.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +31 -0
- package/dist/attachments-streaming/attachments-streaming-pool.d.ts +16 -0
- package/dist/attachments-streaming/attachments-streaming-pool.d.ts.map +1 -0
- package/dist/attachments-streaming/attachments-streaming-pool.interfaces.d.ts +9 -0
- package/dist/attachments-streaming/attachments-streaming-pool.interfaces.d.ts.map +1 -0
- package/dist/attachments-streaming/attachments-streaming-pool.interfaces.js +2 -0
- package/dist/attachments-streaming/attachments-streaming-pool.js +97 -0
- package/dist/attachments-streaming/attachments-streaming-pool.test.d.ts +2 -0
- package/dist/attachments-streaming/attachments-streaming-pool.test.d.ts.map +1 -0
- package/dist/attachments-streaming/attachments-streaming-pool.test.js +267 -0
- package/dist/common/constants.d.ts +25 -0
- package/dist/common/constants.d.ts.map +1 -0
- package/dist/common/constants.js +58 -0
- package/dist/common/control-protocol.d.ts +10 -0
- package/dist/common/control-protocol.d.ts.map +1 -0
- package/dist/common/control-protocol.js +31 -0
- package/dist/common/errors.d.ts +6 -0
- package/dist/common/errors.d.ts.map +1 -0
- package/dist/common/errors.js +4 -0
- package/dist/common/event-type-translation.d.ts +24 -0
- package/dist/common/event-type-translation.d.ts.map +1 -0
- package/dist/common/event-type-translation.js +117 -0
- package/dist/common/helpers.d.ts +41 -0
- package/dist/common/helpers.d.ts.map +1 -0
- package/dist/common/helpers.js +124 -0
- package/dist/common/install-initial-domain-mapping.d.ts +4 -0
- package/dist/common/install-initial-domain-mapping.d.ts.map +1 -0
- package/dist/common/install-initial-domain-mapping.js +58 -0
- package/dist/common/install-initial-domain-mapping.test.d.ts +2 -0
- package/dist/common/install-initial-domain-mapping.test.d.ts.map +1 -0
- package/dist/common/install-initial-domain-mapping.test.js +207 -0
- package/dist/deprecated/adapter/index.d.ts +62 -0
- package/dist/deprecated/adapter/index.d.ts.map +1 -0
- package/dist/deprecated/adapter/index.js +151 -0
- package/dist/deprecated/common/helpers.d.ts +7 -0
- package/dist/deprecated/common/helpers.d.ts.map +1 -0
- package/dist/deprecated/common/helpers.js +47 -0
- package/dist/deprecated/demo-extractor/external_domain_metadata.json +38 -0
- package/dist/deprecated/demo-extractor/index.d.ts +18 -0
- package/dist/deprecated/demo-extractor/index.d.ts.map +1 -0
- package/dist/deprecated/demo-extractor/index.js +161 -0
- package/dist/deprecated/http/client.d.ts +22 -0
- package/dist/deprecated/http/client.d.ts.map +1 -0
- package/dist/deprecated/http/client.js +161 -0
- package/dist/deprecated/uploader/index.d.ts +35 -0
- package/dist/deprecated/uploader/index.d.ts.map +1 -0
- package/dist/deprecated/uploader/index.js +161 -0
- package/dist/http/axios-client-internal.d.ts +3 -0
- package/dist/http/axios-client-internal.d.ts.map +1 -0
- package/dist/http/axios-client-internal.js +66 -0
- package/dist/http/axios-client-internal.test.d.ts +2 -0
- package/dist/http/axios-client-internal.test.d.ts.map +1 -0
- package/dist/http/axios-client-internal.test.js +154 -0
- package/dist/http/axios-client.d.ts +27 -0
- package/dist/http/axios-client.d.ts.map +1 -0
- package/dist/http/axios-client.js +57 -0
- package/dist/http/constants.d.ts +4 -0
- package/dist/http/constants.d.ts.map +1 -0
- package/dist/http/constants.js +6 -0
- package/dist/http/index.d.ts +3 -0
- package/dist/http/index.d.ts.map +1 -0
- package/dist/http/index.js +18 -0
- package/dist/http/types.d.ts +17 -0
- package/dist/http/types.d.ts.map +1 -0
- package/dist/http/types.js +2 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +34 -0
- package/dist/logger/logger.constants.d.ts +6 -0
- package/dist/logger/logger.constants.d.ts.map +1 -0
- package/dist/logger/logger.constants.js +13 -0
- package/dist/logger/logger.context.d.ts +58 -0
- package/dist/logger/logger.context.d.ts.map +1 -0
- package/dist/logger/logger.context.js +86 -0
- package/dist/logger/logger.d.ts +89 -0
- package/dist/logger/logger.d.ts.map +1 -0
- package/dist/logger/logger.interfaces.d.ts +43 -0
- package/dist/logger/logger.interfaces.d.ts.map +1 -0
- package/dist/logger/logger.interfaces.js +9 -0
- package/dist/logger/logger.js +196 -0
- package/dist/logger/logger.test.d.ts +2 -0
- package/dist/logger/logger.test.d.ts.map +1 -0
- package/dist/logger/logger.test.js +490 -0
- package/dist/mappers/mappers.d.ts +52 -0
- package/dist/mappers/mappers.d.ts.map +1 -0
- package/dist/mappers/mappers.interface.d.ts +294 -0
- package/dist/mappers/mappers.interface.d.ts.map +1 -0
- package/dist/mappers/mappers.interface.js +48 -0
- package/dist/mappers/mappers.js +83 -0
- package/dist/mappers/mappers.test.d.ts +2 -0
- package/dist/mappers/mappers.test.d.ts.map +1 -0
- package/dist/mappers/mappers.test.js +107 -0
- package/dist/multithreading/create-worker.d.ts +5 -0
- package/dist/multithreading/create-worker.d.ts.map +1 -0
- package/dist/multithreading/create-worker.js +28 -0
- package/dist/multithreading/create-worker.test.d.ts +2 -0
- package/dist/multithreading/create-worker.test.d.ts.map +1 -0
- package/dist/multithreading/create-worker.test.js +89 -0
- package/dist/multithreading/process-task.d.ts +3 -0
- package/dist/multithreading/process-task.d.ts.map +1 -0
- package/dist/multithreading/process-task.js +58 -0
- package/dist/multithreading/spawn/spawn.d.ts +30 -0
- package/dist/multithreading/spawn/spawn.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.helpers.d.ts +21 -0
- package/dist/multithreading/spawn/spawn.helpers.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.helpers.js +114 -0
- package/dist/multithreading/spawn/spawn.helpers.test.d.ts +2 -0
- package/dist/multithreading/spawn/spawn.helpers.test.d.ts.map +1 -0
- package/dist/multithreading/spawn/spawn.helpers.test.js +293 -0
- package/dist/multithreading/spawn/spawn.js +249 -0
- package/dist/multithreading/worker-adapter/worker-adapter.artifacts.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.artifacts.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.artifacts.test.js +127 -0
- package/dist/multithreading/worker-adapter/worker-adapter.d.ts +91 -0
- package/dist/multithreading/worker-adapter/worker-adapter.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.d.ts +22 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.js +64 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.helpers.test.js +514 -0
- package/dist/multithreading/worker-adapter/worker-adapter.js +747 -0
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts +2 -0
- package/dist/multithreading/worker-adapter/worker-adapter.test.d.ts.map +1 -0
- package/dist/multithreading/worker-adapter/worker-adapter.test.js +483 -0
- package/dist/multithreading/worker.d.ts +2 -0
- package/dist/multithreading/worker.d.ts.map +1 -0
- package/dist/multithreading/worker.js +9 -0
- package/dist/repo/repo.d.ts +18 -0
- package/dist/repo/repo.d.ts.map +1 -0
- package/dist/repo/repo.interfaces.d.ts +46 -0
- package/dist/repo/repo.interfaces.d.ts.map +1 -0
- package/dist/repo/repo.interfaces.js +2 -0
- package/dist/repo/repo.js +75 -0
- package/dist/repo/repo.test.d.ts +2 -0
- package/dist/repo/repo.test.d.ts.map +1 -0
- package/dist/repo/repo.test.js +131 -0
- package/dist/state/state.d.ts +30 -0
- package/dist/state/state.d.ts.map +1 -0
- package/dist/state/state.interfaces.d.ts +51 -0
- package/dist/state/state.interfaces.d.ts.map +1 -0
- package/dist/state/state.interfaces.js +21 -0
- package/dist/state/state.js +166 -0
- package/dist/state/state.test.d.ts +2 -0
- package/dist/state/state.test.d.ts.map +1 -0
- package/dist/state/state.test.js +224 -0
- package/dist/types/common.d.ts +50 -0
- package/dist/types/common.d.ts.map +1 -0
- package/dist/types/common.js +25 -0
- package/dist/types/extraction.d.ts +417 -0
- package/dist/types/extraction.d.ts.map +1 -0
- package/dist/types/extraction.js +170 -0
- package/dist/types/extraction.test.d.ts +2 -0
- package/dist/types/extraction.test.d.ts.map +1 -0
- package/dist/types/extraction.test.js +70 -0
- package/dist/types/index.d.ts +9 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +18 -0
- package/dist/types/loading.d.ts +147 -0
- package/dist/types/loading.d.ts.map +1 -0
- package/dist/types/loading.js +48 -0
- package/dist/types/workers.d.ts +161 -0
- package/dist/types/workers.d.ts.map +1 -0
- package/dist/types/workers.js +22 -0
- package/dist/uploader/uploader.d.ts +92 -0
- package/dist/uploader/uploader.d.ts.map +1 -0
- package/dist/uploader/uploader.helpers.d.ts +33 -0
- package/dist/uploader/uploader.helpers.d.ts.map +1 -0
- package/dist/uploader/uploader.helpers.js +139 -0
- package/dist/uploader/uploader.helpers.test.d.ts +2 -0
- package/dist/uploader/uploader.helpers.test.d.ts.map +1 -0
- package/dist/uploader/uploader.helpers.test.js +267 -0
- package/dist/uploader/uploader.interfaces.d.ts +95 -0
- package/dist/uploader/uploader.interfaces.d.ts.map +1 -0
- package/dist/uploader/uploader.interfaces.js +2 -0
- package/dist/uploader/uploader.js +305 -0
- package/dist/uploader/uploader.test.d.ts +2 -0
- package/dist/uploader/uploader.test.d.ts.map +1 -0
- package/dist/uploader/uploader.test.js +589 -0
- package/package.json +60 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"worker-adapter.test.d.ts","sourceRoot":"","sources":["../../../src/multithreading/worker-adapter/worker-adapter.test.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,483 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const attachments_streaming_pool_1 = require("../../attachments-streaming/attachments-streaming-pool");
|
|
4
|
+
const state_1 = require("../../state/state");
|
|
5
|
+
const test_helpers_1 = require("../../tests/test-helpers");
|
|
6
|
+
const types_1 = require("../../types");
|
|
7
|
+
const worker_adapter_1 = require("./worker-adapter");
|
|
8
|
+
/* eslint-disable @typescript-eslint/no-require-imports */
|
|
9
|
+
// Mock dependencies
|
|
10
|
+
jest.mock('../../common/control-protocol', () => ({
|
|
11
|
+
emit: jest.fn().mockResolvedValue({}),
|
|
12
|
+
}));
|
|
13
|
+
// const mockPostState = jest.spyOn(State.prototype, 'postState').mockResolvedValue(); // Mock to resolve void
|
|
14
|
+
// const mockFetchState = jest.spyOn(State.prototype, 'fetchState').mockResolvedValue({}); // Mock to resolve a default state
|
|
15
|
+
jest.mock('../../mappers/mappers');
|
|
16
|
+
jest.mock('../../uploader/uploader');
|
|
17
|
+
// jest.mock('../../state/state');
|
|
18
|
+
jest.mock('../../repo/repo');
|
|
19
|
+
jest.mock('node:worker_threads', () => ({
|
|
20
|
+
parentPort: {
|
|
21
|
+
postMessage: jest.fn(),
|
|
22
|
+
},
|
|
23
|
+
}));
|
|
24
|
+
jest.mock('../../attachments-streaming/attachments-streaming-pool', () => {
|
|
25
|
+
return {
|
|
26
|
+
AttachmentsStreamingPool: jest.fn().mockImplementation(() => {
|
|
27
|
+
return {
|
|
28
|
+
streamAll: jest.fn().mockResolvedValue(undefined),
|
|
29
|
+
};
|
|
30
|
+
}),
|
|
31
|
+
};
|
|
32
|
+
});
|
|
33
|
+
describe(worker_adapter_1.WorkerAdapter.name, () => {
|
|
34
|
+
let adapter;
|
|
35
|
+
let mockEvent;
|
|
36
|
+
let mockAdapterState;
|
|
37
|
+
beforeEach(() => {
|
|
38
|
+
// Reset all mocks
|
|
39
|
+
jest.clearAllMocks();
|
|
40
|
+
// Create mock objects
|
|
41
|
+
mockEvent = (0, test_helpers_1.createEvent)({ eventType: types_1.EventType.StartExtractingData });
|
|
42
|
+
const initialState = {
|
|
43
|
+
attachments: { completed: false },
|
|
44
|
+
lastSyncStarted: '',
|
|
45
|
+
lastSuccessfulSyncStarted: '',
|
|
46
|
+
snapInVersionId: '',
|
|
47
|
+
toDevRev: {
|
|
48
|
+
attachmentsMetadata: {
|
|
49
|
+
artifactIds: [],
|
|
50
|
+
lastProcessed: 0,
|
|
51
|
+
lastProcessedAttachmentsIdsList: [],
|
|
52
|
+
},
|
|
53
|
+
},
|
|
54
|
+
};
|
|
55
|
+
mockAdapterState = new state_1.State({
|
|
56
|
+
event: mockEvent,
|
|
57
|
+
initialState: initialState,
|
|
58
|
+
});
|
|
59
|
+
// Create the adapter instance
|
|
60
|
+
adapter = new worker_adapter_1.WorkerAdapter({
|
|
61
|
+
event: mockEvent,
|
|
62
|
+
adapterState: mockAdapterState,
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
describe(worker_adapter_1.WorkerAdapter.prototype.streamAttachments.name, () => {
|
|
66
|
+
it('should process all artifact batches successfully', async () => {
|
|
67
|
+
const mockStream = jest.fn();
|
|
68
|
+
// Set up adapter state with artifact IDs
|
|
69
|
+
adapter.state.toDevRev = {
|
|
70
|
+
attachmentsMetadata: {
|
|
71
|
+
artifactIds: ['artifact1', 'artifact2'],
|
|
72
|
+
lastProcessed: 0,
|
|
73
|
+
lastProcessedAttachmentsIdsList: [],
|
|
74
|
+
},
|
|
75
|
+
};
|
|
76
|
+
// Mock getting attachments from each artifact
|
|
77
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
78
|
+
.fn()
|
|
79
|
+
.mockResolvedValueOnce({
|
|
80
|
+
attachments: [
|
|
81
|
+
{
|
|
82
|
+
url: 'http://example.com/file1.pdf',
|
|
83
|
+
id: 'attachment1',
|
|
84
|
+
file_name: 'file1.pdf',
|
|
85
|
+
parent_id: 'parent1',
|
|
86
|
+
},
|
|
87
|
+
{
|
|
88
|
+
url: 'http://example.com/file2.pdf',
|
|
89
|
+
id: 'attachment2',
|
|
90
|
+
file_name: 'file2.pdf',
|
|
91
|
+
parent_id: 'parent2',
|
|
92
|
+
},
|
|
93
|
+
],
|
|
94
|
+
})
|
|
95
|
+
.mockResolvedValueOnce({
|
|
96
|
+
attachments: [
|
|
97
|
+
{
|
|
98
|
+
url: 'http://example.com/file3.pdf',
|
|
99
|
+
id: 'attachment3',
|
|
100
|
+
file_name: 'file3.pdf',
|
|
101
|
+
parent_id: 'parent3',
|
|
102
|
+
},
|
|
103
|
+
],
|
|
104
|
+
});
|
|
105
|
+
// Mock the initializeRepos method
|
|
106
|
+
adapter.initializeRepos = jest.fn();
|
|
107
|
+
const result = await adapter.streamAttachments({
|
|
108
|
+
stream: mockStream,
|
|
109
|
+
});
|
|
110
|
+
expect(adapter.initializeRepos).toHaveBeenCalledWith([
|
|
111
|
+
{ itemType: 'ssor_attachment' },
|
|
112
|
+
]);
|
|
113
|
+
expect(adapter.initializeRepos).toHaveBeenCalledTimes(1);
|
|
114
|
+
expect(adapter['uploader'].getAttachmentsFromArtifactId).toHaveBeenCalledTimes(2);
|
|
115
|
+
// Verify state was updated correctly
|
|
116
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([]);
|
|
117
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.lastProcessed).toBe(0);
|
|
118
|
+
expect(result).toBeUndefined();
|
|
119
|
+
});
|
|
120
|
+
it('[edge] should handle invalid batch size by using 1 instead', async () => {
|
|
121
|
+
const mockStream = jest.fn();
|
|
122
|
+
// Set up adapter state with artifact IDs
|
|
123
|
+
adapter.state.toDevRev = {
|
|
124
|
+
attachmentsMetadata: {
|
|
125
|
+
artifactIds: ['artifact1'],
|
|
126
|
+
lastProcessed: 0,
|
|
127
|
+
lastProcessedAttachmentsIdsList: [],
|
|
128
|
+
},
|
|
129
|
+
};
|
|
130
|
+
// Mock getting attachments
|
|
131
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
132
|
+
.fn()
|
|
133
|
+
.mockResolvedValue({
|
|
134
|
+
attachments: [
|
|
135
|
+
{
|
|
136
|
+
url: 'http://example.com/file1.pdf',
|
|
137
|
+
id: 'attachment1',
|
|
138
|
+
file_name: 'file1.pdf',
|
|
139
|
+
parent_id: 'parent1',
|
|
140
|
+
},
|
|
141
|
+
],
|
|
142
|
+
});
|
|
143
|
+
adapter.initializeRepos = jest.fn();
|
|
144
|
+
const result = await adapter.streamAttachments({
|
|
145
|
+
stream: mockStream,
|
|
146
|
+
batchSize: 0,
|
|
147
|
+
});
|
|
148
|
+
expect(result).toBeUndefined();
|
|
149
|
+
});
|
|
150
|
+
it('[edge] should cap batch size to 50 when batchSize is greater than 50', async () => {
|
|
151
|
+
const mockStream = jest.fn();
|
|
152
|
+
// Set up adapter state with artifact IDs
|
|
153
|
+
adapter.state.toDevRev = {
|
|
154
|
+
attachmentsMetadata: {
|
|
155
|
+
artifactIds: ['artifact1'],
|
|
156
|
+
lastProcessed: 0,
|
|
157
|
+
lastProcessedAttachmentsIdsList: [],
|
|
158
|
+
},
|
|
159
|
+
};
|
|
160
|
+
// Mock getting attachments
|
|
161
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
162
|
+
.fn()
|
|
163
|
+
.mockResolvedValue({
|
|
164
|
+
attachments: [
|
|
165
|
+
{
|
|
166
|
+
url: 'http://example.com/file1.pdf',
|
|
167
|
+
id: 'attachment1',
|
|
168
|
+
file_name: 'file1.pdf',
|
|
169
|
+
parent_id: 'parent1',
|
|
170
|
+
},
|
|
171
|
+
],
|
|
172
|
+
});
|
|
173
|
+
// Mock the required methods
|
|
174
|
+
adapter.initializeRepos = jest.fn();
|
|
175
|
+
const result = await adapter.streamAttachments({
|
|
176
|
+
stream: mockStream,
|
|
177
|
+
batchSize: 100, // Set batch size greater than 50
|
|
178
|
+
});
|
|
179
|
+
expect(result).toBeUndefined();
|
|
180
|
+
});
|
|
181
|
+
it('[edge] should handle empty attachments metadata artifact IDs', async () => {
|
|
182
|
+
const mockStream = jest.fn();
|
|
183
|
+
// Set up adapter state with no artifact IDs
|
|
184
|
+
adapter.state.toDevRev = {
|
|
185
|
+
attachmentsMetadata: {
|
|
186
|
+
artifactIds: [],
|
|
187
|
+
lastProcessed: 0,
|
|
188
|
+
},
|
|
189
|
+
};
|
|
190
|
+
const result = await adapter.streamAttachments({
|
|
191
|
+
stream: mockStream,
|
|
192
|
+
});
|
|
193
|
+
expect(result).toBeUndefined();
|
|
194
|
+
});
|
|
195
|
+
it('[edge] should handle errors when getting attachments', async () => {
|
|
196
|
+
const mockStream = jest.fn();
|
|
197
|
+
// Set up adapter state with artifact IDs
|
|
198
|
+
adapter.state.toDevRev = {
|
|
199
|
+
attachmentsMetadata: {
|
|
200
|
+
artifactIds: ['artifact1'],
|
|
201
|
+
lastProcessed: 0,
|
|
202
|
+
lastProcessedAttachmentsIdsList: [],
|
|
203
|
+
},
|
|
204
|
+
};
|
|
205
|
+
// Mock error when getting attachments
|
|
206
|
+
const mockError = new Error('Failed to get attachments');
|
|
207
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
208
|
+
.fn()
|
|
209
|
+
.mockResolvedValue({
|
|
210
|
+
error: mockError,
|
|
211
|
+
});
|
|
212
|
+
// Mock methods
|
|
213
|
+
adapter.initializeRepos = jest.fn();
|
|
214
|
+
const result = await adapter.streamAttachments({
|
|
215
|
+
stream: mockStream,
|
|
216
|
+
});
|
|
217
|
+
expect(result).toEqual({
|
|
218
|
+
error: mockError,
|
|
219
|
+
});
|
|
220
|
+
});
|
|
221
|
+
it('[edge] should handle empty attachments array from artifact', async () => {
|
|
222
|
+
const mockStream = jest.fn();
|
|
223
|
+
// Set up adapter state with artifact IDs
|
|
224
|
+
adapter.state.toDevRev = {
|
|
225
|
+
attachmentsMetadata: {
|
|
226
|
+
artifactIds: ['artifact1'],
|
|
227
|
+
lastProcessed: 0,
|
|
228
|
+
lastProcessedAttachmentsIdsList: [],
|
|
229
|
+
},
|
|
230
|
+
};
|
|
231
|
+
// Mock getting empty attachments
|
|
232
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
233
|
+
.fn()
|
|
234
|
+
.mockResolvedValue({
|
|
235
|
+
attachments: [],
|
|
236
|
+
});
|
|
237
|
+
// Mock methods
|
|
238
|
+
adapter.initializeRepos = jest.fn();
|
|
239
|
+
const result = await adapter.streamAttachments({
|
|
240
|
+
stream: mockStream,
|
|
241
|
+
});
|
|
242
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([]);
|
|
243
|
+
expect(result).toBeUndefined();
|
|
244
|
+
});
|
|
245
|
+
it('should use custom processors when provided', async () => {
|
|
246
|
+
const mockStream = jest.fn();
|
|
247
|
+
const mockReducer = jest.fn().mockReturnValue(['custom-reduced']);
|
|
248
|
+
const mockIterator = jest.fn().mockResolvedValue({});
|
|
249
|
+
// Set up adapter state with artifact IDs
|
|
250
|
+
adapter.state.toDevRev = {
|
|
251
|
+
attachmentsMetadata: {
|
|
252
|
+
artifactIds: ['artifact1'],
|
|
253
|
+
lastProcessed: 0,
|
|
254
|
+
lastProcessedAttachmentsIdsList: [],
|
|
255
|
+
},
|
|
256
|
+
};
|
|
257
|
+
// Mock getting attachments
|
|
258
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
259
|
+
.fn()
|
|
260
|
+
.mockResolvedValue({
|
|
261
|
+
attachments: [{ id: 'attachment1' }],
|
|
262
|
+
});
|
|
263
|
+
// Mock methods
|
|
264
|
+
adapter.initializeRepos = jest.fn();
|
|
265
|
+
const result = await adapter.streamAttachments({
|
|
266
|
+
stream: mockStream,
|
|
267
|
+
processors: {
|
|
268
|
+
reducer: mockReducer,
|
|
269
|
+
iterator: mockIterator,
|
|
270
|
+
},
|
|
271
|
+
});
|
|
272
|
+
expect(mockReducer).toHaveBeenCalledWith({
|
|
273
|
+
attachments: [{ id: 'attachment1' }],
|
|
274
|
+
adapter: adapter,
|
|
275
|
+
batchSize: 1,
|
|
276
|
+
});
|
|
277
|
+
expect(mockIterator).toHaveBeenCalledWith({
|
|
278
|
+
reducedAttachments: ['custom-reduced'],
|
|
279
|
+
adapter: adapter,
|
|
280
|
+
stream: mockStream,
|
|
281
|
+
});
|
|
282
|
+
expect(result).toBeUndefined();
|
|
283
|
+
});
|
|
284
|
+
it('should handle rate limiting from iterator', async () => {
|
|
285
|
+
const mockStream = jest.fn();
|
|
286
|
+
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => {
|
|
287
|
+
return {
|
|
288
|
+
// Return an object with a `streamAll` method that resolves to your desired value.
|
|
289
|
+
streamAll: jest.fn().mockResolvedValue({ delay: 30 }),
|
|
290
|
+
};
|
|
291
|
+
});
|
|
292
|
+
// Set up adapter state with artifact IDs
|
|
293
|
+
adapter.state.toDevRev = {
|
|
294
|
+
attachmentsMetadata: {
|
|
295
|
+
artifactIds: ['artifact1'],
|
|
296
|
+
lastProcessed: 0,
|
|
297
|
+
lastProcessedAttachmentsIdsList: [],
|
|
298
|
+
},
|
|
299
|
+
};
|
|
300
|
+
// Mock getting attachments
|
|
301
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
302
|
+
.fn()
|
|
303
|
+
.mockResolvedValue({
|
|
304
|
+
attachments: [{ id: 'attachment1' }],
|
|
305
|
+
});
|
|
306
|
+
// Mock methods
|
|
307
|
+
adapter.initializeRepos = jest.fn();
|
|
308
|
+
const result = await adapter.streamAttachments({
|
|
309
|
+
stream: mockStream,
|
|
310
|
+
});
|
|
311
|
+
expect(result).toEqual({
|
|
312
|
+
delay: 30,
|
|
313
|
+
});
|
|
314
|
+
// The artifactIds array should remain unchanged
|
|
315
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
316
|
+
'artifact1',
|
|
317
|
+
]);
|
|
318
|
+
});
|
|
319
|
+
it('should handle error from iterator', async () => {
|
|
320
|
+
const mockStream = jest.fn();
|
|
321
|
+
attachments_streaming_pool_1.AttachmentsStreamingPool.mockImplementationOnce(() => {
|
|
322
|
+
return {
|
|
323
|
+
// Return an object with a `streamAll` method that resolves to your desired value.
|
|
324
|
+
streamAll: jest.fn().mockResolvedValue({
|
|
325
|
+
error: 'Mock error',
|
|
326
|
+
}),
|
|
327
|
+
};
|
|
328
|
+
});
|
|
329
|
+
// Set up adapter state with artifact IDs
|
|
330
|
+
adapter.state.toDevRev = {
|
|
331
|
+
attachmentsMetadata: {
|
|
332
|
+
artifactIds: ['artifact1'],
|
|
333
|
+
lastProcessed: 0,
|
|
334
|
+
lastProcessedAttachmentsIdsList: [],
|
|
335
|
+
},
|
|
336
|
+
};
|
|
337
|
+
// Mock getting attachments
|
|
338
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
339
|
+
.fn()
|
|
340
|
+
.mockResolvedValue({
|
|
341
|
+
attachments: [{ id: 'attachment1' }],
|
|
342
|
+
});
|
|
343
|
+
// Mock methods
|
|
344
|
+
adapter.initializeRepos = jest.fn();
|
|
345
|
+
const result = await adapter.streamAttachments({
|
|
346
|
+
stream: mockStream,
|
|
347
|
+
});
|
|
348
|
+
expect(result).toEqual({
|
|
349
|
+
error: 'Mock error',
|
|
350
|
+
});
|
|
351
|
+
// The artifactIds array should remain unchanged
|
|
352
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toEqual([
|
|
353
|
+
'artifact1',
|
|
354
|
+
]);
|
|
355
|
+
});
|
|
356
|
+
it('should reset lastProcessed and attachment IDs list after processing all artifacts', async () => {
|
|
357
|
+
const mockStream = jest.fn();
|
|
358
|
+
adapter.state.toDevRev = {
|
|
359
|
+
attachmentsMetadata: {
|
|
360
|
+
artifactIds: ['artifact1'],
|
|
361
|
+
lastProcessed: 0,
|
|
362
|
+
lastProcessedAttachmentsIdsList: [],
|
|
363
|
+
},
|
|
364
|
+
};
|
|
365
|
+
adapter['uploader'].getAttachmentsFromArtifactId = jest
|
|
366
|
+
.fn()
|
|
367
|
+
.mockResolvedValueOnce({
|
|
368
|
+
attachments: [
|
|
369
|
+
{
|
|
370
|
+
url: 'http://example.com/file1.pdf',
|
|
371
|
+
id: 'attachment1',
|
|
372
|
+
file_name: 'file1.pdf',
|
|
373
|
+
parent_id: 'parent1',
|
|
374
|
+
},
|
|
375
|
+
{
|
|
376
|
+
url: 'http://example.com/file2.pdf',
|
|
377
|
+
id: 'attachment2',
|
|
378
|
+
file_name: 'file2.pdf',
|
|
379
|
+
parent_id: 'parent2',
|
|
380
|
+
},
|
|
381
|
+
{
|
|
382
|
+
url: 'http://example.com/file3.pdf',
|
|
383
|
+
id: 'attachment3',
|
|
384
|
+
file_name: 'file3.pdf',
|
|
385
|
+
parent_id: 'parent3',
|
|
386
|
+
},
|
|
387
|
+
],
|
|
388
|
+
});
|
|
389
|
+
adapter.processAttachment = jest.fn().mockResolvedValue(null);
|
|
390
|
+
await adapter.streamAttachments({
|
|
391
|
+
stream: mockStream,
|
|
392
|
+
});
|
|
393
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.artifactIds).toHaveLength(0);
|
|
394
|
+
expect(adapter.state.toDevRev.attachmentsMetadata.lastProcessed).toBe(0);
|
|
395
|
+
});
|
|
396
|
+
});
|
|
397
|
+
describe(worker_adapter_1.WorkerAdapter.prototype.emit.name, () => {
|
|
398
|
+
let counter;
|
|
399
|
+
let mockPostMessage;
|
|
400
|
+
beforeEach(() => {
|
|
401
|
+
counter = { counter: 0 };
|
|
402
|
+
// Import the worker_threads module and spy on parentPort.postMessage
|
|
403
|
+
const workerThreads = require('node:worker_threads');
|
|
404
|
+
mockPostMessage = jest.fn().mockImplementation(() => {
|
|
405
|
+
counter.counter += 1;
|
|
406
|
+
});
|
|
407
|
+
// Spy on the parentPort.postMessage method
|
|
408
|
+
if (workerThreads.parentPort) {
|
|
409
|
+
jest
|
|
410
|
+
.spyOn(workerThreads.parentPort, 'postMessage')
|
|
411
|
+
.mockImplementation(mockPostMessage);
|
|
412
|
+
}
|
|
413
|
+
else {
|
|
414
|
+
// If parentPort is null (not in worker context), create a mock
|
|
415
|
+
workerThreads.parentPort = {
|
|
416
|
+
postMessage: mockPostMessage,
|
|
417
|
+
};
|
|
418
|
+
}
|
|
419
|
+
});
|
|
420
|
+
afterEach(() => {
|
|
421
|
+
// Restore all mocks
|
|
422
|
+
jest.restoreAllMocks();
|
|
423
|
+
});
|
|
424
|
+
it('should emit only one event when multiple events of same type are sent', async () => {
|
|
425
|
+
adapter['adapterState'].postState = jest
|
|
426
|
+
.fn()
|
|
427
|
+
.mockResolvedValue(undefined);
|
|
428
|
+
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
429
|
+
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
430
|
+
reports: [],
|
|
431
|
+
processed_files: [],
|
|
432
|
+
});
|
|
433
|
+
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
434
|
+
reports: [],
|
|
435
|
+
processed_files: [],
|
|
436
|
+
});
|
|
437
|
+
expect(counter.counter).toBe(1);
|
|
438
|
+
});
|
|
439
|
+
it('should emit event when different event type is sent after previous events', async () => {
|
|
440
|
+
adapter['adapterState'].postState = jest
|
|
441
|
+
.fn()
|
|
442
|
+
.mockResolvedValue(undefined);
|
|
443
|
+
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
444
|
+
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
445
|
+
reports: [],
|
|
446
|
+
processed_files: [],
|
|
447
|
+
});
|
|
448
|
+
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
449
|
+
reports: [],
|
|
450
|
+
processed_files: [],
|
|
451
|
+
});
|
|
452
|
+
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
453
|
+
reports: [],
|
|
454
|
+
processed_files: [],
|
|
455
|
+
});
|
|
456
|
+
expect(counter.counter).toBe(1);
|
|
457
|
+
});
|
|
458
|
+
it('should correctly emit one event even if postState errors', async () => {
|
|
459
|
+
adapter['adapterState'].postState = jest
|
|
460
|
+
.fn()
|
|
461
|
+
.mockRejectedValue(new Error('postState error'));
|
|
462
|
+
adapter.uploadAllRepos = jest.fn().mockResolvedValue(undefined);
|
|
463
|
+
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
464
|
+
reports: [],
|
|
465
|
+
processed_files: [],
|
|
466
|
+
});
|
|
467
|
+
expect(counter.counter).toBe(1);
|
|
468
|
+
});
|
|
469
|
+
it('should correctly emit one event even if uploadAllRepos errors', async () => {
|
|
470
|
+
adapter['adapterState'].postState = jest
|
|
471
|
+
.fn()
|
|
472
|
+
.mockResolvedValue(undefined);
|
|
473
|
+
adapter.uploadAllRepos = jest
|
|
474
|
+
.fn()
|
|
475
|
+
.mockRejectedValue(new Error('uploadAllRepos error'));
|
|
476
|
+
await adapter.emit(types_1.ExtractorEventType.MetadataExtractionError, {
|
|
477
|
+
reports: [],
|
|
478
|
+
processed_files: [],
|
|
479
|
+
});
|
|
480
|
+
expect(counter.counter).toBe(1);
|
|
481
|
+
});
|
|
482
|
+
});
|
|
483
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../../src/multithreading/worker.js"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
const { workerData } = require('node:worker_threads');
|
|
3
|
+
require('ts-node').register();
|
|
4
|
+
const { Logger } = require('../logger/logger');
|
|
5
|
+
const { runWithUserLogContext } = require('../logger/logger.context');
|
|
6
|
+
console = new Logger({ event: workerData.event, options: workerData.options });
|
|
7
|
+
runWithUserLogContext(() => {
|
|
8
|
+
require(workerData.workerPath);
|
|
9
|
+
});
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Item } from '../repo/repo.interfaces';
|
|
2
|
+
import { ErrorRecord } from '../types/common';
|
|
3
|
+
import { Artifact } from '../uploader/uploader.interfaces';
|
|
4
|
+
import { NormalizedAttachment, NormalizedItem, RepoFactoryInterface } from './repo.interfaces';
|
|
5
|
+
export declare class Repo {
|
|
6
|
+
readonly itemType: string;
|
|
7
|
+
private items;
|
|
8
|
+
private normalize?;
|
|
9
|
+
private uploader;
|
|
10
|
+
private onUpload;
|
|
11
|
+
private options?;
|
|
12
|
+
uploadedArtifacts: Artifact[];
|
|
13
|
+
constructor({ event, itemType, normalize, onUpload, options, }: RepoFactoryInterface);
|
|
14
|
+
getItems(): (NormalizedItem | NormalizedAttachment | Item)[];
|
|
15
|
+
upload(batch?: (NormalizedItem | NormalizedAttachment | Item)[]): Promise<void | ErrorRecord>;
|
|
16
|
+
push(items: Item[]): Promise<boolean>;
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=repo.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"repo.d.ts","sourceRoot":"","sources":["../../src/repo/repo.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,IAAI,EAAE,MAAM,yBAAyB,CAAC;AAC/C,OAAO,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAE9C,OAAO,EAAE,QAAQ,EAAE,MAAM,iCAAiC,CAAC;AAG3D,OAAO,EACL,oBAAoB,EACpB,cAAc,EACd,oBAAoB,EACrB,MAAM,mBAAmB,CAAC;AAE3B,qBAAa,IAAI;IACf,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,OAAO,CAAC,KAAK,CAAmD;IAChE,OAAO,CAAC,SAAS,CAAC,CAAwD;IAC1E,OAAO,CAAC,QAAQ,CAAW;IAC3B,OAAO,CAAC,QAAQ,CAA+B;IAC/C,OAAO,CAAC,OAAO,CAAC,CAAuB;IAChC,iBAAiB,EAAE,QAAQ,EAAE,CAAC;gBAEzB,EACV,KAAK,EACL,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,OAAO,GACR,EAAE,oBAAoB;IAUvB,QAAQ,IAAI,CAAC,cAAc,GAAG,oBAAoB,GAAG,IAAI,CAAC,EAAE;IAItD,MAAM,CACV,KAAK,CAAC,EAAE,CAAC,cAAc,GAAG,oBAAoB,GAAG,IAAI,CAAC,EAAE,GACvD,OAAO,CAAC,IAAI,GAAG,WAAW,CAAC;IAqCxB,IAAI,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC;CAuC5C"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { Artifact } from '../uploader/uploader.interfaces';
|
|
2
|
+
import { AirdropEvent } from '../types/extraction';
|
|
3
|
+
import { WorkerAdapterOptions } from '../types/workers';
|
|
4
|
+
/**
|
|
5
|
+
* RepoInterface is an interface that defines the structure of a repo which is used to store and upload extracted data.
|
|
6
|
+
*/
|
|
7
|
+
export interface RepoInterface {
|
|
8
|
+
itemType: string;
|
|
9
|
+
normalize?: (record: object) => NormalizedItem | NormalizedAttachment;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* RepoFactoryInterface is an interface that defines the structure of a repo factory which is used to create a repo.
|
|
13
|
+
*/
|
|
14
|
+
export interface RepoFactoryInterface {
|
|
15
|
+
event: AirdropEvent;
|
|
16
|
+
itemType: string;
|
|
17
|
+
normalize?: (record: object) => NormalizedItem | NormalizedAttachment;
|
|
18
|
+
onUpload: (artifact: Artifact) => void;
|
|
19
|
+
options?: WorkerAdapterOptions;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* NormalizedItem is an interface of item after normalization.
|
|
23
|
+
*/
|
|
24
|
+
export interface NormalizedItem {
|
|
25
|
+
id: string;
|
|
26
|
+
created_date: string;
|
|
27
|
+
modified_date: string;
|
|
28
|
+
data: object;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* NormalizedAttachment is an interface of attachment after normalization.
|
|
32
|
+
*/
|
|
33
|
+
export interface NormalizedAttachment {
|
|
34
|
+
url: string;
|
|
35
|
+
id: string;
|
|
36
|
+
file_name: string;
|
|
37
|
+
parent_id: string;
|
|
38
|
+
author_id?: string;
|
|
39
|
+
inline?: boolean;
|
|
40
|
+
grand_parent_id?: number | string;
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Item is an interface that defines the structure of an item.
|
|
44
|
+
*/
|
|
45
|
+
export type Item = Record<string, any>;
|
|
46
|
+
//# sourceMappingURL=repo.interfaces.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"repo.interfaces.d.ts","sourceRoot":"","sources":["../../src/repo/repo.interfaces.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,iCAAiC,CAAC;AAE3D,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AACnD,OAAO,EAAE,oBAAoB,EAAE,MAAM,kBAAkB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,cAAc,GAAG,oBAAoB,CAAC;CACvE;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,KAAK,EAAE,YAAY,CAAC;IACpB,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,cAAc,GAAG,oBAAoB,CAAC;IACtE,QAAQ,EAAE,CAAC,QAAQ,EAAE,QAAQ,KAAK,IAAI,CAAC;IACvC,OAAO,CAAC,EAAE,oBAAoB,CAAC;CAChC;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,EAAE,EAAE,MAAM,CAAC;IACX,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,IAAI,EAAE,MAAM,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,GAAG,EAAE,MAAM,CAAC;IACZ,EAAE,EAAE,MAAM,CAAC;IACX,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,OAAO,CAAC;IAIjB,eAAe,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;CACnC;AAED;;GAEG;AAEH,MAAM,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC"}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Repo = void 0;
|
|
4
|
+
const constants_1 = require("../common/constants");
|
|
5
|
+
const uploader_1 = require("../uploader/uploader");
|
|
6
|
+
class Repo {
|
|
7
|
+
constructor({ event, itemType, normalize, onUpload, options, }) {
|
|
8
|
+
this.items = [];
|
|
9
|
+
this.itemType = itemType;
|
|
10
|
+
this.normalize = normalize;
|
|
11
|
+
this.onUpload = onUpload;
|
|
12
|
+
this.uploader = new uploader_1.Uploader({ event, options });
|
|
13
|
+
this.options = options;
|
|
14
|
+
this.uploadedArtifacts = [];
|
|
15
|
+
}
|
|
16
|
+
getItems() {
|
|
17
|
+
return this.items;
|
|
18
|
+
}
|
|
19
|
+
async upload(batch) {
|
|
20
|
+
const itemsToUpload = batch || this.items;
|
|
21
|
+
if (itemsToUpload.length > 0) {
|
|
22
|
+
console.log(`Uploading ${itemsToUpload.length} items of type ${this.itemType}. `);
|
|
23
|
+
const { artifact, error } = await this.uploader.upload(this.itemType, itemsToUpload);
|
|
24
|
+
if (error || !artifact) {
|
|
25
|
+
console.error('Error while uploading batch', error);
|
|
26
|
+
return error;
|
|
27
|
+
}
|
|
28
|
+
this.onUpload(artifact);
|
|
29
|
+
this.uploadedArtifacts.push(artifact);
|
|
30
|
+
// Clear the uploaded items from the main items array if no batch was specified
|
|
31
|
+
if (!batch) {
|
|
32
|
+
this.items = [];
|
|
33
|
+
}
|
|
34
|
+
console.log(`Uploaded ${itemsToUpload.length} items of type ${this.itemType}. Number of items left in repo: ${this.items.length}.`);
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
console.log(`No items to upload for type ${this.itemType}. Skipping upload.`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
async push(items) {
|
|
41
|
+
var _a;
|
|
42
|
+
let recordsToPush;
|
|
43
|
+
if (!items || items.length === 0) {
|
|
44
|
+
console.log(`No items to push for type ${this.itemType}. Skipping push.`);
|
|
45
|
+
return true;
|
|
46
|
+
}
|
|
47
|
+
// Normalize items if needed
|
|
48
|
+
if (this.normalize &&
|
|
49
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.EXTERNAL_DOMAIN_METADATA &&
|
|
50
|
+
this.itemType != constants_1.AIRDROP_DEFAULT_ITEM_TYPES.SSOR_ATTACHMENT) {
|
|
51
|
+
recordsToPush = items.map((item) => this.normalize(item));
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
recordsToPush = items;
|
|
55
|
+
}
|
|
56
|
+
// Add the new records to the items array
|
|
57
|
+
this.items.push(...recordsToPush);
|
|
58
|
+
// Upload in batches while the number of items exceeds the batch size
|
|
59
|
+
const batchSize = ((_a = this.options) === null || _a === void 0 ? void 0 : _a.batchSize) || constants_1.ARTIFACT_BATCH_SIZE;
|
|
60
|
+
while (this.items.length >= batchSize) {
|
|
61
|
+
// Slice out a batch of batchSize items to upload
|
|
62
|
+
const batch = this.items.splice(0, batchSize);
|
|
63
|
+
try {
|
|
64
|
+
// Upload the batch
|
|
65
|
+
await this.upload(batch);
|
|
66
|
+
}
|
|
67
|
+
catch (error) {
|
|
68
|
+
console.error('Error while uploading batch', error);
|
|
69
|
+
return false;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return true;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
exports.Repo = Repo;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"repo.test.d.ts","sourceRoot":"","sources":["../../src/repo/repo.test.ts"],"names":[],"mappings":""}
|