@webiny/api-page-builder-import-export 0.0.0-mt-2 → 0.0.0-unstable.13771d80a8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +8 -0
- package/client.js +43 -0
- package/client.js.map +1 -0
- package/export/combine/blocksHandler.d.ts +6 -0
- package/export/combine/blocksHandler.js +99 -0
- package/export/combine/blocksHandler.js.map +1 -0
- package/export/combine/formsHandler.d.ts +6 -0
- package/export/combine/formsHandler.js +99 -0
- package/export/combine/formsHandler.js.map +1 -0
- package/export/combine/index.d.ts +16 -0
- package/export/combine/index.js +40 -0
- package/export/combine/index.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +6 -0
- package/export/combine/pagesHandler.js +99 -0
- package/export/combine/pagesHandler.js.map +1 -0
- package/export/combine/templatesHandler.d.ts +6 -0
- package/export/combine/templatesHandler.js +99 -0
- package/export/combine/templatesHandler.js.map +1 -0
- package/export/process/blocksHandler.d.ts +6 -0
- package/export/process/blocksHandler.js +162 -0
- package/export/process/blocksHandler.js.map +1 -0
- package/export/process/formsHandler.d.ts +6 -0
- package/export/process/formsHandler.js +187 -0
- package/export/process/formsHandler.js.map +1 -0
- package/export/process/index.d.ts +20 -0
- package/export/process/index.js +37 -0
- package/export/process/index.js.map +1 -0
- package/export/process/pagesHandler.d.ts +6 -0
- package/export/process/pagesHandler.js +189 -0
- package/export/process/pagesHandler.js.map +1 -0
- package/export/process/templatesHandler.d.ts +6 -0
- package/export/process/templatesHandler.js +166 -0
- package/export/process/templatesHandler.js.map +1 -0
- package/{exportPages → export}/s3Stream.d.ts +7 -2
- package/{exportPages → export}/s3Stream.js +12 -24
- package/export/s3Stream.js.map +1 -0
- package/export/utils.d.ts +29 -0
- package/export/utils.js +188 -0
- package/export/utils.js.map +1 -0
- package/{exportPages → export}/zipper.d.ts +7 -5
- package/{exportPages → export}/zipper.js +40 -53
- package/export/zipper.js.map +1 -0
- package/graphql/crud/blocks.crud.d.ts +4 -0
- package/graphql/crud/blocks.crud.js +136 -0
- package/graphql/crud/blocks.crud.js.map +1 -0
- package/graphql/crud/forms.crud.d.ts +4 -0
- package/graphql/crud/forms.crud.js +129 -0
- package/graphql/crud/forms.crud.js.map +1 -0
- package/graphql/crud/importExportTasks.crud.d.ts +5 -0
- package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +87 -118
- package/graphql/crud/importExportTasks.crud.js.map +1 -0
- package/graphql/crud/pages.crud.d.ts +3 -3
- package/graphql/crud/pages.crud.js +64 -70
- package/graphql/crud/pages.crud.js.map +1 -0
- package/graphql/crud/templates.crud.d.ts +4 -0
- package/graphql/crud/templates.crud.js +124 -0
- package/graphql/crud/templates.crud.js.map +1 -0
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +6 -8
- package/graphql/crud.js.map +1 -0
- package/graphql/graphql/blocks.gql.d.ts +4 -0
- package/graphql/graphql/blocks.gql.js +52 -0
- package/graphql/graphql/blocks.gql.js.map +1 -0
- package/graphql/graphql/forms.gql.d.ts +4 -0
- package/graphql/graphql/forms.gql.js +60 -0
- package/graphql/graphql/forms.gql.js.map +1 -0
- package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
- package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +19 -24
- package/graphql/graphql/importExportTasks.gql.js.map +1 -0
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +5 -14
- package/graphql/graphql/pages.gql.js.map +1 -0
- package/graphql/graphql/templates.gql.d.ts +4 -0
- package/graphql/graphql/templates.gql.js +52 -0
- package/graphql/graphql/templates.gql.js.map +1 -0
- package/graphql/graphql/utils/resolve.d.ts +1 -1
- package/graphql/graphql/utils/resolve.js +0 -3
- package/graphql/graphql/utils/resolve.js.map +1 -0
- package/graphql/graphql.d.ts +1 -1
- package/graphql/graphql.js +6 -7
- package/graphql/graphql.js.map +1 -0
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js +1 -6
- package/graphql/index.js.map +1 -0
- package/graphql/types.d.ts +86 -27
- package/graphql/types.js.map +1 -0
- package/import/constants.d.ts +3 -0
- package/import/constants.js +14 -0
- package/import/constants.js.map +1 -0
- package/import/create/blocksHandler.d.ts +3 -0
- package/import/create/blocksHandler.js +101 -0
- package/import/create/blocksHandler.js.map +1 -0
- package/import/create/formsHandler.d.ts +3 -0
- package/import/create/formsHandler.js +103 -0
- package/import/create/formsHandler.js.map +1 -0
- package/import/create/index.d.ts +24 -0
- package/import/create/index.js +40 -0
- package/import/create/index.js.map +1 -0
- package/import/create/pagesHandler.d.ts +3 -0
- package/import/create/pagesHandler.js +103 -0
- package/import/create/pagesHandler.js.map +1 -0
- package/import/create/templatesHandler.d.ts +3 -0
- package/import/create/templatesHandler.js +99 -0
- package/import/create/templatesHandler.js.map +1 -0
- package/import/process/blocks/blocksHandler.d.ts +3 -0
- package/import/process/blocks/blocksHandler.js +169 -0
- package/import/process/blocks/blocksHandler.js.map +1 -0
- package/import/process/blocks/importBlock.d.ts +11 -0
- package/import/process/blocks/importBlock.js +89 -0
- package/import/process/blocks/importBlock.js.map +1 -0
- package/import/process/blocksHandler.d.ts +3 -0
- package/import/process/blocksHandler.js +175 -0
- package/import/process/blocksHandler.js.map +1 -0
- package/import/process/forms/formsHandler.d.ts +3 -0
- package/import/process/forms/formsHandler.js +176 -0
- package/import/process/forms/formsHandler.js.map +1 -0
- package/import/process/forms/importForm.d.ts +9 -0
- package/import/process/forms/importForm.js +43 -0
- package/import/process/forms/importForm.js.map +1 -0
- package/import/process/index.d.ts +20 -0
- package/import/process/index.js +37 -0
- package/import/process/index.js.map +1 -0
- package/import/process/pages/importPage.d.ts +11 -0
- package/import/process/pages/importPage.js +92 -0
- package/import/process/pages/importPage.js.map +1 -0
- package/import/process/pages/pagesHandler.d.ts +3 -0
- package/import/process/pages/pagesHandler.js +183 -0
- package/import/process/pages/pagesHandler.js.map +1 -0
- package/import/process/pagesHandler.d.ts +3 -0
- package/import/process/pagesHandler.js +180 -0
- package/import/process/pagesHandler.js.map +1 -0
- package/import/process/templates/importTemplate.d.ts +11 -0
- package/import/process/templates/importTemplate.js +66 -0
- package/import/process/templates/importTemplate.js.map +1 -0
- package/import/process/templates/templatesHandler.d.ts +3 -0
- package/import/process/templates/templatesHandler.js +170 -0
- package/import/process/templates/templatesHandler.js.map +1 -0
- package/import/utils/deleteS3Folder.d.ts +1 -0
- package/import/utils/deleteS3Folder.js +19 -0
- package/import/utils/deleteS3Folder.js.map +1 -0
- package/import/utils/extractAndUploadZipFileContents.d.ts +7 -0
- package/import/utils/extractAndUploadZipFileContents.js +122 -0
- package/import/utils/extractAndUploadZipFileContents.js.map +1 -0
- package/import/utils/extractZipAndUploadToS3.d.ts +2 -0
- package/import/utils/extractZipAndUploadToS3.js +98 -0
- package/import/utils/extractZipAndUploadToS3.js.map +1 -0
- package/import/utils/getFileNameWithoutExt.d.ts +1 -0
- package/import/utils/getFileNameWithoutExt.js +11 -0
- package/import/utils/getFileNameWithoutExt.js.map +1 -0
- package/import/utils/index.d.ts +9 -0
- package/import/utils/index.js +104 -0
- package/import/utils/index.js.map +1 -0
- package/import/utils/initialStats.d.ts +7 -0
- package/import/utils/initialStats.js +16 -0
- package/import/utils/initialStats.js.map +1 -0
- package/import/utils/prepareDataDirMap.d.ts +6 -0
- package/import/utils/prepareDataDirMap.js +29 -0
- package/import/utils/prepareDataDirMap.js.map +1 -0
- package/import/utils/updateFilesInData.d.ts +8 -0
- package/import/utils/updateFilesInData.js +48 -0
- package/import/utils/updateFilesInData.js.map +1 -0
- package/import/utils/uploadAssets.d.ts +10 -0
- package/import/utils/uploadAssets.js +58 -0
- package/import/utils/uploadAssets.js.map +1 -0
- package/import/utils/uploadFilesFromS3.d.ts +3 -0
- package/import/utils/uploadFilesFromS3.js +19 -0
- package/import/utils/uploadFilesFromS3.js.map +1 -0
- package/import/utils.d.ts +49 -0
- package/{importPages → import}/utils.js +191 -89
- package/import/utils.js.map +1 -0
- package/mockSecurity.js +0 -2
- package/mockSecurity.js.map +1 -0
- package/package.json +37 -34
- package/types.d.ts +91 -72
- package/types.js +17 -22
- package/types.js.map +1 -0
- package/exportPages/combine/index.d.ts +0 -19
- package/exportPages/combine/index.js +0 -88
- package/exportPages/process/index.d.ts +0 -26
- package/exportPages/process/index.js +0 -204
- package/exportPages/utils.d.ts +0 -13
- package/exportPages/utils.js +0 -113
- package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
- package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
- package/importPages/client.d.ts +0 -7
- package/importPages/client.js +0 -40
- package/importPages/create/index.d.ts +0 -27
- package/importPages/create/index.js +0 -109
- package/importPages/process/index.d.ts +0 -25
- package/importPages/process/index.js +0 -183
- package/importPages/utils.d.ts +0 -43
@@ -1,14 +1,15 @@
|
|
1
1
|
"use strict";
|
2
2
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
|
4
4
|
|
5
5
|
Object.defineProperty(exports, "__esModule", {
|
6
6
|
value: true
|
7
7
|
});
|
8
|
+
exports.importBlock = importBlock;
|
8
9
|
exports.importPage = importPage;
|
9
10
|
exports.initialStats = initialStats;
|
10
11
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
11
|
-
exports.
|
12
|
+
exports.uploadAssets = void 0;
|
12
13
|
|
13
14
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
14
15
|
|
@@ -38,36 +39,52 @@ var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/insta
|
|
38
39
|
|
39
40
|
var _types = require("../types");
|
40
41
|
|
41
|
-
var _s3Stream = require("../
|
42
|
+
var _s3Stream = require("../export/s3Stream");
|
42
43
|
|
43
44
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
44
45
|
const INSTALL_DIR = "/tmp";
|
45
46
|
|
46
|
-
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "
|
47
|
+
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
47
48
|
|
48
49
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
49
|
-
const ZIP_CONTENT_TYPE = "application/zip";
|
50
50
|
|
51
|
-
function updateImageInPageSettings({
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
51
|
+
function updateImageInPageSettings(params) {
|
52
|
+
const {
|
53
|
+
settings,
|
54
|
+
fileIdToKeyMap,
|
55
|
+
srcPrefix
|
56
|
+
} = params;
|
56
57
|
let newSettings = settings;
|
57
58
|
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
58
59
|
|
59
60
|
if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
|
60
|
-
|
61
|
+
var _settings$general, _settings$general$ima;
|
62
|
+
|
63
|
+
newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 ? void 0 : (_settings$general$ima = _settings$general.image) === null || _settings$general$ima === void 0 ? void 0 : _settings$general$ima.id) || "")}`);
|
61
64
|
}
|
62
65
|
|
63
66
|
if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
|
64
|
-
|
67
|
+
var _settings$social, _settings$social$imag;
|
68
|
+
|
69
|
+
newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 ? void 0 : (_settings$social$imag = _settings$social.image) === null || _settings$social$imag === void 0 ? void 0 : _settings$social$imag.id) || "")}`);
|
65
70
|
}
|
66
71
|
|
67
|
-
return
|
72
|
+
return newSettings;
|
73
|
+
}
|
74
|
+
|
75
|
+
function updateBlockPreviewImage(params) {
|
76
|
+
const {
|
77
|
+
file,
|
78
|
+
fileIdToKeyMap,
|
79
|
+
srcPrefix
|
80
|
+
} = params;
|
81
|
+
const newFile = file;
|
82
|
+
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
83
|
+
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
84
|
+
return newFile;
|
68
85
|
}
|
69
86
|
|
70
|
-
function
|
87
|
+
function updateFilesInData({
|
71
88
|
data,
|
72
89
|
fileIdToKeyMap,
|
73
90
|
srcPrefix
|
@@ -81,7 +98,7 @@ function updateFilesInPageData({
|
|
81
98
|
if (Array.isArray(data)) {
|
82
99
|
for (let i = 0; i < data.length; i++) {
|
83
100
|
const element = data[i];
|
84
|
-
|
101
|
+
updateFilesInData({
|
85
102
|
data: element,
|
86
103
|
fileIdToKeyMap,
|
87
104
|
srcPrefix
|
@@ -102,7 +119,7 @@ function updateFilesInPageData({
|
|
102
119
|
value.name = fileIdToKeyMap.get(value.id);
|
103
120
|
value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
|
104
121
|
} else {
|
105
|
-
|
122
|
+
updateFilesInData({
|
106
123
|
data: value,
|
107
124
|
srcPrefix,
|
108
125
|
fileIdToKeyMap
|
@@ -111,23 +128,26 @@ function updateFilesInPageData({
|
|
111
128
|
}
|
112
129
|
}
|
113
130
|
|
114
|
-
const
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
131
|
+
const uploadAssets = async params => {
|
132
|
+
const {
|
133
|
+
context,
|
134
|
+
filesData,
|
135
|
+
fileUploadsData
|
136
|
+
} = params; // Save uploaded file key against static id for later use.
|
137
|
+
|
138
|
+
const fileIdToKeyMap = new Map();
|
119
139
|
/**
|
120
140
|
* This function contains logic of file download from S3.
|
121
141
|
* Current we're not mocking zip file download from S3 in tests at the moment.
|
122
142
|
* So, we're manually mocking it in case of test just by returning an empty object.
|
123
143
|
*/
|
124
|
-
if (process.env.NODE_ENV === "test") {
|
125
|
-
return {};
|
126
|
-
}
|
127
144
|
|
128
|
-
|
145
|
+
if (process.env.NODE_ENV === "test") {
|
146
|
+
return {
|
147
|
+
fileIdToKeyMap
|
148
|
+
};
|
149
|
+
} // Save files meta data against old key for later use.
|
129
150
|
|
130
|
-
const fileIdToKeyMap = new Map(); // Save files meta data against old key for later use.
|
131
151
|
|
132
152
|
const fileKeyToFileMap = new Map(); // Initialize maps.
|
133
153
|
|
@@ -145,7 +165,12 @@ const uploadPageAssets = async ({
|
|
145
165
|
|
146
166
|
const createFilesInput = fileUploadResults.map(uploadResult => {
|
147
167
|
const newKey = uploadResult.Key;
|
148
|
-
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
168
|
+
const file = fileKeyToFileMap.get(getOldFileKey(newKey));
|
169
|
+
|
170
|
+
if (!file) {
|
171
|
+
return null;
|
172
|
+
} // Update the file map with newly uploaded file.
|
173
|
+
|
149
174
|
|
150
175
|
fileIdToKeyMap.set(file.id, newKey);
|
151
176
|
return {
|
@@ -156,7 +181,7 @@ const uploadPageAssets = async ({
|
|
156
181
|
meta: file.meta,
|
157
182
|
tags: file.tags
|
158
183
|
};
|
159
|
-
});
|
184
|
+
}).filter(Boolean);
|
160
185
|
const createFilesPromises = []; // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).
|
161
186
|
|
162
187
|
const createFilesInputChunks = (0, _chunk.default)(createFilesInput, FILES_COUNT_IN_EACH_BATCH);
|
@@ -177,7 +202,7 @@ const uploadPageAssets = async ({
|
|
177
202
|
};
|
178
203
|
};
|
179
204
|
|
180
|
-
exports.
|
205
|
+
exports.uploadAssets = uploadAssets;
|
181
206
|
|
182
207
|
async function importPage({
|
183
208
|
pageKey,
|
@@ -206,25 +231,31 @@ async function importPage({
|
|
206
231
|
files
|
207
232
|
} = await (0, _loadJsonFile.default)(PAGE_DATA_FILE_PATH); // Only update page data if there are files.
|
208
233
|
|
209
|
-
if (Array.isArray(files) && files.length) {
|
234
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
210
235
|
// Upload page assets.
|
211
236
|
const {
|
212
237
|
fileIdToKeyMap
|
213
|
-
} = await
|
238
|
+
} = await uploadAssets({
|
214
239
|
context,
|
240
|
+
|
241
|
+
/**
|
242
|
+
* TODO @ts-refactor @ashutosh figure out correct types.
|
243
|
+
*/
|
244
|
+
// @ts-ignore
|
215
245
|
filesData: files,
|
216
246
|
fileUploadsData
|
217
247
|
});
|
248
|
+
const settings = await context.fileManager.settings.getSettings();
|
218
249
|
const {
|
219
|
-
srcPrefix
|
220
|
-
} =
|
221
|
-
|
222
|
-
data: page.content,
|
250
|
+
srcPrefix = ""
|
251
|
+
} = settings || {};
|
252
|
+
updateFilesInData({
|
253
|
+
data: page.content || {},
|
223
254
|
fileIdToKeyMap,
|
224
255
|
srcPrefix
|
225
256
|
});
|
226
257
|
page.settings = updateImageInPageSettings({
|
227
|
-
settings: page.settings,
|
258
|
+
settings: page.settings || {},
|
228
259
|
fileIdToKeyMap,
|
229
260
|
srcPrefix
|
230
261
|
});
|
@@ -237,6 +268,65 @@ async function importPage({
|
|
237
268
|
return page;
|
238
269
|
}
|
239
270
|
|
271
|
+
async function importBlock({
|
272
|
+
blockKey,
|
273
|
+
context,
|
274
|
+
fileUploadsData
|
275
|
+
}) {
|
276
|
+
const log = console.log; // Making Directory for block in which we're going to extract the block data file.
|
277
|
+
|
278
|
+
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
279
|
+
|
280
|
+
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
281
|
+
|
282
|
+
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
283
|
+
|
284
|
+
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
285
|
+
|
286
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`); // Download and save block data file in disk.
|
287
|
+
|
288
|
+
await new Promise((resolve, reject) => {
|
289
|
+
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
290
|
+
}); // Load the block data file from disk.
|
291
|
+
|
292
|
+
log(`Load file ${blockDataFileKey}`);
|
293
|
+
const {
|
294
|
+
block,
|
295
|
+
files
|
296
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH); // Only update block data if there are files.
|
297
|
+
|
298
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
299
|
+
// Upload block assets.
|
300
|
+
const {
|
301
|
+
fileIdToKeyMap
|
302
|
+
} = await uploadAssets({
|
303
|
+
context,
|
304
|
+
filesData: files,
|
305
|
+
fileUploadsData
|
306
|
+
});
|
307
|
+
const settings = await context.fileManager.settings.getSettings();
|
308
|
+
const {
|
309
|
+
srcPrefix = ""
|
310
|
+
} = settings || {};
|
311
|
+
updateFilesInData({
|
312
|
+
data: block.content || {},
|
313
|
+
fileIdToKeyMap,
|
314
|
+
srcPrefix
|
315
|
+
});
|
316
|
+
block.preview = updateBlockPreviewImage({
|
317
|
+
file: block.preview || {},
|
318
|
+
fileIdToKeyMap,
|
319
|
+
srcPrefix
|
320
|
+
});
|
321
|
+
}
|
322
|
+
|
323
|
+
log("Removing Directory for block...");
|
324
|
+
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
325
|
+
log(`Remove block contents from S3...`);
|
326
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
327
|
+
return block;
|
328
|
+
}
|
329
|
+
|
240
330
|
async function uploadFilesFromS3({
|
241
331
|
fileKeyToFileMap,
|
242
332
|
oldKeyToNewKeyMap
|
@@ -270,14 +360,6 @@ async function uploadFilesFromS3({
|
|
270
360
|
return Promise.all(promises);
|
271
361
|
}
|
272
362
|
|
273
|
-
async function getObjectMetaFromS3(Key) {
|
274
|
-
const meta = await _s3Stream.s3Stream.getObjectHead(Key);
|
275
|
-
|
276
|
-
if (meta.ContentType !== ZIP_CONTENT_TYPE) {
|
277
|
-
throw new _error.default(`Unsupported file type: "${meta.ContentType}"`, "UNSUPPORTED_FILE");
|
278
|
-
}
|
279
|
-
}
|
280
|
-
|
281
363
|
function getOldFileKey(key) {
|
282
364
|
/*
|
283
365
|
* Because we know the naming convention, we can extract the old key from new key.
|
@@ -298,32 +380,23 @@ function getFileNameWithoutExt(fileName) {
|
|
298
380
|
|
299
381
|
/**
|
300
382
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
301
|
-
* @param
|
302
|
-
* @return
|
383
|
+
* @param zipFileUrl
|
384
|
+
* @return ImportData S3 file keys for all uploaded assets group by page/block.
|
303
385
|
*/
|
304
|
-
async function readExtractAndUploadZipFileContents(
|
386
|
+
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
305
387
|
const log = console.log;
|
306
|
-
const
|
307
|
-
let readStream; // Check whether it is a URL
|
388
|
+
const importDataList = [];
|
308
389
|
|
309
|
-
|
310
|
-
const response = await (0, _nodeFetch.default)(zipFileKey);
|
390
|
+
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
311
391
|
|
312
|
-
|
313
|
-
throw new _error.default(`Unable to downloading file: "${zipFileKey}"`, response.statusText);
|
314
|
-
}
|
392
|
+
const response = await (0, _nodeFetch.default)(zipFileUrl);
|
315
393
|
|
316
|
-
|
317
|
-
|
318
|
-
// We're first retrieving object's meta data, just to check whether the file is available at the given Key
|
319
|
-
await getObjectMetaFromS3(zipFileKey);
|
320
|
-
readStream = _s3Stream.s3Stream.readStream(zipFileKey);
|
394
|
+
if (!response.ok) {
|
395
|
+
throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
|
321
396
|
}
|
322
397
|
|
323
|
-
const
|
324
|
-
|
325
|
-
const zipFileName = _path.default.basename(zipFileKey); // Read export file and download it in the disk
|
326
|
-
|
398
|
+
const readStream = response.body;
|
399
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/"); // Read export file and download it in the disk
|
327
400
|
|
328
401
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
329
402
|
|
@@ -332,23 +405,23 @@ async function readExtractAndUploadZipFileContents(zipFileKey) {
|
|
332
405
|
log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`); // Extract the downloaded zip file
|
333
406
|
|
334
407
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
335
|
-
log(`Removing ZIP file "${
|
336
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page zip and upload their content's to S3
|
408
|
+
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
409
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page/block zip and upload their content's to S3
|
337
410
|
|
338
411
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
339
412
|
const currentPath = zipFilePaths[i];
|
340
413
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
341
|
-
|
414
|
+
importDataList.push(dataMap);
|
342
415
|
}
|
343
416
|
|
344
417
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
345
418
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
346
|
-
return
|
419
|
+
return importDataList;
|
347
420
|
}
|
348
421
|
|
349
422
|
const ASSETS_DIR_NAME = "/assets";
|
350
423
|
|
351
|
-
function
|
424
|
+
function prepareDataDirMap({
|
352
425
|
map,
|
353
426
|
filePath,
|
354
427
|
newKey
|
@@ -382,30 +455,27 @@ async function deleteS3Folder(key) {
|
|
382
455
|
}
|
383
456
|
|
384
457
|
const response = await _s3Stream.s3Stream.listObject(key);
|
385
|
-
const keys = response.Contents.map(c => c.Key);
|
458
|
+
const keys = (response.Contents || []).map(c => c.Key).filter(Boolean);
|
386
459
|
console.log(`Found ${keys.length} files.`);
|
387
460
|
const deleteFilePromises = keys.map(key => _s3Stream.s3Stream.deleteObject(key));
|
388
461
|
await Promise.all(deleteFilePromises);
|
389
462
|
console.log(`Successfully deleted ${deleteFilePromises.length} files.`);
|
390
|
-
}
|
391
|
-
|
392
|
-
const zeroPad = version => `${version}`.padStart(5, "0");
|
463
|
+
} // export const zeroPad = version => `${version}`.padStart(5, "0");
|
393
464
|
|
394
|
-
exports.zeroPad = zeroPad;
|
395
465
|
|
396
466
|
function initialStats(total) {
|
397
467
|
return {
|
398
|
-
[_types.
|
399
|
-
[_types.
|
400
|
-
[_types.
|
401
|
-
[_types.
|
468
|
+
[_types.ImportExportTaskStatus.PENDING]: total,
|
469
|
+
[_types.ImportExportTaskStatus.PROCESSING]: 0,
|
470
|
+
[_types.ImportExportTaskStatus.COMPLETED]: 0,
|
471
|
+
[_types.ImportExportTaskStatus.FAILED]: 0,
|
402
472
|
total
|
403
473
|
};
|
404
474
|
}
|
405
475
|
|
406
476
|
function extractZipToDisk(exportFileZipPath) {
|
407
477
|
return new Promise((resolve, reject) => {
|
408
|
-
const
|
478
|
+
const zipFilePaths = [];
|
409
479
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
410
480
|
|
411
481
|
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
|
@@ -419,6 +489,13 @@ function extractZipToDisk(exportFileZipPath) {
|
|
419
489
|
if (err) {
|
420
490
|
console.warn("ERROR: Failed to extract zip: ", exportFileZipPath, err);
|
421
491
|
reject(err);
|
492
|
+
return;
|
493
|
+
}
|
494
|
+
|
495
|
+
if (!zipFile) {
|
496
|
+
console.log("ERROR: Missing zip file resource for path: " + exportFileZipPath);
|
497
|
+
reject("Missing Zip File Resource.");
|
498
|
+
return;
|
422
499
|
}
|
423
500
|
|
424
501
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
@@ -428,7 +505,7 @@ function extractZipToDisk(exportFileZipPath) {
|
|
428
505
|
reject(err);
|
429
506
|
}
|
430
507
|
|
431
|
-
resolve(
|
508
|
+
resolve(zipFilePaths);
|
432
509
|
});
|
433
510
|
zipFile.readEntry();
|
434
511
|
zipFile.on("entry", function (entry) {
|
@@ -445,15 +522,24 @@ function extractZipToDisk(exportFileZipPath) {
|
|
445
522
|
if (err) {
|
446
523
|
console.warn("ERROR: Failed to openReadStream for file: ", entry.fileName, err);
|
447
524
|
reject(err);
|
525
|
+
return;
|
526
|
+
}
|
527
|
+
|
528
|
+
if (!readStream) {
|
529
|
+
console.log("ERROR: Missing Read Stream Resource when extracting to disk.");
|
530
|
+
reject("Missing Read Stream Resource.");
|
531
|
+
return;
|
448
532
|
}
|
449
533
|
|
450
534
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
451
535
|
|
452
536
|
readStream.on("end", function () {
|
453
|
-
|
537
|
+
zipFilePaths.push(filePath);
|
454
538
|
zipFile.readEntry();
|
455
539
|
});
|
456
|
-
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath))
|
540
|
+
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
|
541
|
+
reject(error);
|
542
|
+
});
|
457
543
|
});
|
458
544
|
}
|
459
545
|
});
|
@@ -461,29 +547,36 @@ function extractZipToDisk(exportFileZipPath) {
|
|
461
547
|
});
|
462
548
|
}
|
463
549
|
|
464
|
-
function extractZipAndUploadToS3(
|
550
|
+
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
465
551
|
return new Promise((resolve, reject) => {
|
466
552
|
const filePaths = [];
|
467
553
|
const fileUploadPromises = [];
|
468
|
-
const
|
554
|
+
const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
|
469
555
|
let dataMap = {
|
470
|
-
key:
|
556
|
+
key: uniqueKey,
|
471
557
|
assets: {},
|
472
558
|
data: ""
|
473
559
|
};
|
474
560
|
|
475
|
-
_yauzl.default.open(
|
561
|
+
_yauzl.default.open(dataZipFilePath, {
|
476
562
|
lazyEntries: true
|
477
563
|
}, function (err, zipFile) {
|
478
564
|
if (err) {
|
479
|
-
console.warn("ERROR: Failed to extract zip: ",
|
565
|
+
console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
|
480
566
|
reject(err);
|
567
|
+
return;
|
568
|
+
}
|
569
|
+
|
570
|
+
if (!zipFile) {
|
571
|
+
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
572
|
+
reject("Missing Zip File Resource.");
|
573
|
+
return;
|
481
574
|
}
|
482
575
|
|
483
576
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
484
577
|
zipFile.on("end", function (err) {
|
485
578
|
if (err) {
|
486
|
-
console.warn('ERROR: Failed on "END" for file: ',
|
579
|
+
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
487
580
|
reject(err);
|
488
581
|
}
|
489
582
|
|
@@ -509,15 +602,22 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
509
602
|
if (err) {
|
510
603
|
console.warn("ERROR: Failed while performing [openReadStream] for file: ", entry.fileName, err);
|
511
604
|
reject(err);
|
605
|
+
return;
|
606
|
+
}
|
607
|
+
|
608
|
+
if (!readStream) {
|
609
|
+
console.log("ERROR: Missing Read Stream while importing.");
|
610
|
+
reject("Missing Read Strea Resource.");
|
611
|
+
return;
|
512
612
|
}
|
513
613
|
|
514
614
|
readStream.on("end", function () {
|
515
615
|
filePaths.push(entry.fileName);
|
516
616
|
zipFile.readEntry();
|
517
617
|
});
|
518
|
-
const newKey = `${uniquePath}/${
|
618
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`; // Modify in place
|
519
619
|
|
520
|
-
dataMap =
|
620
|
+
dataMap = prepareDataDirMap({
|
521
621
|
map: dataMap,
|
522
622
|
filePath: entry.fileName,
|
523
623
|
newKey
|
@@ -530,6 +630,8 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
530
630
|
|
531
631
|
streamPipeline(readStream, streamPassThrough).then(() => {
|
532
632
|
fileUploadPromises.push(promise);
|
633
|
+
}).catch(error => {
|
634
|
+
reject(error);
|
533
635
|
});
|
534
636
|
});
|
535
637
|
}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["streamPipeline","promisify","pipeline","INSTALL_DIR","INSTALL_EXTRACT_DIR","path","join","FILES_COUNT_IN_EACH_BATCH","updateImageInPageSettings","params","settings","fileIdToKeyMap","srcPrefix","newSettings","srcPrefixWithoutTrailingSlash","endsWith","slice","dotProp","get","set","general","image","id","social","updateBlockPreviewImage","file","newFile","src","updateFilesInData","data","Array","isArray","i","length","element","tuple","Object","entries","key","value","has","name","uploadAssets","context","filesData","fileUploadsData","Map","process","env","NODE_ENV","fileKeyToFileMap","type","fileUploadResults","uploadFilesFromS3","oldKeyToNewKeyMap","assets","createFilesInput","map","uploadResult","newKey","Key","getOldFileKey","size","meta","tags","filter","Boolean","createFilesPromises","createFilesInputChunks","chunk","createFilesInputChunk","push","fileManager","files","createFilesInBatch","Promise","all","importPage","pageKey","log","console","PAGE_EXTRACT_DIR","ensureDirSync","pageDataFileKey","PAGE_DATA_FILE_PATH","basename","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","page","loadJson","getSettings","content","deleteFile","deleteS3Folder","dirname","importBlock","blockKey","BLOCK_EXTRACT_DIR","blockDataFileKey","BLOCK_DATA_FILE_PATH","block","preview","oldKeysForAssets","keys","promises","oldKey","tempNewKey","fileMetaData","uniqueId","streamPassThrough","streamPassThroughUploadPromise","promise","writeStream","rest","split","e","FILE_CONTENT_TYPE","getFileNameWithoutExt","fileName","replace","extname","readExtractAndUploadZipFileContents","zipFileUrl","importDataList","zipFileName","response","fetch","ok","WebinyError","statusText","body","uniquePath","ZIP_FILE_PATH","zipFilePaths","extractZipToDisk","currentPath","dataMap","extractZipAndUploadToS3","ASSETS_DIR_NAME","prepareDataDirMap","filePath","isAsset","listObject","Contents","c","deleteFilePromises","deleteObject","initialStats","total","ImportExportTaskStatus","PENDING","PROCESSING","COMPLETED","FAILED","exportFileZipPath","uniqueFolderNameForExport","EXPORT_FILE_EXTRACTION_PATH","yauzl","open","lazyEntries","err","zipFile","warn","info","entryCount","readEntry","entry","test","openReadStream","catch","error","dataZipFilePath","filePaths","fileUploadPromises","uniqueKey","then","res","forEach","r"],"sources":["utils.ts"],"sourcesContent":["import uniqueId from \"uniqid\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport dotProp from \"dot-prop-immutable\";\nimport { createWriteStream } from \"fs\";\nimport { ensureDirSync } from \"fs-extra\";\nimport { promisify } from \"util\";\nimport { pipeline } from \"stream\";\nimport fetch from \"node-fetch\";\nimport path from \"path\";\nimport yauzl from \"yauzl\";\nimport chunk from \"lodash/chunk\";\nimport loadJson from \"load-json-file\";\nimport { FileInput, File } from \"@webiny/api-file-manager/types\";\nimport WebinyError from \"@webiny/error\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { File as ImageFile, ImportExportTaskStatus } from \"~/types\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { ExportedPageData, ExportedBlockData } from \"~/export/utils\";\nimport { PageSettings } from \"@webiny/api-page-builder/types\";\n\ninterface FileItem extends File {\n key: string;\n type: string;\n name: string;\n size: number;\n meta: Record<string, any>;\n tags: string[];\n}\n\nconst streamPipeline = promisify(pipeline);\n\nconst INSTALL_DIR = \"/tmp\";\nconst INSTALL_EXTRACT_DIR = path.join(INSTALL_DIR, \"apiPageBuilderImport\");\nconst FILES_COUNT_IN_EACH_BATCH = 15;\n\ninterface UpdateFilesInDataParams {\n data: Record<string, any>;\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n}\n\ninterface UpdateImageInPageSettingsParams {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n settings: PageSettings;\n}\n\nfunction updateImageInPageSettings(\n params: UpdateImageInPageSettingsParams\n): UpdateImageInPageSettingsParams[\"settings\"] {\n const { settings, fileIdToKeyMap, srcPrefix } = params;\n let newSettings = settings;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n if (dotProp.get(newSettings, \"general.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"general.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.general?.image?.id || \"\"\n )}`\n );\n }\n if (dotProp.get(newSettings, \"social.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"social.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.social?.image?.id || \"\"\n )}`\n );\n }\n\n return newSettings;\n}\n\ninterface UpdateBlockPreviewImage {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n file: ImageFile;\n}\n\nfunction updateBlockPreviewImage(params: UpdateBlockPreviewImage): ImageFile {\n const { file, fileIdToKeyMap, srcPrefix } = params;\n const newFile = file;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || \"\")}`;\n\n return newFile;\n}\n\nfunction updateFilesInData({ data, fileIdToKeyMap, srcPrefix }: UpdateFilesInDataParams) {\n // BASE CASE: Termination point\n if (!data || typeof data !== \"object\") {\n return;\n }\n // Recursively call function if data is array\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n updateFilesInData({ data: element, fileIdToKeyMap, srcPrefix });\n }\n return;\n }\n // Main logic\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n\n if (key === \"file\" && value && fileIdToKeyMap.has(value.id)) {\n value.key = fileIdToKeyMap.get(value.id);\n value.name = fileIdToKeyMap.get(value.id);\n value.src = `${srcPrefix}${srcPrefix.endsWith(\"/\") ? \"\" : \"/\"}${fileIdToKeyMap.get(\n value.id\n )}`;\n } else {\n updateFilesInData({ data: value, srcPrefix, fileIdToKeyMap });\n }\n }\n}\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n filesData: File[];\n fileUploadsData: FileUploadsData;\n}\n\ninterface UploadAssetsReturnType {\n fileIdToKeyMap: Map<string, string>;\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams): Promise<UploadAssetsReturnType> => {\n const { context, filesData, fileUploadsData } = params;\n // Save uploaded file key against static id for later use.\n const fileIdToKeyMap = new Map<string, string>();\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return {\n fileIdToKeyMap\n };\n }\n\n // Save files meta data against old key for later use.\n const fileKeyToFileMap = new Map<string, FileItem>();\n // Initialize maps.\n for (let i = 0; i < filesData.length; i++) {\n const file = filesData[i];\n fileKeyToFileMap.set(file.key, file);\n\n // Initialize the value\n fileIdToKeyMap.set(file.id, file.type);\n }\n\n const fileUploadResults = await uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap: fileUploadsData.assets\n });\n\n // Create files in File Manager\n const createFilesInput = fileUploadResults\n .map((uploadResult): FileInput | null => {\n const newKey = uploadResult.Key;\n const file = fileKeyToFileMap.get(getOldFileKey(newKey));\n if (!file) {\n return null;\n }\n\n // Update the file map with newly uploaded file.\n fileIdToKeyMap.set(file.id, newKey);\n\n return {\n key: newKey,\n name: file.name,\n size: file.size,\n type: file.type,\n meta: file.meta,\n tags: file.tags\n };\n })\n .filter(Boolean) as FileInput[];\n\n const createFilesPromises = [];\n // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).\n const createFilesInputChunks = chunk(createFilesInput, FILES_COUNT_IN_EACH_BATCH);\n for (let i = 0; i < createFilesInputChunks.length; i++) {\n const createFilesInputChunk = createFilesInputChunks[i];\n createFilesPromises.push(\n /*\n * We need to break down files into chunks because\n * `createFilesInBatch` operation has a limit on number of files it can handle at once.\n */\n context.fileManager.files.createFilesInBatch(createFilesInputChunk)\n );\n }\n\n await Promise.all(createFilesPromises);\n\n return {\n fileIdToKeyMap\n };\n};\n\ninterface FileUploadsData {\n data: string;\n assets: Record<string, string>;\n}\n\ninterface ImportPageParams {\n key: string;\n pageKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importPage({\n pageKey,\n context,\n fileUploadsData\n}: ImportPageParams): Promise<ExportedPageData[\"page\"]> {\n const log = console.log;\n\n // Making Directory for page in which we're going to extract the page data file.\n const PAGE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, pageKey);\n ensureDirSync(PAGE_EXTRACT_DIR);\n\n const pageDataFileKey = dotProp.get(fileUploadsData, `data`);\n const PAGE_DATA_FILE_PATH = path.join(PAGE_EXTRACT_DIR, path.basename(pageDataFileKey));\n\n log(`Downloading Page data file: ${pageDataFileKey} at \"${PAGE_DATA_FILE_PATH}\"`);\n // Download and save page data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(pageDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(PAGE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the page data file from disk.\n log(`Load file ${pageDataFileKey}`);\n const { page, files } = await loadJson<ExportedPageData>(PAGE_DATA_FILE_PATH);\n\n // Only update page data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload page assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n /**\n * TODO @ts-refactor @ashutosh figure out correct types.\n */\n // @ts-ignore\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: page.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n page.settings = updateImageInPageSettings({\n settings: page.settings || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for page...\");\n await deleteFile(pageKey);\n\n log(`Remove page contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return page;\n}\n\ninterface ImportBlockParams {\n key: string;\n blockKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importBlock({\n blockKey,\n context,\n fileUploadsData\n}: ImportBlockParams): Promise<ExportedBlockData[\"block\"]> {\n const log = console.log;\n\n // Making Directory for block in which we're going to extract the block data file.\n const BLOCK_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, blockKey);\n ensureDirSync(BLOCK_EXTRACT_DIR);\n\n const blockDataFileKey = dotProp.get(fileUploadsData, `data`);\n const BLOCK_DATA_FILE_PATH = path.join(BLOCK_EXTRACT_DIR, path.basename(blockDataFileKey));\n\n log(`Downloading Block data file: ${blockDataFileKey} at \"${BLOCK_DATA_FILE_PATH}\"`);\n // Download and save block data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(blockDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(BLOCK_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the block data file from disk.\n log(`Load file ${blockDataFileKey}`);\n const { block, files } = await loadJson<ExportedBlockData>(BLOCK_DATA_FILE_PATH);\n\n // Only update block data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload block assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: block.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n block.preview = updateBlockPreviewImage({\n file: block.preview || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for block...\");\n await deleteFile(blockKey);\n\n log(`Remove block contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return block;\n}\n\ninterface UploadFilesFromZipParams {\n fileKeyToFileMap: Map<string, any>;\n oldKeyToNewKeyMap: Record<string, string>;\n}\n\nasync function uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap\n}: UploadFilesFromZipParams): Promise<S3.ManagedUpload.SendData[]> {\n const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);\n\n const promises = [];\n // Upload all assets.\n for (let i = 0; i < oldKeysForAssets.length; i++) {\n const oldKey = oldKeysForAssets[i];\n const tempNewKey = oldKeyToNewKeyMap[oldKey];\n\n // Read file.\n const readStream = s3Stream.readStream(tempNewKey);\n // Get file meta data.\n const fileMetaData = fileKeyToFileMap.get(oldKey);\n\n if (fileMetaData) {\n const newKey = uniqueId(\"\", `-${fileMetaData.key}`);\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, fileMetaData.type);\n readStream.pipe(streamPassThrough);\n promises.push(promise);\n\n console.log(`Successfully queued file \"${newKey}\"`);\n }\n }\n\n return Promise.all(promises);\n}\n\nfunction getOldFileKey(key: string) {\n /*\n * Because we know the naming convention, we can extract the old key from new key.\n */\n try {\n const [, ...rest] = key.split(\"-\");\n return rest.join(\"-\");\n } catch (e) {\n return key;\n }\n}\n\nconst FILE_CONTENT_TYPE = \"application/octet-stream\";\n\nfunction getFileNameWithoutExt(fileName: string): string {\n return path.basename(fileName).replace(path.extname(fileName), \"\");\n}\n\ninterface ImportData {\n assets: Record<string, string>;\n data: string;\n key: string;\n}\n\n/**\n * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.\n * @param zipFileUrl\n * @return ImportData S3 file keys for all uploaded assets group by page/block.\n */\nexport async function readExtractAndUploadZipFileContents(\n zipFileUrl: string\n): Promise<ImportData[]> {\n const log = console.log;\n const importDataList = [];\n\n const zipFileName = path.basename(zipFileUrl).split(\"?\")[0];\n\n const response = await fetch(zipFileUrl);\n if (!response.ok) {\n throw new WebinyError(`Unable to downloading file: \"${zipFileUrl}\"`, response.statusText);\n }\n\n const readStream = response.body;\n\n const uniquePath = uniqueId(\"IMPORTS/\");\n // Read export file and download it in the disk\n const ZIP_FILE_PATH = path.join(INSTALL_DIR, zipFileName);\n\n const writeStream = createWriteStream(ZIP_FILE_PATH);\n await streamPipeline(readStream, writeStream);\n log(`Downloaded file \"${zipFileName}\" at ${ZIP_FILE_PATH}`);\n\n // Extract the downloaded zip file\n const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);\n\n log(`Removing ZIP file \"${zipFileUrl}\" from ${ZIP_FILE_PATH}`);\n await deleteFile(ZIP_FILE_PATH);\n\n // Extract each page/block zip and upload their content's to S3\n for (let i = 0; i < zipFilePaths.length; i++) {\n const currentPath = zipFilePaths[i];\n const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);\n importDataList.push(dataMap);\n }\n log(\"Removing all ZIP files located at \", path.dirname(zipFilePaths[0]));\n await deleteFile(path.dirname(zipFilePaths[0]));\n\n return importDataList;\n}\n\nconst ASSETS_DIR_NAME = \"/assets\";\n\nfunction prepareDataDirMap({\n map,\n filePath,\n newKey\n}: {\n map: ImportData;\n filePath: string;\n newKey: string;\n}): ImportData {\n const dirname = path.dirname(filePath);\n const fileName = path.basename(filePath);\n /*\n * We want to use dot (.) as part of object key rather than creating nested object(s).\n * Also, the file name might contain dots in it beside the extension, so, we are escaping them all.\n */\n const oldKey = fileName.replace(/\\./g, \"\\\\.\");\n\n const isAsset = dirname.endsWith(ASSETS_DIR_NAME);\n\n if (isAsset) {\n map = dotProp.set(map, `assets.${oldKey}`, newKey);\n } else {\n // We only need to know the newKey for data file.\n map = dotProp.set(map, `data`, newKey);\n }\n\n return map;\n}\n\nasync function deleteS3Folder(key: string): Promise<void> {\n // Append trailing slash i.e \"/\" to key to make sure we only delete a specific folder.\n if (!key.endsWith(\"/\")) {\n key = `${key}/`;\n }\n\n const response = await s3Stream.listObject(key);\n const keys = (response.Contents || []).map(c => c.Key).filter(Boolean) as string[];\n console.log(`Found ${keys.length} files.`);\n\n const deleteFilePromises = keys.map(key => s3Stream.deleteObject(key));\n\n await Promise.all(deleteFilePromises);\n console.log(`Successfully deleted ${deleteFilePromises.length} files.`);\n}\n\n// export const zeroPad = version => `${version}`.padStart(5, \"0\");\n\nexport function initialStats(total: number) {\n return {\n [ImportExportTaskStatus.PENDING]: total,\n [ImportExportTaskStatus.PROCESSING]: 0,\n [ImportExportTaskStatus.COMPLETED]: 0,\n [ImportExportTaskStatus.FAILED]: 0,\n total\n };\n}\n\nfunction extractZipToDisk(exportFileZipPath: string): Promise<string[]> {\n return new Promise((resolve, reject) => {\n const zipFilePaths: string[] = [];\n const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);\n const EXPORT_FILE_EXTRACTION_PATH = path.join(INSTALL_DIR, uniqueFolderNameForExport);\n // Make sure DIR exists\n ensureDirSync(EXPORT_FILE_EXTRACTION_PATH);\n\n yauzl.open(exportFileZipPath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", exportFileZipPath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Missing zip file resource for path: \" + exportFileZipPath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn(\"ERROR: Failed on END event for file: \", exportFileZipPath, err);\n reject(err);\n }\n resolve(zipFilePaths);\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed to openReadStream for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\n \"ERROR: Missing Read Stream Resource when extracting to disk.\"\n );\n reject(\"Missing Read Stream Resource.\");\n return;\n }\n\n const filePath = path.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);\n\n readStream.on(\"end\", function () {\n zipFilePaths.push(filePath);\n zipFile.readEntry();\n });\n\n streamPipeline(readStream, createWriteStream(filePath)).catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n\nfunction extractZipAndUploadToS3(dataZipFilePath: string, uniquePath: string): Promise<ImportData> {\n return new Promise((resolve, reject) => {\n const filePaths = [];\n const fileUploadPromises: Promise<S3.ManagedUpload.SendData>[] = [];\n const uniqueKey = getFileNameWithoutExt(dataZipFilePath);\n let dataMap: ImportData = {\n key: uniqueKey,\n assets: {},\n data: \"\"\n };\n yauzl.open(dataZipFilePath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", dataZipFilePath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Probably failed to extract zip: \" + dataZipFilePath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn('ERROR: Failed on \"END\" for file: ', dataZipFilePath, err);\n reject(err);\n }\n\n Promise.all(fileUploadPromises).then(res => {\n res.forEach(r => {\n console.info(\"Done uploading... \", r);\n });\n resolve(dataMap);\n });\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed while performing [openReadStream] for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\"ERROR: Missing Read Stream while importing.\");\n reject(\"Missing Read Strea Resource.\");\n return;\n }\n readStream.on(\"end\", function () {\n filePaths.push(entry.fileName);\n zipFile.readEntry();\n });\n\n const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;\n // Modify in place\n dataMap = prepareDataDirMap({\n map: dataMap,\n filePath: entry.fileName,\n newKey\n });\n\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);\n\n streamPipeline(readStream, streamPassThrough)\n .then(() => {\n fileUploadPromises.push(promise);\n })\n .catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;;;;;;;AAAA;;AAEA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AAEA;;AACA;;AACA;;AAEA;;AAaA,MAAMA,cAAc,GAAG,IAAAC,eAAA,EAAUC,gBAAV,CAAvB;AAEA,MAAMC,WAAW,GAAG,MAApB;;AACA,MAAMC,mBAAmB,GAAGC,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB,sBAAvB,CAA5B;;AACA,MAAMI,yBAAyB,GAAG,EAAlC;;AAcA,SAASC,yBAAT,CACIC,MADJ,EAE+C;EAC3C,MAAM;IAAEC,QAAF;IAAYC,cAAZ;IAA4BC;EAA5B,IAA0CH,MAAhD;EACA,IAAII,WAAW,GAAGH,QAAlB;EAEA,MAAMI,6BAA6B,GAAGF,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAChCH,SAAS,CAACI,KAAV,CAAgB,CAAhB,EAAmB,CAAC,CAApB,CADgC,GAEhCJ,SAFN;;EAIA,IAAIK,yBAAA,CAAQC,GAAR,CAAYL,WAAZ,EAAyB,mBAAzB,CAAJ,EAAmD;IAAA;;IAC/CA,WAAW,GAAGI,yBAAA,CAAQE,GAAR,CACVN,WADU,EAEV,mBAFU,EAGT,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAChC,sBAAAR,QAAQ,CAACU,OAAT,iGAAkBC,KAAlB,gFAAyBC,EAAzB,KAA+B,EADC,CAElC,EALQ,CAAd;EAOH;;EACD,IAAIL,yBAAA,CAAQC,GAAR,CAAYL,WAAZ,EAAyB,kBAAzB,CAAJ,EAAkD;IAAA;;IAC9CA,WAAW,GAAGI,yBAAA,CAAQE,GAAR,CACVN,WADU,EAEV,kBAFU,EAGT,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAChC,qBAAAR,QAAQ,CAACa,MAAT,+FAAiBF,KAAjB,gFAAwBC,EAAxB,KAA8B,EADE,CAElC,EALQ,CAAd;EAOH;;EAED,OAAOT,WAAP;AACH;;AAQD,SAASW,uBAAT,CAAiCf,MAAjC,EAA6E;EACzE,MAAM;IAAEgB,IAAF;IAAQd,cAAR;IAAwBC;EAAxB,IAAsCH,MAA5C;EACA,MAAMiB,OAAO,GAAGD,IAAhB;EAEA,MAAMX,6BAA6B,GAAGF,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAChCH,SAAS,CAACI,KAAV,CAAgB,CAAhB,EAAmB,CAAC,CAApB,CADgC,GAEhCJ,SAFN;EAIAc,OAAO,CAACC,GAAR,GAAe,GAAEb,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAAmBO,IAAI,CAACH,EAAL,IAAW,EAA9B,CAAkC,EAApF;EAEA,OAAOI,OAAP;AACH;;AAED,SAASE,iBAAT,CAA2B;EAAEC,IAAF;EAAQlB,cAAR;EAAwBC;AAAxB,CAA3B,EAAyF;EACrF;EACA,IAAI,CAACiB,IAAD,IAAS,OAAOA,IAAP,KAAgB,QAA7B,EAAuC;IACnC;EACH,CAJoF,CAKrF;;;EACA,IAAIC,KAAK,CAACC,OAAN,CAAcF,IAAd,CAAJ,EAAyB;IACrB,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGH,IAAI,CAACI,MAAzB,EAAiCD,CAAC,EAAlC,EAAsC;MAClC,MAAME,OAAO,GAAGL,IAAI,CAACG,CAAD,CAApB;MACAJ,iBAAiB,CAAC;QAAEC,IAAI,EAAEK,OAAR;QAAiBvB,cAAjB;QAAiCC;MAAjC,CAAD,CAAjB;IACH;;IACD;EACH,CAZoF,CAarF;;;EACA,MAAMuB,KAAK,GAAGC,MAAM,CAACC,OAAP,CAAeR,IAAf,CAAd;;EACA,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGG,KAAK,CAACF,MAA1B,EAAkCD,CAAC,EAAnC,EAAuC;IACnC,MAAM,CAACM,GAAD,EAAMC,KAAN,IAAeJ,KAAK,CAACH,CAAD,CAA1B;;IAEA,IAAIM,GAAG,KAAK,MAAR,IAAkBC,KAAlB,IAA2B5B,cAAc,CAAC6B,GAAf,CAAmBD,KAAK,CAACjB,EAAzB,CAA/B,EAA6D;MACzDiB,KAAK,CAACD,GAAN,GAAY3B,cAAc,CAACO,GAAf,CAAmBqB,KAAK,CAACjB,EAAzB,CAAZ;MACAiB,KAAK,CAACE,IAAN,GAAa9B,cAAc,CAACO,GAAf,CAAmBqB,KAAK,CAACjB,EAAzB,CAAb;MACAiB,KAAK,CAACZ,GAAN,GAAa,GAAEf,SAAU,GAAEA,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAA0B,EAA1B,GAA+B,GAAI,GAAEJ,cAAc,CAACO,GAAf,CAC5DqB,KAAK,CAACjB,EADsD,CAE9D,EAFF;IAGH,CAND,MAMO;MACHM,iBAAiB,CAAC;QAAEC,IAAI,EAAEU,KAAR;QAAe3B,SAAf;QAA0BD;MAA1B,CAAD,CAAjB;IACH;EACJ;AACJ;;AAYM,MAAM+B,YAAY,GAAG,MAAOjC,MAAP,IAAuE;EAC/F,MAAM;IAAEkC,OAAF;IAAWC,SAAX;IAAsBC;EAAtB,IAA0CpC,MAAhD,CAD+F,CAE/F;;EACA,MAAME,cAAc,GAAG,IAAImC,GAAJ,EAAvB;EACA;AACJ;AACA;AACA;AACA;;EACI,IAAIC,OAAO,CAACC,GAAR,CAAYC,QAAZ,KAAyB,MAA7B,EAAqC;IACjC,OAAO;MACHtC;IADG,CAAP;EAGH,CAb8F,CAe/F;;;EACA,MAAMuC,gBAAgB,GAAG,IAAIJ,GAAJ,EAAzB,CAhB+F,CAiB/F;;EACA,KAAK,IAAId,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGY,SAAS,CAACX,MAA9B,EAAsCD,CAAC,EAAvC,EAA2C;IACvC,MAAMP,IAAI,GAAGmB,SAAS,CAACZ,CAAD,CAAtB;IACAkB,gBAAgB,CAAC/B,GAAjB,CAAqBM,IAAI,CAACa,GAA1B,EAA+Bb,IAA/B,EAFuC,CAIvC;;IACAd,cAAc,CAACQ,GAAf,CAAmBM,IAAI,CAACH,EAAxB,EAA4BG,IAAI,CAAC0B,IAAjC;EACH;;EAED,MAAMC,iBAAiB,GAAG,MAAMC,iBAAiB,CAAC;IAC9CH,gBAD8C;IAE9CI,iBAAiB,EAAET,eAAe,CAACU;EAFW,CAAD,CAAjD,CA1B+F,CA+B/F;;EACA,MAAMC,gBAAgB,GAAGJ,iBAAiB,CACrCK,GADoB,CACfC,YAAD,IAAoC;IACrC,MAAMC,MAAM,GAAGD,YAAY,CAACE,GAA5B;IACA,MAAMnC,IAAI,GAAGyB,gBAAgB,CAAChC,GAAjB,CAAqB2C,aAAa,CAACF,MAAD,CAAlC,CAAb;;IACA,IAAI,CAAClC,IAAL,EAAW;MACP,OAAO,IAAP;IACH,CALoC,CAOrC;;;IACAd,cAAc,CAACQ,GAAf,CAAmBM,IAAI,CAACH,EAAxB,EAA4BqC,MAA5B;IAEA,OAAO;MACHrB,GAAG,EAAEqB,MADF;MAEHlB,IAAI,EAAEhB,IAAI,CAACgB,IAFR;MAGHqB,IAAI,EAAErC,IAAI,CAACqC,IAHR;MAIHX,IAAI,EAAE1B,IAAI,CAAC0B,IAJR;MAKHY,IAAI,EAAEtC,IAAI,CAACsC,IALR;MAMHC,IAAI,EAAEvC,IAAI,CAACuC;IANR,CAAP;EAQH,CAnBoB,EAoBpBC,MApBoB,CAoBbC,OApBa,CAAzB;EAsBA,MAAMC,mBAAmB,GAAG,EAA5B,CAtD+F,CAuD/F;;EACA,MAAMC,sBAAsB,GAAG,IAAAC,cAAA,EAAMb,gBAAN,EAAwBjD,yBAAxB,CAA/B;;EACA,KAAK,IAAIyB,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGoC,sBAAsB,CAACnC,MAA3C,EAAmDD,CAAC,EAApD,EAAwD;IACpD,MAAMsC,qBAAqB,GAAGF,sBAAsB,CAACpC,CAAD,CAApD;IACAmC,mBAAmB,CAACI,IAApB;IACI;AACZ;AACA;AACA;IACY5B,OAAO,CAAC6B,WAAR,CAAoBC,KAApB,CAA0BC,kBAA1B,CAA6CJ,qBAA7C,CALJ;EAOH;;EAED,MAAMK,OAAO,CAACC,GAAR,CAAYT,mBAAZ,CAAN;EAEA,OAAO;IACHxD;EADG,CAAP;AAGH,CAzEM;;;;AAuFA,eAAekE,UAAf,CAA0B;EAC7BC,OAD6B;EAE7BnC,OAF6B;EAG7BE;AAH6B,CAA1B,EAIiD;EACpD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAApB,CADoD,CAGpD;;EACA,MAAME,gBAAgB,GAAG5E,aAAA,CAAKC,IAAL,CAAUF,mBAAV,EAA+B0E,OAA/B,CAAzB;;EACA,IAAAI,sBAAA,EAAcD,gBAAd;;EAEA,MAAME,eAAe,GAAGlE,yBAAA,CAAQC,GAAR,CAAY2B,eAAZ,EAA8B,MAA9B,CAAxB;;EACA,MAAMuC,mBAAmB,GAAG/E,aAAA,CAAKC,IAAL,CAAU2E,gBAAV,EAA4B5E,aAAA,CAAKgF,QAAL,CAAcF,eAAd,CAA5B,CAA5B;;EAEAJ,GAAG,CAAE,+BAA8BI,eAAgB,QAAOC,mBAAoB,GAA3E,CAAH,CAVoD,CAWpD;;EACA,MAAM,IAAIT,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACnCC,kBAAA,CACKC,UADL,CACgBN,eADhB,EAEKO,EAFL,CAEQ,OAFR,EAEiBH,MAFjB,EAGKI,IAHL,CAGU,IAAAC,qBAAA,EAAkBR,mBAAlB,CAHV,EAIKM,EAJL,CAIQ,OAJR,EAIiBH,MAJjB,EAKKG,EALL,CAKQ,QALR,EAKkBJ,OALlB;EAMH,CAPK,CAAN,CAZoD,CAqBpD;;EACAP,GAAG,CAAE,aAAYI,eAAgB,EAA9B,CAAH;EACA,MAAM;IAAEU,IAAF;IAAQpB;EAAR,IAAkB,MAAM,IAAAqB,qBAAA,EAA2BV,mBAA3B,CAA9B,CAvBoD,CAyBpD;;EACA,IAAIX,KAAK,IAAI3C,KAAK,CAACC,OAAN,CAAc0C,KAAd,CAAT,IAAiCA,KAAK,CAACxC,MAAN,GAAe,CAApD,EAAuD;IACnD;IACA,MAAM;MAAEtB;IAAF,IAAqB,MAAM+B,YAAY,CAAC;MAC1CC,OAD0C;;MAE1C;AACZ;AACA;MACY;MACAC,SAAS,EAAE6B,KAN+B;MAO1C5B;IAP0C,CAAD,CAA7C;IAUA,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAR,CAAoB9D,QAApB,CAA6BqF,WAA7B,EAAvB;IAEA,MAAM;MAAEnF,SAAS,GAAG;IAAd,IAAqBF,QAAQ,IAAI,EAAvC;IACAkB,iBAAiB,CAAC;MACdC,IAAI,EAAEgE,IAAI,CAACG,OAAL,IAAgB,EADR;MAEdrF,cAFc;MAGdC;IAHc,CAAD,CAAjB;IAMAiF,IAAI,CAACnF,QAAL,GAAgBF,yBAAyB,CAAC;MACtCE,QAAQ,EAAEmF,IAAI,CAACnF,QAAL,IAAiB,EADW;MAEtCC,cAFsC;MAGtCC;IAHsC,CAAD,CAAzC;EAKH;;EAEDmE,GAAG,CAAC,gCAAD,CAAH;EACA,MAAM,IAAAkB,gCAAA,EAAWnB,OAAX,CAAN;EAEAC,GAAG,CAAE,iCAAF,CAAH;EACA,MAAMmB,cAAc,CAAC7F,aAAA,CAAK8F,OAAL,CAAatD,eAAe,CAAChB,IAA7B,CAAD,CAApB;EAEA,OAAOgE,IAAP;AACH;;AASM,eAAeO,WAAf,CAA2B;EAC9BC,QAD8B;EAE9B1D,OAF8B;EAG9BE;AAH8B,CAA3B,EAIoD;EACvD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAApB,CADuD,CAGvD;;EACA,MAAMuB,iBAAiB,GAAGjG,aAAA,CAAKC,IAAL,CAAUF,mBAAV,EAA+BiG,QAA/B,CAA1B;;EACA,IAAAnB,sBAAA,EAAcoB,iBAAd;;EAEA,MAAMC,gBAAgB,GAAGtF,yBAAA,CAAQC,GAAR,CAAY2B,eAAZ,EAA8B,MAA9B,CAAzB;;EACA,MAAM2D,oBAAoB,GAAGnG,aAAA,CAAKC,IAAL,CAAUgG,iBAAV,EAA6BjG,aAAA,CAAKgF,QAAL,CAAckB,gBAAd,CAA7B,CAA7B;;EAEAxB,GAAG,CAAE,gCAA+BwB,gBAAiB,QAAOC,oBAAqB,GAA9E,CAAH,CAVuD,CAWvD;;EACA,MAAM,IAAI7B,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACnCC,kBAAA,CACKC,UADL,CACgBc,gBADhB,EAEKb,EAFL,CAEQ,OAFR,EAEiBH,MAFjB,EAGKI,IAHL,CAGU,IAAAC,qBAAA,EAAkBY,oBAAlB,CAHV,EAIKd,EAJL,CAIQ,OAJR,EAIiBH,MAJjB,EAKKG,EALL,CAKQ,QALR,EAKkBJ,OALlB;EAMH,CAPK,CAAN,CAZuD,CAqBvD;;EACAP,GAAG,CAAE,aAAYwB,gBAAiB,EAA/B,CAAH;EACA,MAAM;IAAEE,KAAF;IAAShC;EAAT,IAAmB,MAAM,IAAAqB,qBAAA,EAA4BU,oBAA5B,CAA/B,CAvBuD,CAyBvD;;EACA,IAAI/B,KAAK,IAAI3C,KAAK,CAACC,OAAN,CAAc0C,KAAd,CAAT,IAAiCA,KAAK,CAACxC,MAAN,GAAe,CAApD,EAAuD;IACnD;IACA,MAAM;MAAEtB;IAAF,IAAqB,MAAM+B,YAAY,CAAC;MAC1CC,OAD0C;MAE1CC,SAAS,EAAE6B,KAF+B;MAG1C5B;IAH0C,CAAD,CAA7C;IAMA,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAR,CAAoB9D,QAApB,CAA6BqF,WAA7B,EAAvB;IAEA,MAAM;MAAEnF,SAAS,GAAG;IAAd,IAAqBF,QAAQ,IAAI,EAAvC;IACAkB,iBAAiB,CAAC;MACdC,IAAI,EAAE4E,KAAK,CAACT,OAAN,IAAiB,EADT;MAEdrF,cAFc;MAGdC;IAHc,CAAD,CAAjB;IAMA6F,KAAK,CAACC,OAAN,GAAgBlF,uBAAuB,CAAC;MACpCC,IAAI,EAAEgF,KAAK,CAACC,OAAN,IAAiB,EADa;MAEpC/F,cAFoC;MAGpCC;IAHoC,CAAD,CAAvC;EAKH;;EAEDmE,GAAG,CAAC,iCAAD,CAAH;EACA,MAAM,IAAAkB,gCAAA,EAAWI,QAAX,CAAN;EAEAtB,GAAG,CAAE,kCAAF,CAAH;EACA,MAAMmB,cAAc,CAAC7F,aAAA,CAAK8F,OAAL,CAAatD,eAAe,CAAChB,IAA7B,CAAD,CAApB;EAEA,OAAO4E,KAAP;AACH;;AAOD,eAAepD,iBAAf,CAAiC;EAC7BH,gBAD6B;EAE7BI;AAF6B,CAAjC,EAGmE;EAC/D,MAAMqD,gBAAgB,GAAGvE,MAAM,CAACwE,IAAP,CAAYtD,iBAAZ,CAAzB;EAEA,MAAMuD,QAAQ,GAAG,EAAjB,CAH+D,CAI/D;;EACA,KAAK,IAAI7E,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG2E,gBAAgB,CAAC1E,MAArC,EAA6CD,CAAC,EAA9C,EAAkD;IAC9C,MAAM8E,MAAM,GAAGH,gBAAgB,CAAC3E,CAAD,CAA/B;IACA,MAAM+E,UAAU,GAAGzD,iBAAiB,CAACwD,MAAD,CAApC,CAF8C,CAI9C;;IACA,MAAMrB,UAAU,GAAGD,kBAAA,CAASC,UAAT,CAAoBsB,UAApB,CAAnB,CAL8C,CAM9C;;;IACA,MAAMC,YAAY,GAAG9D,gBAAgB,CAAChC,GAAjB,CAAqB4F,MAArB,CAArB;;IAEA,IAAIE,YAAJ,EAAkB;MACd,MAAMrD,MAAM,GAAG,IAAAsD,eAAA,EAAS,EAAT,EAAc,IAAGD,YAAY,CAAC1E,GAAI,EAAlC,CAAf;;MACA,MAAM;QAAE4E,iBAAF;QAAqBC,8BAA8B,EAAEC;MAArD,IACF5B,kBAAA,CAAS6B,WAAT,CAAqB1D,MAArB,EAA6BqD,YAAY,CAAC7D,IAA1C,CADJ;;MAEAsC,UAAU,CAACE,IAAX,CAAgBuB,iBAAhB;MACAL,QAAQ,CAACtC,IAAT,CAAc6C,OAAd;MAEApC,OAAO,CAACD,GAAR,CAAa,6BAA4BpB,MAAO,GAAhD;IACH;EACJ;;EAED,OAAOgB,OAAO,CAACC,GAAR,CAAYiC,QAAZ,CAAP;AACH;;AAED,SAAShD,aAAT,CAAuBvB,GAAvB,EAAoC;EAChC;AACJ;AACA;EACI,IAAI;IACA,MAAM,GAAG,GAAGgF,IAAN,IAAchF,GAAG,CAACiF,KAAJ,CAAU,GAAV,CAApB;IACA,OAAOD,IAAI,CAAChH,IAAL,CAAU,GAAV,CAAP;EACH,CAHD,CAGE,OAAOkH,CAAP,EAAU;IACR,OAAOlF,GAAP;EACH;AACJ;;AAED,MAAMmF,iBAAiB,GAAG,0BAA1B;;AAEA,SAASC,qBAAT,CAA+BC,QAA/B,EAAyD;EACrD,OAAOtH,aAAA,CAAKgF,QAAL,CAAcsC,QAAd,EAAwBC,OAAxB,CAAgCvH,aAAA,CAAKwH,OAAL,CAAaF,QAAb,CAAhC,EAAwD,EAAxD,CAAP;AACH;;AAQD;AACA;AACA;AACA;AACA;AACO,eAAeG,mCAAf,CACHC,UADG,EAEkB;EACrB,MAAMhD,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,MAAMiD,cAAc,GAAG,EAAvB;;EAEA,MAAMC,WAAW,GAAG5H,aAAA,CAAKgF,QAAL,CAAc0C,UAAd,EAA0BR,KAA1B,CAAgC,GAAhC,EAAqC,CAArC,CAApB;;EAEA,MAAMW,QAAQ,GAAG,MAAM,IAAAC,kBAAA,EAAMJ,UAAN,CAAvB;;EACA,IAAI,CAACG,QAAQ,CAACE,EAAd,EAAkB;IACd,MAAM,IAAIC,cAAJ,CAAiB,gCAA+BN,UAAW,GAA3D,EAA+DG,QAAQ,CAACI,UAAxE,CAAN;EACH;;EAED,MAAM7C,UAAU,GAAGyC,QAAQ,CAACK,IAA5B;EAEA,MAAMC,UAAU,GAAG,IAAAvB,eAAA,EAAS,UAAT,CAAnB,CAbqB,CAcrB;;EACA,MAAMwB,aAAa,GAAGpI,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB8H,WAAvB,CAAtB;;EAEA,MAAMZ,WAAW,GAAG,IAAAzB,qBAAA,EAAkB6C,aAAlB,CAApB;EACA,MAAMzI,cAAc,CAACyF,UAAD,EAAa4B,WAAb,CAApB;EACAtC,GAAG,CAAE,oBAAmBkD,WAAY,QAAOQ,aAAc,EAAtD,CAAH,CAnBqB,CAqBrB;;EACA,MAAMC,YAAY,GAAG,MAAMC,gBAAgB,CAACF,aAAD,CAA3C;EAEA1D,GAAG,CAAE,sBAAqBgD,UAAW,UAASU,aAAc,EAAzD,CAAH;EACA,MAAM,IAAAxC,gCAAA,EAAWwC,aAAX,CAAN,CAzBqB,CA2BrB;;EACA,KAAK,IAAIzG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG0G,YAAY,CAACzG,MAAjC,EAAyCD,CAAC,EAA1C,EAA8C;IAC1C,MAAM4G,WAAW,GAAGF,YAAY,CAAC1G,CAAD,CAAhC;IACA,MAAM6G,OAAO,GAAG,MAAMC,uBAAuB,CAACF,WAAD,EAAcJ,UAAd,CAA7C;IACAR,cAAc,CAACzD,IAAf,CAAoBsE,OAApB;EACH;;EACD9D,GAAG,CAAC,oCAAD,EAAuC1E,aAAA,CAAK8F,OAAL,CAAauC,YAAY,CAAC,CAAD,CAAzB,CAAvC,CAAH;EACA,MAAM,IAAAzC,gCAAA,EAAW5F,aAAA,CAAK8F,OAAL,CAAauC,YAAY,CAAC,CAAD,CAAzB,CAAX,CAAN;EAEA,OAAOV,cAAP;AACH;;AAED,MAAMe,eAAe,GAAG,SAAxB;;AAEA,SAASC,iBAAT,CAA2B;EACvBvF,GADuB;EAEvBwF,QAFuB;EAGvBtF;AAHuB,CAA3B,EAQe;EACX,MAAMwC,OAAO,GAAG9F,aAAA,CAAK8F,OAAL,CAAa8C,QAAb,CAAhB;;EACA,MAAMtB,QAAQ,GAAGtH,aAAA,CAAKgF,QAAL,CAAc4D,QAAd,CAAjB;EACA;AACJ;AACA;AACA;;;EACI,MAAMnC,MAAM,GAAGa,QAAQ,CAACC,OAAT,CAAiB,KAAjB,EAAwB,KAAxB,CAAf;EAEA,MAAMsB,OAAO,GAAG/C,OAAO,CAACpF,QAAR,CAAiBgI,eAAjB,CAAhB;;EAEA,IAAIG,OAAJ,EAAa;IACTzF,GAAG,GAAGxC,yBAAA,CAAQE,GAAR,CAAYsC,GAAZ,EAAkB,UAASqD,MAAO,EAAlC,EAAqCnD,MAArC,CAAN;EACH,CAFD,MAEO;IACH;IACAF,GAAG,GAAGxC,yBAAA,CAAQE,GAAR,CAAYsC,GAAZ,EAAkB,MAAlB,EAAyBE,MAAzB,CAAN;EACH;;EAED,OAAOF,GAAP;AACH;;AAED,eAAeyC,cAAf,CAA8B5D,GAA9B,EAA0D;EACtD;EACA,IAAI,CAACA,GAAG,CAACvB,QAAJ,CAAa,GAAb,CAAL,EAAwB;IACpBuB,GAAG,GAAI,GAAEA,GAAI,GAAb;EACH;;EAED,MAAM4F,QAAQ,GAAG,MAAM1C,kBAAA,CAAS2D,UAAT,CAAoB7G,GAApB,CAAvB;EACA,MAAMsE,IAAI,GAAG,CAACsB,QAAQ,CAACkB,QAAT,IAAqB,EAAtB,EAA0B3F,GAA1B,CAA8B4F,CAAC,IAAIA,CAAC,CAACzF,GAArC,EAA0CK,MAA1C,CAAiDC,OAAjD,CAAb;EACAc,OAAO,CAACD,GAAR,CAAa,SAAQ6B,IAAI,CAAC3E,MAAO,SAAjC;EAEA,MAAMqH,kBAAkB,GAAG1C,IAAI,CAACnD,GAAL,CAASnB,GAAG,IAAIkD,kBAAA,CAAS+D,YAAT,CAAsBjH,GAAtB,CAAhB,CAA3B;EAEA,MAAMqC,OAAO,CAACC,GAAR,CAAY0E,kBAAZ,CAAN;EACAtE,OAAO,CAACD,GAAR,CAAa,wBAAuBuE,kBAAkB,CAACrH,MAAO,SAA9D;AACH,C,CAED;;;AAEO,SAASuH,YAAT,CAAsBC,KAAtB,EAAqC;EACxC,OAAO;IACH,CAACC,6BAAA,CAAuBC,OAAxB,GAAkCF,KAD/B;IAEH,CAACC,6BAAA,CAAuBE,UAAxB,GAAqC,CAFlC;IAGH,CAACF,6BAAA,CAAuBG,SAAxB,GAAoC,CAHjC;IAIH,CAACH,6BAAA,CAAuBI,MAAxB,GAAiC,CAJ9B;IAKHL;EALG,CAAP;AAOH;;AAED,SAASd,gBAAT,CAA0BoB,iBAA1B,EAAwE;EACpE,OAAO,IAAIpF,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACpC,MAAMmD,YAAsB,GAAG,EAA/B;IACA,MAAMsB,yBAAyB,GAAGtC,qBAAqB,CAACqC,iBAAD,CAAvD;;IACA,MAAME,2BAA2B,GAAG5J,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB6J,yBAAvB,CAApC,CAHoC,CAIpC;;;IACA,IAAA9E,sBAAA,EAAc+E,2BAAd;;IAEAC,cAAA,CAAMC,IAAN,CAAWJ,iBAAX,EAA8B;MAAEK,WAAW,EAAE;IAAf,CAA9B,EAAqD,UAAUC,GAAV,EAAeC,OAAf,EAAwB;MACzE,IAAID,GAAJ,EAAS;QACLrF,OAAO,CAACuF,IAAR,CAAa,gCAAb,EAA+CR,iBAA/C,EAAkEM,GAAlE;QACA9E,MAAM,CAAC8E,GAAD,CAAN;QACA;MACH;;MACD,IAAI,CAACC,OAAL,EAAc;QACVtF,OAAO,CAACD,GAAR,CAAY,gDAAgDgF,iBAA5D;QACAxE,MAAM,CAAC,4BAAD,CAAN;QACA;MACH;;MAEDP,OAAO,CAACwF,IAAR,CAAc,yBAAwBF,OAAO,CAACG,UAAW,WAAzD;MAEAH,OAAO,CAAC5E,EAAR,CAAW,KAAX,EAAkB,UAAU2E,GAAV,EAAe;QAC7B,IAAIA,GAAJ,EAAS;UACLrF,OAAO,CAACuF,IAAR,CAAa,uCAAb,EAAsDR,iBAAtD,EAAyEM,GAAzE;UACA9E,MAAM,CAAC8E,GAAD,CAAN;QACH;;QACD/E,OAAO,CAACoD,YAAD,CAAP;MACH,CAND;MAQA4B,OAAO,CAACI,SAAR;MAEAJ,OAAO,CAAC5E,EAAR,CAAW,OAAX,EAAoB,UAAUiF,KAAV,EAAiB;QACjC3F,OAAO,CAACwF,IAAR,CAAc,sBAAqBG,KAAK,CAAChD,QAAS,GAAlD;;QACA,IAAI,MAAMiD,IAAN,CAAWD,KAAK,CAAChD,QAAjB,CAAJ,EAAgC;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAR;QACH,CALD,MAKO;UACH;UACAJ,OAAO,CAACO,cAAR,CAAuBF,KAAvB,EAA8B,UAAUN,GAAV,EAAe5E,UAAf,EAA2B;YACrD,IAAI4E,GAAJ,EAAS;cACLrF,OAAO,CAACuF,IAAR,CACI,4CADJ,EAEII,KAAK,CAAChD,QAFV,EAGI0C,GAHJ;cAKA9E,MAAM,CAAC8E,GAAD,CAAN;cACA;YACH;;YACD,IAAI,CAAC5E,UAAL,EAAiB;cACbT,OAAO,CAACD,GAAR,CACI,8DADJ;cAGAQ,MAAM,CAAC,+BAAD,CAAN;cACA;YACH;;YAED,MAAM0D,QAAQ,GAAG5I,aAAA,CAAKC,IAAL,CAAU2J,2BAAV,EAAuCU,KAAK,CAAChD,QAA7C,CAAjB;;YAEAlC,UAAU,CAACC,EAAX,CAAc,KAAd,EAAqB,YAAY;cAC7BgD,YAAY,CAACnE,IAAb,CAAkB0E,QAAlB;cACAqB,OAAO,CAACI,SAAR;YACH,CAHD;YAKA1K,cAAc,CAACyF,UAAD,EAAa,IAAAG,qBAAA,EAAkBqD,QAAlB,CAAb,CAAd,CAAwD6B,KAAxD,CAA8DC,KAAK,IAAI;cACnExF,MAAM,CAACwF,KAAD,CAAN;YACH,CAFD;UAGH,CA5BD;QA6BH;MACJ,CAvCD;IAwCH,CAhED;EAiEH,CAxEM,CAAP;AAyEH;;AAED,SAASjC,uBAAT,CAAiCkC,eAAjC,EAA0DxC,UAA1D,EAAmG;EAC/F,OAAO,IAAI7D,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACpC,MAAM0F,SAAS,GAAG,EAAlB;IACA,MAAMC,kBAAwD,GAAG,EAAjE;IACA,MAAMC,SAAS,GAAGzD,qBAAqB,CAACsD,eAAD,CAAvC;IACA,IAAInC,OAAmB,GAAG;MACtBvG,GAAG,EAAE6I,SADiB;MAEtB5H,MAAM,EAAE,EAFc;MAGtB1B,IAAI,EAAE;IAHgB,CAA1B;;IAKAqI,cAAA,CAAMC,IAAN,CAAWa,eAAX,EAA4B;MAAEZ,WAAW,EAAE;IAAf,CAA5B,EAAmD,UAAUC,GAAV,EAAeC,OAAf,EAAwB;MACvE,IAAID,GAAJ,EAAS;QACLrF,OAAO,CAACuF,IAAR,CAAa,gCAAb,EAA+CS,eAA/C,EAAgEX,GAAhE;QACA9E,MAAM,CAAC8E,GAAD,CAAN;QACA;MACH;;MACD,IAAI,CAACC,OAAL,EAAc;QACVtF,OAAO,CAACD,GAAR,CAAY,4CAA4CiG,eAAxD;QACAzF,MAAM,CAAC,4BAAD,CAAN;QACA;MACH;;MACDP,OAAO,CAACwF,IAAR,CAAc,yBAAwBF,OAAO,CAACG,UAAW,WAAzD;MACAH,OAAO,CAAC5E,EAAR,CAAW,KAAX,EAAkB,UAAU2E,GAAV,EAAe;QAC7B,IAAIA,GAAJ,EAAS;UACLrF,OAAO,CAACuF,IAAR,CAAa,mCAAb,EAAkDS,eAAlD,EAAmEX,GAAnE;UACA9E,MAAM,CAAC8E,GAAD,CAAN;QACH;;QAED1F,OAAO,CAACC,GAAR,CAAYsG,kBAAZ,EAAgCE,IAAhC,CAAqCC,GAAG,IAAI;UACxCA,GAAG,CAACC,OAAJ,CAAYC,CAAC,IAAI;YACbvG,OAAO,CAACwF,IAAR,CAAa,oBAAb,EAAmCe,CAAnC;UACH,CAFD;UAGAjG,OAAO,CAACuD,OAAD,CAAP;QACH,CALD;MAMH,CAZD;MAcAyB,OAAO,CAACI,SAAR;MAEAJ,OAAO,CAAC5E,EAAR,CAAW,OAAX,EAAoB,UAAUiF,KAAV,EAAiB;QACjC3F,OAAO,CAACwF,IAAR,CAAc,sBAAqBG,KAAK,CAAChD,QAAS,GAAlD;;QACA,IAAI,MAAMiD,IAAN,CAAWD,KAAK,CAAChD,QAAjB,CAAJ,EAAgC;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAR;QACH,CALD,MAKO;UACH;UACAJ,OAAO,CAACO,cAAR,CAAuBF,KAAvB,EAA8B,UAAUN,GAAV,EAAe5E,UAAf,EAA2B;YACrD,IAAI4E,GAAJ,EAAS;cACLrF,OAAO,CAACuF,IAAR,CACI,4DADJ,EAEII,KAAK,CAAChD,QAFV,EAGI0C,GAHJ;cAKA9E,MAAM,CAAC8E,GAAD,CAAN;cACA;YACH;;YACD,IAAI,CAAC5E,UAAL,EAAiB;cACbT,OAAO,CAACD,GAAR,CAAY,6CAAZ;cACAQ,MAAM,CAAC,8BAAD,CAAN;cACA;YACH;;YACDE,UAAU,CAACC,EAAX,CAAc,KAAd,EAAqB,YAAY;cAC7BuF,SAAS,CAAC1G,IAAV,CAAeoG,KAAK,CAAChD,QAArB;cACA2C,OAAO,CAACI,SAAR;YACH,CAHD;YAKA,MAAM/G,MAAM,GAAI,GAAE6E,UAAW,IAAG2C,SAAU,IAAGR,KAAK,CAAChD,QAAS,EAA5D,CApBqD,CAqBrD;;YACAkB,OAAO,GAAGG,iBAAiB,CAAC;cACxBvF,GAAG,EAAEoF,OADmB;cAExBI,QAAQ,EAAE0B,KAAK,CAAChD,QAFQ;cAGxBhE;YAHwB,CAAD,CAA3B;;YAMA,MAAM;cAAEuD,iBAAF;cAAqBC,8BAA8B,EAAEC;YAArD,IACF5B,kBAAA,CAAS6B,WAAT,CAAqB1D,MAArB,EAA6B8D,iBAA7B,CADJ;;YAGAzH,cAAc,CAACyF,UAAD,EAAayB,iBAAb,CAAd,CACKkE,IADL,CACU,MAAM;cACRF,kBAAkB,CAAC3G,IAAnB,CAAwB6C,OAAxB;YACH,CAHL,EAIK0D,KAJL,CAIWC,KAAK,IAAI;cACZxF,MAAM,CAACwF,KAAD,CAAN;YACH,CANL;UAOH,CAtCD;QAuCH;MACJ,CAjDD;IAkDH,CA9ED;EA+EH,CAxFM,CAAP;AAyFH"}
|
package/mockSecurity.js
CHANGED
@@ -4,10 +4,8 @@ Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
5
5
|
});
|
6
6
|
exports.mockSecurity = void 0;
|
7
|
-
|
8
7
|
const mockSecurity = (identity, context) => {
|
9
8
|
context.security.disableAuthorization();
|
10
9
|
context.security.setIdentity(identity);
|
11
10
|
};
|
12
|
-
|
13
11
|
exports.mockSecurity = mockSecurity;
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["mockSecurity","identity","context","security","disableAuthorization","setIdentity"],"sources":["mockSecurity.ts"],"sourcesContent":["import { SecurityContext, SecurityIdentity } from \"@webiny/api-security/types\";\n\nexport const mockSecurity = (identity: SecurityIdentity, context: SecurityContext) => {\n context.security.disableAuthorization();\n context.security.setIdentity(identity);\n};\n"],"mappings":";;;;;;AAEO,MAAMA,YAAY,GAAG,CAACC,QAA0B,EAAEC,OAAwB,KAAK;EAClFA,OAAO,CAACC,QAAQ,CAACC,oBAAoB,EAAE;EACvCF,OAAO,CAACC,QAAQ,CAACE,WAAW,CAACJ,QAAQ,CAAC;AAC1C,CAAC;AAAC"}
|