@webiny/api-page-builder-import-export 0.0.0-unstable.97a151f74d → 0.0.0-unstable.aad28a72ae
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/client.d.ts +2 -2
- package/client.js.map +1 -1
- package/export/combine/blocksHandler.d.ts +6 -0
- package/export/combine/blocksHandler.js +106 -0
- package/export/combine/blocksHandler.js.map +1 -0
- package/{exportPages → export}/combine/index.d.ts +3 -2
- package/export/combine/index.js +30 -0
- package/export/combine/index.js.map +1 -0
- package/export/combine/pagesHandler.d.ts +6 -0
- package/export/combine/pagesHandler.js +106 -0
- package/export/combine/pagesHandler.js.map +1 -0
- package/export/process/blocksHandler.d.ts +6 -0
- package/export/process/blocksHandler.js +176 -0
- package/export/process/blocksHandler.js.map +1 -0
- package/{exportPages → export}/process/index.d.ts +4 -6
- package/export/process/index.js +27 -0
- package/export/process/index.js.map +1 -0
- package/export/process/pagesHandler.d.ts +6 -0
- package/export/process/pagesHandler.js +204 -0
- package/export/process/pagesHandler.js.map +1 -0
- package/{exportPages → export}/s3Stream.d.ts +0 -0
- package/{exportPages → export}/s3Stream.js +0 -0
- package/{exportPages → export}/s3Stream.js.map +0 -0
- package/export/utils.d.ts +16 -0
- package/export/utils.js +135 -0
- package/export/utils.js.map +1 -0
- package/{exportPages → export}/zipper.d.ts +6 -5
- package/{exportPages → export}/zipper.js +8 -7
- package/export/zipper.js.map +1 -0
- package/graphql/crud/blocks.crud.d.ts +4 -0
- package/graphql/crud/blocks.crud.js +155 -0
- package/graphql/crud/blocks.crud.js.map +1 -0
- package/graphql/crud/importExportTasks.crud.d.ts +5 -0
- package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +48 -48
- package/graphql/crud/importExportTasks.crud.js.map +1 -0
- package/graphql/crud/pages.crud.d.ts +2 -2
- package/graphql/crud/pages.crud.js +14 -12
- package/graphql/crud/pages.crud.js.map +1 -1
- package/graphql/crud.d.ts +2 -2
- package/graphql/crud.js +4 -2
- package/graphql/crud.js.map +1 -1
- package/graphql/graphql/blocks.gql.d.ts +4 -0
- package/graphql/graphql/blocks.gql.js +57 -0
- package/graphql/graphql/blocks.gql.js.map +1 -0
- package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
- package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +17 -17
- package/graphql/graphql/importExportTasks.gql.js.map +1 -0
- package/graphql/graphql/pages.gql.d.ts +2 -2
- package/graphql/graphql/pages.gql.js +2 -8
- package/graphql/graphql/pages.gql.js.map +1 -1
- package/graphql/graphql.js +4 -2
- package/graphql/graphql.js.map +1 -1
- package/graphql/index.d.ts +2 -2
- package/graphql/index.js.map +1 -1
- package/graphql/types.d.ts +43 -23
- package/graphql/types.js.map +1 -1
- package/import/create/blocksHandler.d.ts +3 -0
- package/import/create/blocksHandler.js +110 -0
- package/import/create/blocksHandler.js.map +1 -0
- package/{importPages → import}/create/index.d.ts +5 -4
- package/import/create/index.js +30 -0
- package/import/create/index.js.map +1 -0
- package/import/create/pagesHandler.d.ts +3 -0
- package/import/create/pagesHandler.js +110 -0
- package/import/create/pagesHandler.js.map +1 -0
- package/import/process/blocksHandler.d.ts +3 -0
- package/import/process/blocksHandler.js +175 -0
- package/import/process/blocksHandler.js.map +1 -0
- package/{importPages → import}/process/index.d.ts +4 -3
- package/import/process/index.js +27 -0
- package/import/process/index.js.map +1 -0
- package/import/process/pagesHandler.d.ts +3 -0
- package/import/process/pagesHandler.js +180 -0
- package/import/process/pagesHandler.js.map +1 -0
- package/{importPages → import}/utils.d.ts +19 -20
- package/{importPages → import}/utils.js +107 -36
- package/import/utils.js.map +1 -0
- package/package.json +22 -22
- package/types.d.ts +62 -65
- package/types.js +17 -17
- package/types.js.map +1 -1
- package/exportPages/combine/index.js +0 -114
- package/exportPages/combine/index.js.map +0 -1
- package/exportPages/process/index.js +0 -208
- package/exportPages/process/index.js.map +0 -1
- package/exportPages/utils.d.ts +0 -13
- package/exportPages/utils.js +0 -113
- package/exportPages/utils.js.map +0 -1
- package/exportPages/zipper.js.map +0 -1
- package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
- package/graphql/crud/pageImportExportTasks.crud.js.map +0 -1
- package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
- package/graphql/graphql/pageImportExportTasks.gql.js.map +0 -1
- package/importPages/create/index.js +0 -118
- package/importPages/create/index.js.map +0 -1
- package/importPages/process/index.js +0 -185
- package/importPages/process/index.js.map +0 -1
- package/importPages/utils.js.map +0 -1
@@ -5,10 +5,11 @@ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefau
|
|
5
5
|
Object.defineProperty(exports, "__esModule", {
|
6
6
|
value: true
|
7
7
|
});
|
8
|
+
exports.importBlock = importBlock;
|
8
9
|
exports.importPage = importPage;
|
9
10
|
exports.initialStats = initialStats;
|
10
11
|
exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
|
11
|
-
exports.
|
12
|
+
exports.uploadAssets = void 0;
|
12
13
|
|
13
14
|
var _uniqid = _interopRequireDefault(require("uniqid"));
|
14
15
|
|
@@ -38,12 +39,12 @@ var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/insta
|
|
38
39
|
|
39
40
|
var _types = require("../types");
|
40
41
|
|
41
|
-
var _s3Stream = require("../
|
42
|
+
var _s3Stream = require("../export/s3Stream");
|
42
43
|
|
43
44
|
const streamPipeline = (0, _util.promisify)(_stream.pipeline);
|
44
45
|
const INSTALL_DIR = "/tmp";
|
45
46
|
|
46
|
-
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "
|
47
|
+
const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
|
47
48
|
|
48
49
|
const FILES_COUNT_IN_EACH_BATCH = 15;
|
49
50
|
|
@@ -71,7 +72,19 @@ function updateImageInPageSettings(params) {
|
|
71
72
|
return newSettings;
|
72
73
|
}
|
73
74
|
|
74
|
-
function
|
75
|
+
function updateBlockPreviewImage(params) {
|
76
|
+
const {
|
77
|
+
file,
|
78
|
+
fileIdToKeyMap,
|
79
|
+
srcPrefix
|
80
|
+
} = params;
|
81
|
+
const newFile = file;
|
82
|
+
const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
|
83
|
+
newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
|
84
|
+
return newFile;
|
85
|
+
}
|
86
|
+
|
87
|
+
function updateFilesInData({
|
75
88
|
data,
|
76
89
|
fileIdToKeyMap,
|
77
90
|
srcPrefix
|
@@ -85,7 +98,7 @@ function updateFilesInPageData({
|
|
85
98
|
if (Array.isArray(data)) {
|
86
99
|
for (let i = 0; i < data.length; i++) {
|
87
100
|
const element = data[i];
|
88
|
-
|
101
|
+
updateFilesInData({
|
89
102
|
data: element,
|
90
103
|
fileIdToKeyMap,
|
91
104
|
srcPrefix
|
@@ -106,7 +119,7 @@ function updateFilesInPageData({
|
|
106
119
|
value.name = fileIdToKeyMap.get(value.id);
|
107
120
|
value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
|
108
121
|
} else {
|
109
|
-
|
122
|
+
updateFilesInData({
|
110
123
|
data: value,
|
111
124
|
srcPrefix,
|
112
125
|
fileIdToKeyMap
|
@@ -115,7 +128,7 @@ function updateFilesInPageData({
|
|
115
128
|
}
|
116
129
|
}
|
117
130
|
|
118
|
-
const
|
131
|
+
const uploadAssets = async params => {
|
119
132
|
const {
|
120
133
|
context,
|
121
134
|
filesData,
|
@@ -133,9 +146,8 @@ const uploadPageAssets = async params => {
|
|
133
146
|
return {
|
134
147
|
fileIdToKeyMap
|
135
148
|
};
|
136
|
-
}
|
149
|
+
} // Save files meta data against old key for later use.
|
137
150
|
|
138
|
-
console.log("INSIDE uploadPageAssets"); // Save files meta data against old key for later use.
|
139
151
|
|
140
152
|
const fileKeyToFileMap = new Map(); // Initialize maps.
|
141
153
|
|
@@ -190,7 +202,7 @@ const uploadPageAssets = async params => {
|
|
190
202
|
};
|
191
203
|
};
|
192
204
|
|
193
|
-
exports.
|
205
|
+
exports.uploadAssets = uploadAssets;
|
194
206
|
|
195
207
|
async function importPage({
|
196
208
|
pageKey,
|
@@ -223,7 +235,7 @@ async function importPage({
|
|
223
235
|
// Upload page assets.
|
224
236
|
const {
|
225
237
|
fileIdToKeyMap
|
226
|
-
} = await
|
238
|
+
} = await uploadAssets({
|
227
239
|
context,
|
228
240
|
|
229
241
|
/**
|
@@ -237,7 +249,7 @@ async function importPage({
|
|
237
249
|
const {
|
238
250
|
srcPrefix = ""
|
239
251
|
} = settings || {};
|
240
|
-
|
252
|
+
updateFilesInData({
|
241
253
|
data: page.content || {},
|
242
254
|
fileIdToKeyMap,
|
243
255
|
srcPrefix
|
@@ -256,6 +268,65 @@ async function importPage({
|
|
256
268
|
return page;
|
257
269
|
}
|
258
270
|
|
271
|
+
async function importBlock({
|
272
|
+
blockKey,
|
273
|
+
context,
|
274
|
+
fileUploadsData
|
275
|
+
}) {
|
276
|
+
const log = console.log; // Making Directory for block in which we're going to extract the block data file.
|
277
|
+
|
278
|
+
const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
|
279
|
+
|
280
|
+
(0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
|
281
|
+
|
282
|
+
const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
|
283
|
+
|
284
|
+
const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
|
285
|
+
|
286
|
+
log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`); // Download and save block data file in disk.
|
287
|
+
|
288
|
+
await new Promise((resolve, reject) => {
|
289
|
+
_s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
|
290
|
+
}); // Load the block data file from disk.
|
291
|
+
|
292
|
+
log(`Load file ${blockDataFileKey}`);
|
293
|
+
const {
|
294
|
+
block,
|
295
|
+
files
|
296
|
+
} = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH); // Only update block data if there are files.
|
297
|
+
|
298
|
+
if (files && Array.isArray(files) && files.length > 0) {
|
299
|
+
// Upload block assets.
|
300
|
+
const {
|
301
|
+
fileIdToKeyMap
|
302
|
+
} = await uploadAssets({
|
303
|
+
context,
|
304
|
+
filesData: files,
|
305
|
+
fileUploadsData
|
306
|
+
});
|
307
|
+
const settings = await context.fileManager.settings.getSettings();
|
308
|
+
const {
|
309
|
+
srcPrefix = ""
|
310
|
+
} = settings || {};
|
311
|
+
updateFilesInData({
|
312
|
+
data: block.content || {},
|
313
|
+
fileIdToKeyMap,
|
314
|
+
srcPrefix
|
315
|
+
});
|
316
|
+
block.preview = updateBlockPreviewImage({
|
317
|
+
file: block.preview || {},
|
318
|
+
fileIdToKeyMap,
|
319
|
+
srcPrefix
|
320
|
+
});
|
321
|
+
}
|
322
|
+
|
323
|
+
log("Removing Directory for block...");
|
324
|
+
await (0, _downloadInstallFiles.deleteFile)(blockKey);
|
325
|
+
log(`Remove block contents from S3...`);
|
326
|
+
await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
|
327
|
+
return block;
|
328
|
+
}
|
329
|
+
|
259
330
|
async function uploadFilesFromS3({
|
260
331
|
fileKeyToFileMap,
|
261
332
|
oldKeyToNewKeyMap
|
@@ -310,11 +381,11 @@ function getFileNameWithoutExt(fileName) {
|
|
310
381
|
/**
|
311
382
|
* Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
|
312
383
|
* @param zipFileUrl
|
313
|
-
* @return
|
384
|
+
* @return ImportData S3 file keys for all uploaded assets group by page/block.
|
314
385
|
*/
|
315
386
|
async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
316
387
|
const log = console.log;
|
317
|
-
const
|
388
|
+
const importDataList = [];
|
318
389
|
|
319
390
|
const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
|
320
391
|
|
@@ -325,7 +396,7 @@ async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
|
325
396
|
}
|
326
397
|
|
327
398
|
const readStream = response.body;
|
328
|
-
const uniquePath = (0, _uniqid.default)("
|
399
|
+
const uniquePath = (0, _uniqid.default)("IMPORTS/"); // Read export file and download it in the disk
|
329
400
|
|
330
401
|
const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
|
331
402
|
|
@@ -335,22 +406,22 @@ async function readExtractAndUploadZipFileContents(zipFileUrl) {
|
|
335
406
|
|
336
407
|
const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);
|
337
408
|
log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
|
338
|
-
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page zip and upload their content's to S3
|
409
|
+
await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH); // Extract each page/block zip and upload their content's to S3
|
339
410
|
|
340
411
|
for (let i = 0; i < zipFilePaths.length; i++) {
|
341
412
|
const currentPath = zipFilePaths[i];
|
342
413
|
const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
|
343
|
-
|
414
|
+
importDataList.push(dataMap);
|
344
415
|
}
|
345
416
|
|
346
417
|
log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
|
347
418
|
await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
|
348
|
-
return
|
419
|
+
return importDataList;
|
349
420
|
}
|
350
421
|
|
351
422
|
const ASSETS_DIR_NAME = "/assets";
|
352
423
|
|
353
|
-
function
|
424
|
+
function prepareDataDirMap({
|
354
425
|
map,
|
355
426
|
filePath,
|
356
427
|
newKey
|
@@ -394,17 +465,17 @@ async function deleteS3Folder(key) {
|
|
394
465
|
|
395
466
|
function initialStats(total) {
|
396
467
|
return {
|
397
|
-
[_types.
|
398
|
-
[_types.
|
399
|
-
[_types.
|
400
|
-
[_types.
|
468
|
+
[_types.ImportExportTaskStatus.PENDING]: total,
|
469
|
+
[_types.ImportExportTaskStatus.PROCESSING]: 0,
|
470
|
+
[_types.ImportExportTaskStatus.COMPLETED]: 0,
|
471
|
+
[_types.ImportExportTaskStatus.FAILED]: 0,
|
401
472
|
total
|
402
473
|
};
|
403
474
|
}
|
404
475
|
|
405
476
|
function extractZipToDisk(exportFileZipPath) {
|
406
477
|
return new Promise((resolve, reject) => {
|
407
|
-
const
|
478
|
+
const zipFilePaths = [];
|
408
479
|
const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
|
409
480
|
|
410
481
|
const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport); // Make sure DIR exists
|
@@ -434,7 +505,7 @@ function extractZipToDisk(exportFileZipPath) {
|
|
434
505
|
reject(err);
|
435
506
|
}
|
436
507
|
|
437
|
-
resolve(
|
508
|
+
resolve(zipFilePaths);
|
438
509
|
});
|
439
510
|
zipFile.readEntry();
|
440
511
|
zipFile.on("entry", function (entry) {
|
@@ -463,7 +534,7 @@ function extractZipToDisk(exportFileZipPath) {
|
|
463
534
|
const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
|
464
535
|
|
465
536
|
readStream.on("end", function () {
|
466
|
-
|
537
|
+
zipFilePaths.push(filePath);
|
467
538
|
zipFile.readEntry();
|
468
539
|
});
|
469
540
|
streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
|
@@ -476,28 +547,28 @@ function extractZipToDisk(exportFileZipPath) {
|
|
476
547
|
});
|
477
548
|
}
|
478
549
|
|
479
|
-
function extractZipAndUploadToS3(
|
550
|
+
function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
|
480
551
|
return new Promise((resolve, reject) => {
|
481
552
|
const filePaths = [];
|
482
553
|
const fileUploadPromises = [];
|
483
|
-
const
|
554
|
+
const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
|
484
555
|
let dataMap = {
|
485
|
-
key:
|
556
|
+
key: uniqueKey,
|
486
557
|
assets: {},
|
487
558
|
data: ""
|
488
559
|
};
|
489
560
|
|
490
|
-
_yauzl.default.open(
|
561
|
+
_yauzl.default.open(dataZipFilePath, {
|
491
562
|
lazyEntries: true
|
492
563
|
}, function (err, zipFile) {
|
493
564
|
if (err) {
|
494
|
-
console.warn("ERROR: Failed to extract zip: ",
|
565
|
+
console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
|
495
566
|
reject(err);
|
496
567
|
return;
|
497
568
|
}
|
498
569
|
|
499
570
|
if (!zipFile) {
|
500
|
-
console.log("ERROR: Probably failed to extract zip: " +
|
571
|
+
console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
|
501
572
|
reject("Missing Zip File Resource.");
|
502
573
|
return;
|
503
574
|
}
|
@@ -505,7 +576,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
505
576
|
console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
|
506
577
|
zipFile.on("end", function (err) {
|
507
578
|
if (err) {
|
508
|
-
console.warn('ERROR: Failed on "END" for file: ',
|
579
|
+
console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
|
509
580
|
reject(err);
|
510
581
|
}
|
511
582
|
|
@@ -535,7 +606,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
535
606
|
}
|
536
607
|
|
537
608
|
if (!readStream) {
|
538
|
-
console.log("ERROR: Missing Read Stream while importing
|
609
|
+
console.log("ERROR: Missing Read Stream while importing.");
|
539
610
|
reject("Missing Read Strea Resource.");
|
540
611
|
return;
|
541
612
|
}
|
@@ -544,9 +615,9 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
|
|
544
615
|
filePaths.push(entry.fileName);
|
545
616
|
zipFile.readEntry();
|
546
617
|
});
|
547
|
-
const newKey = `${uniquePath}/${
|
618
|
+
const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`; // Modify in place
|
548
619
|
|
549
|
-
dataMap =
|
620
|
+
dataMap = prepareDataDirMap({
|
550
621
|
map: dataMap,
|
551
622
|
filePath: entry.fileName,
|
552
623
|
newKey
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"names":["streamPipeline","promisify","pipeline","INSTALL_DIR","INSTALL_EXTRACT_DIR","path","join","FILES_COUNT_IN_EACH_BATCH","updateImageInPageSettings","params","settings","fileIdToKeyMap","srcPrefix","newSettings","srcPrefixWithoutTrailingSlash","endsWith","slice","dotProp","get","set","general","image","id","social","updateBlockPreviewImage","file","newFile","src","updateFilesInData","data","Array","isArray","i","length","element","tuple","Object","entries","key","value","has","name","uploadAssets","context","filesData","fileUploadsData","Map","process","env","NODE_ENV","fileKeyToFileMap","type","fileUploadResults","uploadFilesFromS3","oldKeyToNewKeyMap","assets","createFilesInput","map","uploadResult","newKey","Key","getOldFileKey","size","meta","tags","filter","Boolean","createFilesPromises","createFilesInputChunks","chunk","createFilesInputChunk","push","fileManager","files","createFilesInBatch","Promise","all","importPage","pageKey","log","console","PAGE_EXTRACT_DIR","ensureDirSync","pageDataFileKey","PAGE_DATA_FILE_PATH","basename","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","page","loadJson","getSettings","content","deleteFile","deleteS3Folder","dirname","importBlock","blockKey","BLOCK_EXTRACT_DIR","blockDataFileKey","BLOCK_DATA_FILE_PATH","block","preview","oldKeysForAssets","keys","promises","oldKey","tempNewKey","fileMetaData","uniqueId","streamPassThrough","streamPassThroughUploadPromise","promise","writeStream","rest","split","e","FILE_CONTENT_TYPE","getFileNameWithoutExt","fileName","replace","extname","readExtractAndUploadZipFileContents","zipFileUrl","importDataList","zipFileName","response","fetch","ok","WebinyError","statusText","body","uniquePath","ZIP_FILE_PATH","zipFilePaths","extractZipToDisk","currentPath","dataMap","extractZipAndUploadToS3","ASSETS_DIR_NAME","prepareDataDirMap","filePath","isAsset","listObject","Contents","c","deleteFilePromises","deleteObject","initialStats","total","ImportExportTaskStatus","PENDING","PROCESSING","COMPLETED","FAILED","exportFileZipPath","uniqueFolderNameForExport","EXPORT_FILE_EXTRACTION_PATH","yauzl","open","lazyEntries","err","zipFile","warn","info","entryCount","readEntry","entry","test","openReadStream","catch","error","dataZipFilePath","filePaths","fileUploadPromises","uniqueKey","then","res","forEach","r"],"sources":["utils.ts"],"sourcesContent":["import uniqueId from \"uniqid\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport dotProp from \"dot-prop-immutable\";\nimport { createWriteStream } from \"fs\";\nimport { ensureDirSync } from \"fs-extra\";\nimport { promisify } from \"util\";\nimport { pipeline } from \"stream\";\nimport fetch from \"node-fetch\";\nimport path from \"path\";\nimport yauzl from \"yauzl\";\nimport chunk from \"lodash/chunk\";\nimport loadJson from \"load-json-file\";\nimport { FileInput, File } from \"@webiny/api-file-manager/types\";\nimport WebinyError from \"@webiny/error\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { File as ImageFile, ImportExportTaskStatus } from \"~/types\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { ExportedPageData, ExportedBlockData } from \"~/export/utils\";\nimport { PageSettings } from \"@webiny/api-page-builder/types\";\n\ninterface FileItem extends File {\n key: string;\n type: string;\n name: string;\n size: number;\n meta: Record<string, any>;\n tags: string[];\n}\n\nconst streamPipeline = promisify(pipeline);\n\nconst INSTALL_DIR = \"/tmp\";\nconst INSTALL_EXTRACT_DIR = path.join(INSTALL_DIR, \"apiPageBuilderImport\");\nconst FILES_COUNT_IN_EACH_BATCH = 15;\n\ninterface UpdateFilesInDataParams {\n data: Record<string, any>;\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n}\n\ninterface UpdateImageInPageSettingsParams {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n settings: PageSettings;\n}\n\nfunction updateImageInPageSettings(\n params: UpdateImageInPageSettingsParams\n): UpdateImageInPageSettingsParams[\"settings\"] {\n const { settings, fileIdToKeyMap, srcPrefix } = params;\n let newSettings = settings;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n if (dotProp.get(newSettings, \"general.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"general.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.general?.image?.id || \"\"\n )}`\n );\n }\n if (dotProp.get(newSettings, \"social.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"social.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.social?.image?.id || \"\"\n )}`\n );\n }\n\n return newSettings;\n}\n\ninterface UpdateBlockPreviewImage {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n file: ImageFile;\n}\n\nfunction updateBlockPreviewImage(params: UpdateBlockPreviewImage): ImageFile {\n const { file, fileIdToKeyMap, srcPrefix } = params;\n const newFile = file;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || \"\")}`;\n\n return newFile;\n}\n\nfunction updateFilesInData({ data, fileIdToKeyMap, srcPrefix }: UpdateFilesInDataParams) {\n // BASE CASE: Termination point\n if (!data || typeof data !== \"object\") {\n return;\n }\n // Recursively call function if data is array\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n updateFilesInData({ data: element, fileIdToKeyMap, srcPrefix });\n }\n return;\n }\n // Main logic\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n\n if (key === \"file\" && value && fileIdToKeyMap.has(value.id)) {\n value.key = fileIdToKeyMap.get(value.id);\n value.name = fileIdToKeyMap.get(value.id);\n value.src = `${srcPrefix}${srcPrefix.endsWith(\"/\") ? \"\" : \"/\"}${fileIdToKeyMap.get(\n value.id\n )}`;\n } else {\n updateFilesInData({ data: value, srcPrefix, fileIdToKeyMap });\n }\n }\n}\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n filesData: File[];\n fileUploadsData: FileUploadsData;\n}\n\ninterface UploadAssetsReturnType {\n fileIdToKeyMap: Map<string, string>;\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams): Promise<UploadAssetsReturnType> => {\n const { context, filesData, fileUploadsData } = params;\n // Save uploaded file key against static id for later use.\n const fileIdToKeyMap = new Map<string, string>();\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return {\n fileIdToKeyMap\n };\n }\n\n // Save files meta data against old key for later use.\n const fileKeyToFileMap = new Map<string, FileItem>();\n // Initialize maps.\n for (let i = 0; i < filesData.length; i++) {\n const file = filesData[i];\n fileKeyToFileMap.set(file.key, file);\n\n // Initialize the value\n fileIdToKeyMap.set(file.id, file.type);\n }\n\n const fileUploadResults = await uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap: fileUploadsData.assets\n });\n\n // Create files in File Manager\n const createFilesInput = fileUploadResults\n .map((uploadResult): FileInput | null => {\n const newKey = uploadResult.Key;\n const file = fileKeyToFileMap.get(getOldFileKey(newKey));\n if (!file) {\n return null;\n }\n\n // Update the file map with newly uploaded file.\n fileIdToKeyMap.set(file.id, newKey);\n\n return {\n key: newKey,\n name: file.name,\n size: file.size,\n type: file.type,\n meta: file.meta,\n tags: file.tags\n };\n })\n .filter(Boolean) as FileInput[];\n\n const createFilesPromises = [];\n // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).\n const createFilesInputChunks = chunk(createFilesInput, FILES_COUNT_IN_EACH_BATCH);\n for (let i = 0; i < createFilesInputChunks.length; i++) {\n const createFilesInputChunk = createFilesInputChunks[i];\n createFilesPromises.push(\n /*\n * We need to break down files into chunks because\n * `createFilesInBatch` operation has a limit on number of files it can handle at once.\n */\n context.fileManager.files.createFilesInBatch(createFilesInputChunk)\n );\n }\n\n await Promise.all(createFilesPromises);\n\n return {\n fileIdToKeyMap\n };\n};\n\ninterface FileUploadsData {\n data: string;\n assets: Record<string, string>;\n}\n\ninterface ImportPageParams {\n key: string;\n pageKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importPage({\n pageKey,\n context,\n fileUploadsData\n}: ImportPageParams): Promise<ExportedPageData[\"page\"]> {\n const log = console.log;\n\n // Making Directory for page in which we're going to extract the page data file.\n const PAGE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, pageKey);\n ensureDirSync(PAGE_EXTRACT_DIR);\n\n const pageDataFileKey = dotProp.get(fileUploadsData, `data`);\n const PAGE_DATA_FILE_PATH = path.join(PAGE_EXTRACT_DIR, path.basename(pageDataFileKey));\n\n log(`Downloading Page data file: ${pageDataFileKey} at \"${PAGE_DATA_FILE_PATH}\"`);\n // Download and save page data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(pageDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(PAGE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the page data file from disk.\n log(`Load file ${pageDataFileKey}`);\n const { page, files } = await loadJson<ExportedPageData>(PAGE_DATA_FILE_PATH);\n\n // Only update page data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload page assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n /**\n * TODO @ts-refactor @ashutosh figure out correct types.\n */\n // @ts-ignore\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: page.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n page.settings = updateImageInPageSettings({\n settings: page.settings || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for page...\");\n await deleteFile(pageKey);\n\n log(`Remove page contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return page;\n}\n\ninterface ImportBlockParams {\n key: string;\n blockKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importBlock({\n blockKey,\n context,\n fileUploadsData\n}: ImportBlockParams): Promise<ExportedBlockData[\"block\"]> {\n const log = console.log;\n\n // Making Directory for block in which we're going to extract the block data file.\n const BLOCK_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, blockKey);\n ensureDirSync(BLOCK_EXTRACT_DIR);\n\n const blockDataFileKey = dotProp.get(fileUploadsData, `data`);\n const BLOCK_DATA_FILE_PATH = path.join(BLOCK_EXTRACT_DIR, path.basename(blockDataFileKey));\n\n log(`Downloading Block data file: ${blockDataFileKey} at \"${BLOCK_DATA_FILE_PATH}\"`);\n // Download and save block data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(blockDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(BLOCK_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the block data file from disk.\n log(`Load file ${blockDataFileKey}`);\n const { block, files } = await loadJson<ExportedBlockData>(BLOCK_DATA_FILE_PATH);\n\n // Only update block data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload block assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: block.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n block.preview = updateBlockPreviewImage({\n file: block.preview || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for block...\");\n await deleteFile(blockKey);\n\n log(`Remove block contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return block;\n}\n\ninterface UploadFilesFromZipParams {\n fileKeyToFileMap: Map<string, any>;\n oldKeyToNewKeyMap: Record<string, string>;\n}\n\nasync function uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap\n}: UploadFilesFromZipParams): Promise<S3.ManagedUpload.SendData[]> {\n const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);\n\n const promises = [];\n // Upload all assets.\n for (let i = 0; i < oldKeysForAssets.length; i++) {\n const oldKey = oldKeysForAssets[i];\n const tempNewKey = oldKeyToNewKeyMap[oldKey];\n\n // Read file.\n const readStream = s3Stream.readStream(tempNewKey);\n // Get file meta data.\n const fileMetaData = fileKeyToFileMap.get(oldKey);\n\n if (fileMetaData) {\n const newKey = uniqueId(\"\", `-${fileMetaData.key}`);\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, fileMetaData.type);\n readStream.pipe(streamPassThrough);\n promises.push(promise);\n\n console.log(`Successfully queued file \"${newKey}\"`);\n }\n }\n\n return Promise.all(promises);\n}\n\nfunction getOldFileKey(key: string) {\n /*\n * Because we know the naming convention, we can extract the old key from new key.\n */\n try {\n const [, ...rest] = key.split(\"-\");\n return rest.join(\"-\");\n } catch (e) {\n return key;\n }\n}\n\nconst FILE_CONTENT_TYPE = \"application/octet-stream\";\n\nfunction getFileNameWithoutExt(fileName: string): string {\n return path.basename(fileName).replace(path.extname(fileName), \"\");\n}\n\ninterface ImportData {\n assets: Record<string, string>;\n data: string;\n key: string;\n}\n\n/**\n * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.\n * @param zipFileUrl\n * @return ImportData S3 file keys for all uploaded assets group by page/block.\n */\nexport async function readExtractAndUploadZipFileContents(\n zipFileUrl: string\n): Promise<ImportData[]> {\n const log = console.log;\n const importDataList = [];\n\n const zipFileName = path.basename(zipFileUrl).split(\"?\")[0];\n\n const response = await fetch(zipFileUrl);\n if (!response.ok) {\n throw new WebinyError(`Unable to downloading file: \"${zipFileUrl}\"`, response.statusText);\n }\n\n const readStream = response.body;\n\n const uniquePath = uniqueId(\"IMPORTS/\");\n // Read export file and download it in the disk\n const ZIP_FILE_PATH = path.join(INSTALL_DIR, zipFileName);\n\n const writeStream = createWriteStream(ZIP_FILE_PATH);\n await streamPipeline(readStream, writeStream);\n log(`Downloaded file \"${zipFileName}\" at ${ZIP_FILE_PATH}`);\n\n // Extract the downloaded zip file\n const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);\n\n log(`Removing ZIP file \"${zipFileUrl}\" from ${ZIP_FILE_PATH}`);\n await deleteFile(ZIP_FILE_PATH);\n\n // Extract each page/block zip and upload their content's to S3\n for (let i = 0; i < zipFilePaths.length; i++) {\n const currentPath = zipFilePaths[i];\n const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);\n importDataList.push(dataMap);\n }\n log(\"Removing all ZIP files located at \", path.dirname(zipFilePaths[0]));\n await deleteFile(path.dirname(zipFilePaths[0]));\n\n return importDataList;\n}\n\nconst ASSETS_DIR_NAME = \"/assets\";\n\nfunction prepareDataDirMap({\n map,\n filePath,\n newKey\n}: {\n map: ImportData;\n filePath: string;\n newKey: string;\n}): ImportData {\n const dirname = path.dirname(filePath);\n const fileName = path.basename(filePath);\n /*\n * We want to use dot (.) as part of object key rather than creating nested object(s).\n * Also, the file name might contain dots in it beside the extension, so, we are escaping them all.\n */\n const oldKey = fileName.replace(/\\./g, \"\\\\.\");\n\n const isAsset = dirname.endsWith(ASSETS_DIR_NAME);\n\n if (isAsset) {\n map = dotProp.set(map, `assets.${oldKey}`, newKey);\n } else {\n // We only need to know the newKey for data file.\n map = dotProp.set(map, `data`, newKey);\n }\n\n return map;\n}\n\nasync function deleteS3Folder(key: string): Promise<void> {\n // Append trailing slash i.e \"/\" to key to make sure we only delete a specific folder.\n if (!key.endsWith(\"/\")) {\n key = `${key}/`;\n }\n\n const response = await s3Stream.listObject(key);\n const keys = (response.Contents || []).map(c => c.Key).filter(Boolean) as string[];\n console.log(`Found ${keys.length} files.`);\n\n const deleteFilePromises = keys.map(key => s3Stream.deleteObject(key));\n\n await Promise.all(deleteFilePromises);\n console.log(`Successfully deleted ${deleteFilePromises.length} files.`);\n}\n\n// export const zeroPad = version => `${version}`.padStart(5, \"0\");\n\nexport function initialStats(total: number) {\n return {\n [ImportExportTaskStatus.PENDING]: total,\n [ImportExportTaskStatus.PROCESSING]: 0,\n [ImportExportTaskStatus.COMPLETED]: 0,\n [ImportExportTaskStatus.FAILED]: 0,\n total\n };\n}\n\nfunction extractZipToDisk(exportFileZipPath: string): Promise<string[]> {\n return new Promise((resolve, reject) => {\n const zipFilePaths: string[] = [];\n const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);\n const EXPORT_FILE_EXTRACTION_PATH = path.join(INSTALL_DIR, uniqueFolderNameForExport);\n // Make sure DIR exists\n ensureDirSync(EXPORT_FILE_EXTRACTION_PATH);\n\n yauzl.open(exportFileZipPath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", exportFileZipPath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Missing zip file resource for path: \" + exportFileZipPath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn(\"ERROR: Failed on END event for file: \", exportFileZipPath, err);\n reject(err);\n }\n resolve(zipFilePaths);\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed to openReadStream for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\n \"ERROR: Missing Read Stream Resource when extracting to disk.\"\n );\n reject(\"Missing Read Stream Resource.\");\n return;\n }\n\n const filePath = path.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);\n\n readStream.on(\"end\", function () {\n zipFilePaths.push(filePath);\n zipFile.readEntry();\n });\n\n streamPipeline(readStream, createWriteStream(filePath)).catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n\nfunction extractZipAndUploadToS3(dataZipFilePath: string, uniquePath: string): Promise<ImportData> {\n return new Promise((resolve, reject) => {\n const filePaths = [];\n const fileUploadPromises: Promise<S3.ManagedUpload.SendData>[] = [];\n const uniqueKey = getFileNameWithoutExt(dataZipFilePath);\n let dataMap: ImportData = {\n key: uniqueKey,\n assets: {},\n data: \"\"\n };\n yauzl.open(dataZipFilePath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", dataZipFilePath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Probably failed to extract zip: \" + dataZipFilePath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn('ERROR: Failed on \"END\" for file: ', dataZipFilePath, err);\n reject(err);\n }\n\n Promise.all(fileUploadPromises).then(res => {\n res.forEach(r => {\n console.info(\"Done uploading... \", r);\n });\n resolve(dataMap);\n });\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed while performing [openReadStream] for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\"ERROR: Missing Read Stream while importing.\");\n reject(\"Missing Read Strea Resource.\");\n return;\n }\n readStream.on(\"end\", function () {\n filePaths.push(entry.fileName);\n zipFile.readEntry();\n });\n\n const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;\n // Modify in place\n dataMap = prepareDataDirMap({\n map: dataMap,\n filePath: entry.fileName,\n newKey\n });\n\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);\n\n streamPipeline(readStream, streamPassThrough)\n .then(() => {\n fileUploadPromises.push(promise);\n })\n .catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;;;;;;;AAAA;;AAEA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AACA;;AAEA;;AACA;;AACA;;AAEA;;AAaA,MAAMA,cAAc,GAAG,IAAAC,eAAA,EAAUC,gBAAV,CAAvB;AAEA,MAAMC,WAAW,GAAG,MAApB;;AACA,MAAMC,mBAAmB,GAAGC,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB,sBAAvB,CAA5B;;AACA,MAAMI,yBAAyB,GAAG,EAAlC;;AAcA,SAASC,yBAAT,CACIC,MADJ,EAE+C;EAC3C,MAAM;IAAEC,QAAF;IAAYC,cAAZ;IAA4BC;EAA5B,IAA0CH,MAAhD;EACA,IAAII,WAAW,GAAGH,QAAlB;EAEA,MAAMI,6BAA6B,GAAGF,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAChCH,SAAS,CAACI,KAAV,CAAgB,CAAhB,EAAmB,CAAC,CAApB,CADgC,GAEhCJ,SAFN;;EAIA,IAAIK,yBAAA,CAAQC,GAAR,CAAYL,WAAZ,EAAyB,mBAAzB,CAAJ,EAAmD;IAAA;;IAC/CA,WAAW,GAAGI,yBAAA,CAAQE,GAAR,CACVN,WADU,EAEV,mBAFU,EAGT,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAChC,sBAAAR,QAAQ,CAACU,OAAT,iGAAkBC,KAAlB,gFAAyBC,EAAzB,KAA+B,EADC,CAElC,EALQ,CAAd;EAOH;;EACD,IAAIL,yBAAA,CAAQC,GAAR,CAAYL,WAAZ,EAAyB,kBAAzB,CAAJ,EAAkD;IAAA;;IAC9CA,WAAW,GAAGI,yBAAA,CAAQE,GAAR,CACVN,WADU,EAEV,kBAFU,EAGT,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAChC,qBAAAR,QAAQ,CAACa,MAAT,+FAAiBF,KAAjB,gFAAwBC,EAAxB,KAA8B,EADE,CAElC,EALQ,CAAd;EAOH;;EAED,OAAOT,WAAP;AACH;;AAQD,SAASW,uBAAT,CAAiCf,MAAjC,EAA6E;EACzE,MAAM;IAAEgB,IAAF;IAAQd,cAAR;IAAwBC;EAAxB,IAAsCH,MAA5C;EACA,MAAMiB,OAAO,GAAGD,IAAhB;EAEA,MAAMX,6BAA6B,GAAGF,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAChCH,SAAS,CAACI,KAAV,CAAgB,CAAhB,EAAmB,CAAC,CAApB,CADgC,GAEhCJ,SAFN;EAIAc,OAAO,CAACC,GAAR,GAAe,GAAEb,6BAA8B,IAAGH,cAAc,CAACO,GAAf,CAAmBO,IAAI,CAACH,EAAL,IAAW,EAA9B,CAAkC,EAApF;EAEA,OAAOI,OAAP;AACH;;AAED,SAASE,iBAAT,CAA2B;EAAEC,IAAF;EAAQlB,cAAR;EAAwBC;AAAxB,CAA3B,EAAyF;EACrF;EACA,IAAI,CAACiB,IAAD,IAAS,OAAOA,IAAP,KAAgB,QAA7B,EAAuC;IACnC;EACH,CAJoF,CAKrF;;;EACA,IAAIC,KAAK,CAACC,OAAN,CAAcF,IAAd,CAAJ,EAAyB;IACrB,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGH,IAAI,CAACI,MAAzB,EAAiCD,CAAC,EAAlC,EAAsC;MAClC,MAAME,OAAO,GAAGL,IAAI,CAACG,CAAD,CAApB;MACAJ,iBAAiB,CAAC;QAAEC,IAAI,EAAEK,OAAR;QAAiBvB,cAAjB;QAAiCC;MAAjC,CAAD,CAAjB;IACH;;IACD;EACH,CAZoF,CAarF;;;EACA,MAAMuB,KAAK,GAAGC,MAAM,CAACC,OAAP,CAAeR,IAAf,CAAd;;EACA,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGG,KAAK,CAACF,MAA1B,EAAkCD,CAAC,EAAnC,EAAuC;IACnC,MAAM,CAACM,GAAD,EAAMC,KAAN,IAAeJ,KAAK,CAACH,CAAD,CAA1B;;IAEA,IAAIM,GAAG,KAAK,MAAR,IAAkBC,KAAlB,IAA2B5B,cAAc,CAAC6B,GAAf,CAAmBD,KAAK,CAACjB,EAAzB,CAA/B,EAA6D;MACzDiB,KAAK,CAACD,GAAN,GAAY3B,cAAc,CAACO,GAAf,CAAmBqB,KAAK,CAACjB,EAAzB,CAAZ;MACAiB,KAAK,CAACE,IAAN,GAAa9B,cAAc,CAACO,GAAf,CAAmBqB,KAAK,CAACjB,EAAzB,CAAb;MACAiB,KAAK,CAACZ,GAAN,GAAa,GAAEf,SAAU,GAAEA,SAAS,CAACG,QAAV,CAAmB,GAAnB,IAA0B,EAA1B,GAA+B,GAAI,GAAEJ,cAAc,CAACO,GAAf,CAC5DqB,KAAK,CAACjB,EADsD,CAE9D,EAFF;IAGH,CAND,MAMO;MACHM,iBAAiB,CAAC;QAAEC,IAAI,EAAEU,KAAR;QAAe3B,SAAf;QAA0BD;MAA1B,CAAD,CAAjB;IACH;EACJ;AACJ;;AAYM,MAAM+B,YAAY,GAAG,MAAOjC,MAAP,IAAuE;EAC/F,MAAM;IAAEkC,OAAF;IAAWC,SAAX;IAAsBC;EAAtB,IAA0CpC,MAAhD,CAD+F,CAE/F;;EACA,MAAME,cAAc,GAAG,IAAImC,GAAJ,EAAvB;EACA;AACJ;AACA;AACA;AACA;;EACI,IAAIC,OAAO,CAACC,GAAR,CAAYC,QAAZ,KAAyB,MAA7B,EAAqC;IACjC,OAAO;MACHtC;IADG,CAAP;EAGH,CAb8F,CAe/F;;;EACA,MAAMuC,gBAAgB,GAAG,IAAIJ,GAAJ,EAAzB,CAhB+F,CAiB/F;;EACA,KAAK,IAAId,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGY,SAAS,CAACX,MAA9B,EAAsCD,CAAC,EAAvC,EAA2C;IACvC,MAAMP,IAAI,GAAGmB,SAAS,CAACZ,CAAD,CAAtB;IACAkB,gBAAgB,CAAC/B,GAAjB,CAAqBM,IAAI,CAACa,GAA1B,EAA+Bb,IAA/B,EAFuC,CAIvC;;IACAd,cAAc,CAACQ,GAAf,CAAmBM,IAAI,CAACH,EAAxB,EAA4BG,IAAI,CAAC0B,IAAjC;EACH;;EAED,MAAMC,iBAAiB,GAAG,MAAMC,iBAAiB,CAAC;IAC9CH,gBAD8C;IAE9CI,iBAAiB,EAAET,eAAe,CAACU;EAFW,CAAD,CAAjD,CA1B+F,CA+B/F;;EACA,MAAMC,gBAAgB,GAAGJ,iBAAiB,CACrCK,GADoB,CACfC,YAAD,IAAoC;IACrC,MAAMC,MAAM,GAAGD,YAAY,CAACE,GAA5B;IACA,MAAMnC,IAAI,GAAGyB,gBAAgB,CAAChC,GAAjB,CAAqB2C,aAAa,CAACF,MAAD,CAAlC,CAAb;;IACA,IAAI,CAAClC,IAAL,EAAW;MACP,OAAO,IAAP;IACH,CALoC,CAOrC;;;IACAd,cAAc,CAACQ,GAAf,CAAmBM,IAAI,CAACH,EAAxB,EAA4BqC,MAA5B;IAEA,OAAO;MACHrB,GAAG,EAAEqB,MADF;MAEHlB,IAAI,EAAEhB,IAAI,CAACgB,IAFR;MAGHqB,IAAI,EAAErC,IAAI,CAACqC,IAHR;MAIHX,IAAI,EAAE1B,IAAI,CAAC0B,IAJR;MAKHY,IAAI,EAAEtC,IAAI,CAACsC,IALR;MAMHC,IAAI,EAAEvC,IAAI,CAACuC;IANR,CAAP;EAQH,CAnBoB,EAoBpBC,MApBoB,CAoBbC,OApBa,CAAzB;EAsBA,MAAMC,mBAAmB,GAAG,EAA5B,CAtD+F,CAuD/F;;EACA,MAAMC,sBAAsB,GAAG,IAAAC,cAAA,EAAMb,gBAAN,EAAwBjD,yBAAxB,CAA/B;;EACA,KAAK,IAAIyB,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGoC,sBAAsB,CAACnC,MAA3C,EAAmDD,CAAC,EAApD,EAAwD;IACpD,MAAMsC,qBAAqB,GAAGF,sBAAsB,CAACpC,CAAD,CAApD;IACAmC,mBAAmB,CAACI,IAApB;IACI;AACZ;AACA;AACA;IACY5B,OAAO,CAAC6B,WAAR,CAAoBC,KAApB,CAA0BC,kBAA1B,CAA6CJ,qBAA7C,CALJ;EAOH;;EAED,MAAMK,OAAO,CAACC,GAAR,CAAYT,mBAAZ,CAAN;EAEA,OAAO;IACHxD;EADG,CAAP;AAGH,CAzEM;;;;AAuFA,eAAekE,UAAf,CAA0B;EAC7BC,OAD6B;EAE7BnC,OAF6B;EAG7BE;AAH6B,CAA1B,EAIiD;EACpD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAApB,CADoD,CAGpD;;EACA,MAAME,gBAAgB,GAAG5E,aAAA,CAAKC,IAAL,CAAUF,mBAAV,EAA+B0E,OAA/B,CAAzB;;EACA,IAAAI,sBAAA,EAAcD,gBAAd;;EAEA,MAAME,eAAe,GAAGlE,yBAAA,CAAQC,GAAR,CAAY2B,eAAZ,EAA8B,MAA9B,CAAxB;;EACA,MAAMuC,mBAAmB,GAAG/E,aAAA,CAAKC,IAAL,CAAU2E,gBAAV,EAA4B5E,aAAA,CAAKgF,QAAL,CAAcF,eAAd,CAA5B,CAA5B;;EAEAJ,GAAG,CAAE,+BAA8BI,eAAgB,QAAOC,mBAAoB,GAA3E,CAAH,CAVoD,CAWpD;;EACA,MAAM,IAAIT,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACnCC,kBAAA,CACKC,UADL,CACgBN,eADhB,EAEKO,EAFL,CAEQ,OAFR,EAEiBH,MAFjB,EAGKI,IAHL,CAGU,IAAAC,qBAAA,EAAkBR,mBAAlB,CAHV,EAIKM,EAJL,CAIQ,OAJR,EAIiBH,MAJjB,EAKKG,EALL,CAKQ,QALR,EAKkBJ,OALlB;EAMH,CAPK,CAAN,CAZoD,CAqBpD;;EACAP,GAAG,CAAE,aAAYI,eAAgB,EAA9B,CAAH;EACA,MAAM;IAAEU,IAAF;IAAQpB;EAAR,IAAkB,MAAM,IAAAqB,qBAAA,EAA2BV,mBAA3B,CAA9B,CAvBoD,CAyBpD;;EACA,IAAIX,KAAK,IAAI3C,KAAK,CAACC,OAAN,CAAc0C,KAAd,CAAT,IAAiCA,KAAK,CAACxC,MAAN,GAAe,CAApD,EAAuD;IACnD;IACA,MAAM;MAAEtB;IAAF,IAAqB,MAAM+B,YAAY,CAAC;MAC1CC,OAD0C;;MAE1C;AACZ;AACA;MACY;MACAC,SAAS,EAAE6B,KAN+B;MAO1C5B;IAP0C,CAAD,CAA7C;IAUA,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAR,CAAoB9D,QAApB,CAA6BqF,WAA7B,EAAvB;IAEA,MAAM;MAAEnF,SAAS,GAAG;IAAd,IAAqBF,QAAQ,IAAI,EAAvC;IACAkB,iBAAiB,CAAC;MACdC,IAAI,EAAEgE,IAAI,CAACG,OAAL,IAAgB,EADR;MAEdrF,cAFc;MAGdC;IAHc,CAAD,CAAjB;IAMAiF,IAAI,CAACnF,QAAL,GAAgBF,yBAAyB,CAAC;MACtCE,QAAQ,EAAEmF,IAAI,CAACnF,QAAL,IAAiB,EADW;MAEtCC,cAFsC;MAGtCC;IAHsC,CAAD,CAAzC;EAKH;;EAEDmE,GAAG,CAAC,gCAAD,CAAH;EACA,MAAM,IAAAkB,gCAAA,EAAWnB,OAAX,CAAN;EAEAC,GAAG,CAAE,iCAAF,CAAH;EACA,MAAMmB,cAAc,CAAC7F,aAAA,CAAK8F,OAAL,CAAatD,eAAe,CAAChB,IAA7B,CAAD,CAApB;EAEA,OAAOgE,IAAP;AACH;;AASM,eAAeO,WAAf,CAA2B;EAC9BC,QAD8B;EAE9B1D,OAF8B;EAG9BE;AAH8B,CAA3B,EAIoD;EACvD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAApB,CADuD,CAGvD;;EACA,MAAMuB,iBAAiB,GAAGjG,aAAA,CAAKC,IAAL,CAAUF,mBAAV,EAA+BiG,QAA/B,CAA1B;;EACA,IAAAnB,sBAAA,EAAcoB,iBAAd;;EAEA,MAAMC,gBAAgB,GAAGtF,yBAAA,CAAQC,GAAR,CAAY2B,eAAZ,EAA8B,MAA9B,CAAzB;;EACA,MAAM2D,oBAAoB,GAAGnG,aAAA,CAAKC,IAAL,CAAUgG,iBAAV,EAA6BjG,aAAA,CAAKgF,QAAL,CAAckB,gBAAd,CAA7B,CAA7B;;EAEAxB,GAAG,CAAE,gCAA+BwB,gBAAiB,QAAOC,oBAAqB,GAA9E,CAAH,CAVuD,CAWvD;;EACA,MAAM,IAAI7B,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACnCC,kBAAA,CACKC,UADL,CACgBc,gBADhB,EAEKb,EAFL,CAEQ,OAFR,EAEiBH,MAFjB,EAGKI,IAHL,CAGU,IAAAC,qBAAA,EAAkBY,oBAAlB,CAHV,EAIKd,EAJL,CAIQ,OAJR,EAIiBH,MAJjB,EAKKG,EALL,CAKQ,QALR,EAKkBJ,OALlB;EAMH,CAPK,CAAN,CAZuD,CAqBvD;;EACAP,GAAG,CAAE,aAAYwB,gBAAiB,EAA/B,CAAH;EACA,MAAM;IAAEE,KAAF;IAAShC;EAAT,IAAmB,MAAM,IAAAqB,qBAAA,EAA4BU,oBAA5B,CAA/B,CAvBuD,CAyBvD;;EACA,IAAI/B,KAAK,IAAI3C,KAAK,CAACC,OAAN,CAAc0C,KAAd,CAAT,IAAiCA,KAAK,CAACxC,MAAN,GAAe,CAApD,EAAuD;IACnD;IACA,MAAM;MAAEtB;IAAF,IAAqB,MAAM+B,YAAY,CAAC;MAC1CC,OAD0C;MAE1CC,SAAS,EAAE6B,KAF+B;MAG1C5B;IAH0C,CAAD,CAA7C;IAMA,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAR,CAAoB9D,QAApB,CAA6BqF,WAA7B,EAAvB;IAEA,MAAM;MAAEnF,SAAS,GAAG;IAAd,IAAqBF,QAAQ,IAAI,EAAvC;IACAkB,iBAAiB,CAAC;MACdC,IAAI,EAAE4E,KAAK,CAACT,OAAN,IAAiB,EADT;MAEdrF,cAFc;MAGdC;IAHc,CAAD,CAAjB;IAMA6F,KAAK,CAACC,OAAN,GAAgBlF,uBAAuB,CAAC;MACpCC,IAAI,EAAEgF,KAAK,CAACC,OAAN,IAAiB,EADa;MAEpC/F,cAFoC;MAGpCC;IAHoC,CAAD,CAAvC;EAKH;;EAEDmE,GAAG,CAAC,iCAAD,CAAH;EACA,MAAM,IAAAkB,gCAAA,EAAWI,QAAX,CAAN;EAEAtB,GAAG,CAAE,kCAAF,CAAH;EACA,MAAMmB,cAAc,CAAC7F,aAAA,CAAK8F,OAAL,CAAatD,eAAe,CAAChB,IAA7B,CAAD,CAApB;EAEA,OAAO4E,KAAP;AACH;;AAOD,eAAepD,iBAAf,CAAiC;EAC7BH,gBAD6B;EAE7BI;AAF6B,CAAjC,EAGmE;EAC/D,MAAMqD,gBAAgB,GAAGvE,MAAM,CAACwE,IAAP,CAAYtD,iBAAZ,CAAzB;EAEA,MAAMuD,QAAQ,GAAG,EAAjB,CAH+D,CAI/D;;EACA,KAAK,IAAI7E,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG2E,gBAAgB,CAAC1E,MAArC,EAA6CD,CAAC,EAA9C,EAAkD;IAC9C,MAAM8E,MAAM,GAAGH,gBAAgB,CAAC3E,CAAD,CAA/B;IACA,MAAM+E,UAAU,GAAGzD,iBAAiB,CAACwD,MAAD,CAApC,CAF8C,CAI9C;;IACA,MAAMrB,UAAU,GAAGD,kBAAA,CAASC,UAAT,CAAoBsB,UAApB,CAAnB,CAL8C,CAM9C;;;IACA,MAAMC,YAAY,GAAG9D,gBAAgB,CAAChC,GAAjB,CAAqB4F,MAArB,CAArB;;IAEA,IAAIE,YAAJ,EAAkB;MACd,MAAMrD,MAAM,GAAG,IAAAsD,eAAA,EAAS,EAAT,EAAc,IAAGD,YAAY,CAAC1E,GAAI,EAAlC,CAAf;;MACA,MAAM;QAAE4E,iBAAF;QAAqBC,8BAA8B,EAAEC;MAArD,IACF5B,kBAAA,CAAS6B,WAAT,CAAqB1D,MAArB,EAA6BqD,YAAY,CAAC7D,IAA1C,CADJ;;MAEAsC,UAAU,CAACE,IAAX,CAAgBuB,iBAAhB;MACAL,QAAQ,CAACtC,IAAT,CAAc6C,OAAd;MAEApC,OAAO,CAACD,GAAR,CAAa,6BAA4BpB,MAAO,GAAhD;IACH;EACJ;;EAED,OAAOgB,OAAO,CAACC,GAAR,CAAYiC,QAAZ,CAAP;AACH;;AAED,SAAShD,aAAT,CAAuBvB,GAAvB,EAAoC;EAChC;AACJ;AACA;EACI,IAAI;IACA,MAAM,GAAG,GAAGgF,IAAN,IAAchF,GAAG,CAACiF,KAAJ,CAAU,GAAV,CAApB;IACA,OAAOD,IAAI,CAAChH,IAAL,CAAU,GAAV,CAAP;EACH,CAHD,CAGE,OAAOkH,CAAP,EAAU;IACR,OAAOlF,GAAP;EACH;AACJ;;AAED,MAAMmF,iBAAiB,GAAG,0BAA1B;;AAEA,SAASC,qBAAT,CAA+BC,QAA/B,EAAyD;EACrD,OAAOtH,aAAA,CAAKgF,QAAL,CAAcsC,QAAd,EAAwBC,OAAxB,CAAgCvH,aAAA,CAAKwH,OAAL,CAAaF,QAAb,CAAhC,EAAwD,EAAxD,CAAP;AACH;;AAQD;AACA;AACA;AACA;AACA;AACO,eAAeG,mCAAf,CACHC,UADG,EAEkB;EACrB,MAAMhD,GAAG,GAAGC,OAAO,CAACD,GAApB;EACA,MAAMiD,cAAc,GAAG,EAAvB;;EAEA,MAAMC,WAAW,GAAG5H,aAAA,CAAKgF,QAAL,CAAc0C,UAAd,EAA0BR,KAA1B,CAAgC,GAAhC,EAAqC,CAArC,CAApB;;EAEA,MAAMW,QAAQ,GAAG,MAAM,IAAAC,kBAAA,EAAMJ,UAAN,CAAvB;;EACA,IAAI,CAACG,QAAQ,CAACE,EAAd,EAAkB;IACd,MAAM,IAAIC,cAAJ,CAAiB,gCAA+BN,UAAW,GAA3D,EAA+DG,QAAQ,CAACI,UAAxE,CAAN;EACH;;EAED,MAAM7C,UAAU,GAAGyC,QAAQ,CAACK,IAA5B;EAEA,MAAMC,UAAU,GAAG,IAAAvB,eAAA,EAAS,UAAT,CAAnB,CAbqB,CAcrB;;EACA,MAAMwB,aAAa,GAAGpI,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB8H,WAAvB,CAAtB;;EAEA,MAAMZ,WAAW,GAAG,IAAAzB,qBAAA,EAAkB6C,aAAlB,CAApB;EACA,MAAMzI,cAAc,CAACyF,UAAD,EAAa4B,WAAb,CAApB;EACAtC,GAAG,CAAE,oBAAmBkD,WAAY,QAAOQ,aAAc,EAAtD,CAAH,CAnBqB,CAqBrB;;EACA,MAAMC,YAAY,GAAG,MAAMC,gBAAgB,CAACF,aAAD,CAA3C;EAEA1D,GAAG,CAAE,sBAAqBgD,UAAW,UAASU,aAAc,EAAzD,CAAH;EACA,MAAM,IAAAxC,gCAAA,EAAWwC,aAAX,CAAN,CAzBqB,CA2BrB;;EACA,KAAK,IAAIzG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG0G,YAAY,CAACzG,MAAjC,EAAyCD,CAAC,EAA1C,EAA8C;IAC1C,MAAM4G,WAAW,GAAGF,YAAY,CAAC1G,CAAD,CAAhC;IACA,MAAM6G,OAAO,GAAG,MAAMC,uBAAuB,CAACF,WAAD,EAAcJ,UAAd,CAA7C;IACAR,cAAc,CAACzD,IAAf,CAAoBsE,OAApB;EACH;;EACD9D,GAAG,CAAC,oCAAD,EAAuC1E,aAAA,CAAK8F,OAAL,CAAauC,YAAY,CAAC,CAAD,CAAzB,CAAvC,CAAH;EACA,MAAM,IAAAzC,gCAAA,EAAW5F,aAAA,CAAK8F,OAAL,CAAauC,YAAY,CAAC,CAAD,CAAzB,CAAX,CAAN;EAEA,OAAOV,cAAP;AACH;;AAED,MAAMe,eAAe,GAAG,SAAxB;;AAEA,SAASC,iBAAT,CAA2B;EACvBvF,GADuB;EAEvBwF,QAFuB;EAGvBtF;AAHuB,CAA3B,EAQe;EACX,MAAMwC,OAAO,GAAG9F,aAAA,CAAK8F,OAAL,CAAa8C,QAAb,CAAhB;;EACA,MAAMtB,QAAQ,GAAGtH,aAAA,CAAKgF,QAAL,CAAc4D,QAAd,CAAjB;EACA;AACJ;AACA;AACA;;;EACI,MAAMnC,MAAM,GAAGa,QAAQ,CAACC,OAAT,CAAiB,KAAjB,EAAwB,KAAxB,CAAf;EAEA,MAAMsB,OAAO,GAAG/C,OAAO,CAACpF,QAAR,CAAiBgI,eAAjB,CAAhB;;EAEA,IAAIG,OAAJ,EAAa;IACTzF,GAAG,GAAGxC,yBAAA,CAAQE,GAAR,CAAYsC,GAAZ,EAAkB,UAASqD,MAAO,EAAlC,EAAqCnD,MAArC,CAAN;EACH,CAFD,MAEO;IACH;IACAF,GAAG,GAAGxC,yBAAA,CAAQE,GAAR,CAAYsC,GAAZ,EAAkB,MAAlB,EAAyBE,MAAzB,CAAN;EACH;;EAED,OAAOF,GAAP;AACH;;AAED,eAAeyC,cAAf,CAA8B5D,GAA9B,EAA0D;EACtD;EACA,IAAI,CAACA,GAAG,CAACvB,QAAJ,CAAa,GAAb,CAAL,EAAwB;IACpBuB,GAAG,GAAI,GAAEA,GAAI,GAAb;EACH;;EAED,MAAM4F,QAAQ,GAAG,MAAM1C,kBAAA,CAAS2D,UAAT,CAAoB7G,GAApB,CAAvB;EACA,MAAMsE,IAAI,GAAG,CAACsB,QAAQ,CAACkB,QAAT,IAAqB,EAAtB,EAA0B3F,GAA1B,CAA8B4F,CAAC,IAAIA,CAAC,CAACzF,GAArC,EAA0CK,MAA1C,CAAiDC,OAAjD,CAAb;EACAc,OAAO,CAACD,GAAR,CAAa,SAAQ6B,IAAI,CAAC3E,MAAO,SAAjC;EAEA,MAAMqH,kBAAkB,GAAG1C,IAAI,CAACnD,GAAL,CAASnB,GAAG,IAAIkD,kBAAA,CAAS+D,YAAT,CAAsBjH,GAAtB,CAAhB,CAA3B;EAEA,MAAMqC,OAAO,CAACC,GAAR,CAAY0E,kBAAZ,CAAN;EACAtE,OAAO,CAACD,GAAR,CAAa,wBAAuBuE,kBAAkB,CAACrH,MAAO,SAA9D;AACH,C,CAED;;;AAEO,SAASuH,YAAT,CAAsBC,KAAtB,EAAqC;EACxC,OAAO;IACH,CAACC,6BAAA,CAAuBC,OAAxB,GAAkCF,KAD/B;IAEH,CAACC,6BAAA,CAAuBE,UAAxB,GAAqC,CAFlC;IAGH,CAACF,6BAAA,CAAuBG,SAAxB,GAAoC,CAHjC;IAIH,CAACH,6BAAA,CAAuBI,MAAxB,GAAiC,CAJ9B;IAKHL;EALG,CAAP;AAOH;;AAED,SAASd,gBAAT,CAA0BoB,iBAA1B,EAAwE;EACpE,OAAO,IAAIpF,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACpC,MAAMmD,YAAsB,GAAG,EAA/B;IACA,MAAMsB,yBAAyB,GAAGtC,qBAAqB,CAACqC,iBAAD,CAAvD;;IACA,MAAME,2BAA2B,GAAG5J,aAAA,CAAKC,IAAL,CAAUH,WAAV,EAAuB6J,yBAAvB,CAApC,CAHoC,CAIpC;;;IACA,IAAA9E,sBAAA,EAAc+E,2BAAd;;IAEAC,cAAA,CAAMC,IAAN,CAAWJ,iBAAX,EAA8B;MAAEK,WAAW,EAAE;IAAf,CAA9B,EAAqD,UAAUC,GAAV,EAAeC,OAAf,EAAwB;MACzE,IAAID,GAAJ,EAAS;QACLrF,OAAO,CAACuF,IAAR,CAAa,gCAAb,EAA+CR,iBAA/C,EAAkEM,GAAlE;QACA9E,MAAM,CAAC8E,GAAD,CAAN;QACA;MACH;;MACD,IAAI,CAACC,OAAL,EAAc;QACVtF,OAAO,CAACD,GAAR,CAAY,gDAAgDgF,iBAA5D;QACAxE,MAAM,CAAC,4BAAD,CAAN;QACA;MACH;;MAEDP,OAAO,CAACwF,IAAR,CAAc,yBAAwBF,OAAO,CAACG,UAAW,WAAzD;MAEAH,OAAO,CAAC5E,EAAR,CAAW,KAAX,EAAkB,UAAU2E,GAAV,EAAe;QAC7B,IAAIA,GAAJ,EAAS;UACLrF,OAAO,CAACuF,IAAR,CAAa,uCAAb,EAAsDR,iBAAtD,EAAyEM,GAAzE;UACA9E,MAAM,CAAC8E,GAAD,CAAN;QACH;;QACD/E,OAAO,CAACoD,YAAD,CAAP;MACH,CAND;MAQA4B,OAAO,CAACI,SAAR;MAEAJ,OAAO,CAAC5E,EAAR,CAAW,OAAX,EAAoB,UAAUiF,KAAV,EAAiB;QACjC3F,OAAO,CAACwF,IAAR,CAAc,sBAAqBG,KAAK,CAAChD,QAAS,GAAlD;;QACA,IAAI,MAAMiD,IAAN,CAAWD,KAAK,CAAChD,QAAjB,CAAJ,EAAgC;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAR;QACH,CALD,MAKO;UACH;UACAJ,OAAO,CAACO,cAAR,CAAuBF,KAAvB,EAA8B,UAAUN,GAAV,EAAe5E,UAAf,EAA2B;YACrD,IAAI4E,GAAJ,EAAS;cACLrF,OAAO,CAACuF,IAAR,CACI,4CADJ,EAEII,KAAK,CAAChD,QAFV,EAGI0C,GAHJ;cAKA9E,MAAM,CAAC8E,GAAD,CAAN;cACA;YACH;;YACD,IAAI,CAAC5E,UAAL,EAAiB;cACbT,OAAO,CAACD,GAAR,CACI,8DADJ;cAGAQ,MAAM,CAAC,+BAAD,CAAN;cACA;YACH;;YAED,MAAM0D,QAAQ,GAAG5I,aAAA,CAAKC,IAAL,CAAU2J,2BAAV,EAAuCU,KAAK,CAAChD,QAA7C,CAAjB;;YAEAlC,UAAU,CAACC,EAAX,CAAc,KAAd,EAAqB,YAAY;cAC7BgD,YAAY,CAACnE,IAAb,CAAkB0E,QAAlB;cACAqB,OAAO,CAACI,SAAR;YACH,CAHD;YAKA1K,cAAc,CAACyF,UAAD,EAAa,IAAAG,qBAAA,EAAkBqD,QAAlB,CAAb,CAAd,CAAwD6B,KAAxD,CAA8DC,KAAK,IAAI;cACnExF,MAAM,CAACwF,KAAD,CAAN;YACH,CAFD;UAGH,CA5BD;QA6BH;MACJ,CAvCD;IAwCH,CAhED;EAiEH,CAxEM,CAAP;AAyEH;;AAED,SAASjC,uBAAT,CAAiCkC,eAAjC,EAA0DxC,UAA1D,EAAmG;EAC/F,OAAO,IAAI7D,OAAJ,CAAY,CAACW,OAAD,EAAUC,MAAV,KAAqB;IACpC,MAAM0F,SAAS,GAAG,EAAlB;IACA,MAAMC,kBAAwD,GAAG,EAAjE;IACA,MAAMC,SAAS,GAAGzD,qBAAqB,CAACsD,eAAD,CAAvC;IACA,IAAInC,OAAmB,GAAG;MACtBvG,GAAG,EAAE6I,SADiB;MAEtB5H,MAAM,EAAE,EAFc;MAGtB1B,IAAI,EAAE;IAHgB,CAA1B;;IAKAqI,cAAA,CAAMC,IAAN,CAAWa,eAAX,EAA4B;MAAEZ,WAAW,EAAE;IAAf,CAA5B,EAAmD,UAAUC,GAAV,EAAeC,OAAf,EAAwB;MACvE,IAAID,GAAJ,EAAS;QACLrF,OAAO,CAACuF,IAAR,CAAa,gCAAb,EAA+CS,eAA/C,EAAgEX,GAAhE;QACA9E,MAAM,CAAC8E,GAAD,CAAN;QACA;MACH;;MACD,IAAI,CAACC,OAAL,EAAc;QACVtF,OAAO,CAACD,GAAR,CAAY,4CAA4CiG,eAAxD;QACAzF,MAAM,CAAC,4BAAD,CAAN;QACA;MACH;;MACDP,OAAO,CAACwF,IAAR,CAAc,yBAAwBF,OAAO,CAACG,UAAW,WAAzD;MACAH,OAAO,CAAC5E,EAAR,CAAW,KAAX,EAAkB,UAAU2E,GAAV,EAAe;QAC7B,IAAIA,GAAJ,EAAS;UACLrF,OAAO,CAACuF,IAAR,CAAa,mCAAb,EAAkDS,eAAlD,EAAmEX,GAAnE;UACA9E,MAAM,CAAC8E,GAAD,CAAN;QACH;;QAED1F,OAAO,CAACC,GAAR,CAAYsG,kBAAZ,EAAgCE,IAAhC,CAAqCC,GAAG,IAAI;UACxCA,GAAG,CAACC,OAAJ,CAAYC,CAAC,IAAI;YACbvG,OAAO,CAACwF,IAAR,CAAa,oBAAb,EAAmCe,CAAnC;UACH,CAFD;UAGAjG,OAAO,CAACuD,OAAD,CAAP;QACH,CALD;MAMH,CAZD;MAcAyB,OAAO,CAACI,SAAR;MAEAJ,OAAO,CAAC5E,EAAR,CAAW,OAAX,EAAoB,UAAUiF,KAAV,EAAiB;QACjC3F,OAAO,CAACwF,IAAR,CAAc,sBAAqBG,KAAK,CAAChD,QAAS,GAAlD;;QACA,IAAI,MAAMiD,IAAN,CAAWD,KAAK,CAAChD,QAAjB,CAAJ,EAAgC;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAR;QACH,CALD,MAKO;UACH;UACAJ,OAAO,CAACO,cAAR,CAAuBF,KAAvB,EAA8B,UAAUN,GAAV,EAAe5E,UAAf,EAA2B;YACrD,IAAI4E,GAAJ,EAAS;cACLrF,OAAO,CAACuF,IAAR,CACI,4DADJ,EAEII,KAAK,CAAChD,QAFV,EAGI0C,GAHJ;cAKA9E,MAAM,CAAC8E,GAAD,CAAN;cACA;YACH;;YACD,IAAI,CAAC5E,UAAL,EAAiB;cACbT,OAAO,CAACD,GAAR,CAAY,6CAAZ;cACAQ,MAAM,CAAC,8BAAD,CAAN;cACA;YACH;;YACDE,UAAU,CAACC,EAAX,CAAc,KAAd,EAAqB,YAAY;cAC7BuF,SAAS,CAAC1G,IAAV,CAAeoG,KAAK,CAAChD,QAArB;cACA2C,OAAO,CAACI,SAAR;YACH,CAHD;YAKA,MAAM/G,MAAM,GAAI,GAAE6E,UAAW,IAAG2C,SAAU,IAAGR,KAAK,CAAChD,QAAS,EAA5D,CApBqD,CAqBrD;;YACAkB,OAAO,GAAGG,iBAAiB,CAAC;cACxBvF,GAAG,EAAEoF,OADmB;cAExBI,QAAQ,EAAE0B,KAAK,CAAChD,QAFQ;cAGxBhE;YAHwB,CAAD,CAA3B;;YAMA,MAAM;cAAEuD,iBAAF;cAAqBC,8BAA8B,EAAEC;YAArD,IACF5B,kBAAA,CAAS6B,WAAT,CAAqB1D,MAArB,EAA6B8D,iBAA7B,CADJ;;YAGAzH,cAAc,CAACyF,UAAD,EAAayB,iBAAb,CAAd,CACKkE,IADL,CACU,MAAM;cACRF,kBAAkB,CAAC3G,IAAnB,CAAwB6C,OAAxB;YACH,CAHL,EAIK0D,KAJL,CAIWC,KAAK,IAAI;cACZxF,MAAM,CAACwF,KAAD,CAAN;YACH,CANL;UAOH,CAtCD;QAuCH;MACJ,CAjDD;IAkDH,CA9ED;EA+EH,CAxFM,CAAP;AAyFH"}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@webiny/api-page-builder-import-export",
|
3
|
-
"version": "0.0.0-unstable.
|
3
|
+
"version": "0.0.0-unstable.aad28a72ae",
|
4
4
|
"main": "index.js",
|
5
5
|
"keywords": [
|
6
6
|
"pbie:base"
|
@@ -16,16 +16,16 @@
|
|
16
16
|
"dependencies": {
|
17
17
|
"@babel/runtime": "7.19.0",
|
18
18
|
"@commodo/fields": "1.1.2-beta.20",
|
19
|
-
"@webiny/api": "0.0.0-unstable.
|
20
|
-
"@webiny/api-file-manager": "0.0.0-unstable.
|
21
|
-
"@webiny/api-page-builder": "0.0.0-unstable.
|
22
|
-
"@webiny/api-security": "0.0.0-unstable.
|
23
|
-
"@webiny/error": "0.0.0-unstable.
|
24
|
-
"@webiny/handler": "0.0.0-unstable.
|
25
|
-
"@webiny/handler-aws": "0.0.0-unstable.
|
26
|
-
"@webiny/handler-graphql": "0.0.0-unstable.
|
27
|
-
"@webiny/utils": "0.0.0-unstable.
|
28
|
-
"@webiny/validation": "0.0.0-unstable.
|
19
|
+
"@webiny/api": "0.0.0-unstable.aad28a72ae",
|
20
|
+
"@webiny/api-file-manager": "0.0.0-unstable.aad28a72ae",
|
21
|
+
"@webiny/api-page-builder": "0.0.0-unstable.aad28a72ae",
|
22
|
+
"@webiny/api-security": "0.0.0-unstable.aad28a72ae",
|
23
|
+
"@webiny/error": "0.0.0-unstable.aad28a72ae",
|
24
|
+
"@webiny/handler": "0.0.0-unstable.aad28a72ae",
|
25
|
+
"@webiny/handler-aws": "0.0.0-unstable.aad28a72ae",
|
26
|
+
"@webiny/handler-graphql": "0.0.0-unstable.aad28a72ae",
|
27
|
+
"@webiny/utils": "0.0.0-unstable.aad28a72ae",
|
28
|
+
"@webiny/validation": "0.0.0-unstable.aad28a72ae",
|
29
29
|
"archiver": "5.3.1",
|
30
30
|
"commodo-fields-object": "1.0.6",
|
31
31
|
"dot-prop-immutable": "2.1.1",
|
@@ -47,16 +47,16 @@
|
|
47
47
|
"@types/archiver": "^5.3.1",
|
48
48
|
"@types/node-fetch": "^2.6.1",
|
49
49
|
"@types/yauzl": "^2.9.2",
|
50
|
-
"@webiny/api-dynamodb-to-elasticsearch": "^0.0.0-unstable.
|
51
|
-
"@webiny/api-file-manager-ddb-es": "^0.0.0-unstable.
|
52
|
-
"@webiny/api-i18n-ddb": "^0.0.0-unstable.
|
53
|
-
"@webiny/api-security-so-ddb": "^0.0.0-unstable.
|
54
|
-
"@webiny/api-tenancy": "^0.0.0-unstable.
|
55
|
-
"@webiny/api-tenancy-so-ddb": "^0.0.0-unstable.
|
56
|
-
"@webiny/api-wcp": "^0.0.0-unstable.
|
57
|
-
"@webiny/cli": "^0.0.0-unstable.
|
58
|
-
"@webiny/db": "^0.0.0-unstable.
|
59
|
-
"@webiny/project-utils": "^0.0.0-unstable.
|
50
|
+
"@webiny/api-dynamodb-to-elasticsearch": "^0.0.0-unstable.aad28a72ae",
|
51
|
+
"@webiny/api-file-manager-ddb-es": "^0.0.0-unstable.aad28a72ae",
|
52
|
+
"@webiny/api-i18n-ddb": "^0.0.0-unstable.aad28a72ae",
|
53
|
+
"@webiny/api-security-so-ddb": "^0.0.0-unstable.aad28a72ae",
|
54
|
+
"@webiny/api-tenancy": "^0.0.0-unstable.aad28a72ae",
|
55
|
+
"@webiny/api-tenancy-so-ddb": "^0.0.0-unstable.aad28a72ae",
|
56
|
+
"@webiny/api-wcp": "^0.0.0-unstable.aad28a72ae",
|
57
|
+
"@webiny/cli": "^0.0.0-unstable.aad28a72ae",
|
58
|
+
"@webiny/db": "^0.0.0-unstable.aad28a72ae",
|
59
|
+
"@webiny/project-utils": "^0.0.0-unstable.aad28a72ae",
|
60
60
|
"jest": "^28.1.0",
|
61
61
|
"jest-dynalite": "^3.2.0",
|
62
62
|
"rimraf": "^3.0.2",
|
@@ -78,5 +78,5 @@
|
|
78
78
|
]
|
79
79
|
}
|
80
80
|
},
|
81
|
-
"gitHead": "
|
81
|
+
"gitHead": "aad28a72ae72f19b80a3196d2b4439399acc67ad"
|
82
82
|
}
|