@webiny/api-page-builder-import-export 0.0.0-unstable.990c3ab1b6 → 0.0.0-unstable.cc58a6566b

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. package/client.d.ts +2 -2
  2. package/client.js.map +1 -1
  3. package/export/combine/blocksHandler.d.ts +6 -0
  4. package/export/combine/blocksHandler.js +99 -0
  5. package/export/combine/blocksHandler.js.map +1 -0
  6. package/{exportPages → export}/combine/index.d.ts +3 -2
  7. package/export/combine/index.js +35 -0
  8. package/export/combine/index.js.map +1 -0
  9. package/export/combine/pagesHandler.d.ts +6 -0
  10. package/export/combine/pagesHandler.js +99 -0
  11. package/export/combine/pagesHandler.js.map +1 -0
  12. package/export/combine/templatesHandler.d.ts +6 -0
  13. package/export/combine/templatesHandler.js +99 -0
  14. package/export/combine/templatesHandler.js.map +1 -0
  15. package/export/process/blocksHandler.d.ts +6 -0
  16. package/export/process/blocksHandler.js +162 -0
  17. package/export/process/blocksHandler.js.map +1 -0
  18. package/{exportPages → export}/process/index.d.ts +4 -6
  19. package/export/process/index.js +32 -0
  20. package/export/process/index.js.map +1 -0
  21. package/export/process/pagesHandler.d.ts +6 -0
  22. package/export/process/pagesHandler.js +189 -0
  23. package/export/process/pagesHandler.js.map +1 -0
  24. package/export/process/templatesHandler.d.ts +6 -0
  25. package/export/process/templatesHandler.js +166 -0
  26. package/export/process/templatesHandler.js.map +1 -0
  27. package/{exportPages → export}/s3Stream.d.ts +0 -0
  28. package/{exportPages → export}/s3Stream.js +0 -0
  29. package/{exportPages → export}/s3Stream.js.map +0 -0
  30. package/export/utils.d.ts +22 -0
  31. package/export/utils.js +160 -0
  32. package/export/utils.js.map +1 -0
  33. package/{exportPages → export}/zipper.d.ts +6 -5
  34. package/{exportPages → export}/zipper.js +8 -7
  35. package/export/zipper.js.map +1 -0
  36. package/graphql/crud/blocks.crud.d.ts +4 -0
  37. package/graphql/crud/blocks.crud.js +137 -0
  38. package/graphql/crud/blocks.crud.js.map +1 -0
  39. package/graphql/crud/importExportTasks.crud.d.ts +5 -0
  40. package/graphql/crud/{pageImportExportTasks.crud.js → importExportTasks.crud.js} +48 -48
  41. package/graphql/crud/importExportTasks.crud.js.map +1 -0
  42. package/graphql/crud/pages.crud.d.ts +2 -2
  43. package/graphql/crud/pages.crud.js +18 -14
  44. package/graphql/crud/pages.crud.js.map +1 -1
  45. package/graphql/crud/templates.crud.d.ts +4 -0
  46. package/graphql/crud/templates.crud.js +124 -0
  47. package/graphql/crud/templates.crud.js.map +1 -0
  48. package/graphql/crud.d.ts +2 -2
  49. package/graphql/crud.js +4 -2
  50. package/graphql/crud.js.map +1 -1
  51. package/graphql/graphql/blocks.gql.d.ts +4 -0
  52. package/graphql/graphql/blocks.gql.js +52 -0
  53. package/graphql/graphql/blocks.gql.js.map +1 -0
  54. package/graphql/graphql/importExportTasks.gql.d.ts +4 -0
  55. package/graphql/graphql/{pageImportExportTasks.gql.js → importExportTasks.gql.js} +17 -17
  56. package/graphql/graphql/importExportTasks.gql.js.map +1 -0
  57. package/graphql/graphql/pages.gql.d.ts +2 -2
  58. package/graphql/graphql/pages.gql.js +3 -9
  59. package/graphql/graphql/pages.gql.js.map +1 -1
  60. package/graphql/graphql/templates.gql.d.ts +4 -0
  61. package/graphql/graphql/templates.gql.js +52 -0
  62. package/graphql/graphql/templates.gql.js.map +1 -0
  63. package/graphql/graphql/utils/resolve.d.ts +1 -1
  64. package/graphql/graphql.js +4 -2
  65. package/graphql/graphql.js.map +1 -1
  66. package/graphql/index.d.ts +2 -2
  67. package/graphql/index.js.map +1 -1
  68. package/graphql/types.d.ts +60 -23
  69. package/graphql/types.js.map +1 -1
  70. package/import/create/blocksHandler.d.ts +3 -0
  71. package/import/create/blocksHandler.js +100 -0
  72. package/import/create/blocksHandler.js.map +1 -0
  73. package/{importPages → import}/create/index.d.ts +7 -5
  74. package/import/create/index.js +35 -0
  75. package/import/create/index.js.map +1 -0
  76. package/import/create/pagesHandler.d.ts +3 -0
  77. package/import/create/pagesHandler.js +102 -0
  78. package/import/create/pagesHandler.js.map +1 -0
  79. package/import/create/templatesHandler.d.ts +3 -0
  80. package/import/create/templatesHandler.js +98 -0
  81. package/import/create/templatesHandler.js.map +1 -0
  82. package/import/process/blocksHandler.d.ts +3 -0
  83. package/import/process/blocksHandler.js +169 -0
  84. package/import/process/blocksHandler.js.map +1 -0
  85. package/{importPages → import}/process/index.d.ts +5 -3
  86. package/import/process/index.js +32 -0
  87. package/import/process/index.js.map +1 -0
  88. package/import/process/pagesHandler.d.ts +3 -0
  89. package/import/process/pagesHandler.js +177 -0
  90. package/import/process/pagesHandler.js.map +1 -0
  91. package/import/process/templatesHandler.d.ts +3 -0
  92. package/import/process/templatesHandler.js +169 -0
  93. package/import/process/templatesHandler.js.map +1 -0
  94. package/import/utils.d.ts +56 -0
  95. package/{importPages → import}/utils.js +154 -35
  96. package/import/utils.js.map +1 -0
  97. package/package.json +24 -24
  98. package/types.d.ts +62 -65
  99. package/types.js +17 -17
  100. package/types.js.map +1 -1
  101. package/exportPages/combine/index.js +0 -106
  102. package/exportPages/combine/index.js.map +0 -1
  103. package/exportPages/process/index.js +0 -192
  104. package/exportPages/process/index.js.map +0 -1
  105. package/exportPages/utils.d.ts +0 -13
  106. package/exportPages/utils.js +0 -100
  107. package/exportPages/utils.js.map +0 -1
  108. package/exportPages/zipper.js.map +0 -1
  109. package/graphql/crud/pageImportExportTasks.crud.d.ts +0 -5
  110. package/graphql/crud/pageImportExportTasks.crud.js.map +0 -1
  111. package/graphql/graphql/pageImportExportTasks.gql.d.ts +0 -4
  112. package/graphql/graphql/pageImportExportTasks.gql.js.map +0 -1
  113. package/importPages/create/index.js +0 -107
  114. package/importPages/create/index.js.map +0 -1
  115. package/importPages/process/index.js +0 -180
  116. package/importPages/process/index.js.map +0 -1
  117. package/importPages/utils.d.ts +0 -50
  118. package/importPages/utils.js.map +0 -1
@@ -4,10 +4,12 @@ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefau
4
4
  Object.defineProperty(exports, "__esModule", {
5
5
  value: true
6
6
  });
7
+ exports.importBlock = importBlock;
7
8
  exports.importPage = importPage;
9
+ exports.importTemplate = importTemplate;
8
10
  exports.initialStats = initialStats;
9
11
  exports.readExtractAndUploadZipFileContents = readExtractAndUploadZipFileContents;
10
- exports.uploadPageAssets = void 0;
12
+ exports.uploadAssets = void 0;
11
13
  var _uniqid = _interopRequireDefault(require("uniqid"));
12
14
  var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
13
15
  var _fs = require("fs");
@@ -22,10 +24,10 @@ var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
22
24
  var _error = _interopRequireDefault(require("@webiny/error"));
23
25
  var _downloadInstallFiles = require("@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles");
24
26
  var _types = require("../types");
25
- var _s3Stream = require("../exportPages/s3Stream");
27
+ var _s3Stream = require("../export/s3Stream");
26
28
  const streamPipeline = (0, _util.promisify)(_stream.pipeline);
27
29
  const INSTALL_DIR = "/tmp";
28
- const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImportPage");
30
+ const INSTALL_EXTRACT_DIR = _path.default.join(INSTALL_DIR, "apiPageBuilderImport");
29
31
  const FILES_COUNT_IN_EACH_BATCH = 15;
30
32
  function updateImageInPageSettings(params) {
31
33
  const {
@@ -45,7 +47,18 @@ function updateImageInPageSettings(params) {
45
47
  }
46
48
  return newSettings;
47
49
  }
48
- function updateFilesInPageData({
50
+ function updateBlockPreviewImage(params) {
51
+ const {
52
+ file,
53
+ fileIdToKeyMap,
54
+ srcPrefix
55
+ } = params;
56
+ const newFile = file;
57
+ const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
58
+ newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || "")}`;
59
+ return newFile;
60
+ }
61
+ function updateFilesInData({
49
62
  data,
50
63
  fileIdToKeyMap,
51
64
  srcPrefix
@@ -58,7 +71,7 @@ function updateFilesInPageData({
58
71
  if (Array.isArray(data)) {
59
72
  for (let i = 0; i < data.length; i++) {
60
73
  const element = data[i];
61
- updateFilesInPageData({
74
+ updateFilesInData({
62
75
  data: element,
63
76
  fileIdToKeyMap,
64
77
  srcPrefix
@@ -75,7 +88,7 @@ function updateFilesInPageData({
75
88
  value.name = fileIdToKeyMap.get(value.id);
76
89
  value.src = `${srcPrefix}${srcPrefix.endsWith("/") ? "" : "/"}${fileIdToKeyMap.get(value.id)}`;
77
90
  } else {
78
- updateFilesInPageData({
91
+ updateFilesInData({
79
92
  data: value,
80
93
  srcPrefix,
81
94
  fileIdToKeyMap
@@ -83,7 +96,7 @@ function updateFilesInPageData({
83
96
  }
84
97
  }
85
98
  }
86
- const uploadPageAssets = async params => {
99
+ const uploadAssets = async params => {
87
100
  const {
88
101
  context,
89
102
  filesData,
@@ -101,7 +114,6 @@ const uploadPageAssets = async params => {
101
114
  fileIdToKeyMap
102
115
  };
103
116
  }
104
- console.log("INSIDE uploadPageAssets");
105
117
 
106
118
  // Save files meta data against old key for later use.
107
119
  const fileKeyToFileMap = new Map();
@@ -154,7 +166,7 @@ const uploadPageAssets = async params => {
154
166
  fileIdToKeyMap
155
167
  };
156
168
  };
157
- exports.uploadPageAssets = uploadPageAssets;
169
+ exports.uploadAssets = uploadAssets;
158
170
  async function importPage({
159
171
  pageKey,
160
172
  context,
@@ -185,7 +197,7 @@ async function importPage({
185
197
  // Upload page assets.
186
198
  const {
187
199
  fileIdToKeyMap
188
- } = await uploadPageAssets({
200
+ } = await uploadAssets({
189
201
  context,
190
202
  /**
191
203
  * TODO @ts-refactor @ashutosh figure out correct types.
@@ -198,7 +210,7 @@ async function importPage({
198
210
  const {
199
211
  srcPrefix = ""
200
212
  } = settings || {};
201
- updateFilesInPageData({
213
+ updateFilesInData({
202
214
  data: page.content || {},
203
215
  fileIdToKeyMap,
204
216
  srcPrefix
@@ -215,6 +227,113 @@ async function importPage({
215
227
  await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
216
228
  return page;
217
229
  }
230
+ async function importBlock({
231
+ blockKey,
232
+ context,
233
+ fileUploadsData
234
+ }) {
235
+ const log = console.log;
236
+
237
+ // Making Directory for block in which we're going to extract the block data file.
238
+ const BLOCK_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, blockKey);
239
+ (0, _fsExtra.ensureDirSync)(BLOCK_EXTRACT_DIR);
240
+ const blockDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
241
+ const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
242
+ log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`);
243
+ // Download and save block data file in disk.
244
+ await new Promise((resolve, reject) => {
245
+ _s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
246
+ });
247
+
248
+ // Load the block data file from disk.
249
+ log(`Load file ${blockDataFileKey}`);
250
+ const {
251
+ block,
252
+ files
253
+ } = await (0, _loadJsonFile.default)(BLOCK_DATA_FILE_PATH);
254
+
255
+ // Only update block data if there are files.
256
+ if (files && Array.isArray(files) && files.length > 0) {
257
+ // Upload block assets.
258
+ const {
259
+ fileIdToKeyMap
260
+ } = await uploadAssets({
261
+ context,
262
+ filesData: files,
263
+ fileUploadsData
264
+ });
265
+ const settings = await context.fileManager.settings.getSettings();
266
+ const {
267
+ srcPrefix = ""
268
+ } = settings || {};
269
+ updateFilesInData({
270
+ data: block.content || {},
271
+ fileIdToKeyMap,
272
+ srcPrefix
273
+ });
274
+ block.preview = updateBlockPreviewImage({
275
+ file: block.preview || {},
276
+ fileIdToKeyMap,
277
+ srcPrefix
278
+ });
279
+ }
280
+ log("Removing Directory for block...");
281
+ await (0, _downloadInstallFiles.deleteFile)(blockKey);
282
+ log(`Remove block contents from S3...`);
283
+ await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
284
+ return block;
285
+ }
286
+ async function importTemplate({
287
+ templateKey,
288
+ context,
289
+ fileUploadsData
290
+ }) {
291
+ const log = console.log;
292
+
293
+ // Making Directory for template in which we're going to extract the template data file.
294
+ const TEMPLATE_EXTRACT_DIR = _path.default.join(INSTALL_EXTRACT_DIR, templateKey);
295
+ (0, _fsExtra.ensureDirSync)(TEMPLATE_EXTRACT_DIR);
296
+ const templateDataFileKey = _dotPropImmutable.default.get(fileUploadsData, `data`);
297
+ const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
298
+ log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`);
299
+ // Download and save template data file in disk.
300
+ await new Promise((resolve, reject) => {
301
+ _s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fs.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
302
+ });
303
+
304
+ // Load the template data file from disk.
305
+ log(`Load file ${templateDataFileKey}`);
306
+ const {
307
+ template,
308
+ files
309
+ } = await (0, _loadJsonFile.default)(TEMPLATE_DATA_FILE_PATH);
310
+
311
+ // Only update template data if there are files.
312
+ if (files && Array.isArray(files) && files.length > 0) {
313
+ // Upload template assets.
314
+ const {
315
+ fileIdToKeyMap
316
+ } = await uploadAssets({
317
+ context,
318
+ filesData: files,
319
+ fileUploadsData
320
+ });
321
+ const settings = await context.fileManager.settings.getSettings();
322
+ const {
323
+ srcPrefix = ""
324
+ } = settings || {};
325
+ updateFilesInData({
326
+ data: template.content || {},
327
+ fileIdToKeyMap,
328
+ srcPrefix
329
+ });
330
+ }
331
+ log("Removing Directory for template...");
332
+ await (0, _downloadInstallFiles.deleteFile)(templateKey);
333
+ log(`Remove template contents from S3...`);
334
+ await deleteS3Folder(_path.default.dirname(fileUploadsData.data));
335
+ return template;
336
+ }
218
337
  async function uploadFilesFromS3({
219
338
  fileKeyToFileMap,
220
339
  oldKeyToNewKeyMap
@@ -261,18 +380,18 @@ function getFileNameWithoutExt(fileName) {
261
380
  /**
262
381
  * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.
263
382
  * @param zipFileUrl
264
- * @return PageImportData S3 file keys for all uploaded assets group by page.
383
+ * @return ImportData S3 file keys for all uploaded assets group by page/block.
265
384
  */
266
385
  async function readExtractAndUploadZipFileContents(zipFileUrl) {
267
386
  const log = console.log;
268
- const pageImportDataList = [];
387
+ const importDataList = [];
269
388
  const zipFileName = _path.default.basename(zipFileUrl).split("?")[0];
270
389
  const response = await (0, _nodeFetch.default)(zipFileUrl);
271
390
  if (!response.ok) {
272
391
  throw new _error.default(`Unable to downloading file: "${zipFileUrl}"`, response.statusText);
273
392
  }
274
393
  const readStream = response.body;
275
- const uniquePath = (0, _uniqid.default)("IMPORT_PAGES/");
394
+ const uniquePath = (0, _uniqid.default)("IMPORTS/");
276
395
  // Read export file and download it in the disk
277
396
  const ZIP_FILE_PATH = _path.default.join(INSTALL_DIR, zipFileName);
278
397
  const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
@@ -284,18 +403,18 @@ async function readExtractAndUploadZipFileContents(zipFileUrl) {
284
403
  log(`Removing ZIP file "${zipFileUrl}" from ${ZIP_FILE_PATH}`);
285
404
  await (0, _downloadInstallFiles.deleteFile)(ZIP_FILE_PATH);
286
405
 
287
- // Extract each page zip and upload their content's to S3
406
+ // Extract each page/block zip and upload their content's to S3
288
407
  for (let i = 0; i < zipFilePaths.length; i++) {
289
408
  const currentPath = zipFilePaths[i];
290
409
  const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);
291
- pageImportDataList.push(dataMap);
410
+ importDataList.push(dataMap);
292
411
  }
293
412
  log("Removing all ZIP files located at ", _path.default.dirname(zipFilePaths[0]));
294
413
  await (0, _downloadInstallFiles.deleteFile)(_path.default.dirname(zipFilePaths[0]));
295
- return pageImportDataList;
414
+ return importDataList;
296
415
  }
297
416
  const ASSETS_DIR_NAME = "/assets";
298
- function preparePageDataDirMap({
417
+ function prepareDataDirMap({
299
418
  map,
300
419
  filePath,
301
420
  newKey
@@ -333,16 +452,16 @@ async function deleteS3Folder(key) {
333
452
 
334
453
  function initialStats(total) {
335
454
  return {
336
- [_types.PageImportExportTaskStatus.PENDING]: total,
337
- [_types.PageImportExportTaskStatus.PROCESSING]: 0,
338
- [_types.PageImportExportTaskStatus.COMPLETED]: 0,
339
- [_types.PageImportExportTaskStatus.FAILED]: 0,
455
+ [_types.ImportExportTaskStatus.PENDING]: total,
456
+ [_types.ImportExportTaskStatus.PROCESSING]: 0,
457
+ [_types.ImportExportTaskStatus.COMPLETED]: 0,
458
+ [_types.ImportExportTaskStatus.FAILED]: 0,
340
459
  total
341
460
  };
342
461
  }
343
462
  function extractZipToDisk(exportFileZipPath) {
344
463
  return new Promise((resolve, reject) => {
345
- const pageZipFilePaths = [];
464
+ const zipFilePaths = [];
346
465
  const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);
347
466
  const EXPORT_FILE_EXTRACTION_PATH = _path.default.join(INSTALL_DIR, uniqueFolderNameForExport);
348
467
  // Make sure DIR exists
@@ -366,7 +485,7 @@ function extractZipToDisk(exportFileZipPath) {
366
485
  console.warn("ERROR: Failed on END event for file: ", exportFileZipPath, err);
367
486
  reject(err);
368
487
  }
369
- resolve(pageZipFilePaths);
488
+ resolve(zipFilePaths);
370
489
  });
371
490
  zipFile.readEntry();
372
491
  zipFile.on("entry", function (entry) {
@@ -391,7 +510,7 @@ function extractZipToDisk(exportFileZipPath) {
391
510
  }
392
511
  const filePath = _path.default.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);
393
512
  readStream.on("end", function () {
394
- pageZipFilePaths.push(filePath);
513
+ zipFilePaths.push(filePath);
395
514
  zipFile.readEntry();
396
515
  });
397
516
  streamPipeline(readStream, (0, _fs.createWriteStream)(filePath)).catch(error => {
@@ -403,33 +522,33 @@ function extractZipToDisk(exportFileZipPath) {
403
522
  });
404
523
  });
405
524
  }
406
- function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
525
+ function extractZipAndUploadToS3(dataZipFilePath, uniquePath) {
407
526
  return new Promise((resolve, reject) => {
408
527
  const filePaths = [];
409
528
  const fileUploadPromises = [];
410
- const uniquePageKey = getFileNameWithoutExt(pageDataZipFilePath);
529
+ const uniqueKey = getFileNameWithoutExt(dataZipFilePath);
411
530
  let dataMap = {
412
- key: uniquePageKey,
531
+ key: uniqueKey,
413
532
  assets: {},
414
533
  data: ""
415
534
  };
416
- _yauzl.default.open(pageDataZipFilePath, {
535
+ _yauzl.default.open(dataZipFilePath, {
417
536
  lazyEntries: true
418
537
  }, function (err, zipFile) {
419
538
  if (err) {
420
- console.warn("ERROR: Failed to extract zip: ", pageDataZipFilePath, err);
539
+ console.warn("ERROR: Failed to extract zip: ", dataZipFilePath, err);
421
540
  reject(err);
422
541
  return;
423
542
  }
424
543
  if (!zipFile) {
425
- console.log("ERROR: Probably failed to extract zip: " + pageDataZipFilePath);
544
+ console.log("ERROR: Probably failed to extract zip: " + dataZipFilePath);
426
545
  reject("Missing Zip File Resource.");
427
546
  return;
428
547
  }
429
548
  console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);
430
549
  zipFile.on("end", function (err) {
431
550
  if (err) {
432
- console.warn('ERROR: Failed on "END" for file: ', pageDataZipFilePath, err);
551
+ console.warn('ERROR: Failed on "END" for file: ', dataZipFilePath, err);
433
552
  reject(err);
434
553
  }
435
554
  Promise.all(fileUploadPromises).then(res => {
@@ -456,7 +575,7 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
456
575
  return;
457
576
  }
458
577
  if (!readStream) {
459
- console.log("ERROR: Missing Read Stream while importing pages.");
578
+ console.log("ERROR: Missing Read Stream while importing.");
460
579
  reject("Missing Read Strea Resource.");
461
580
  return;
462
581
  }
@@ -464,9 +583,9 @@ function extractZipAndUploadToS3(pageDataZipFilePath, uniquePath) {
464
583
  filePaths.push(entry.fileName);
465
584
  zipFile.readEntry();
466
585
  });
467
- const newKey = `${uniquePath}/${uniquePageKey}/${entry.fileName}`;
586
+ const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;
468
587
  // Modify in place
469
- dataMap = preparePageDataDirMap({
588
+ dataMap = prepareDataDirMap({
470
589
  map: dataMap,
471
590
  filePath: entry.fileName,
472
591
  newKey
@@ -0,0 +1 @@
1
+ {"version":3,"names":["streamPipeline","promisify","pipeline","INSTALL_DIR","INSTALL_EXTRACT_DIR","path","join","FILES_COUNT_IN_EACH_BATCH","updateImageInPageSettings","params","settings","fileIdToKeyMap","srcPrefix","newSettings","srcPrefixWithoutTrailingSlash","endsWith","slice","dotProp","get","set","general","image","id","social","updateBlockPreviewImage","file","newFile","src","updateFilesInData","data","Array","isArray","i","length","element","tuple","Object","entries","key","value","has","name","uploadAssets","context","filesData","fileUploadsData","Map","process","env","NODE_ENV","fileKeyToFileMap","type","fileUploadResults","uploadFilesFromS3","oldKeyToNewKeyMap","assets","createFilesInput","map","uploadResult","newKey","Key","getOldFileKey","size","meta","tags","filter","Boolean","createFilesPromises","createFilesInputChunks","chunk","createFilesInputChunk","push","fileManager","files","createFilesInBatch","Promise","all","importPage","pageKey","log","console","PAGE_EXTRACT_DIR","ensureDirSync","pageDataFileKey","PAGE_DATA_FILE_PATH","basename","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","page","loadJson","getSettings","content","deleteFile","deleteS3Folder","dirname","importBlock","blockKey","BLOCK_EXTRACT_DIR","blockDataFileKey","BLOCK_DATA_FILE_PATH","block","preview","importTemplate","templateKey","TEMPLATE_EXTRACT_DIR","templateDataFileKey","TEMPLATE_DATA_FILE_PATH","template","oldKeysForAssets","keys","promises","oldKey","tempNewKey","fileMetaData","uniqueId","streamPassThrough","streamPassThroughUploadPromise","promise","writeStream","rest","split","e","FILE_CONTENT_TYPE","getFileNameWithoutExt","fileName","replace","extname","readExtractAndUploadZipFileContents","zipFileUrl","importDataList","zipFileName","response","fetch","ok","WebinyError","statusText","body","uniquePath","ZIP_FILE_PATH","zipFilePaths","extractZipToDisk","currentPath","dataMap","extractZipAndUploadToS3","ASSETS_DIR_NAME","prepareDataDirMap","filePath","isAsset","listObject","Contents","c","deleteFilePromises","deleteObject","initialStats","total","ImportExportTaskStatus","PENDING","PROCESSING","COMPLETED","FAILED","exportFileZipPath","uniqueFolderNameForExport","EXPORT_FILE_EXTRACTION_PATH","yauzl","open","lazyEntries","err","zipFile","warn","info","entryCount","readEntry","entry","test","openReadStream","catch","error","dataZipFilePath","filePaths","fileUploadPromises","uniqueKey","then","res","forEach","r"],"sources":["utils.ts"],"sourcesContent":["import uniqueId from \"uniqid\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport dotProp from \"dot-prop-immutable\";\nimport { createWriteStream } from \"fs\";\nimport { ensureDirSync } from \"fs-extra\";\nimport { promisify } from \"util\";\nimport { pipeline } from \"stream\";\nimport fetch from \"node-fetch\";\nimport path from \"path\";\nimport yauzl from \"yauzl\";\nimport chunk from \"lodash/chunk\";\nimport loadJson from \"load-json-file\";\nimport { FileInput, File } from \"@webiny/api-file-manager/types\";\nimport WebinyError from \"@webiny/error\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { File as ImageFile, ImportExportTaskStatus } from \"~/types\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { ExportedPageData, ExportedBlockData, ExportedTemplateData } from \"~/export/utils\";\nimport { PageSettings } from \"@webiny/api-page-builder/types\";\n\ninterface FileItem extends File {\n key: string;\n type: string;\n name: string;\n size: number;\n meta: Record<string, any>;\n tags: string[];\n}\n\nconst streamPipeline = promisify(pipeline);\n\nconst INSTALL_DIR = \"/tmp\";\nconst INSTALL_EXTRACT_DIR = path.join(INSTALL_DIR, \"apiPageBuilderImport\");\nconst FILES_COUNT_IN_EACH_BATCH = 15;\n\ninterface UpdateFilesInDataParams {\n data: Record<string, any>;\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n}\n\ninterface UpdateImageInPageSettingsParams {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n settings: PageSettings;\n}\n\nfunction updateImageInPageSettings(\n params: UpdateImageInPageSettingsParams\n): UpdateImageInPageSettingsParams[\"settings\"] {\n const { settings, fileIdToKeyMap, srcPrefix } = params;\n let newSettings = settings;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n if (dotProp.get(newSettings, \"general.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"general.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.general?.image?.id || \"\"\n )}`\n );\n }\n if (dotProp.get(newSettings, \"social.image.src\")) {\n newSettings = dotProp.set(\n newSettings,\n \"social.image.src\",\n `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(\n settings.social?.image?.id || \"\"\n )}`\n );\n }\n\n return newSettings;\n}\n\ninterface UpdateBlockPreviewImage {\n fileIdToKeyMap: Map<string, string>;\n srcPrefix: string;\n file: ImageFile;\n}\n\nfunction updateBlockPreviewImage(params: UpdateBlockPreviewImage): ImageFile {\n const { file, fileIdToKeyMap, srcPrefix } = params;\n const newFile = file;\n\n const srcPrefixWithoutTrailingSlash = srcPrefix.endsWith(\"/\")\n ? srcPrefix.slice(0, -1)\n : srcPrefix;\n\n newFile.src = `${srcPrefixWithoutTrailingSlash}/${fileIdToKeyMap.get(file.id || \"\")}`;\n\n return newFile;\n}\n\nfunction updateFilesInData({ data, fileIdToKeyMap, srcPrefix }: UpdateFilesInDataParams) {\n // BASE CASE: Termination point\n if (!data || typeof data !== \"object\") {\n return;\n }\n // Recursively call function if data is array\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n updateFilesInData({ data: element, fileIdToKeyMap, srcPrefix });\n }\n return;\n }\n // Main logic\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n\n if (key === \"file\" && value && fileIdToKeyMap.has(value.id)) {\n value.key = fileIdToKeyMap.get(value.id);\n value.name = fileIdToKeyMap.get(value.id);\n value.src = `${srcPrefix}${srcPrefix.endsWith(\"/\") ? \"\" : \"/\"}${fileIdToKeyMap.get(\n value.id\n )}`;\n } else {\n updateFilesInData({ data: value, srcPrefix, fileIdToKeyMap });\n }\n }\n}\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n filesData: File[];\n fileUploadsData: FileUploadsData;\n}\n\ninterface UploadAssetsReturnType {\n fileIdToKeyMap: Map<string, string>;\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams): Promise<UploadAssetsReturnType> => {\n const { context, filesData, fileUploadsData } = params;\n // Save uploaded file key against static id for later use.\n const fileIdToKeyMap = new Map<string, string>();\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return {\n fileIdToKeyMap\n };\n }\n\n // Save files meta data against old key for later use.\n const fileKeyToFileMap = new Map<string, FileItem>();\n // Initialize maps.\n for (let i = 0; i < filesData.length; i++) {\n const file = filesData[i];\n fileKeyToFileMap.set(file.key, file);\n\n // Initialize the value\n fileIdToKeyMap.set(file.id, file.type);\n }\n\n const fileUploadResults = await uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap: fileUploadsData.assets\n });\n\n // Create files in File Manager\n const createFilesInput = fileUploadResults\n .map((uploadResult): FileInput | null => {\n const newKey = uploadResult.Key;\n const file = fileKeyToFileMap.get(getOldFileKey(newKey));\n if (!file) {\n return null;\n }\n\n // Update the file map with newly uploaded file.\n fileIdToKeyMap.set(file.id, newKey);\n\n return {\n key: newKey,\n name: file.name,\n size: file.size,\n type: file.type,\n meta: file.meta,\n tags: file.tags\n };\n })\n .filter(Boolean) as FileInput[];\n\n const createFilesPromises = [];\n // Gives an array of chunks (each consists of FILES_COUNT_IN_EACH_BATCH items).\n const createFilesInputChunks = chunk(createFilesInput, FILES_COUNT_IN_EACH_BATCH);\n for (let i = 0; i < createFilesInputChunks.length; i++) {\n const createFilesInputChunk = createFilesInputChunks[i];\n createFilesPromises.push(\n /*\n * We need to break down files into chunks because\n * `createFilesInBatch` operation has a limit on number of files it can handle at once.\n */\n context.fileManager.files.createFilesInBatch(createFilesInputChunk)\n );\n }\n\n await Promise.all(createFilesPromises);\n\n return {\n fileIdToKeyMap\n };\n};\n\ninterface FileUploadsData {\n data: string;\n assets: Record<string, string>;\n}\n\ninterface ImportPageParams {\n key: string;\n pageKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importPage({\n pageKey,\n context,\n fileUploadsData\n}: ImportPageParams): Promise<ExportedPageData[\"page\"]> {\n const log = console.log;\n\n // Making Directory for page in which we're going to extract the page data file.\n const PAGE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, pageKey);\n ensureDirSync(PAGE_EXTRACT_DIR);\n\n const pageDataFileKey = dotProp.get(fileUploadsData, `data`);\n const PAGE_DATA_FILE_PATH = path.join(PAGE_EXTRACT_DIR, path.basename(pageDataFileKey));\n\n log(`Downloading Page data file: ${pageDataFileKey} at \"${PAGE_DATA_FILE_PATH}\"`);\n // Download and save page data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(pageDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(PAGE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the page data file from disk.\n log(`Load file ${pageDataFileKey}`);\n const { page, files } = await loadJson<ExportedPageData>(PAGE_DATA_FILE_PATH);\n\n // Only update page data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload page assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n /**\n * TODO @ts-refactor @ashutosh figure out correct types.\n */\n // @ts-ignore\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: page.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n page.settings = updateImageInPageSettings({\n settings: page.settings || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for page...\");\n await deleteFile(pageKey);\n\n log(`Remove page contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return page;\n}\n\ninterface ImportBlockParams {\n key: string;\n blockKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importBlock({\n blockKey,\n context,\n fileUploadsData\n}: ImportBlockParams): Promise<ExportedBlockData[\"block\"]> {\n const log = console.log;\n\n // Making Directory for block in which we're going to extract the block data file.\n const BLOCK_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, blockKey);\n ensureDirSync(BLOCK_EXTRACT_DIR);\n\n const blockDataFileKey = dotProp.get(fileUploadsData, `data`);\n const BLOCK_DATA_FILE_PATH = path.join(BLOCK_EXTRACT_DIR, path.basename(blockDataFileKey));\n\n log(`Downloading Block data file: ${blockDataFileKey} at \"${BLOCK_DATA_FILE_PATH}\"`);\n // Download and save block data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(blockDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(BLOCK_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the block data file from disk.\n log(`Load file ${blockDataFileKey}`);\n const { block, files } = await loadJson<ExportedBlockData>(BLOCK_DATA_FILE_PATH);\n\n // Only update block data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload block assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: block.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n\n block.preview = updateBlockPreviewImage({\n file: block.preview || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for block...\");\n await deleteFile(blockKey);\n\n log(`Remove block contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return block;\n}\n\ninterface ImportTemplateParams {\n key: string;\n templateKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importTemplate({\n templateKey,\n context,\n fileUploadsData\n}: ImportTemplateParams): Promise<ExportedTemplateData[\"template\"]> {\n const log = console.log;\n\n // Making Directory for template in which we're going to extract the template data file.\n const TEMPLATE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, templateKey);\n ensureDirSync(TEMPLATE_EXTRACT_DIR);\n\n const templateDataFileKey = dotProp.get(fileUploadsData, `data`);\n const TEMPLATE_DATA_FILE_PATH = path.join(\n TEMPLATE_EXTRACT_DIR,\n path.basename(templateDataFileKey)\n );\n\n log(`Downloading Template data file: ${templateDataFileKey} at \"${TEMPLATE_DATA_FILE_PATH}\"`);\n // Download and save template data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(templateDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(TEMPLATE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the template data file from disk.\n log(`Load file ${templateDataFileKey}`);\n const { template, files } = await loadJson<ExportedTemplateData>(TEMPLATE_DATA_FILE_PATH);\n\n // Only update template data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload template assets.\n const { fileIdToKeyMap } = await uploadAssets({\n context,\n filesData: files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.settings.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: template.content || {},\n fileIdToKeyMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for template...\");\n await deleteFile(templateKey);\n\n log(`Remove template contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return template;\n}\n\ninterface UploadFilesFromZipParams {\n fileKeyToFileMap: Map<string, any>;\n oldKeyToNewKeyMap: Record<string, string>;\n}\n\nasync function uploadFilesFromS3({\n fileKeyToFileMap,\n oldKeyToNewKeyMap\n}: UploadFilesFromZipParams): Promise<S3.ManagedUpload.SendData[]> {\n const oldKeysForAssets = Object.keys(oldKeyToNewKeyMap);\n\n const promises = [];\n // Upload all assets.\n for (let i = 0; i < oldKeysForAssets.length; i++) {\n const oldKey = oldKeysForAssets[i];\n const tempNewKey = oldKeyToNewKeyMap[oldKey];\n\n // Read file.\n const readStream = s3Stream.readStream(tempNewKey);\n // Get file meta data.\n const fileMetaData = fileKeyToFileMap.get(oldKey);\n\n if (fileMetaData) {\n const newKey = uniqueId(\"\", `-${fileMetaData.key}`);\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, fileMetaData.type);\n readStream.pipe(streamPassThrough);\n promises.push(promise);\n\n console.log(`Successfully queued file \"${newKey}\"`);\n }\n }\n\n return Promise.all(promises);\n}\n\nfunction getOldFileKey(key: string) {\n /*\n * Because we know the naming convention, we can extract the old key from new key.\n */\n try {\n const [, ...rest] = key.split(\"-\");\n return rest.join(\"-\");\n } catch (e) {\n return key;\n }\n}\n\nconst FILE_CONTENT_TYPE = \"application/octet-stream\";\n\nfunction getFileNameWithoutExt(fileName: string): string {\n return path.basename(fileName).replace(path.extname(fileName), \"\");\n}\n\ninterface ImportData {\n assets: Record<string, string>;\n data: string;\n key: string;\n}\n\n/**\n * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.\n * @param zipFileUrl\n * @return ImportData S3 file keys for all uploaded assets group by page/block.\n */\nexport async function readExtractAndUploadZipFileContents(\n zipFileUrl: string\n): Promise<ImportData[]> {\n const log = console.log;\n const importDataList = [];\n\n const zipFileName = path.basename(zipFileUrl).split(\"?\")[0];\n\n const response = await fetch(zipFileUrl);\n if (!response.ok) {\n throw new WebinyError(`Unable to downloading file: \"${zipFileUrl}\"`, response.statusText);\n }\n\n const readStream = response.body;\n\n const uniquePath = uniqueId(\"IMPORTS/\");\n // Read export file and download it in the disk\n const ZIP_FILE_PATH = path.join(INSTALL_DIR, zipFileName);\n\n const writeStream = createWriteStream(ZIP_FILE_PATH);\n await streamPipeline(readStream, writeStream);\n log(`Downloaded file \"${zipFileName}\" at ${ZIP_FILE_PATH}`);\n\n // Extract the downloaded zip file\n const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);\n\n log(`Removing ZIP file \"${zipFileUrl}\" from ${ZIP_FILE_PATH}`);\n await deleteFile(ZIP_FILE_PATH);\n\n // Extract each page/block zip and upload their content's to S3\n for (let i = 0; i < zipFilePaths.length; i++) {\n const currentPath = zipFilePaths[i];\n const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);\n importDataList.push(dataMap);\n }\n log(\"Removing all ZIP files located at \", path.dirname(zipFilePaths[0]));\n await deleteFile(path.dirname(zipFilePaths[0]));\n\n return importDataList;\n}\n\nconst ASSETS_DIR_NAME = \"/assets\";\n\nfunction prepareDataDirMap({\n map,\n filePath,\n newKey\n}: {\n map: ImportData;\n filePath: string;\n newKey: string;\n}): ImportData {\n const dirname = path.dirname(filePath);\n const fileName = path.basename(filePath);\n /*\n * We want to use dot (.) as part of object key rather than creating nested object(s).\n * Also, the file name might contain dots in it beside the extension, so, we are escaping them all.\n */\n const oldKey = fileName.replace(/\\./g, \"\\\\.\");\n\n const isAsset = dirname.endsWith(ASSETS_DIR_NAME);\n\n if (isAsset) {\n map = dotProp.set(map, `assets.${oldKey}`, newKey);\n } else {\n // We only need to know the newKey for data file.\n map = dotProp.set(map, `data`, newKey);\n }\n\n return map;\n}\n\nasync function deleteS3Folder(key: string): Promise<void> {\n // Append trailing slash i.e \"/\" to key to make sure we only delete a specific folder.\n if (!key.endsWith(\"/\")) {\n key = `${key}/`;\n }\n\n const response = await s3Stream.listObject(key);\n const keys = (response.Contents || []).map(c => c.Key).filter(Boolean) as string[];\n console.log(`Found ${keys.length} files.`);\n\n const deleteFilePromises = keys.map(key => s3Stream.deleteObject(key));\n\n await Promise.all(deleteFilePromises);\n console.log(`Successfully deleted ${deleteFilePromises.length} files.`);\n}\n\n// export const zeroPad = version => `${version}`.padStart(5, \"0\");\n\nexport function initialStats(total: number) {\n return {\n [ImportExportTaskStatus.PENDING]: total,\n [ImportExportTaskStatus.PROCESSING]: 0,\n [ImportExportTaskStatus.COMPLETED]: 0,\n [ImportExportTaskStatus.FAILED]: 0,\n total\n };\n}\n\nfunction extractZipToDisk(exportFileZipPath: string): Promise<string[]> {\n return new Promise((resolve, reject) => {\n const zipFilePaths: string[] = [];\n const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);\n const EXPORT_FILE_EXTRACTION_PATH = path.join(INSTALL_DIR, uniqueFolderNameForExport);\n // Make sure DIR exists\n ensureDirSync(EXPORT_FILE_EXTRACTION_PATH);\n\n yauzl.open(exportFileZipPath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", exportFileZipPath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Missing zip file resource for path: \" + exportFileZipPath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn(\"ERROR: Failed on END event for file: \", exportFileZipPath, err);\n reject(err);\n }\n resolve(zipFilePaths);\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed to openReadStream for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\n \"ERROR: Missing Read Stream Resource when extracting to disk.\"\n );\n reject(\"Missing Read Stream Resource.\");\n return;\n }\n\n const filePath = path.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);\n\n readStream.on(\"end\", function () {\n zipFilePaths.push(filePath);\n zipFile.readEntry();\n });\n\n streamPipeline(readStream, createWriteStream(filePath)).catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n\nfunction extractZipAndUploadToS3(dataZipFilePath: string, uniquePath: string): Promise<ImportData> {\n return new Promise((resolve, reject) => {\n const filePaths = [];\n const fileUploadPromises: Promise<S3.ManagedUpload.SendData>[] = [];\n const uniqueKey = getFileNameWithoutExt(dataZipFilePath);\n let dataMap: ImportData = {\n key: uniqueKey,\n assets: {},\n data: \"\"\n };\n yauzl.open(dataZipFilePath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", dataZipFilePath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Probably failed to extract zip: \" + dataZipFilePath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn('ERROR: Failed on \"END\" for file: ', dataZipFilePath, err);\n reject(err);\n }\n\n Promise.all(fileUploadPromises).then(res => {\n res.forEach(r => {\n console.info(\"Done uploading... \", r);\n });\n resolve(dataMap);\n });\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed while performing [openReadStream] for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\"ERROR: Missing Read Stream while importing.\");\n reject(\"Missing Read Strea Resource.\");\n return;\n }\n readStream.on(\"end\", function () {\n filePaths.push(entry.fileName);\n zipFile.readEntry();\n });\n\n const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;\n // Modify in place\n dataMap = prepareDataDirMap({\n map: dataMap,\n filePath: entry.fileName,\n newKey\n });\n\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);\n\n streamPipeline(readStream, streamPassThrough)\n .then(() => {\n fileUploadPromises.push(promise);\n })\n .catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;;;;;;AAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AAEA;AAaA,MAAMA,cAAc,GAAG,IAAAC,eAAS,EAACC,gBAAQ,CAAC;AAE1C,MAAMC,WAAW,GAAG,MAAM;AAC1B,MAAMC,mBAAmB,GAAGC,aAAI,CAACC,IAAI,CAACH,WAAW,EAAE,sBAAsB,CAAC;AAC1E,MAAMI,yBAAyB,GAAG,EAAE;AAcpC,SAASC,yBAAyB,CAC9BC,MAAuC,EACI;EAC3C,MAAM;IAAEC,QAAQ;IAAEC,cAAc;IAAEC;EAAU,CAAC,GAAGH,MAAM;EACtD,IAAII,WAAW,GAAGH,QAAQ;EAE1B,MAAMI,6BAA6B,GAAGF,SAAS,CAACG,QAAQ,CAAC,GAAG,CAAC,GACvDH,SAAS,CAACI,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GACtBJ,SAAS;EAEf,IAAIK,yBAAO,CAACC,GAAG,CAACL,WAAW,EAAE,mBAAmB,CAAC,EAAE;IAAA;IAC/CA,WAAW,GAAGI,yBAAO,CAACE,GAAG,CACrBN,WAAW,EACX,mBAAmB,EAClB,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAG,CAClD,sBAAAR,QAAQ,CAACU,OAAO,+EAAhB,kBAAkBC,KAAK,0DAAvB,sBAAyBC,EAAE,KAAI,EAAE,CACnC,EAAC,CACN;EACL;EACA,IAAIL,yBAAO,CAACC,GAAG,CAACL,WAAW,EAAE,kBAAkB,CAAC,EAAE;IAAA;IAC9CA,WAAW,GAAGI,yBAAO,CAACE,GAAG,CACrBN,WAAW,EACX,kBAAkB,EACjB,GAAEC,6BAA8B,IAAGH,cAAc,CAACO,GAAG,CAClD,qBAAAR,QAAQ,CAACa,MAAM,8EAAf,iBAAiBF,KAAK,0DAAtB,sBAAwBC,EAAE,KAAI,EAAE,CAClC,EAAC,CACN;EACL;EAEA,OAAOT,WAAW;AACtB;AAQA,SAASW,uBAAuB,CAACf,MAA+B,EAAa;EACzE,MAAM;IAAEgB,IAAI;IAAEd,cAAc;IAAEC;EAAU,CAAC,GAAGH,MAAM;EAClD,MAAMiB,OAAO,GAAGD,IAAI;EAEpB,MAAMX,6BAA6B,GAAGF,SAAS,CAACG,QAAQ,CAAC,GAAG,CAAC,GACvDH,SAAS,CAACI,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GACtBJ,SAAS;EAEfc,OAAO,CAACC,GAAG,GAAI,GAAEb,6BAA8B,IAAGH,cAAc,CAACO,GAAG,CAACO,IAAI,CAACH,EAAE,IAAI,EAAE,CAAE,EAAC;EAErF,OAAOI,OAAO;AAClB;AAEA,SAASE,iBAAiB,CAAC;EAAEC,IAAI;EAAElB,cAAc;EAAEC;AAAmC,CAAC,EAAE;EACrF;EACA,IAAI,CAACiB,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IACnC;EACJ;EACA;EACA,IAAIC,KAAK,CAACC,OAAO,CAACF,IAAI,CAAC,EAAE;IACrB,KAAK,IAAIG,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,IAAI,CAACI,MAAM,EAAED,CAAC,EAAE,EAAE;MAClC,MAAME,OAAO,GAAGL,IAAI,CAACG,CAAC,CAAC;MACvBJ,iBAAiB,CAAC;QAAEC,IAAI,EAAEK,OAAO;QAAEvB,cAAc;QAAEC;MAAU,CAAC,CAAC;IACnE;IACA;EACJ;EACA;EACA,MAAMuB,KAAK,GAAGC,MAAM,CAACC,OAAO,CAACR,IAAI,CAAC;EAClC,KAAK,IAAIG,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGG,KAAK,CAACF,MAAM,EAAED,CAAC,EAAE,EAAE;IACnC,MAAM,CAACM,GAAG,EAAEC,KAAK,CAAC,GAAGJ,KAAK,CAACH,CAAC,CAAC;IAE7B,IAAIM,GAAG,KAAK,MAAM,IAAIC,KAAK,IAAI5B,cAAc,CAAC6B,GAAG,CAACD,KAAK,CAACjB,EAAE,CAAC,EAAE;MACzDiB,KAAK,CAACD,GAAG,GAAG3B,cAAc,CAACO,GAAG,CAACqB,KAAK,CAACjB,EAAE,CAAC;MACxCiB,KAAK,CAACE,IAAI,GAAG9B,cAAc,CAACO,GAAG,CAACqB,KAAK,CAACjB,EAAE,CAAC;MACzCiB,KAAK,CAACZ,GAAG,GAAI,GAAEf,SAAU,GAAEA,SAAS,CAACG,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAI,GAAEJ,cAAc,CAACO,GAAG,CAC9EqB,KAAK,CAACjB,EAAE,CACV,EAAC;IACP,CAAC,MAAM;MACHM,iBAAiB,CAAC;QAAEC,IAAI,EAAEU,KAAK;QAAE3B,SAAS;QAAED;MAAe,CAAC,CAAC;IACjE;EACJ;AACJ;AAYO,MAAM+B,YAAY,GAAG,MAAOjC,MAA0B,IAAsC;EAC/F,MAAM;IAAEkC,OAAO;IAAEC,SAAS;IAAEC;EAAgB,CAAC,GAAGpC,MAAM;EACtD;EACA,MAAME,cAAc,GAAG,IAAImC,GAAG,EAAkB;EAChD;AACJ;AACA;AACA;AACA;EACI,IAAIC,OAAO,CAACC,GAAG,CAACC,QAAQ,KAAK,MAAM,EAAE;IACjC,OAAO;MACHtC;IACJ,CAAC;EACL;;EAEA;EACA,MAAMuC,gBAAgB,GAAG,IAAIJ,GAAG,EAAoB;EACpD;EACA,KAAK,IAAId,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGY,SAAS,CAACX,MAAM,EAAED,CAAC,EAAE,EAAE;IACvC,MAAMP,IAAI,GAAGmB,SAAS,CAACZ,CAAC,CAAC;IACzBkB,gBAAgB,CAAC/B,GAAG,CAACM,IAAI,CAACa,GAAG,EAAEb,IAAI,CAAC;;IAEpC;IACAd,cAAc,CAACQ,GAAG,CAACM,IAAI,CAACH,EAAE,EAAEG,IAAI,CAAC0B,IAAI,CAAC;EAC1C;EAEA,MAAMC,iBAAiB,GAAG,MAAMC,iBAAiB,CAAC;IAC9CH,gBAAgB;IAChBI,iBAAiB,EAAET,eAAe,CAACU;EACvC,CAAC,CAAC;;EAEF;EACA,MAAMC,gBAAgB,GAAGJ,iBAAiB,CACrCK,GAAG,CAAEC,YAAY,IAAuB;IACrC,MAAMC,MAAM,GAAGD,YAAY,CAACE,GAAG;IAC/B,MAAMnC,IAAI,GAAGyB,gBAAgB,CAAChC,GAAG,CAAC2C,aAAa,CAACF,MAAM,CAAC,CAAC;IACxD,IAAI,CAAClC,IAAI,EAAE;MACP,OAAO,IAAI;IACf;;IAEA;IACAd,cAAc,CAACQ,GAAG,CAACM,IAAI,CAACH,EAAE,EAAEqC,MAAM,CAAC;IAEnC,OAAO;MACHrB,GAAG,EAAEqB,MAAM;MACXlB,IAAI,EAAEhB,IAAI,CAACgB,IAAI;MACfqB,IAAI,EAAErC,IAAI,CAACqC,IAAI;MACfX,IAAI,EAAE1B,IAAI,CAAC0B,IAAI;MACfY,IAAI,EAAEtC,IAAI,CAACsC,IAAI;MACfC,IAAI,EAAEvC,IAAI,CAACuC;IACf,CAAC;EACL,CAAC,CAAC,CACDC,MAAM,CAACC,OAAO,CAAgB;EAEnC,MAAMC,mBAAmB,GAAG,EAAE;EAC9B;EACA,MAAMC,sBAAsB,GAAG,IAAAC,cAAK,EAACb,gBAAgB,EAAEjD,yBAAyB,CAAC;EACjF,KAAK,IAAIyB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGoC,sBAAsB,CAACnC,MAAM,EAAED,CAAC,EAAE,EAAE;IACpD,MAAMsC,qBAAqB,GAAGF,sBAAsB,CAACpC,CAAC,CAAC;IACvDmC,mBAAmB,CAACI,IAAI;IACpB;AACZ;AACA;AACA;IACY5B,OAAO,CAAC6B,WAAW,CAACC,KAAK,CAACC,kBAAkB,CAACJ,qBAAqB,CAAC,CACtE;EACL;EAEA,MAAMK,OAAO,CAACC,GAAG,CAACT,mBAAmB,CAAC;EAEtC,OAAO;IACHxD;EACJ,CAAC;AACL,CAAC;AAAC;AAcK,eAAekE,UAAU,CAAC;EAC7BC,OAAO;EACPnC,OAAO;EACPE;AACc,CAAC,EAAqC;EACpD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,gBAAgB,GAAG5E,aAAI,CAACC,IAAI,CAACF,mBAAmB,EAAE0E,OAAO,CAAC;EAChE,IAAAI,sBAAa,EAACD,gBAAgB,CAAC;EAE/B,MAAME,eAAe,GAAGlE,yBAAO,CAACC,GAAG,CAAC2B,eAAe,EAAG,MAAK,CAAC;EAC5D,MAAMuC,mBAAmB,GAAG/E,aAAI,CAACC,IAAI,CAAC2E,gBAAgB,EAAE5E,aAAI,CAACgF,QAAQ,CAACF,eAAe,CAAC,CAAC;EAEvFJ,GAAG,CAAE,+BAA8BI,eAAgB,QAAOC,mBAAoB,GAAE,CAAC;EACjF;EACA,MAAM,IAAIT,OAAO,CAAC,CAACW,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACN,eAAe,CAAC,CAC3BO,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,qBAAiB,EAACR,mBAAmB,CAAC,CAAC,CAC5CM,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAP,GAAG,CAAE,aAAYI,eAAgB,EAAC,CAAC;EACnC,MAAM;IAAEU,IAAI;IAAEpB;EAAM,CAAC,GAAG,MAAM,IAAAqB,qBAAQ,EAAmBV,mBAAmB,CAAC;;EAE7E;EACA,IAAIX,KAAK,IAAI3C,KAAK,CAACC,OAAO,CAAC0C,KAAK,CAAC,IAAIA,KAAK,CAACxC,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAM;MAAEtB;IAAe,CAAC,GAAG,MAAM+B,YAAY,CAAC;MAC1CC,OAAO;MACP;AACZ;AACA;MACY;MACAC,SAAS,EAAE6B,KAAK;MAChB5B;IACJ,CAAC,CAAC;IAEF,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAW,CAAC9D,QAAQ,CAACqF,WAAW,EAAE;IAEjE,MAAM;MAAEnF,SAAS,GAAG;IAAG,CAAC,GAAGF,QAAQ,IAAI,CAAC,CAAC;IACzCkB,iBAAiB,CAAC;MACdC,IAAI,EAAEgE,IAAI,CAACG,OAAO,IAAI,CAAC,CAAC;MACxBrF,cAAc;MACdC;IACJ,CAAC,CAAC;IAEFiF,IAAI,CAACnF,QAAQ,GAAGF,yBAAyB,CAAC;MACtCE,QAAQ,EAAEmF,IAAI,CAACnF,QAAQ,IAAI,CAAC,CAAC;MAC7BC,cAAc;MACdC;IACJ,CAAC,CAAC;EACN;EAEAmE,GAAG,CAAC,gCAAgC,CAAC;EACrC,MAAM,IAAAkB,gCAAU,EAACnB,OAAO,CAAC;EAEzBC,GAAG,CAAE,iCAAgC,CAAC;EACtC,MAAMmB,cAAc,CAAC7F,aAAI,CAAC8F,OAAO,CAACtD,eAAe,CAAChB,IAAI,CAAC,CAAC;EAExD,OAAOgE,IAAI;AACf;AASO,eAAeO,WAAW,CAAC;EAC9BC,QAAQ;EACR1D,OAAO;EACPE;AACe,CAAC,EAAuC;EACvD,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAMuB,iBAAiB,GAAGjG,aAAI,CAACC,IAAI,CAACF,mBAAmB,EAAEiG,QAAQ,CAAC;EAClE,IAAAnB,sBAAa,EAACoB,iBAAiB,CAAC;EAEhC,MAAMC,gBAAgB,GAAGtF,yBAAO,CAACC,GAAG,CAAC2B,eAAe,EAAG,MAAK,CAAC;EAC7D,MAAM2D,oBAAoB,GAAGnG,aAAI,CAACC,IAAI,CAACgG,iBAAiB,EAAEjG,aAAI,CAACgF,QAAQ,CAACkB,gBAAgB,CAAC,CAAC;EAE1FxB,GAAG,CAAE,gCAA+BwB,gBAAiB,QAAOC,oBAAqB,GAAE,CAAC;EACpF;EACA,MAAM,IAAI7B,OAAO,CAAC,CAACW,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACc,gBAAgB,CAAC,CAC5Bb,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,qBAAiB,EAACY,oBAAoB,CAAC,CAAC,CAC7Cd,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAP,GAAG,CAAE,aAAYwB,gBAAiB,EAAC,CAAC;EACpC,MAAM;IAAEE,KAAK;IAAEhC;EAAM,CAAC,GAAG,MAAM,IAAAqB,qBAAQ,EAAoBU,oBAAoB,CAAC;;EAEhF;EACA,IAAI/B,KAAK,IAAI3C,KAAK,CAACC,OAAO,CAAC0C,KAAK,CAAC,IAAIA,KAAK,CAACxC,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAM;MAAEtB;IAAe,CAAC,GAAG,MAAM+B,YAAY,CAAC;MAC1CC,OAAO;MACPC,SAAS,EAAE6B,KAAK;MAChB5B;IACJ,CAAC,CAAC;IAEF,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAW,CAAC9D,QAAQ,CAACqF,WAAW,EAAE;IAEjE,MAAM;MAAEnF,SAAS,GAAG;IAAG,CAAC,GAAGF,QAAQ,IAAI,CAAC,CAAC;IACzCkB,iBAAiB,CAAC;MACdC,IAAI,EAAE4E,KAAK,CAACT,OAAO,IAAI,CAAC,CAAC;MACzBrF,cAAc;MACdC;IACJ,CAAC,CAAC;IAEF6F,KAAK,CAACC,OAAO,GAAGlF,uBAAuB,CAAC;MACpCC,IAAI,EAAEgF,KAAK,CAACC,OAAO,IAAI,CAAC,CAAC;MACzB/F,cAAc;MACdC;IACJ,CAAC,CAAC;EACN;EAEAmE,GAAG,CAAC,iCAAiC,CAAC;EACtC,MAAM,IAAAkB,gCAAU,EAACI,QAAQ,CAAC;EAE1BtB,GAAG,CAAE,kCAAiC,CAAC;EACvC,MAAMmB,cAAc,CAAC7F,aAAI,CAAC8F,OAAO,CAACtD,eAAe,CAAChB,IAAI,CAAC,CAAC;EAExD,OAAO4E,KAAK;AAChB;AASO,eAAeE,cAAc,CAAC;EACjCC,WAAW;EACXjE,OAAO;EACPE;AACkB,CAAC,EAA6C;EAChE,MAAMkC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAM8B,oBAAoB,GAAGxG,aAAI,CAACC,IAAI,CAACF,mBAAmB,EAAEwG,WAAW,CAAC;EACxE,IAAA1B,sBAAa,EAAC2B,oBAAoB,CAAC;EAEnC,MAAMC,mBAAmB,GAAG7F,yBAAO,CAACC,GAAG,CAAC2B,eAAe,EAAG,MAAK,CAAC;EAChE,MAAMkE,uBAAuB,GAAG1G,aAAI,CAACC,IAAI,CACrCuG,oBAAoB,EACpBxG,aAAI,CAACgF,QAAQ,CAACyB,mBAAmB,CAAC,CACrC;EAED/B,GAAG,CAAE,mCAAkC+B,mBAAoB,QAAOC,uBAAwB,GAAE,CAAC;EAC7F;EACA,MAAM,IAAIpC,OAAO,CAAC,CAACW,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACqB,mBAAmB,CAAC,CAC/BpB,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,qBAAiB,EAACmB,uBAAuB,CAAC,CAAC,CAChDrB,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAP,GAAG,CAAE,aAAY+B,mBAAoB,EAAC,CAAC;EACvC,MAAM;IAAEE,QAAQ;IAAEvC;EAAM,CAAC,GAAG,MAAM,IAAAqB,qBAAQ,EAAuBiB,uBAAuB,CAAC;;EAEzF;EACA,IAAItC,KAAK,IAAI3C,KAAK,CAACC,OAAO,CAAC0C,KAAK,CAAC,IAAIA,KAAK,CAACxC,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAM;MAAEtB;IAAe,CAAC,GAAG,MAAM+B,YAAY,CAAC;MAC1CC,OAAO;MACPC,SAAS,EAAE6B,KAAK;MAChB5B;IACJ,CAAC,CAAC;IAEF,MAAMnC,QAAQ,GAAG,MAAMiC,OAAO,CAAC6B,WAAW,CAAC9D,QAAQ,CAACqF,WAAW,EAAE;IAEjE,MAAM;MAAEnF,SAAS,GAAG;IAAG,CAAC,GAAGF,QAAQ,IAAI,CAAC,CAAC;IACzCkB,iBAAiB,CAAC;MACdC,IAAI,EAAEmF,QAAQ,CAAChB,OAAO,IAAI,CAAC,CAAC;MAC5BrF,cAAc;MACdC;IACJ,CAAC,CAAC;EACN;EAEAmE,GAAG,CAAC,oCAAoC,CAAC;EACzC,MAAM,IAAAkB,gCAAU,EAACW,WAAW,CAAC;EAE7B7B,GAAG,CAAE,qCAAoC,CAAC;EAC1C,MAAMmB,cAAc,CAAC7F,aAAI,CAAC8F,OAAO,CAACtD,eAAe,CAAChB,IAAI,CAAC,CAAC;EAExD,OAAOmF,QAAQ;AACnB;AAOA,eAAe3D,iBAAiB,CAAC;EAC7BH,gBAAgB;EAChBI;AACsB,CAAC,EAAwC;EAC/D,MAAM2D,gBAAgB,GAAG7E,MAAM,CAAC8E,IAAI,CAAC5D,iBAAiB,CAAC;EAEvD,MAAM6D,QAAQ,GAAG,EAAE;EACnB;EACA,KAAK,IAAInF,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGiF,gBAAgB,CAAChF,MAAM,EAAED,CAAC,EAAE,EAAE;IAC9C,MAAMoF,MAAM,GAAGH,gBAAgB,CAACjF,CAAC,CAAC;IAClC,MAAMqF,UAAU,GAAG/D,iBAAiB,CAAC8D,MAAM,CAAC;;IAE5C;IACA,MAAM3B,UAAU,GAAGD,kBAAQ,CAACC,UAAU,CAAC4B,UAAU,CAAC;IAClD;IACA,MAAMC,YAAY,GAAGpE,gBAAgB,CAAChC,GAAG,CAACkG,MAAM,CAAC;IAEjD,IAAIE,YAAY,EAAE;MACd,MAAM3D,MAAM,GAAG,IAAA4D,eAAQ,EAAC,EAAE,EAAG,IAAGD,YAAY,CAAChF,GAAI,EAAC,CAAC;MACnD,MAAM;QAAEkF,iBAAiB;QAAEC,8BAA8B,EAAEC;MAAQ,CAAC,GAChElC,kBAAQ,CAACmC,WAAW,CAAChE,MAAM,EAAE2D,YAAY,CAACnE,IAAI,CAAC;MACnDsC,UAAU,CAACE,IAAI,CAAC6B,iBAAiB,CAAC;MAClCL,QAAQ,CAAC5C,IAAI,CAACmD,OAAO,CAAC;MAEtB1C,OAAO,CAACD,GAAG,CAAE,6BAA4BpB,MAAO,GAAE,CAAC;IACvD;EACJ;EAEA,OAAOgB,OAAO,CAACC,GAAG,CAACuC,QAAQ,CAAC;AAChC;AAEA,SAAStD,aAAa,CAACvB,GAAW,EAAE;EAChC;AACJ;AACA;EACI,IAAI;IACA,MAAM,GAAG,GAAGsF,IAAI,CAAC,GAAGtF,GAAG,CAACuF,KAAK,CAAC,GAAG,CAAC;IAClC,OAAOD,IAAI,CAACtH,IAAI,CAAC,GAAG,CAAC;EACzB,CAAC,CAAC,OAAOwH,CAAC,EAAE;IACR,OAAOxF,GAAG;EACd;AACJ;AAEA,MAAMyF,iBAAiB,GAAG,0BAA0B;AAEpD,SAASC,qBAAqB,CAACC,QAAgB,EAAU;EACrD,OAAO5H,aAAI,CAACgF,QAAQ,CAAC4C,QAAQ,CAAC,CAACC,OAAO,CAAC7H,aAAI,CAAC8H,OAAO,CAACF,QAAQ,CAAC,EAAE,EAAE,CAAC;AACtE;AAQA;AACA;AACA;AACA;AACA;AACO,eAAeG,mCAAmC,CACrDC,UAAkB,EACG;EACrB,MAAMtD,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,MAAMuD,cAAc,GAAG,EAAE;EAEzB,MAAMC,WAAW,GAAGlI,aAAI,CAACgF,QAAQ,CAACgD,UAAU,CAAC,CAACR,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAE3D,MAAMW,QAAQ,GAAG,MAAM,IAAAC,kBAAK,EAACJ,UAAU,CAAC;EACxC,IAAI,CAACG,QAAQ,CAACE,EAAE,EAAE;IACd,MAAM,IAAIC,cAAW,CAAE,gCAA+BN,UAAW,GAAE,EAAEG,QAAQ,CAACI,UAAU,CAAC;EAC7F;EAEA,MAAMnD,UAAU,GAAG+C,QAAQ,CAACK,IAAI;EAEhC,MAAMC,UAAU,GAAG,IAAAvB,eAAQ,EAAC,UAAU,CAAC;EACvC;EACA,MAAMwB,aAAa,GAAG1I,aAAI,CAACC,IAAI,CAACH,WAAW,EAAEoI,WAAW,CAAC;EAEzD,MAAMZ,WAAW,GAAG,IAAA/B,qBAAiB,EAACmD,aAAa,CAAC;EACpD,MAAM/I,cAAc,CAACyF,UAAU,EAAEkC,WAAW,CAAC;EAC7C5C,GAAG,CAAE,oBAAmBwD,WAAY,QAAOQ,aAAc,EAAC,CAAC;;EAE3D;EACA,MAAMC,YAAY,GAAG,MAAMC,gBAAgB,CAACF,aAAa,CAAC;EAE1DhE,GAAG,CAAE,sBAAqBsD,UAAW,UAASU,aAAc,EAAC,CAAC;EAC9D,MAAM,IAAA9C,gCAAU,EAAC8C,aAAa,CAAC;;EAE/B;EACA,KAAK,IAAI/G,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGgH,YAAY,CAAC/G,MAAM,EAAED,CAAC,EAAE,EAAE;IAC1C,MAAMkH,WAAW,GAAGF,YAAY,CAAChH,CAAC,CAAC;IACnC,MAAMmH,OAAO,GAAG,MAAMC,uBAAuB,CAACF,WAAW,EAAEJ,UAAU,CAAC;IACtER,cAAc,CAAC/D,IAAI,CAAC4E,OAAO,CAAC;EAChC;EACApE,GAAG,CAAC,oCAAoC,EAAE1E,aAAI,CAAC8F,OAAO,CAAC6C,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EACxE,MAAM,IAAA/C,gCAAU,EAAC5F,aAAI,CAAC8F,OAAO,CAAC6C,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EAE/C,OAAOV,cAAc;AACzB;AAEA,MAAMe,eAAe,GAAG,SAAS;AAEjC,SAASC,iBAAiB,CAAC;EACvB7F,GAAG;EACH8F,QAAQ;EACR5F;AAKJ,CAAC,EAAc;EACX,MAAMwC,OAAO,GAAG9F,aAAI,CAAC8F,OAAO,CAACoD,QAAQ,CAAC;EACtC,MAAMtB,QAAQ,GAAG5H,aAAI,CAACgF,QAAQ,CAACkE,QAAQ,CAAC;EACxC;AACJ;AACA;AACA;EACI,MAAMnC,MAAM,GAAGa,QAAQ,CAACC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;EAE7C,MAAMsB,OAAO,GAAGrD,OAAO,CAACpF,QAAQ,CAACsI,eAAe,CAAC;EAEjD,IAAIG,OAAO,EAAE;IACT/F,GAAG,GAAGxC,yBAAO,CAACE,GAAG,CAACsC,GAAG,EAAG,UAAS2D,MAAO,EAAC,EAAEzD,MAAM,CAAC;EACtD,CAAC,MAAM;IACH;IACAF,GAAG,GAAGxC,yBAAO,CAACE,GAAG,CAACsC,GAAG,EAAG,MAAK,EAAEE,MAAM,CAAC;EAC1C;EAEA,OAAOF,GAAG;AACd;AAEA,eAAeyC,cAAc,CAAC5D,GAAW,EAAiB;EACtD;EACA,IAAI,CAACA,GAAG,CAACvB,QAAQ,CAAC,GAAG,CAAC,EAAE;IACpBuB,GAAG,GAAI,GAAEA,GAAI,GAAE;EACnB;EAEA,MAAMkG,QAAQ,GAAG,MAAMhD,kBAAQ,CAACiE,UAAU,CAACnH,GAAG,CAAC;EAC/C,MAAM4E,IAAI,GAAG,CAACsB,QAAQ,CAACkB,QAAQ,IAAI,EAAE,EAAEjG,GAAG,CAACkG,CAAC,IAAIA,CAAC,CAAC/F,GAAG,CAAC,CAACK,MAAM,CAACC,OAAO,CAAa;EAClFc,OAAO,CAACD,GAAG,CAAE,SAAQmC,IAAI,CAACjF,MAAO,SAAQ,CAAC;EAE1C,MAAM2H,kBAAkB,GAAG1C,IAAI,CAACzD,GAAG,CAACnB,GAAG,IAAIkD,kBAAQ,CAACqE,YAAY,CAACvH,GAAG,CAAC,CAAC;EAEtE,MAAMqC,OAAO,CAACC,GAAG,CAACgF,kBAAkB,CAAC;EACrC5E,OAAO,CAACD,GAAG,CAAE,wBAAuB6E,kBAAkB,CAAC3H,MAAO,SAAQ,CAAC;AAC3E;;AAEA;;AAEO,SAAS6H,YAAY,CAACC,KAAa,EAAE;EACxC,OAAO;IACH,CAACC,6BAAsB,CAACC,OAAO,GAAGF,KAAK;IACvC,CAACC,6BAAsB,CAACE,UAAU,GAAG,CAAC;IACtC,CAACF,6BAAsB,CAACG,SAAS,GAAG,CAAC;IACrC,CAACH,6BAAsB,CAACI,MAAM,GAAG,CAAC;IAClCL;EACJ,CAAC;AACL;AAEA,SAASd,gBAAgB,CAACoB,iBAAyB,EAAqB;EACpE,OAAO,IAAI1F,OAAO,CAAC,CAACW,OAAO,EAAEC,MAAM,KAAK;IACpC,MAAMyD,YAAsB,GAAG,EAAE;IACjC,MAAMsB,yBAAyB,GAAGtC,qBAAqB,CAACqC,iBAAiB,CAAC;IAC1E,MAAME,2BAA2B,GAAGlK,aAAI,CAACC,IAAI,CAACH,WAAW,EAAEmK,yBAAyB,CAAC;IACrF;IACA,IAAApF,sBAAa,EAACqF,2BAA2B,CAAC;IAE1CC,cAAK,CAACC,IAAI,CAACJ,iBAAiB,EAAE;MAAEK,WAAW,EAAE;IAAK,CAAC,EAAE,UAAUC,GAAG,EAAEC,OAAO,EAAE;MACzE,IAAID,GAAG,EAAE;QACL3F,OAAO,CAAC6F,IAAI,CAAC,gCAAgC,EAAER,iBAAiB,EAAEM,GAAG,CAAC;QACtEpF,MAAM,CAACoF,GAAG,CAAC;QACX;MACJ;MACA,IAAI,CAACC,OAAO,EAAE;QACV5F,OAAO,CAACD,GAAG,CAAC,6CAA6C,GAAGsF,iBAAiB,CAAC;QAC9E9E,MAAM,CAAC,4BAA4B,CAAC;QACpC;MACJ;MAEAP,OAAO,CAAC8F,IAAI,CAAE,yBAAwBF,OAAO,CAACG,UAAW,WAAU,CAAC;MAEpEH,OAAO,CAAClF,EAAE,CAAC,KAAK,EAAE,UAAUiF,GAAG,EAAE;QAC7B,IAAIA,GAAG,EAAE;UACL3F,OAAO,CAAC6F,IAAI,CAAC,uCAAuC,EAAER,iBAAiB,EAAEM,GAAG,CAAC;UAC7EpF,MAAM,CAACoF,GAAG,CAAC;QACf;QACArF,OAAO,CAAC0D,YAAY,CAAC;MACzB,CAAC,CAAC;MAEF4B,OAAO,CAACI,SAAS,EAAE;MAEnBJ,OAAO,CAAClF,EAAE,CAAC,OAAO,EAAE,UAAUuF,KAAK,EAAE;QACjCjG,OAAO,CAAC8F,IAAI,CAAE,sBAAqBG,KAAK,CAAChD,QAAS,GAAE,CAAC;QACrD,IAAI,KAAK,CAACiD,IAAI,CAACD,KAAK,CAAChD,QAAQ,CAAC,EAAE;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAS,EAAE;QACvB,CAAC,MAAM;UACH;UACAJ,OAAO,CAACO,cAAc,CAACF,KAAK,EAAE,UAAUN,GAAG,EAAElF,UAAU,EAAE;YACrD,IAAIkF,GAAG,EAAE;cACL3F,OAAO,CAAC6F,IAAI,CACR,4CAA4C,EAC5CI,KAAK,CAAChD,QAAQ,EACd0C,GAAG,CACN;cACDpF,MAAM,CAACoF,GAAG,CAAC;cACX;YACJ;YACA,IAAI,CAAClF,UAAU,EAAE;cACbT,OAAO,CAACD,GAAG,CACP,8DAA8D,CACjE;cACDQ,MAAM,CAAC,+BAA+B,CAAC;cACvC;YACJ;YAEA,MAAMgE,QAAQ,GAAGlJ,aAAI,CAACC,IAAI,CAACiK,2BAA2B,EAAEU,KAAK,CAAChD,QAAQ,CAAC;YAEvExC,UAAU,CAACC,EAAE,CAAC,KAAK,EAAE,YAAY;cAC7BsD,YAAY,CAACzE,IAAI,CAACgF,QAAQ,CAAC;cAC3BqB,OAAO,CAACI,SAAS,EAAE;YACvB,CAAC,CAAC;YAEFhL,cAAc,CAACyF,UAAU,EAAE,IAAAG,qBAAiB,EAAC2D,QAAQ,CAAC,CAAC,CAAC6B,KAAK,CAACC,KAAK,IAAI;cACnE9F,MAAM,CAAC8F,KAAK,CAAC;YACjB,CAAC,CAAC;UACN,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN,CAAC,CAAC;AACN;AAEA,SAASjC,uBAAuB,CAACkC,eAAuB,EAAExC,UAAkB,EAAuB;EAC/F,OAAO,IAAInE,OAAO,CAAC,CAACW,OAAO,EAAEC,MAAM,KAAK;IACpC,MAAMgG,SAAS,GAAG,EAAE;IACpB,MAAMC,kBAAwD,GAAG,EAAE;IACnE,MAAMC,SAAS,GAAGzD,qBAAqB,CAACsD,eAAe,CAAC;IACxD,IAAInC,OAAmB,GAAG;MACtB7G,GAAG,EAAEmJ,SAAS;MACdlI,MAAM,EAAE,CAAC,CAAC;MACV1B,IAAI,EAAE;IACV,CAAC;IACD2I,cAAK,CAACC,IAAI,CAACa,eAAe,EAAE;MAAEZ,WAAW,EAAE;IAAK,CAAC,EAAE,UAAUC,GAAG,EAAEC,OAAO,EAAE;MACvE,IAAID,GAAG,EAAE;QACL3F,OAAO,CAAC6F,IAAI,CAAC,gCAAgC,EAAES,eAAe,EAAEX,GAAG,CAAC;QACpEpF,MAAM,CAACoF,GAAG,CAAC;QACX;MACJ;MACA,IAAI,CAACC,OAAO,EAAE;QACV5F,OAAO,CAACD,GAAG,CAAC,yCAAyC,GAAGuG,eAAe,CAAC;QACxE/F,MAAM,CAAC,4BAA4B,CAAC;QACpC;MACJ;MACAP,OAAO,CAAC8F,IAAI,CAAE,yBAAwBF,OAAO,CAACG,UAAW,WAAU,CAAC;MACpEH,OAAO,CAAClF,EAAE,CAAC,KAAK,EAAE,UAAUiF,GAAG,EAAE;QAC7B,IAAIA,GAAG,EAAE;UACL3F,OAAO,CAAC6F,IAAI,CAAC,mCAAmC,EAAES,eAAe,EAAEX,GAAG,CAAC;UACvEpF,MAAM,CAACoF,GAAG,CAAC;QACf;QAEAhG,OAAO,CAACC,GAAG,CAAC4G,kBAAkB,CAAC,CAACE,IAAI,CAACC,GAAG,IAAI;UACxCA,GAAG,CAACC,OAAO,CAACC,CAAC,IAAI;YACb7G,OAAO,CAAC8F,IAAI,CAAC,oBAAoB,EAAEe,CAAC,CAAC;UACzC,CAAC,CAAC;UACFvG,OAAO,CAAC6D,OAAO,CAAC;QACpB,CAAC,CAAC;MACN,CAAC,CAAC;MAEFyB,OAAO,CAACI,SAAS,EAAE;MAEnBJ,OAAO,CAAClF,EAAE,CAAC,OAAO,EAAE,UAAUuF,KAAK,EAAE;QACjCjG,OAAO,CAAC8F,IAAI,CAAE,sBAAqBG,KAAK,CAAChD,QAAS,GAAE,CAAC;QACrD,IAAI,KAAK,CAACiD,IAAI,CAACD,KAAK,CAAChD,QAAQ,CAAC,EAAE;UAC5B;UACA;UACA;UACA2C,OAAO,CAACI,SAAS,EAAE;QACvB,CAAC,MAAM;UACH;UACAJ,OAAO,CAACO,cAAc,CAACF,KAAK,EAAE,UAAUN,GAAG,EAAElF,UAAU,EAAE;YACrD,IAAIkF,GAAG,EAAE;cACL3F,OAAO,CAAC6F,IAAI,CACR,4DAA4D,EAC5DI,KAAK,CAAChD,QAAQ,EACd0C,GAAG,CACN;cACDpF,MAAM,CAACoF,GAAG,CAAC;cACX;YACJ;YACA,IAAI,CAAClF,UAAU,EAAE;cACbT,OAAO,CAACD,GAAG,CAAC,6CAA6C,CAAC;cAC1DQ,MAAM,CAAC,8BAA8B,CAAC;cACtC;YACJ;YACAE,UAAU,CAACC,EAAE,CAAC,KAAK,EAAE,YAAY;cAC7B6F,SAAS,CAAChH,IAAI,CAAC0G,KAAK,CAAChD,QAAQ,CAAC;cAC9B2C,OAAO,CAACI,SAAS,EAAE;YACvB,CAAC,CAAC;YAEF,MAAMrH,MAAM,GAAI,GAAEmF,UAAW,IAAG2C,SAAU,IAAGR,KAAK,CAAChD,QAAS,EAAC;YAC7D;YACAkB,OAAO,GAAGG,iBAAiB,CAAC;cACxB7F,GAAG,EAAE0F,OAAO;cACZI,QAAQ,EAAE0B,KAAK,CAAChD,QAAQ;cACxBtE;YACJ,CAAC,CAAC;YAEF,MAAM;cAAE6D,iBAAiB;cAAEC,8BAA8B,EAAEC;YAAQ,CAAC,GAChElC,kBAAQ,CAACmC,WAAW,CAAChE,MAAM,EAAEoE,iBAAiB,CAAC;YAEnD/H,cAAc,CAACyF,UAAU,EAAE+B,iBAAiB,CAAC,CACxCkE,IAAI,CAAC,MAAM;cACRF,kBAAkB,CAACjH,IAAI,CAACmD,OAAO,CAAC;YACpC,CAAC,CAAC,CACD0D,KAAK,CAACC,KAAK,IAAI;cACZ9F,MAAM,CAAC8F,KAAK,CAAC;YACjB,CAAC,CAAC;UACV,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN,CAAC,CAAC;AACN"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@webiny/api-page-builder-import-export",
3
- "version": "0.0.0-unstable.990c3ab1b6",
3
+ "version": "0.0.0-unstable.cc58a6566b",
4
4
  "main": "index.js",
5
5
  "keywords": [
6
6
  "pbie:base"
@@ -14,18 +14,18 @@
14
14
  "author": "Webiny Ltd",
15
15
  "license": "MIT",
16
16
  "dependencies": {
17
- "@babel/runtime": "7.20.1",
17
+ "@babel/runtime": "7.20.13",
18
18
  "@commodo/fields": "1.1.2-beta.20",
19
- "@webiny/api": "0.0.0-unstable.990c3ab1b6",
20
- "@webiny/api-file-manager": "0.0.0-unstable.990c3ab1b6",
21
- "@webiny/api-page-builder": "0.0.0-unstable.990c3ab1b6",
22
- "@webiny/api-security": "0.0.0-unstable.990c3ab1b6",
23
- "@webiny/error": "0.0.0-unstable.990c3ab1b6",
24
- "@webiny/handler": "0.0.0-unstable.990c3ab1b6",
25
- "@webiny/handler-aws": "0.0.0-unstable.990c3ab1b6",
26
- "@webiny/handler-graphql": "0.0.0-unstable.990c3ab1b6",
27
- "@webiny/utils": "0.0.0-unstable.990c3ab1b6",
28
- "@webiny/validation": "0.0.0-unstable.990c3ab1b6",
19
+ "@webiny/api": "0.0.0-unstable.cc58a6566b",
20
+ "@webiny/api-file-manager": "0.0.0-unstable.cc58a6566b",
21
+ "@webiny/api-page-builder": "0.0.0-unstable.cc58a6566b",
22
+ "@webiny/api-security": "0.0.0-unstable.cc58a6566b",
23
+ "@webiny/error": "0.0.0-unstable.cc58a6566b",
24
+ "@webiny/handler": "0.0.0-unstable.cc58a6566b",
25
+ "@webiny/handler-aws": "0.0.0-unstable.cc58a6566b",
26
+ "@webiny/handler-graphql": "0.0.0-unstable.cc58a6566b",
27
+ "@webiny/utils": "0.0.0-unstable.cc58a6566b",
28
+ "@webiny/validation": "0.0.0-unstable.cc58a6566b",
29
29
  "archiver": "5.3.1",
30
30
  "commodo-fields-object": "1.0.6",
31
31
  "dot-prop-immutable": "2.1.1",
@@ -33,7 +33,7 @@
33
33
  "load-json-file": "6.2.0",
34
34
  "lodash": "4.17.21",
35
35
  "mdbid": "1.0.0",
36
- "node-fetch": "2.6.7",
36
+ "node-fetch": "2.6.9",
37
37
  "stream": "0.0.2",
38
38
  "uniqid": "5.4.0",
39
39
  "yauzl": "2.10.0"
@@ -47,16 +47,16 @@
47
47
  "@types/archiver": "^5.3.1",
48
48
  "@types/node-fetch": "^2.6.1",
49
49
  "@types/yauzl": "^2.9.2",
50
- "@webiny/api-dynamodb-to-elasticsearch": "^0.0.0-unstable.990c3ab1b6",
51
- "@webiny/api-file-manager-ddb-es": "^0.0.0-unstable.990c3ab1b6",
52
- "@webiny/api-i18n-ddb": "^0.0.0-unstable.990c3ab1b6",
53
- "@webiny/api-security-so-ddb": "^0.0.0-unstable.990c3ab1b6",
54
- "@webiny/api-tenancy": "^0.0.0-unstable.990c3ab1b6",
55
- "@webiny/api-tenancy-so-ddb": "^0.0.0-unstable.990c3ab1b6",
56
- "@webiny/api-wcp": "^0.0.0-unstable.990c3ab1b6",
57
- "@webiny/cli": "^0.0.0-unstable.990c3ab1b6",
58
- "@webiny/db": "^0.0.0-unstable.990c3ab1b6",
59
- "@webiny/project-utils": "^0.0.0-unstable.990c3ab1b6",
50
+ "@webiny/api-dynamodb-to-elasticsearch": "^0.0.0-unstable.cc58a6566b",
51
+ "@webiny/api-file-manager-ddb-es": "^0.0.0-unstable.cc58a6566b",
52
+ "@webiny/api-i18n-ddb": "^0.0.0-unstable.cc58a6566b",
53
+ "@webiny/api-security-so-ddb": "^0.0.0-unstable.cc58a6566b",
54
+ "@webiny/api-tenancy": "^0.0.0-unstable.cc58a6566b",
55
+ "@webiny/api-tenancy-so-ddb": "^0.0.0-unstable.cc58a6566b",
56
+ "@webiny/api-wcp": "^0.0.0-unstable.cc58a6566b",
57
+ "@webiny/cli": "^0.0.0-unstable.cc58a6566b",
58
+ "@webiny/db": "^0.0.0-unstable.cc58a6566b",
59
+ "@webiny/project-utils": "^0.0.0-unstable.cc58a6566b",
60
60
  "jest": "^28.1.0",
61
61
  "jest-dynalite": "^3.2.0",
62
62
  "rimraf": "^3.0.2",
@@ -78,5 +78,5 @@
78
78
  ]
79
79
  }
80
80
  },
81
- "gitHead": "87e7b4d0a643f65b31d029d6bf2a81902fb940a8"
81
+ "gitHead": "cc58a6566be0baabe335f20bd22137818565241a"
82
82
  }