@webiny/api-page-builder-import-export 5.39.0-beta.0 → 5.39.0-beta.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. package/client.js +9 -20
  2. package/client.js.map +1 -1
  3. package/export/combine/blocksHandler.js +1 -1
  4. package/export/combine/blocksHandler.js.map +1 -1
  5. package/export/combine/formsHandler.js +1 -1
  6. package/export/combine/formsHandler.js.map +1 -1
  7. package/export/combine/pagesHandler.js +1 -1
  8. package/export/combine/pagesHandler.js.map +1 -1
  9. package/export/combine/templatesHandler.js +1 -1
  10. package/export/combine/templatesHandler.js.map +1 -1
  11. package/export/process/exporters/BlockExporter.d.ts +1 -1
  12. package/export/process/exporters/BlockExporter.js +0 -2
  13. package/export/process/exporters/BlockExporter.js.map +1 -1
  14. package/export/process/exporters/FormExporter.d.ts +1 -1
  15. package/export/process/exporters/PageExporter.d.ts +1 -1
  16. package/export/process/exporters/PageExporter.js +0 -2
  17. package/export/process/exporters/PageExporter.js.map +1 -1
  18. package/export/process/exporters/PageTemplateExporter.d.ts +1 -1
  19. package/export/process/exporters/PageTemplateExporter.js +0 -2
  20. package/export/process/exporters/PageTemplateExporter.js.map +1 -1
  21. package/export/s3Stream.d.ts +8 -8
  22. package/export/s3Stream.js +24 -21
  23. package/export/s3Stream.js.map +1 -1
  24. package/export/utils.d.ts +4 -4
  25. package/export/utils.js.map +1 -1
  26. package/export/zipper.d.ts +5 -5
  27. package/export/zipper.js +15 -21
  28. package/export/zipper.js.map +1 -1
  29. package/graphql/crud/importExportTasks.crud.js +40 -25
  30. package/graphql/crud/importExportTasks.crud.js.map +1 -1
  31. package/graphql/graphql/utils/resolve.d.ts +1 -1
  32. package/import/process/blocks/importBlock.js +7 -5
  33. package/import/process/blocks/importBlock.js.map +1 -1
  34. package/import/process/forms/importForm.js +3 -1
  35. package/import/process/forms/importForm.js.map +1 -1
  36. package/import/process/pages/importPage.js +5 -5
  37. package/import/process/pages/importPage.js.map +1 -1
  38. package/import/process/templates/importTemplate.js +3 -1
  39. package/import/process/templates/importTemplate.js.map +1 -1
  40. package/import/utils/extractAndUploadZipFileContents.js +0 -1
  41. package/import/utils/extractAndUploadZipFileContents.js.map +1 -1
  42. package/import/utils/extractZipAndUploadToS3.js.map +1 -1
  43. package/import/utils/uploadAssets.js +6 -6
  44. package/import/utils/uploadAssets.js.map +1 -1
  45. package/import/utils/uploadFilesFromS3.d.ts +1 -1
  46. package/import/utils/uploadFilesFromS3.js +1 -1
  47. package/import/utils/uploadFilesFromS3.js.map +1 -1
  48. package/package.json +22 -28
@@ -5,7 +5,6 @@ Object.defineProperty(exports, "__esModule", {
5
5
  value: true
6
6
  });
7
7
  exports.importBlock = importBlock;
8
- var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
9
8
  var _path = _interopRequireDefault(require("path"));
10
9
  var _dotPropImmutable = _interopRequireDefault(require("dot-prop-immutable"));
11
10
  var _loadJsonFile = _interopRequireDefault(require("load-json-file"));
@@ -30,8 +29,10 @@ async function importBlock({
30
29
  const BLOCK_DATA_FILE_PATH = _path.default.join(BLOCK_EXTRACT_DIR, _path.default.basename(blockDataFileKey));
31
30
  log(`Downloading Block data file: ${blockDataFileKey} at "${BLOCK_DATA_FILE_PATH}"`);
32
31
  // Download and save block data file in disk.
32
+ const readStream = await _s3Stream.s3Stream.readStream(blockDataFileKey);
33
+ const writeStream = (0, _fsExtra.createWriteStream)(BLOCK_DATA_FILE_PATH);
33
34
  await new Promise((resolve, reject) => {
34
- _s3Stream.s3Stream.readStream(blockDataFileKey).on("error", reject).pipe((0, _fsExtra.createWriteStream)(BLOCK_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
35
+ readStream.on("error", reject).pipe(writeStream).on("finish", resolve).on("error", reject);
35
36
  });
36
37
 
37
38
  // Load the block data file from disk.
@@ -63,7 +64,7 @@ async function importBlock({
63
64
  }
64
65
  let loadedCategory;
65
66
  if (category) {
66
- loadedCategory = await context.pageBuilder.getBlockCategory(category === null || category === void 0 ? void 0 : category.slug);
67
+ loadedCategory = await context.pageBuilder.getBlockCategory(category?.slug);
67
68
  if (!loadedCategory) {
68
69
  loadedCategory = await context.pageBuilder.createBlockCategory({
69
70
  name: category.name,
@@ -88,9 +89,10 @@ async function importBlock({
88
89
  await (0, _downloadInstallFiles.deleteFile)(blockKey);
89
90
  log(`Remove block contents from S3...`);
90
91
  await (0, _deleteS3Folder.deleteS3Folder)(_path.default.dirname(fileUploadsData.data));
91
- return (0, _objectSpread2.default)((0, _objectSpread2.default)({}, block), {}, {
92
+ return {
93
+ ...block,
92
94
  blockCategory: loadedCategory.slug
93
- });
95
+ };
94
96
  }
95
97
 
96
98
  //# sourceMappingURL=importBlock.js.map
@@ -1 +1 @@
1
- {"version":3,"names":["_path","_interopRequireDefault","require","_dotPropImmutable","_loadJsonFile","_fsExtra","_s3Stream","_uploadAssets","_downloadInstallFiles","_deleteS3Folder","_updateFilesInData","_constants","importBlock","blockKey","context","fileUploadsData","log","console","BLOCK_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","blockDataFileKey","dotProp","get","BLOCK_DATA_FILE_PATH","basename","Promise","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","block","category","files","loadJson","Array","isArray","length","fileIdToNewFileMap","uploadAssets","JSON","stringify","Object","fromEntries","settings","fileManager","getSettings","srcPrefix","updateFilesInData","data","content","loadedCategory","pageBuilder","getBlockCategory","slug","createBlockCategory","name","icon","description","importedBlocksCategory","deleteFile","deleteS3Folder","dirname","_objectSpread2","default","blockCategory"],"sources":["importBlock.ts"],"sourcesContent":["import path from \"path\";\nimport dotProp from \"dot-prop-immutable\";\nimport loadJson from \"load-json-file\";\nimport { ensureDirSync, createWriteStream } from \"fs-extra\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { FileUploadsData } from \"~/types\";\nimport { PageBlock } from \"@webiny/api-page-builder/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { uploadAssets } from \"~/import/utils/uploadAssets\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { deleteS3Folder } from \"~/import/utils/deleteS3Folder\";\nimport { updateFilesInData } from \"~/import/utils/updateFilesInData\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { ExportedBlockData } from \"~/export/process/exporters/BlockExporter\";\n\ninterface ImportBlockParams {\n key: string;\n blockKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importBlock({\n blockKey,\n context,\n fileUploadsData\n}: ImportBlockParams): Promise<Pick<PageBlock, \"name\" | \"content\" | \"blockCategory\">> {\n const log = console.log;\n\n // Making Directory for block in which we're going to extract the block data file.\n const BLOCK_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, blockKey);\n ensureDirSync(BLOCK_EXTRACT_DIR);\n\n const blockDataFileKey = dotProp.get(fileUploadsData, `data`);\n const BLOCK_DATA_FILE_PATH = path.join(BLOCK_EXTRACT_DIR, path.basename(blockDataFileKey));\n\n log(`Downloading Block data file: ${blockDataFileKey} at \"${BLOCK_DATA_FILE_PATH}\"`);\n // Download and save block data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(blockDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(BLOCK_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the block data file from disk.\n log(`Load file ${blockDataFileKey}`);\n const { block, category, files } = await loadJson<ExportedBlockData>(BLOCK_DATA_FILE_PATH);\n\n // Only update block data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload block assets.\n const fileIdToNewFileMap = await uploadAssets({\n context,\n files,\n fileUploadsData\n });\n\n console.log(\n \"After uploadAssets:fileIdToNewFileMap\",\n JSON.stringify(Object.fromEntries(fileIdToNewFileMap))\n );\n\n const settings = await context.fileManager.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: block.content || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n }\n\n let loadedCategory;\n if (category) {\n loadedCategory = await context.pageBuilder.getBlockCategory(category?.slug);\n if (!loadedCategory) {\n loadedCategory = await context.pageBuilder.createBlockCategory({\n name: category.name,\n slug: category.slug,\n icon: category.icon,\n description: category.description\n });\n }\n } else {\n let importedBlocksCategory = await context.pageBuilder.getBlockCategory(\"imported-blocks\");\n\n if (!importedBlocksCategory) {\n importedBlocksCategory = await context.pageBuilder.createBlockCategory({\n name: \"Imported Blocks\",\n slug: \"imported-blocks\",\n description: \"Imported blocks\",\n icon: \"fas/star\"\n });\n }\n\n loadedCategory = importedBlocksCategory;\n }\n\n log(\"Removing Directory for block...\");\n await deleteFile(blockKey);\n\n log(`Remove block contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return { ...block, blockCategory: loadedCategory!.slug };\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,iBAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,aAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AAIA,IAAAI,SAAA,GAAAJ,OAAA;AACA,IAAAK,aAAA,GAAAL,OAAA;AACA,IAAAM,qBAAA,GAAAN,OAAA;AACA,IAAAO,eAAA,GAAAP,OAAA;AACA,IAAAQ,kBAAA,GAAAR,OAAA;AACA,IAAAS,UAAA,GAAAT,OAAA;AAUO,eAAeU,WAAWA,CAAC;EAC9BC,QAAQ;EACRC,OAAO;EACPC;AACe,CAAC,EAAkE;EAClF,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,iBAAiB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAER,QAAQ,CAAC;EAClE,IAAAS,sBAAa,EAACJ,iBAAiB,CAAC;EAEhC,MAAMK,gBAAgB,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAC7D,MAAMW,oBAAoB,GAAGP,aAAI,CAACC,IAAI,CAACF,iBAAiB,EAAEC,aAAI,CAACQ,QAAQ,CAACJ,gBAAgB,CAAC,CAAC;EAE1FP,GAAG,CAAE,gCAA+BO,gBAAiB,QAAOG,oBAAqB,GAAE,CAAC;EACpF;EACA,MAAM,IAAIE,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACT,gBAAgB,CAAC,CAC5BU,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,0BAAiB,EAACT,oBAAoB,CAAC,CAAC,CAC7CO,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAb,GAAG,CAAE,aAAYO,gBAAiB,EAAC,CAAC;EACpC,MAAM;IAAEa,KAAK;IAAEC,QAAQ;IAAEC;EAAM,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAoBb,oBAAoB,CAAC;;EAE1F;EACA,IAAIY,KAAK,IAAIE,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,IAAIA,KAAK,CAACI,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAMC,kBAAkB,GAAG,MAAM,IAAAC,0BAAY,EAAC;MAC1C9B,OAAO;MACPwB,KAAK;MACLvB;IACJ,CAAC,CAAC;IAEFE,OAAO,CAACD,GAAG,CACP,uCAAuC,EACvC6B,IAAI,CAACC,SAAS,CAACC,MAAM,CAACC,WAAW,CAACL,kBAAkB,CAAC,CACzD,CAAC;IAED,MAAMM,QAAQ,GAAG,MAAMnC,OAAO,CAACoC,WAAW,CAACC,WAAW,CAAC,CAAC;IAExD,MAAM;MAAEC,SAAS,GAAG;IAAG,CAAC,GAAGH,QAAQ,IAAI,CAAC,CAAC;IACzC,IAAAI,oCAAiB,EAAC;MACdC,IAAI,EAAElB,KAAK,CAACmB,OAAO,IAAI,CAAC,CAAC;MACzBZ,kBAAkB;MAClBS;IACJ,CAAC,CAAC;EACN;EAEA,IAAII,cAAc;EAClB,IAAInB,QAAQ,EAAE;IACVmB,cAAc,GAAG,MAAM1C,OAAO,CAAC2C,WAAW,CAACC,gBAAgB,CAACrB,QAAQ,aAARA,QAAQ,uBAARA,QAAQ,CAAEsB,IAAI,CAAC;IAC3E,IAAI,CAACH,cAAc,EAAE;MACjBA,cAAc,GAAG,MAAM1C,OAAO,CAAC2C,WAAW,CAACG,mBAAmB,CAAC;QAC3DC,IAAI,EAAExB,QAAQ,CAACwB,IAAI;QACnBF,IAAI,EAAEtB,QAAQ,CAACsB,IAAI;QACnBG,IAAI,EAAEzB,QAAQ,CAACyB,IAAI;QACnBC,WAAW,EAAE1B,QAAQ,CAAC0B;MAC1B,CAAC,CAAC;IACN;EACJ,CAAC,MAAM;IACH,IAAIC,sBAAsB,GAAG,MAAMlD,OAAO,CAAC2C,WAAW,CAACC,gBAAgB,CAAC,iBAAiB,CAAC;IAE1F,IAAI,CAACM,sBAAsB,EAAE;MACzBA,sBAAsB,GAAG,MAAMlD,OAAO,CAAC2C,WAAW,CAACG,mBAAmB,CAAC;QACnEC,IAAI,EAAE,iBAAiB;QACvBF,IAAI,EAAE,iBAAiB;QACvBI,WAAW,EAAE,iBAAiB;QAC9BD,IAAI,EAAE;MACV,CAAC,CAAC;IACN;IAEAN,cAAc,GAAGQ,sBAAsB;EAC3C;EAEAhD,GAAG,CAAC,iCAAiC,CAAC;EACtC,MAAM,IAAAiD,gCAAU,EAACpD,QAAQ,CAAC;EAE1BG,GAAG,CAAE,kCAAiC,CAAC;EACvC,MAAM,IAAAkD,8BAAc,EAAC/C,aAAI,CAACgD,OAAO,CAACpD,eAAe,CAACuC,IAAI,CAAC,CAAC;EAExD,WAAAc,cAAA,CAAAC,OAAA,MAAAD,cAAA,CAAAC,OAAA,MAAYjC,KAAK;IAAEkC,aAAa,EAAEd,cAAc,CAAEG;EAAI;AAC1D"}
1
+ {"version":3,"names":["_path","_interopRequireDefault","require","_dotPropImmutable","_loadJsonFile","_fsExtra","_s3Stream","_uploadAssets","_downloadInstallFiles","_deleteS3Folder","_updateFilesInData","_constants","importBlock","blockKey","context","fileUploadsData","log","console","BLOCK_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","blockDataFileKey","dotProp","get","BLOCK_DATA_FILE_PATH","basename","readStream","s3Stream","writeStream","createWriteStream","Promise","resolve","reject","on","pipe","block","category","files","loadJson","Array","isArray","length","fileIdToNewFileMap","uploadAssets","JSON","stringify","Object","fromEntries","settings","fileManager","getSettings","srcPrefix","updateFilesInData","data","content","loadedCategory","pageBuilder","getBlockCategory","slug","createBlockCategory","name","icon","description","importedBlocksCategory","deleteFile","deleteS3Folder","dirname","blockCategory"],"sources":["importBlock.ts"],"sourcesContent":["import path from \"path\";\nimport dotProp from \"dot-prop-immutable\";\nimport loadJson from \"load-json-file\";\nimport { ensureDirSync, createWriteStream } from \"fs-extra\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { FileUploadsData } from \"~/types\";\nimport { PageBlock } from \"@webiny/api-page-builder/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { uploadAssets } from \"~/import/utils/uploadAssets\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { deleteS3Folder } from \"~/import/utils/deleteS3Folder\";\nimport { updateFilesInData } from \"~/import/utils/updateFilesInData\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { ExportedBlockData } from \"~/export/process/exporters/BlockExporter\";\n\ninterface ImportBlockParams {\n key: string;\n blockKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importBlock({\n blockKey,\n context,\n fileUploadsData\n}: ImportBlockParams): Promise<Pick<PageBlock, \"name\" | \"content\" | \"blockCategory\">> {\n const log = console.log;\n\n // Making Directory for block in which we're going to extract the block data file.\n const BLOCK_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, blockKey);\n ensureDirSync(BLOCK_EXTRACT_DIR);\n\n const blockDataFileKey = dotProp.get(fileUploadsData, `data`);\n const BLOCK_DATA_FILE_PATH = path.join(BLOCK_EXTRACT_DIR, path.basename(blockDataFileKey));\n\n log(`Downloading Block data file: ${blockDataFileKey} at \"${BLOCK_DATA_FILE_PATH}\"`);\n // Download and save block data file in disk.\n const readStream = await s3Stream.readStream(blockDataFileKey);\n const writeStream = createWriteStream(BLOCK_DATA_FILE_PATH);\n\n await new Promise((resolve, reject) => {\n readStream.on(\"error\", reject).pipe(writeStream).on(\"finish\", resolve).on(\"error\", reject);\n });\n\n // Load the block data file from disk.\n log(`Load file ${blockDataFileKey}`);\n const { block, category, files } = await loadJson<ExportedBlockData>(BLOCK_DATA_FILE_PATH);\n\n // Only update block data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload block assets.\n const fileIdToNewFileMap = await uploadAssets({\n context,\n files,\n fileUploadsData\n });\n\n console.log(\n \"After uploadAssets:fileIdToNewFileMap\",\n JSON.stringify(Object.fromEntries(fileIdToNewFileMap))\n );\n\n const settings = await context.fileManager.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: block.content || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n }\n\n let loadedCategory;\n if (category) {\n loadedCategory = await context.pageBuilder.getBlockCategory(category?.slug);\n if (!loadedCategory) {\n loadedCategory = await context.pageBuilder.createBlockCategory({\n name: category.name,\n slug: category.slug,\n icon: category.icon,\n description: category.description\n });\n }\n } else {\n let importedBlocksCategory = await context.pageBuilder.getBlockCategory(\"imported-blocks\");\n\n if (!importedBlocksCategory) {\n importedBlocksCategory = await context.pageBuilder.createBlockCategory({\n name: \"Imported Blocks\",\n slug: \"imported-blocks\",\n description: \"Imported blocks\",\n icon: \"fas/star\"\n });\n }\n\n loadedCategory = importedBlocksCategory;\n }\n\n log(\"Removing Directory for block...\");\n await deleteFile(blockKey);\n\n log(`Remove block contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return { ...block, blockCategory: loadedCategory!.slug };\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,iBAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,aAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AAIA,IAAAI,SAAA,GAAAJ,OAAA;AACA,IAAAK,aAAA,GAAAL,OAAA;AACA,IAAAM,qBAAA,GAAAN,OAAA;AACA,IAAAO,eAAA,GAAAP,OAAA;AACA,IAAAQ,kBAAA,GAAAR,OAAA;AACA,IAAAS,UAAA,GAAAT,OAAA;AAUO,eAAeU,WAAWA,CAAC;EAC9BC,QAAQ;EACRC,OAAO;EACPC;AACe,CAAC,EAAkE;EAClF,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,iBAAiB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAER,QAAQ,CAAC;EAClE,IAAAS,sBAAa,EAACJ,iBAAiB,CAAC;EAEhC,MAAMK,gBAAgB,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAC7D,MAAMW,oBAAoB,GAAGP,aAAI,CAACC,IAAI,CAACF,iBAAiB,EAAEC,aAAI,CAACQ,QAAQ,CAACJ,gBAAgB,CAAC,CAAC;EAE1FP,GAAG,CAAE,gCAA+BO,gBAAiB,QAAOG,oBAAqB,GAAE,CAAC;EACpF;EACA,MAAME,UAAU,GAAG,MAAMC,kBAAQ,CAACD,UAAU,CAACL,gBAAgB,CAAC;EAC9D,MAAMO,WAAW,GAAG,IAAAC,0BAAiB,EAACL,oBAAoB,CAAC;EAE3D,MAAM,IAAIM,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCN,UAAU,CAACO,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC,CAACE,IAAI,CAACN,WAAW,CAAC,CAACK,EAAE,CAAC,QAAQ,EAAEF,OAAO,CAAC,CAACE,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC;EAC9F,CAAC,CAAC;;EAEF;EACAlB,GAAG,CAAE,aAAYO,gBAAiB,EAAC,CAAC;EACpC,MAAM;IAAEc,KAAK;IAAEC,QAAQ;IAAEC;EAAM,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAoBd,oBAAoB,CAAC;;EAE1F;EACA,IAAIa,KAAK,IAAIE,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,IAAIA,KAAK,CAACI,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAMC,kBAAkB,GAAG,MAAM,IAAAC,0BAAY,EAAC;MAC1C/B,OAAO;MACPyB,KAAK;MACLxB;IACJ,CAAC,CAAC;IAEFE,OAAO,CAACD,GAAG,CACP,uCAAuC,EACvC8B,IAAI,CAACC,SAAS,CAACC,MAAM,CAACC,WAAW,CAACL,kBAAkB,CAAC,CACzD,CAAC;IAED,MAAMM,QAAQ,GAAG,MAAMpC,OAAO,CAACqC,WAAW,CAACC,WAAW,CAAC,CAAC;IAExD,MAAM;MAAEC,SAAS,GAAG;IAAG,CAAC,GAAGH,QAAQ,IAAI,CAAC,CAAC;IACzC,IAAAI,oCAAiB,EAAC;MACdC,IAAI,EAAElB,KAAK,CAACmB,OAAO,IAAI,CAAC,CAAC;MACzBZ,kBAAkB;MAClBS;IACJ,CAAC,CAAC;EACN;EAEA,IAAII,cAAc;EAClB,IAAInB,QAAQ,EAAE;IACVmB,cAAc,GAAG,MAAM3C,OAAO,CAAC4C,WAAW,CAACC,gBAAgB,CAACrB,QAAQ,EAAEsB,IAAI,CAAC;IAC3E,IAAI,CAACH,cAAc,EAAE;MACjBA,cAAc,GAAG,MAAM3C,OAAO,CAAC4C,WAAW,CAACG,mBAAmB,CAAC;QAC3DC,IAAI,EAAExB,QAAQ,CAACwB,IAAI;QACnBF,IAAI,EAAEtB,QAAQ,CAACsB,IAAI;QACnBG,IAAI,EAAEzB,QAAQ,CAACyB,IAAI;QACnBC,WAAW,EAAE1B,QAAQ,CAAC0B;MAC1B,CAAC,CAAC;IACN;EACJ,CAAC,MAAM;IACH,IAAIC,sBAAsB,GAAG,MAAMnD,OAAO,CAAC4C,WAAW,CAACC,gBAAgB,CAAC,iBAAiB,CAAC;IAE1F,IAAI,CAACM,sBAAsB,EAAE;MACzBA,sBAAsB,GAAG,MAAMnD,OAAO,CAAC4C,WAAW,CAACG,mBAAmB,CAAC;QACnEC,IAAI,EAAE,iBAAiB;QACvBF,IAAI,EAAE,iBAAiB;QACvBI,WAAW,EAAE,iBAAiB;QAC9BD,IAAI,EAAE;MACV,CAAC,CAAC;IACN;IAEAN,cAAc,GAAGQ,sBAAsB;EAC3C;EAEAjD,GAAG,CAAC,iCAAiC,CAAC;EACtC,MAAM,IAAAkD,gCAAU,EAACrD,QAAQ,CAAC;EAE1BG,GAAG,CAAE,kCAAiC,CAAC;EACvC,MAAM,IAAAmD,8BAAc,EAAChD,aAAI,CAACiD,OAAO,CAACrD,eAAe,CAACwC,IAAI,CAAC,CAAC;EAExD,OAAO;IAAE,GAAGlB,KAAK;IAAEgC,aAAa,EAAEZ,cAAc,CAAEG;EAAK,CAAC;AAC5D"}
@@ -26,8 +26,10 @@ async function importForm({
26
26
  const FORM_DATA_FILE_PATH = _path.default.join(FORM_EXTRACT_DIR, _path.default.basename(formDataFileKey));
27
27
  log(`Downloading Form data file: ${formDataFileKey} at "${FORM_DATA_FILE_PATH}"`);
28
28
  // Download and save form data file in disk.
29
+ const readStream = await _s3Stream.s3Stream.readStream(formDataFileKey);
30
+ const writeStream = (0, _fsExtra.createWriteStream)(FORM_DATA_FILE_PATH);
29
31
  await new Promise((resolve, reject) => {
30
- _s3Stream.s3Stream.readStream(formDataFileKey).on("error", reject).pipe((0, _fsExtra.createWriteStream)(FORM_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
32
+ readStream.on("error", reject).pipe(writeStream).on("finish", resolve).on("error", reject);
31
33
  });
32
34
 
33
35
  // Load the form data file from disk.
@@ -1 +1 @@
1
- {"version":3,"names":["_dotPropImmutable","_interopRequireDefault","require","_fsExtra","_path","_loadJsonFile","_downloadInstallFiles","_s3Stream","_deleteS3Folder","_constants","importForm","formKey","fileUploadsData","log","console","FORM_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","formDataFileKey","dotProp","get","FORM_DATA_FILE_PATH","basename","Promise","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","form","loadJson","deleteFile","deleteS3Folder","dirname","data"],"sources":["importForm.ts"],"sourcesContent":["import dotProp from \"dot-prop-immutable\";\nimport { createWriteStream, ensureDirSync } from \"fs-extra\";\nimport path from \"path\";\nimport loadJson from \"load-json-file\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { FileUploadsData } from \"~/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { deleteS3Folder } from \"~/import/utils/deleteS3Folder\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { ExportedFormData } from \"~/export/process/exporters/FormExporter\";\n\ninterface ImportFormParams {\n key: string;\n formKey: string;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importForm({\n formKey,\n fileUploadsData\n}: ImportFormParams): Promise<ExportedFormData[\"form\"]> {\n const log = console.log;\n\n // Making Directory for form in which we're going to extract the form data file.\n const FORM_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, formKey);\n ensureDirSync(FORM_EXTRACT_DIR);\n\n const formDataFileKey = dotProp.get(fileUploadsData, `data`);\n const FORM_DATA_FILE_PATH = path.join(FORM_EXTRACT_DIR, path.basename(formDataFileKey));\n\n log(`Downloading Form data file: ${formDataFileKey} at \"${FORM_DATA_FILE_PATH}\"`);\n // Download and save form data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(formDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(FORM_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the form data file from disk.\n log(`Load file ${formDataFileKey}`);\n const { form } = await loadJson<ExportedFormData>(FORM_DATA_FILE_PATH);\n\n log(\"Removing Directory for form...\");\n await deleteFile(formKey);\n\n log(`Remove form contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return form;\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,iBAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AACA,IAAAE,KAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,aAAA,GAAAJ,sBAAA,CAAAC,OAAA;AACA,IAAAI,qBAAA,GAAAJ,OAAA;AAEA,IAAAK,SAAA,GAAAL,OAAA;AACA,IAAAM,eAAA,GAAAN,OAAA;AACA,IAAAO,UAAA,GAAAP,OAAA;AASO,eAAeQ,UAAUA,CAAC;EAC7BC,OAAO;EACPC;AACc,CAAC,EAAqC;EACpD,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,gBAAgB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAEP,OAAO,CAAC;EAChE,IAAAQ,sBAAa,EAACJ,gBAAgB,CAAC;EAE/B,MAAMK,eAAe,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAC5D,MAAMW,mBAAmB,GAAGP,aAAI,CAACC,IAAI,CAACF,gBAAgB,EAAEC,aAAI,CAACQ,QAAQ,CAACJ,eAAe,CAAC,CAAC;EAEvFP,GAAG,CAAE,+BAA8BO,eAAgB,QAAOG,mBAAoB,GAAE,CAAC;EACjF;EACA,MAAM,IAAIE,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACT,eAAe,CAAC,CAC3BU,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,0BAAiB,EAACT,mBAAmB,CAAC,CAAC,CAC5CO,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAb,GAAG,CAAE,aAAYO,eAAgB,EAAC,CAAC;EACnC,MAAM;IAAEa;EAAK,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAmBX,mBAAmB,CAAC;EAEtEV,GAAG,CAAC,gCAAgC,CAAC;EACrC,MAAM,IAAAsB,gCAAU,EAACxB,OAAO,CAAC;EAEzBE,GAAG,CAAE,iCAAgC,CAAC;EACtC,MAAM,IAAAuB,8BAAc,EAACpB,aAAI,CAACqB,OAAO,CAACzB,eAAe,CAAC0B,IAAI,CAAC,CAAC;EAExD,OAAOL,IAAI;AACf"}
1
+ {"version":3,"names":["_dotPropImmutable","_interopRequireDefault","require","_fsExtra","_path","_loadJsonFile","_downloadInstallFiles","_s3Stream","_deleteS3Folder","_constants","importForm","formKey","fileUploadsData","log","console","FORM_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","formDataFileKey","dotProp","get","FORM_DATA_FILE_PATH","basename","readStream","s3Stream","writeStream","createWriteStream","Promise","resolve","reject","on","pipe","form","loadJson","deleteFile","deleteS3Folder","dirname","data"],"sources":["importForm.ts"],"sourcesContent":["import dotProp from \"dot-prop-immutable\";\nimport { createWriteStream, ensureDirSync } from \"fs-extra\";\nimport path from \"path\";\nimport loadJson from \"load-json-file\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { FileUploadsData } from \"~/types\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { deleteS3Folder } from \"~/import/utils/deleteS3Folder\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { ExportedFormData } from \"~/export/process/exporters/FormExporter\";\n\ninterface ImportFormParams {\n key: string;\n formKey: string;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importForm({\n formKey,\n fileUploadsData\n}: ImportFormParams): Promise<ExportedFormData[\"form\"]> {\n const log = console.log;\n\n // Making Directory for form in which we're going to extract the form data file.\n const FORM_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, formKey);\n ensureDirSync(FORM_EXTRACT_DIR);\n\n const formDataFileKey = dotProp.get(fileUploadsData, `data`);\n const FORM_DATA_FILE_PATH = path.join(FORM_EXTRACT_DIR, path.basename(formDataFileKey));\n\n log(`Downloading Form data file: ${formDataFileKey} at \"${FORM_DATA_FILE_PATH}\"`);\n // Download and save form data file in disk.\n const readStream = await s3Stream.readStream(formDataFileKey);\n const writeStream = createWriteStream(FORM_DATA_FILE_PATH);\n\n await new Promise((resolve, reject) => {\n readStream.on(\"error\", reject).pipe(writeStream).on(\"finish\", resolve).on(\"error\", reject);\n });\n\n // Load the form data file from disk.\n log(`Load file ${formDataFileKey}`);\n const { form } = await loadJson<ExportedFormData>(FORM_DATA_FILE_PATH);\n\n log(\"Removing Directory for form...\");\n await deleteFile(formKey);\n\n log(`Remove form contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return form;\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,iBAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AACA,IAAAE,KAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,aAAA,GAAAJ,sBAAA,CAAAC,OAAA;AACA,IAAAI,qBAAA,GAAAJ,OAAA;AAEA,IAAAK,SAAA,GAAAL,OAAA;AACA,IAAAM,eAAA,GAAAN,OAAA;AACA,IAAAO,UAAA,GAAAP,OAAA;AASO,eAAeQ,UAAUA,CAAC;EAC7BC,OAAO;EACPC;AACc,CAAC,EAAqC;EACpD,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,gBAAgB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAEP,OAAO,CAAC;EAChE,IAAAQ,sBAAa,EAACJ,gBAAgB,CAAC;EAE/B,MAAMK,eAAe,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAC5D,MAAMW,mBAAmB,GAAGP,aAAI,CAACC,IAAI,CAACF,gBAAgB,EAAEC,aAAI,CAACQ,QAAQ,CAACJ,eAAe,CAAC,CAAC;EAEvFP,GAAG,CAAE,+BAA8BO,eAAgB,QAAOG,mBAAoB,GAAE,CAAC;EACjF;EACA,MAAME,UAAU,GAAG,MAAMC,kBAAQ,CAACD,UAAU,CAACL,eAAe,CAAC;EAC7D,MAAMO,WAAW,GAAG,IAAAC,0BAAiB,EAACL,mBAAmB,CAAC;EAE1D,MAAM,IAAIM,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCN,UAAU,CAACO,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC,CAACE,IAAI,CAACN,WAAW,CAAC,CAACK,EAAE,CAAC,QAAQ,EAAEF,OAAO,CAAC,CAACE,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC;EAC9F,CAAC,CAAC;;EAEF;EACAlB,GAAG,CAAE,aAAYO,eAAgB,EAAC,CAAC;EACnC,MAAM;IAAEc;EAAK,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAmBZ,mBAAmB,CAAC;EAEtEV,GAAG,CAAC,gCAAgC,CAAC;EACrC,MAAM,IAAAuB,gCAAU,EAACzB,OAAO,CAAC;EAEzBE,GAAG,CAAE,iCAAgC,CAAC;EACtC,MAAM,IAAAwB,8BAAc,EAACrB,aAAI,CAACsB,OAAO,CAAC1B,eAAe,CAAC2B,IAAI,CAAC,CAAC;EAExD,OAAOL,IAAI;AACf"}
@@ -27,8 +27,10 @@ async function importPage({
27
27
  const PAGE_DATA_FILE_PATH = _path.default.join(PAGE_EXTRACT_DIR, _path.default.basename(pageDataFileKey));
28
28
  log(`Downloading Page data file: ${pageDataFileKey} at "${PAGE_DATA_FILE_PATH}"`);
29
29
  // Download and save page data file in disk.
30
+ const readStream = await _s3Stream.s3Stream.readStream(pageDataFileKey);
31
+ const writeStream = (0, _fsExtra.createWriteStream)(PAGE_DATA_FILE_PATH);
30
32
  await new Promise((resolve, reject) => {
31
- _s3Stream.s3Stream.readStream(pageDataFileKey).on("error", reject).pipe((0, _fsExtra.createWriteStream)(PAGE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
33
+ readStream.on("error", reject).pipe(writeStream).on("finish", resolve).on("error", reject);
32
34
  });
33
35
 
34
36
  // Load the page data file from disk.
@@ -75,15 +77,13 @@ function updateImageInPageSettings(params) {
75
77
  let newSettings = settings;
76
78
  const cleanSrcPrefix = srcPrefix.endsWith("/") ? srcPrefix.slice(0, -1) : srcPrefix;
77
79
  if (_dotPropImmutable.default.get(newSettings, "general.image.src")) {
78
- var _settings$general;
79
- const newFile = fileIdToNewFileMap.get(((_settings$general = settings.general) === null || _settings$general === void 0 || (_settings$general = _settings$general.image) === null || _settings$general === void 0 ? void 0 : _settings$general.id) || "");
80
+ const newFile = fileIdToNewFileMap.get(settings.general?.image?.id || "");
80
81
  if (newFile) {
81
82
  newSettings = _dotPropImmutable.default.set(newSettings, "general.image.src", `${cleanSrcPrefix}/${newFile.key}`);
82
83
  }
83
84
  }
84
85
  if (_dotPropImmutable.default.get(newSettings, "social.image.src")) {
85
- var _settings$social;
86
- const newFile = fileIdToNewFileMap.get(((_settings$social = settings.social) === null || _settings$social === void 0 || (_settings$social = _settings$social.image) === null || _settings$social === void 0 ? void 0 : _settings$social.id) || "");
86
+ const newFile = fileIdToNewFileMap.get(settings.social?.image?.id || "");
87
87
  if (newFile) {
88
88
  newSettings = _dotPropImmutable.default.set(newSettings, "social.image.src", `${cleanSrcPrefix}/${newFile.key}`);
89
89
  }
@@ -1 +1 @@
1
- {"version":3,"names":["_path","_interopRequireDefault","require","_dotPropImmutable","_loadJsonFile","_fsExtra","_downloadInstallFiles","_constants","_s3Stream","_utils","importPage","pageKey","context","fileUploadsData","log","console","PAGE_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","pageDataFileKey","dotProp","get","PAGE_DATA_FILE_PATH","basename","Promise","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","page","files","loadJson","Array","isArray","length","fileIdToNewFileMap","uploadAssets","settings","fileManager","getSettings","srcPrefix","updateFilesInData","data","content","updateImageInPageSettings","deleteFile","deleteS3Folder","dirname","params","newSettings","cleanSrcPrefix","endsWith","slice","_settings$general","newFile","general","image","id","set","key","_settings$social","social"],"sources":["importPage.ts"],"sourcesContent":["import path from \"path\";\nimport dotProp from \"dot-prop-immutable\";\nimport loadJson from \"load-json-file\";\nimport { createWriteStream, ensureDirSync } from \"fs-extra\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { FileInput } from \"@webiny/api-file-manager/types\";\nimport { PageSettings } from \"@webiny/api-page-builder/types\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { FileUploadsData } from \"~/types\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { deleteS3Folder, updateFilesInData, uploadAssets } from \"~/import/utils\";\nimport { ExportedPageData } from \"~/export/process/exporters/PageExporter\";\n\ninterface ImportPageParams {\n key: string;\n pageKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importPage({\n pageKey,\n context,\n fileUploadsData\n}: ImportPageParams): Promise<ExportedPageData[\"page\"]> {\n const log = console.log;\n\n // Making Directory for page in which we're going to extract the page data file.\n const PAGE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, pageKey);\n ensureDirSync(PAGE_EXTRACT_DIR);\n\n const pageDataFileKey = dotProp.get(fileUploadsData, `data`);\n const PAGE_DATA_FILE_PATH = path.join(PAGE_EXTRACT_DIR, path.basename(pageDataFileKey));\n\n log(`Downloading Page data file: ${pageDataFileKey} at \"${PAGE_DATA_FILE_PATH}\"`);\n // Download and save page data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(pageDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(PAGE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the page data file from disk.\n log(`Load file ${pageDataFileKey}`);\n const { page, files } = await loadJson<ExportedPageData>(PAGE_DATA_FILE_PATH);\n\n // Only update page data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n const fileIdToNewFileMap = await uploadAssets({\n context,\n files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: page.content || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n\n page.settings = updateImageInPageSettings({\n settings: page.settings || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for page...\");\n await deleteFile(pageKey);\n\n log(`Remove page contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return page;\n}\n\ninterface UpdateImageInPageSettingsParams {\n fileIdToNewFileMap: Map<string, FileInput>;\n srcPrefix: string;\n settings: PageSettings;\n}\n\nfunction updateImageInPageSettings(\n params: UpdateImageInPageSettingsParams\n): UpdateImageInPageSettingsParams[\"settings\"] {\n const { settings, fileIdToNewFileMap, srcPrefix } = params;\n let newSettings = settings;\n\n const cleanSrcPrefix = srcPrefix.endsWith(\"/\") ? srcPrefix.slice(0, -1) : srcPrefix;\n\n if (dotProp.get(newSettings, \"general.image.src\")) {\n const newFile = fileIdToNewFileMap.get(settings.general?.image?.id || \"\");\n if (newFile) {\n newSettings = dotProp.set(\n newSettings,\n \"general.image.src\",\n `${cleanSrcPrefix}/${newFile.key}`\n );\n }\n }\n\n if (dotProp.get(newSettings, \"social.image.src\")) {\n const newFile = fileIdToNewFileMap.get(settings.social?.image?.id || \"\");\n\n if (newFile) {\n newSettings = dotProp.set(\n newSettings,\n \"social.image.src\",\n `${cleanSrcPrefix}/${newFile.key}`\n );\n }\n }\n\n return newSettings;\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,iBAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,aAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AACA,IAAAI,qBAAA,GAAAJ,OAAA;AAKA,IAAAK,UAAA,GAAAL,OAAA;AACA,IAAAM,SAAA,GAAAN,OAAA;AACA,IAAAO,MAAA,GAAAP,OAAA;AAUO,eAAeQ,UAAUA,CAAC;EAC7BC,OAAO;EACPC,OAAO;EACPC;AACc,CAAC,EAAqC;EACpD,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,gBAAgB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAER,OAAO,CAAC;EAChE,IAAAS,sBAAa,EAACJ,gBAAgB,CAAC;EAE/B,MAAMK,eAAe,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAC5D,MAAMW,mBAAmB,GAAGP,aAAI,CAACC,IAAI,CAACF,gBAAgB,EAAEC,aAAI,CAACQ,QAAQ,CAACJ,eAAe,CAAC,CAAC;EAEvFP,GAAG,CAAE,+BAA8BO,eAAgB,QAAOG,mBAAoB,GAAE,CAAC;EACjF;EACA,MAAM,IAAIE,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACT,eAAe,CAAC,CAC3BU,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,0BAAiB,EAACT,mBAAmB,CAAC,CAAC,CAC5CO,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAb,GAAG,CAAE,aAAYO,eAAgB,EAAC,CAAC;EACnC,MAAM;IAAEa,IAAI;IAAEC;EAAM,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAmBZ,mBAAmB,CAAC;;EAE7E;EACA,IAAIW,KAAK,IAAIE,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,IAAIA,KAAK,CAACI,MAAM,GAAG,CAAC,EAAE;IACnD,MAAMC,kBAAkB,GAAG,MAAM,IAAAC,mBAAY,EAAC;MAC1C7B,OAAO;MACPuB,KAAK;MACLtB;IACJ,CAAC,CAAC;IAEF,MAAM6B,QAAQ,GAAG,MAAM9B,OAAO,CAAC+B,WAAW,CAACC,WAAW,CAAC,CAAC;IAExD,MAAM;MAAEC,SAAS,GAAG;IAAG,CAAC,GAAGH,QAAQ,IAAI,CAAC,CAAC;IACzC,IAAAI,wBAAiB,EAAC;MACdC,IAAI,EAAEb,IAAI,CAACc,OAAO,IAAI,CAAC,CAAC;MACxBR,kBAAkB;MAClBK;IACJ,CAAC,CAAC;IAEFX,IAAI,CAACQ,QAAQ,GAAGO,yBAAyB,CAAC;MACtCP,QAAQ,EAAER,IAAI,CAACQ,QAAQ,IAAI,CAAC,CAAC;MAC7BF,kBAAkB;MAClBK;IACJ,CAAC,CAAC;EACN;EAEA/B,GAAG,CAAC,gCAAgC,CAAC;EACrC,MAAM,IAAAoC,gCAAU,EAACvC,OAAO,CAAC;EAEzBG,GAAG,CAAE,iCAAgC,CAAC;EACtC,MAAM,IAAAqC,qBAAc,EAAClC,aAAI,CAACmC,OAAO,CAACvC,eAAe,CAACkC,IAAI,CAAC,CAAC;EAExD,OAAOb,IAAI;AACf;AAQA,SAASe,yBAAyBA,CAC9BI,MAAuC,EACI;EAC3C,MAAM;IAAEX,QAAQ;IAAEF,kBAAkB;IAAEK;EAAU,CAAC,GAAGQ,MAAM;EAC1D,IAAIC,WAAW,GAAGZ,QAAQ;EAE1B,MAAMa,cAAc,GAAGV,SAAS,CAACW,QAAQ,CAAC,GAAG,CAAC,GAAGX,SAAS,CAACY,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAGZ,SAAS;EAEnF,IAAIvB,yBAAO,CAACC,GAAG,CAAC+B,WAAW,EAAE,mBAAmB,CAAC,EAAE;IAAA,IAAAI,iBAAA;IAC/C,MAAMC,OAAO,GAAGnB,kBAAkB,CAACjB,GAAG,CAAC,EAAAmC,iBAAA,GAAAhB,QAAQ,CAACkB,OAAO,cAAAF,iBAAA,gBAAAA,iBAAA,GAAhBA,iBAAA,CAAkBG,KAAK,cAAAH,iBAAA,uBAAvBA,iBAAA,CAAyBI,EAAE,KAAI,EAAE,CAAC;IACzE,IAAIH,OAAO,EAAE;MACTL,WAAW,GAAGhC,yBAAO,CAACyC,GAAG,CACrBT,WAAW,EACX,mBAAmB,EAClB,GAAEC,cAAe,IAAGI,OAAO,CAACK,GAAI,EACrC,CAAC;IACL;EACJ;EAEA,IAAI1C,yBAAO,CAACC,GAAG,CAAC+B,WAAW,EAAE,kBAAkB,CAAC,EAAE;IAAA,IAAAW,gBAAA;IAC9C,MAAMN,OAAO,GAAGnB,kBAAkB,CAACjB,GAAG,CAAC,EAAA0C,gBAAA,GAAAvB,QAAQ,CAACwB,MAAM,cAAAD,gBAAA,gBAAAA,gBAAA,GAAfA,gBAAA,CAAiBJ,KAAK,cAAAI,gBAAA,uBAAtBA,gBAAA,CAAwBH,EAAE,KAAI,EAAE,CAAC;IAExE,IAAIH,OAAO,EAAE;MACTL,WAAW,GAAGhC,yBAAO,CAACyC,GAAG,CACrBT,WAAW,EACX,kBAAkB,EACjB,GAAEC,cAAe,IAAGI,OAAO,CAACK,GAAI,EACrC,CAAC;IACL;EACJ;EAEA,OAAOV,WAAW;AACtB"}
1
+ {"version":3,"names":["_path","_interopRequireDefault","require","_dotPropImmutable","_loadJsonFile","_fsExtra","_downloadInstallFiles","_constants","_s3Stream","_utils","importPage","pageKey","context","fileUploadsData","log","console","PAGE_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","pageDataFileKey","dotProp","get","PAGE_DATA_FILE_PATH","basename","readStream","s3Stream","writeStream","createWriteStream","Promise","resolve","reject","on","pipe","page","files","loadJson","Array","isArray","length","fileIdToNewFileMap","uploadAssets","settings","fileManager","getSettings","srcPrefix","updateFilesInData","data","content","updateImageInPageSettings","deleteFile","deleteS3Folder","dirname","params","newSettings","cleanSrcPrefix","endsWith","slice","newFile","general","image","id","set","key","social"],"sources":["importPage.ts"],"sourcesContent":["import path from \"path\";\nimport dotProp from \"dot-prop-immutable\";\nimport loadJson from \"load-json-file\";\nimport { createWriteStream, ensureDirSync } from \"fs-extra\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { FileInput } from \"@webiny/api-file-manager/types\";\nimport { PageSettings } from \"@webiny/api-page-builder/types\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { FileUploadsData } from \"~/types\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { deleteS3Folder, updateFilesInData, uploadAssets } from \"~/import/utils\";\nimport { ExportedPageData } from \"~/export/process/exporters/PageExporter\";\n\ninterface ImportPageParams {\n key: string;\n pageKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importPage({\n pageKey,\n context,\n fileUploadsData\n}: ImportPageParams): Promise<ExportedPageData[\"page\"]> {\n const log = console.log;\n\n // Making Directory for page in which we're going to extract the page data file.\n const PAGE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, pageKey);\n ensureDirSync(PAGE_EXTRACT_DIR);\n\n const pageDataFileKey = dotProp.get(fileUploadsData, `data`);\n const PAGE_DATA_FILE_PATH = path.join(PAGE_EXTRACT_DIR, path.basename(pageDataFileKey));\n\n log(`Downloading Page data file: ${pageDataFileKey} at \"${PAGE_DATA_FILE_PATH}\"`);\n // Download and save page data file in disk.\n const readStream = await s3Stream.readStream(pageDataFileKey);\n const writeStream = createWriteStream(PAGE_DATA_FILE_PATH);\n\n await new Promise((resolve, reject) => {\n readStream.on(\"error\", reject).pipe(writeStream).on(\"finish\", resolve).on(\"error\", reject);\n });\n\n // Load the page data file from disk.\n log(`Load file ${pageDataFileKey}`);\n const { page, files } = await loadJson<ExportedPageData>(PAGE_DATA_FILE_PATH);\n\n // Only update page data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n const fileIdToNewFileMap = await uploadAssets({\n context,\n files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: page.content || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n\n page.settings = updateImageInPageSettings({\n settings: page.settings || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for page...\");\n await deleteFile(pageKey);\n\n log(`Remove page contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return page;\n}\n\ninterface UpdateImageInPageSettingsParams {\n fileIdToNewFileMap: Map<string, FileInput>;\n srcPrefix: string;\n settings: PageSettings;\n}\n\nfunction updateImageInPageSettings(\n params: UpdateImageInPageSettingsParams\n): UpdateImageInPageSettingsParams[\"settings\"] {\n const { settings, fileIdToNewFileMap, srcPrefix } = params;\n let newSettings = settings;\n\n const cleanSrcPrefix = srcPrefix.endsWith(\"/\") ? srcPrefix.slice(0, -1) : srcPrefix;\n\n if (dotProp.get(newSettings, \"general.image.src\")) {\n const newFile = fileIdToNewFileMap.get(settings.general?.image?.id || \"\");\n if (newFile) {\n newSettings = dotProp.set(\n newSettings,\n \"general.image.src\",\n `${cleanSrcPrefix}/${newFile.key}`\n );\n }\n }\n\n if (dotProp.get(newSettings, \"social.image.src\")) {\n const newFile = fileIdToNewFileMap.get(settings.social?.image?.id || \"\");\n\n if (newFile) {\n newSettings = dotProp.set(\n newSettings,\n \"social.image.src\",\n `${cleanSrcPrefix}/${newFile.key}`\n );\n }\n }\n\n return newSettings;\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,iBAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,aAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AACA,IAAAI,qBAAA,GAAAJ,OAAA;AAKA,IAAAK,UAAA,GAAAL,OAAA;AACA,IAAAM,SAAA,GAAAN,OAAA;AACA,IAAAO,MAAA,GAAAP,OAAA;AAUO,eAAeQ,UAAUA,CAAC;EAC7BC,OAAO;EACPC,OAAO;EACPC;AACc,CAAC,EAAqC;EACpD,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,gBAAgB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAER,OAAO,CAAC;EAChE,IAAAS,sBAAa,EAACJ,gBAAgB,CAAC;EAE/B,MAAMK,eAAe,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAC5D,MAAMW,mBAAmB,GAAGP,aAAI,CAACC,IAAI,CAACF,gBAAgB,EAAEC,aAAI,CAACQ,QAAQ,CAACJ,eAAe,CAAC,CAAC;EAEvFP,GAAG,CAAE,+BAA8BO,eAAgB,QAAOG,mBAAoB,GAAE,CAAC;EACjF;EACA,MAAME,UAAU,GAAG,MAAMC,kBAAQ,CAACD,UAAU,CAACL,eAAe,CAAC;EAC7D,MAAMO,WAAW,GAAG,IAAAC,0BAAiB,EAACL,mBAAmB,CAAC;EAE1D,MAAM,IAAIM,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCN,UAAU,CAACO,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC,CAACE,IAAI,CAACN,WAAW,CAAC,CAACK,EAAE,CAAC,QAAQ,EAAEF,OAAO,CAAC,CAACE,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC;EAC9F,CAAC,CAAC;;EAEF;EACAlB,GAAG,CAAE,aAAYO,eAAgB,EAAC,CAAC;EACnC,MAAM;IAAEc,IAAI;IAAEC;EAAM,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAmBb,mBAAmB,CAAC;;EAE7E;EACA,IAAIY,KAAK,IAAIE,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,IAAIA,KAAK,CAACI,MAAM,GAAG,CAAC,EAAE;IACnD,MAAMC,kBAAkB,GAAG,MAAM,IAAAC,mBAAY,EAAC;MAC1C9B,OAAO;MACPwB,KAAK;MACLvB;IACJ,CAAC,CAAC;IAEF,MAAM8B,QAAQ,GAAG,MAAM/B,OAAO,CAACgC,WAAW,CAACC,WAAW,CAAC,CAAC;IAExD,MAAM;MAAEC,SAAS,GAAG;IAAG,CAAC,GAAGH,QAAQ,IAAI,CAAC,CAAC;IACzC,IAAAI,wBAAiB,EAAC;MACdC,IAAI,EAAEb,IAAI,CAACc,OAAO,IAAI,CAAC,CAAC;MACxBR,kBAAkB;MAClBK;IACJ,CAAC,CAAC;IAEFX,IAAI,CAACQ,QAAQ,GAAGO,yBAAyB,CAAC;MACtCP,QAAQ,EAAER,IAAI,CAACQ,QAAQ,IAAI,CAAC,CAAC;MAC7BF,kBAAkB;MAClBK;IACJ,CAAC,CAAC;EACN;EAEAhC,GAAG,CAAC,gCAAgC,CAAC;EACrC,MAAM,IAAAqC,gCAAU,EAACxC,OAAO,CAAC;EAEzBG,GAAG,CAAE,iCAAgC,CAAC;EACtC,MAAM,IAAAsC,qBAAc,EAACnC,aAAI,CAACoC,OAAO,CAACxC,eAAe,CAACmC,IAAI,CAAC,CAAC;EAExD,OAAOb,IAAI;AACf;AAQA,SAASe,yBAAyBA,CAC9BI,MAAuC,EACI;EAC3C,MAAM;IAAEX,QAAQ;IAAEF,kBAAkB;IAAEK;EAAU,CAAC,GAAGQ,MAAM;EAC1D,IAAIC,WAAW,GAAGZ,QAAQ;EAE1B,MAAMa,cAAc,GAAGV,SAAS,CAACW,QAAQ,CAAC,GAAG,CAAC,GAAGX,SAAS,CAACY,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAGZ,SAAS;EAEnF,IAAIxB,yBAAO,CAACC,GAAG,CAACgC,WAAW,EAAE,mBAAmB,CAAC,EAAE;IAC/C,MAAMI,OAAO,GAAGlB,kBAAkB,CAAClB,GAAG,CAACoB,QAAQ,CAACiB,OAAO,EAAEC,KAAK,EAAEC,EAAE,IAAI,EAAE,CAAC;IACzE,IAAIH,OAAO,EAAE;MACTJ,WAAW,GAAGjC,yBAAO,CAACyC,GAAG,CACrBR,WAAW,EACX,mBAAmB,EAClB,GAAEC,cAAe,IAAGG,OAAO,CAACK,GAAI,EACrC,CAAC;IACL;EACJ;EAEA,IAAI1C,yBAAO,CAACC,GAAG,CAACgC,WAAW,EAAE,kBAAkB,CAAC,EAAE;IAC9C,MAAMI,OAAO,GAAGlB,kBAAkB,CAAClB,GAAG,CAACoB,QAAQ,CAACsB,MAAM,EAAEJ,KAAK,EAAEC,EAAE,IAAI,EAAE,CAAC;IAExE,IAAIH,OAAO,EAAE;MACTJ,WAAW,GAAGjC,yBAAO,CAACyC,GAAG,CACrBR,WAAW,EACX,kBAAkB,EACjB,GAAEC,cAAe,IAAGG,OAAO,CAACK,GAAI,EACrC,CAAC;IACL;EACJ;EAEA,OAAOT,WAAW;AACtB"}
@@ -29,8 +29,10 @@ async function importTemplate({
29
29
  const TEMPLATE_DATA_FILE_PATH = _path.default.join(TEMPLATE_EXTRACT_DIR, _path.default.basename(templateDataFileKey));
30
30
  log(`Downloading Template data file: ${templateDataFileKey} at "${TEMPLATE_DATA_FILE_PATH}"`);
31
31
  // Download and save template data file in disk.
32
+ const readStream = await _s3Stream.s3Stream.readStream(templateDataFileKey);
33
+ const writeStream = (0, _fsExtra.createWriteStream)(TEMPLATE_DATA_FILE_PATH);
32
34
  await new Promise((resolve, reject) => {
33
- _s3Stream.s3Stream.readStream(templateDataFileKey).on("error", reject).pipe((0, _fsExtra.createWriteStream)(TEMPLATE_DATA_FILE_PATH)).on("error", reject).on("finish", resolve);
35
+ readStream.on("error", reject).pipe(writeStream).on("finish", resolve).on("error", reject);
34
36
  });
35
37
 
36
38
  // Load the template data file from disk.
@@ -1 +1 @@
1
- {"version":3,"names":["_path","_interopRequireDefault","require","_dotPropImmutable","_loadJsonFile","_fsExtra","_constants","_s3Stream","_uploadAssets","_updateFilesInData","_downloadInstallFiles","_deleteS3Folder","importTemplate","templateKey","context","fileUploadsData","log","console","TEMPLATE_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","templateDataFileKey","dotProp","get","TEMPLATE_DATA_FILE_PATH","basename","Promise","resolve","reject","s3Stream","readStream","on","pipe","createWriteStream","template","files","loadJson","Array","isArray","length","fileIdToNewFileMap","uploadAssets","settings","fileManager","getSettings","srcPrefix","updateFilesInData","data","content","deleteFile","deleteS3Folder","dirname"],"sources":["importTemplate.ts"],"sourcesContent":["import path from \"path\";\nimport dotProp from \"dot-prop-immutable\";\nimport loadJson from \"load-json-file\";\nimport { createWriteStream, ensureDirSync } from \"fs-extra\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { FileUploadsData } from \"~/types\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { uploadAssets } from \"~/import/utils/uploadAssets\";\nimport { updateFilesInData } from \"~/import/utils/updateFilesInData\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { deleteS3Folder } from \"~/import/utils/deleteS3Folder\";\nimport { ExportedTemplateData } from \"~/export/process/exporters/PageTemplateExporter\";\n\ninterface ImportTemplateParams {\n key: string;\n templateKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importTemplate({\n templateKey,\n context,\n fileUploadsData\n}: ImportTemplateParams): Promise<ExportedTemplateData[\"template\"]> {\n const log = console.log;\n\n // Making Directory for template in which we're going to extract the template data file.\n const TEMPLATE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, templateKey);\n ensureDirSync(TEMPLATE_EXTRACT_DIR);\n\n const templateDataFileKey = dotProp.get(fileUploadsData, `data`);\n const TEMPLATE_DATA_FILE_PATH = path.join(\n TEMPLATE_EXTRACT_DIR,\n path.basename(templateDataFileKey)\n );\n\n log(`Downloading Template data file: ${templateDataFileKey} at \"${TEMPLATE_DATA_FILE_PATH}\"`);\n // Download and save template data file in disk.\n await new Promise((resolve, reject) => {\n s3Stream\n .readStream(templateDataFileKey)\n .on(\"error\", reject)\n .pipe(createWriteStream(TEMPLATE_DATA_FILE_PATH))\n .on(\"error\", reject)\n .on(\"finish\", resolve);\n });\n\n // Load the template data file from disk.\n log(`Load file ${templateDataFileKey}`);\n const { template, files } = await loadJson<ExportedTemplateData>(TEMPLATE_DATA_FILE_PATH);\n\n // Only update template data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload template assets.\n const fileIdToNewFileMap = await uploadAssets({\n context,\n files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: template.content || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for template...\");\n await deleteFile(templateKey);\n\n log(`Remove template contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return template;\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,iBAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,aAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AAGA,IAAAI,UAAA,GAAAJ,OAAA;AACA,IAAAK,SAAA,GAAAL,OAAA;AACA,IAAAM,aAAA,GAAAN,OAAA;AACA,IAAAO,kBAAA,GAAAP,OAAA;AACA,IAAAQ,qBAAA,GAAAR,OAAA;AACA,IAAAS,eAAA,GAAAT,OAAA;AAUO,eAAeU,cAAcA,CAAC;EACjCC,WAAW;EACXC,OAAO;EACPC;AACkB,CAAC,EAA6C;EAChE,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,oBAAoB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAER,WAAW,CAAC;EACxE,IAAAS,sBAAa,EAACJ,oBAAoB,CAAC;EAEnC,MAAMK,mBAAmB,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAChE,MAAMW,uBAAuB,GAAGP,aAAI,CAACC,IAAI,CACrCF,oBAAoB,EACpBC,aAAI,CAACQ,QAAQ,CAACJ,mBAAmB,CACrC,CAAC;EAEDP,GAAG,CAAE,mCAAkCO,mBAAoB,QAAOG,uBAAwB,GAAE,CAAC;EAC7F;EACA,MAAM,IAAIE,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCC,kBAAQ,CACHC,UAAU,CAACT,mBAAmB,CAAC,CAC/BU,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBI,IAAI,CAAC,IAAAC,0BAAiB,EAACT,uBAAuB,CAAC,CAAC,CAChDO,EAAE,CAAC,OAAO,EAAEH,MAAM,CAAC,CACnBG,EAAE,CAAC,QAAQ,EAAEJ,OAAO,CAAC;EAC9B,CAAC,CAAC;;EAEF;EACAb,GAAG,CAAE,aAAYO,mBAAoB,EAAC,CAAC;EACvC,MAAM;IAAEa,QAAQ;IAAEC;EAAM,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAuBZ,uBAAuB,CAAC;;EAEzF;EACA,IAAIW,KAAK,IAAIE,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,IAAIA,KAAK,CAACI,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAMC,kBAAkB,GAAG,MAAM,IAAAC,0BAAY,EAAC;MAC1C7B,OAAO;MACPuB,KAAK;MACLtB;IACJ,CAAC,CAAC;IAEF,MAAM6B,QAAQ,GAAG,MAAM9B,OAAO,CAAC+B,WAAW,CAACC,WAAW,CAAC,CAAC;IAExD,MAAM;MAAEC,SAAS,GAAG;IAAG,CAAC,GAAGH,QAAQ,IAAI,CAAC,CAAC;IACzC,IAAAI,oCAAiB,EAAC;MACdC,IAAI,EAAEb,QAAQ,CAACc,OAAO,IAAI,CAAC,CAAC;MAC5BR,kBAAkB;MAClBK;IACJ,CAAC,CAAC;EACN;EAEA/B,GAAG,CAAC,oCAAoC,CAAC;EACzC,MAAM,IAAAmC,gCAAU,EAACtC,WAAW,CAAC;EAE7BG,GAAG,CAAE,qCAAoC,CAAC;EAC1C,MAAM,IAAAoC,8BAAc,EAACjC,aAAI,CAACkC,OAAO,CAACtC,eAAe,CAACkC,IAAI,CAAC,CAAC;EAExD,OAAOb,QAAQ;AACnB"}
1
+ {"version":3,"names":["_path","_interopRequireDefault","require","_dotPropImmutable","_loadJsonFile","_fsExtra","_constants","_s3Stream","_uploadAssets","_updateFilesInData","_downloadInstallFiles","_deleteS3Folder","importTemplate","templateKey","context","fileUploadsData","log","console","TEMPLATE_EXTRACT_DIR","path","join","INSTALL_EXTRACT_DIR","ensureDirSync","templateDataFileKey","dotProp","get","TEMPLATE_DATA_FILE_PATH","basename","readStream","s3Stream","writeStream","createWriteStream","Promise","resolve","reject","on","pipe","template","files","loadJson","Array","isArray","length","fileIdToNewFileMap","uploadAssets","settings","fileManager","getSettings","srcPrefix","updateFilesInData","data","content","deleteFile","deleteS3Folder","dirname"],"sources":["importTemplate.ts"],"sourcesContent":["import path from \"path\";\nimport dotProp from \"dot-prop-immutable\";\nimport loadJson from \"load-json-file\";\nimport { createWriteStream, ensureDirSync } from \"fs-extra\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { FileUploadsData } from \"~/types\";\nimport { INSTALL_EXTRACT_DIR } from \"~/import/constants\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { uploadAssets } from \"~/import/utils/uploadAssets\";\nimport { updateFilesInData } from \"~/import/utils/updateFilesInData\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { deleteS3Folder } from \"~/import/utils/deleteS3Folder\";\nimport { ExportedTemplateData } from \"~/export/process/exporters/PageTemplateExporter\";\n\ninterface ImportTemplateParams {\n key: string;\n templateKey: string;\n context: PbImportExportContext;\n fileUploadsData: FileUploadsData;\n}\n\nexport async function importTemplate({\n templateKey,\n context,\n fileUploadsData\n}: ImportTemplateParams): Promise<ExportedTemplateData[\"template\"]> {\n const log = console.log;\n\n // Making Directory for template in which we're going to extract the template data file.\n const TEMPLATE_EXTRACT_DIR = path.join(INSTALL_EXTRACT_DIR, templateKey);\n ensureDirSync(TEMPLATE_EXTRACT_DIR);\n\n const templateDataFileKey = dotProp.get(fileUploadsData, `data`);\n const TEMPLATE_DATA_FILE_PATH = path.join(\n TEMPLATE_EXTRACT_DIR,\n path.basename(templateDataFileKey)\n );\n\n log(`Downloading Template data file: ${templateDataFileKey} at \"${TEMPLATE_DATA_FILE_PATH}\"`);\n // Download and save template data file in disk.\n const readStream = await s3Stream.readStream(templateDataFileKey);\n const writeStream = createWriteStream(TEMPLATE_DATA_FILE_PATH);\n\n await new Promise((resolve, reject) => {\n readStream.on(\"error\", reject).pipe(writeStream).on(\"finish\", resolve).on(\"error\", reject);\n });\n\n // Load the template data file from disk.\n log(`Load file ${templateDataFileKey}`);\n const { template, files } = await loadJson<ExportedTemplateData>(TEMPLATE_DATA_FILE_PATH);\n\n // Only update template data if there are files.\n if (files && Array.isArray(files) && files.length > 0) {\n // Upload template assets.\n const fileIdToNewFileMap = await uploadAssets({\n context,\n files,\n fileUploadsData\n });\n\n const settings = await context.fileManager.getSettings();\n\n const { srcPrefix = \"\" } = settings || {};\n updateFilesInData({\n data: template.content || {},\n fileIdToNewFileMap,\n srcPrefix\n });\n }\n\n log(\"Removing Directory for template...\");\n await deleteFile(templateKey);\n\n log(`Remove template contents from S3...`);\n await deleteS3Folder(path.dirname(fileUploadsData.data));\n\n return template;\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,iBAAA,GAAAF,sBAAA,CAAAC,OAAA;AACA,IAAAE,aAAA,GAAAH,sBAAA,CAAAC,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AAGA,IAAAI,UAAA,GAAAJ,OAAA;AACA,IAAAK,SAAA,GAAAL,OAAA;AACA,IAAAM,aAAA,GAAAN,OAAA;AACA,IAAAO,kBAAA,GAAAP,OAAA;AACA,IAAAQ,qBAAA,GAAAR,OAAA;AACA,IAAAS,eAAA,GAAAT,OAAA;AAUO,eAAeU,cAAcA,CAAC;EACjCC,WAAW;EACXC,OAAO;EACPC;AACkB,CAAC,EAA6C;EAChE,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;;EAEvB;EACA,MAAME,oBAAoB,GAAGC,aAAI,CAACC,IAAI,CAACC,8BAAmB,EAAER,WAAW,CAAC;EACxE,IAAAS,sBAAa,EAACJ,oBAAoB,CAAC;EAEnC,MAAMK,mBAAmB,GAAGC,yBAAO,CAACC,GAAG,CAACV,eAAe,EAAG,MAAK,CAAC;EAChE,MAAMW,uBAAuB,GAAGP,aAAI,CAACC,IAAI,CACrCF,oBAAoB,EACpBC,aAAI,CAACQ,QAAQ,CAACJ,mBAAmB,CACrC,CAAC;EAEDP,GAAG,CAAE,mCAAkCO,mBAAoB,QAAOG,uBAAwB,GAAE,CAAC;EAC7F;EACA,MAAME,UAAU,GAAG,MAAMC,kBAAQ,CAACD,UAAU,CAACL,mBAAmB,CAAC;EACjE,MAAMO,WAAW,GAAG,IAAAC,0BAAiB,EAACL,uBAAuB,CAAC;EAE9D,MAAM,IAAIM,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACnCN,UAAU,CAACO,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC,CAACE,IAAI,CAACN,WAAW,CAAC,CAACK,EAAE,CAAC,QAAQ,EAAEF,OAAO,CAAC,CAACE,EAAE,CAAC,OAAO,EAAED,MAAM,CAAC;EAC9F,CAAC,CAAC;;EAEF;EACAlB,GAAG,CAAE,aAAYO,mBAAoB,EAAC,CAAC;EACvC,MAAM;IAAEc,QAAQ;IAAEC;EAAM,CAAC,GAAG,MAAM,IAAAC,qBAAQ,EAAuBb,uBAAuB,CAAC;;EAEzF;EACA,IAAIY,KAAK,IAAIE,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,IAAIA,KAAK,CAACI,MAAM,GAAG,CAAC,EAAE;IACnD;IACA,MAAMC,kBAAkB,GAAG,MAAM,IAAAC,0BAAY,EAAC;MAC1C9B,OAAO;MACPwB,KAAK;MACLvB;IACJ,CAAC,CAAC;IAEF,MAAM8B,QAAQ,GAAG,MAAM/B,OAAO,CAACgC,WAAW,CAACC,WAAW,CAAC,CAAC;IAExD,MAAM;MAAEC,SAAS,GAAG;IAAG,CAAC,GAAGH,QAAQ,IAAI,CAAC,CAAC;IACzC,IAAAI,oCAAiB,EAAC;MACdC,IAAI,EAAEb,QAAQ,CAACc,OAAO,IAAI,CAAC,CAAC;MAC5BR,kBAAkB;MAClBK;IACJ,CAAC,CAAC;EACN;EAEAhC,GAAG,CAAC,oCAAoC,CAAC;EACzC,MAAM,IAAAoC,gCAAU,EAACvC,WAAW,CAAC;EAE7BG,GAAG,CAAE,qCAAoC,CAAC;EAC1C,MAAM,IAAAqC,8BAAc,EAAClC,aAAI,CAACmC,OAAO,CAACvC,eAAe,CAACmC,IAAI,CAAC,CAAC;EAExD,OAAOb,QAAQ;AACnB"}
@@ -38,7 +38,6 @@ async function extractAndUploadZipFileContents(zipFileUrl) {
38
38
  // Read export file and download it in the disk
39
39
  const ZIP_FILE_PATH = _path.default.join(_constants.INSTALL_DIR, zipFileName);
40
40
  const writeStream = (0, _fs.createWriteStream)(ZIP_FILE_PATH);
41
- // @ts-ignore
42
41
  await streamPipeline(readStream, writeStream);
43
42
  log(`Downloaded file "${zipFileName}" at ${ZIP_FILE_PATH}`);
44
43
 
@@ -1 +1 @@
1
- {"version":3,"names":["_stream","require","_util","_nodeFetch","_interopRequireDefault","_path","_yauzl","_fs","_uniqid","_error","_downloadInstallFiles","_extractZipAndUploadToS","_getFileNameWithoutExt","_constants","_fsExtra","streamPipeline","promisify","pipeline","extractAndUploadZipFileContents","zipFileUrl","log","console","importDataList","zipFileName","path","basename","split","response","fetch","readStream","body","ok","WebinyError","statusText","uniquePath","uniqueId","ZIP_FILE_PATH","join","INSTALL_DIR","writeStream","createWriteStream","zipFilePaths","extractZipToDisk","deleteFile","i","length","currentPath","dataMap","extractZipAndUploadToS3","push","dirname","exportFileZipPath","Promise","resolve","reject","uniqueFolderNameForExport","getFileNameWithoutExt","EXPORT_FILE_EXTRACTION_PATH","ensureDirSync","yauzl","open","lazyEntries","err","zipFile","warn","info","entryCount","on","readEntry","entry","fileName","test","openReadStream","filePath","catch","error"],"sources":["extractAndUploadZipFileContents.ts"],"sourcesContent":["import { pipeline } from \"stream\";\nimport { promisify } from \"util\";\nimport fetch from \"node-fetch\";\nimport path from \"path\";\nimport yauzl from \"yauzl\";\nimport { createWriteStream } from \"fs\";\nimport uniqueId from \"uniqid\";\nimport WebinyError from \"@webiny/error\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { extractZipAndUploadToS3 } from \"~/import/utils/extractZipAndUploadToS3\";\nimport { getFileNameWithoutExt } from \"~/import/utils/getFileNameWithoutExt\";\nimport { ImportData } from \"~/types\";\nimport { INSTALL_DIR } from \"~/import/constants\";\nimport { ensureDirSync } from \"fs-extra\";\n\nconst streamPipeline = promisify(pipeline);\n\n/**\n * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.\n * @param zipFileUrl\n * @return ImportData S3 file keys for all uploaded assets group by page/block.\n */\nexport async function extractAndUploadZipFileContents(zipFileUrl: string): Promise<ImportData[]> {\n const log = console.log;\n const importDataList = [];\n\n const zipFileName = path.basename(zipFileUrl).split(\"?\")[0];\n\n const response = await fetch(zipFileUrl);\n const readStream = response.body;\n if (!response.ok || !readStream) {\n throw new WebinyError(`Unable to downloading file: \"${zipFileUrl}\"`, response.statusText);\n }\n\n const uniquePath = uniqueId(\"IMPORTS/\");\n // Read export file and download it in the disk\n const ZIP_FILE_PATH = path.join(INSTALL_DIR, zipFileName);\n\n const writeStream = createWriteStream(ZIP_FILE_PATH);\n // @ts-ignore\n await streamPipeline(readStream, writeStream);\n log(`Downloaded file \"${zipFileName}\" at ${ZIP_FILE_PATH}`);\n\n // Extract the downloaded zip file\n const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);\n\n log(`Removing ZIP file \"${zipFileUrl}\" from ${ZIP_FILE_PATH}`);\n await deleteFile(ZIP_FILE_PATH);\n\n // Extract each page/block zip and upload their content's to S3\n for (let i = 0; i < zipFilePaths.length; i++) {\n const currentPath = zipFilePaths[i];\n const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);\n importDataList.push(dataMap);\n }\n log(\"Removing all ZIP files located at \", path.dirname(zipFilePaths[0]));\n await deleteFile(path.dirname(zipFilePaths[0]));\n\n return importDataList;\n}\n\nfunction extractZipToDisk(exportFileZipPath: string): Promise<string[]> {\n return new Promise((resolve, reject) => {\n const zipFilePaths: string[] = [];\n const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);\n const EXPORT_FILE_EXTRACTION_PATH = path.join(INSTALL_DIR, uniqueFolderNameForExport);\n // Make sure DIR exists\n ensureDirSync(EXPORT_FILE_EXTRACTION_PATH);\n\n yauzl.open(exportFileZipPath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", exportFileZipPath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Missing zip file resource for path: \" + exportFileZipPath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn(\"ERROR: Failed on END event for file: \", exportFileZipPath, err);\n reject(err);\n }\n resolve(zipFilePaths);\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed to openReadStream for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\n \"ERROR: Missing Read Stream Resource when extracting to disk.\"\n );\n reject(\"Missing Read Stream Resource.\");\n return;\n }\n\n const filePath = path.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);\n\n readStream.on(\"end\", function () {\n zipFilePaths.push(filePath);\n zipFile.readEntry();\n });\n\n streamPipeline(readStream, createWriteStream(filePath)).catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,KAAA,GAAAD,OAAA;AACA,IAAAE,UAAA,GAAAC,sBAAA,CAAAH,OAAA;AACA,IAAAI,KAAA,GAAAD,sBAAA,CAAAH,OAAA;AACA,IAAAK,MAAA,GAAAF,sBAAA,CAAAH,OAAA;AACA,IAAAM,GAAA,GAAAN,OAAA;AACA,IAAAO,OAAA,GAAAJ,sBAAA,CAAAH,OAAA;AACA,IAAAQ,MAAA,GAAAL,sBAAA,CAAAH,OAAA;AACA,IAAAS,qBAAA,GAAAT,OAAA;AACA,IAAAU,uBAAA,GAAAV,OAAA;AACA,IAAAW,sBAAA,GAAAX,OAAA;AAEA,IAAAY,UAAA,GAAAZ,OAAA;AACA,IAAAa,QAAA,GAAAb,OAAA;AAEA,MAAMc,cAAc,GAAG,IAAAC,eAAS,EAACC,gBAAQ,CAAC;;AAE1C;AACA;AACA;AACA;AACA;AACO,eAAeC,+BAA+BA,CAACC,UAAkB,EAAyB;EAC7F,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,MAAME,cAAc,GAAG,EAAE;EAEzB,MAAMC,WAAW,GAAGC,aAAI,CAACC,QAAQ,CAACN,UAAU,CAAC,CAACO,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAE3D,MAAMC,QAAQ,GAAG,MAAM,IAAAC,kBAAK,EAACT,UAAU,CAAC;EACxC,MAAMU,UAAU,GAAGF,QAAQ,CAACG,IAAI;EAChC,IAAI,CAACH,QAAQ,CAACI,EAAE,IAAI,CAACF,UAAU,EAAE;IAC7B,MAAM,IAAIG,cAAW,CAAE,gCAA+Bb,UAAW,GAAE,EAAEQ,QAAQ,CAACM,UAAU,CAAC;EAC7F;EAEA,MAAMC,UAAU,GAAG,IAAAC,eAAQ,EAAC,UAAU,CAAC;EACvC;EACA,MAAMC,aAAa,GAAGZ,aAAI,CAACa,IAAI,CAACC,sBAAW,EAAEf,WAAW,CAAC;EAEzD,MAAMgB,WAAW,GAAG,IAAAC,qBAAiB,EAACJ,aAAa,CAAC;EACpD;EACA,MAAMrB,cAAc,CAACc,UAAU,EAAEU,WAAW,CAAC;EAC7CnB,GAAG,CAAE,oBAAmBG,WAAY,QAAOa,aAAc,EAAC,CAAC;;EAE3D;EACA,MAAMK,YAAY,GAAG,MAAMC,gBAAgB,CAACN,aAAa,CAAC;EAE1DhB,GAAG,CAAE,sBAAqBD,UAAW,UAASiB,aAAc,EAAC,CAAC;EAC9D,MAAM,IAAAO,gCAAU,EAACP,aAAa,CAAC;;EAE/B;EACA,KAAK,IAAIQ,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,YAAY,CAACI,MAAM,EAAED,CAAC,EAAE,EAAE;IAC1C,MAAME,WAAW,GAAGL,YAAY,CAACG,CAAC,CAAC;IACnC,MAAMG,OAAO,GAAG,MAAM,IAAAC,+CAAuB,EAACF,WAAW,EAAEZ,UAAU,CAAC;IACtEZ,cAAc,CAAC2B,IAAI,CAACF,OAAO,CAAC;EAChC;EACA3B,GAAG,CAAC,oCAAoC,EAAEI,aAAI,CAAC0B,OAAO,CAACT,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EACxE,MAAM,IAAAE,gCAAU,EAACnB,aAAI,CAAC0B,OAAO,CAACT,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EAE/C,OAAOnB,cAAc;AACzB;AAEA,SAASoB,gBAAgBA,CAACS,iBAAyB,EAAqB;EACpE,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACpC,MAAMb,YAAsB,GAAG,EAAE;IACjC,MAAMc,yBAAyB,GAAG,IAAAC,4CAAqB,EAACL,iBAAiB,CAAC;IAC1E,MAAMM,2BAA2B,GAAGjC,aAAI,CAACa,IAAI,CAACC,sBAAW,EAAEiB,yBAAyB,CAAC;IACrF;IACA,IAAAG,sBAAa,EAACD,2BAA2B,CAAC;IAE1CE,cAAK,CAACC,IAAI,CAACT,iBAAiB,EAAE;MAAEU,WAAW,EAAE;IAAK,CAAC,EAAE,UAAUC,GAAG,EAAEC,OAAO,EAAE;MACzE,IAAID,GAAG,EAAE;QACLzC,OAAO,CAAC2C,IAAI,CAAC,gCAAgC,EAAEb,iBAAiB,EAAEW,GAAG,CAAC;QACtER,MAAM,CAACQ,GAAG,CAAC;QACX;MACJ;MACA,IAAI,CAACC,OAAO,EAAE;QACV1C,OAAO,CAACD,GAAG,CAAC,6CAA6C,GAAG+B,iBAAiB,CAAC;QAC9EG,MAAM,CAAC,4BAA4B,CAAC;QACpC;MACJ;MAEAjC,OAAO,CAAC4C,IAAI,CAAE,yBAAwBF,OAAO,CAACG,UAAW,WAAU,CAAC;MAEpEH,OAAO,CAACI,EAAE,CAAC,KAAK,EAAE,UAAUL,GAAG,EAAE;QAC7B,IAAIA,GAAG,EAAE;UACLzC,OAAO,CAAC2C,IAAI,CAAC,uCAAuC,EAAEb,iBAAiB,EAAEW,GAAG,CAAC;UAC7ER,MAAM,CAACQ,GAAG,CAAC;QACf;QACAT,OAAO,CAACZ,YAAY,CAAC;MACzB,CAAC,CAAC;MAEFsB,OAAO,CAACK,SAAS,CAAC,CAAC;MAEnBL,OAAO,CAACI,EAAE,CAAC,OAAO,EAAE,UAAUE,KAAK,EAAE;QACjChD,OAAO,CAAC4C,IAAI,CAAE,sBAAqBI,KAAK,CAACC,QAAS,GAAE,CAAC;QACrD,IAAI,KAAK,CAACC,IAAI,CAACF,KAAK,CAACC,QAAQ,CAAC,EAAE;UAC5B;UACA;UACA;UACAP,OAAO,CAACK,SAAS,CAAC,CAAC;QACvB,CAAC,MAAM;UACH;UACAL,OAAO,CAACS,cAAc,CAACH,KAAK,EAAE,UAAUP,GAAG,EAAEjC,UAAU,EAAE;YACrD,IAAIiC,GAAG,EAAE;cACLzC,OAAO,CAAC2C,IAAI,CACR,4CAA4C,EAC5CK,KAAK,CAACC,QAAQ,EACdR,GACJ,CAAC;cACDR,MAAM,CAACQ,GAAG,CAAC;cACX;YACJ;YACA,IAAI,CAACjC,UAAU,EAAE;cACbR,OAAO,CAACD,GAAG,CACP,8DACJ,CAAC;cACDkC,MAAM,CAAC,+BAA+B,CAAC;cACvC;YACJ;YAEA,MAAMmB,QAAQ,GAAGjD,aAAI,CAACa,IAAI,CAACoB,2BAA2B,EAAEY,KAAK,CAACC,QAAQ,CAAC;YAEvEzC,UAAU,CAACsC,EAAE,CAAC,KAAK,EAAE,YAAY;cAC7B1B,YAAY,CAACQ,IAAI,CAACwB,QAAQ,CAAC;cAC3BV,OAAO,CAACK,SAAS,CAAC,CAAC;YACvB,CAAC,CAAC;YAEFrD,cAAc,CAACc,UAAU,EAAE,IAAAW,qBAAiB,EAACiC,QAAQ,CAAC,CAAC,CAACC,KAAK,CAACC,KAAK,IAAI;cACnErB,MAAM,CAACqB,KAAK,CAAC;YACjB,CAAC,CAAC;UACN,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN,CAAC,CAAC;AACN"}
1
+ {"version":3,"names":["_stream","require","_util","_nodeFetch","_interopRequireDefault","_path","_yauzl","_fs","_uniqid","_error","_downloadInstallFiles","_extractZipAndUploadToS","_getFileNameWithoutExt","_constants","_fsExtra","streamPipeline","promisify","pipeline","extractAndUploadZipFileContents","zipFileUrl","log","console","importDataList","zipFileName","path","basename","split","response","fetch","readStream","body","ok","WebinyError","statusText","uniquePath","uniqueId","ZIP_FILE_PATH","join","INSTALL_DIR","writeStream","createWriteStream","zipFilePaths","extractZipToDisk","deleteFile","i","length","currentPath","dataMap","extractZipAndUploadToS3","push","dirname","exportFileZipPath","Promise","resolve","reject","uniqueFolderNameForExport","getFileNameWithoutExt","EXPORT_FILE_EXTRACTION_PATH","ensureDirSync","yauzl","open","lazyEntries","err","zipFile","warn","info","entryCount","on","readEntry","entry","fileName","test","openReadStream","filePath","catch","error"],"sources":["extractAndUploadZipFileContents.ts"],"sourcesContent":["import { pipeline } from \"stream\";\nimport { promisify } from \"util\";\nimport fetch from \"node-fetch\";\nimport path from \"path\";\nimport yauzl from \"yauzl\";\nimport { createWriteStream } from \"fs\";\nimport uniqueId from \"uniqid\";\nimport WebinyError from \"@webiny/error\";\nimport { deleteFile } from \"@webiny/api-page-builder/graphql/crud/install/utils/downloadInstallFiles\";\nimport { extractZipAndUploadToS3 } from \"~/import/utils/extractZipAndUploadToS3\";\nimport { getFileNameWithoutExt } from \"~/import/utils/getFileNameWithoutExt\";\nimport { ImportData } from \"~/types\";\nimport { INSTALL_DIR } from \"~/import/constants\";\nimport { ensureDirSync } from \"fs-extra\";\n\nconst streamPipeline = promisify(pipeline);\n\n/**\n * Function will read the given zip file from S3 via stream, extract its content and upload it to S3 bucket.\n * @param zipFileUrl\n * @return ImportData S3 file keys for all uploaded assets group by page/block.\n */\nexport async function extractAndUploadZipFileContents(zipFileUrl: string): Promise<ImportData[]> {\n const log = console.log;\n const importDataList = [];\n\n const zipFileName = path.basename(zipFileUrl).split(\"?\")[0];\n\n const response = await fetch(zipFileUrl);\n const readStream = response.body;\n if (!response.ok || !readStream) {\n throw new WebinyError(`Unable to downloading file: \"${zipFileUrl}\"`, response.statusText);\n }\n\n const uniquePath = uniqueId(\"IMPORTS/\");\n // Read export file and download it in the disk\n const ZIP_FILE_PATH = path.join(INSTALL_DIR, zipFileName);\n\n const writeStream = createWriteStream(ZIP_FILE_PATH);\n\n await streamPipeline(readStream, writeStream);\n log(`Downloaded file \"${zipFileName}\" at ${ZIP_FILE_PATH}`);\n\n // Extract the downloaded zip file\n const zipFilePaths = await extractZipToDisk(ZIP_FILE_PATH);\n\n log(`Removing ZIP file \"${zipFileUrl}\" from ${ZIP_FILE_PATH}`);\n await deleteFile(ZIP_FILE_PATH);\n\n // Extract each page/block zip and upload their content's to S3\n for (let i = 0; i < zipFilePaths.length; i++) {\n const currentPath = zipFilePaths[i];\n const dataMap = await extractZipAndUploadToS3(currentPath, uniquePath);\n importDataList.push(dataMap);\n }\n log(\"Removing all ZIP files located at \", path.dirname(zipFilePaths[0]));\n await deleteFile(path.dirname(zipFilePaths[0]));\n\n return importDataList;\n}\n\nfunction extractZipToDisk(exportFileZipPath: string): Promise<string[]> {\n return new Promise((resolve, reject) => {\n const zipFilePaths: string[] = [];\n const uniqueFolderNameForExport = getFileNameWithoutExt(exportFileZipPath);\n const EXPORT_FILE_EXTRACTION_PATH = path.join(INSTALL_DIR, uniqueFolderNameForExport);\n // Make sure DIR exists\n ensureDirSync(EXPORT_FILE_EXTRACTION_PATH);\n\n yauzl.open(exportFileZipPath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", exportFileZipPath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Missing zip file resource for path: \" + exportFileZipPath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn(\"ERROR: Failed on END event for file: \", exportFileZipPath, err);\n reject(err);\n }\n resolve(zipFilePaths);\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed to openReadStream for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\n \"ERROR: Missing Read Stream Resource when extracting to disk.\"\n );\n reject(\"Missing Read Stream Resource.\");\n return;\n }\n\n const filePath = path.join(EXPORT_FILE_EXTRACTION_PATH, entry.fileName);\n\n readStream.on(\"end\", function () {\n zipFilePaths.push(filePath);\n zipFile.readEntry();\n });\n\n streamPipeline(readStream, createWriteStream(filePath)).catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,OAAA,GAAAC,OAAA;AACA,IAAAC,KAAA,GAAAD,OAAA;AACA,IAAAE,UAAA,GAAAC,sBAAA,CAAAH,OAAA;AACA,IAAAI,KAAA,GAAAD,sBAAA,CAAAH,OAAA;AACA,IAAAK,MAAA,GAAAF,sBAAA,CAAAH,OAAA;AACA,IAAAM,GAAA,GAAAN,OAAA;AACA,IAAAO,OAAA,GAAAJ,sBAAA,CAAAH,OAAA;AACA,IAAAQ,MAAA,GAAAL,sBAAA,CAAAH,OAAA;AACA,IAAAS,qBAAA,GAAAT,OAAA;AACA,IAAAU,uBAAA,GAAAV,OAAA;AACA,IAAAW,sBAAA,GAAAX,OAAA;AAEA,IAAAY,UAAA,GAAAZ,OAAA;AACA,IAAAa,QAAA,GAAAb,OAAA;AAEA,MAAMc,cAAc,GAAG,IAAAC,eAAS,EAACC,gBAAQ,CAAC;;AAE1C;AACA;AACA;AACA;AACA;AACO,eAAeC,+BAA+BA,CAACC,UAAkB,EAAyB;EAC7F,MAAMC,GAAG,GAAGC,OAAO,CAACD,GAAG;EACvB,MAAME,cAAc,GAAG,EAAE;EAEzB,MAAMC,WAAW,GAAGC,aAAI,CAACC,QAAQ,CAACN,UAAU,CAAC,CAACO,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAE3D,MAAMC,QAAQ,GAAG,MAAM,IAAAC,kBAAK,EAACT,UAAU,CAAC;EACxC,MAAMU,UAAU,GAAGF,QAAQ,CAACG,IAAI;EAChC,IAAI,CAACH,QAAQ,CAACI,EAAE,IAAI,CAACF,UAAU,EAAE;IAC7B,MAAM,IAAIG,cAAW,CAAE,gCAA+Bb,UAAW,GAAE,EAAEQ,QAAQ,CAACM,UAAU,CAAC;EAC7F;EAEA,MAAMC,UAAU,GAAG,IAAAC,eAAQ,EAAC,UAAU,CAAC;EACvC;EACA,MAAMC,aAAa,GAAGZ,aAAI,CAACa,IAAI,CAACC,sBAAW,EAAEf,WAAW,CAAC;EAEzD,MAAMgB,WAAW,GAAG,IAAAC,qBAAiB,EAACJ,aAAa,CAAC;EAEpD,MAAMrB,cAAc,CAACc,UAAU,EAAEU,WAAW,CAAC;EAC7CnB,GAAG,CAAE,oBAAmBG,WAAY,QAAOa,aAAc,EAAC,CAAC;;EAE3D;EACA,MAAMK,YAAY,GAAG,MAAMC,gBAAgB,CAACN,aAAa,CAAC;EAE1DhB,GAAG,CAAE,sBAAqBD,UAAW,UAASiB,aAAc,EAAC,CAAC;EAC9D,MAAM,IAAAO,gCAAU,EAACP,aAAa,CAAC;;EAE/B;EACA,KAAK,IAAIQ,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,YAAY,CAACI,MAAM,EAAED,CAAC,EAAE,EAAE;IAC1C,MAAME,WAAW,GAAGL,YAAY,CAACG,CAAC,CAAC;IACnC,MAAMG,OAAO,GAAG,MAAM,IAAAC,+CAAuB,EAACF,WAAW,EAAEZ,UAAU,CAAC;IACtEZ,cAAc,CAAC2B,IAAI,CAACF,OAAO,CAAC;EAChC;EACA3B,GAAG,CAAC,oCAAoC,EAAEI,aAAI,CAAC0B,OAAO,CAACT,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EACxE,MAAM,IAAAE,gCAAU,EAACnB,aAAI,CAAC0B,OAAO,CAACT,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC;EAE/C,OAAOnB,cAAc;AACzB;AAEA,SAASoB,gBAAgBA,CAACS,iBAAyB,EAAqB;EACpE,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACpC,MAAMb,YAAsB,GAAG,EAAE;IACjC,MAAMc,yBAAyB,GAAG,IAAAC,4CAAqB,EAACL,iBAAiB,CAAC;IAC1E,MAAMM,2BAA2B,GAAGjC,aAAI,CAACa,IAAI,CAACC,sBAAW,EAAEiB,yBAAyB,CAAC;IACrF;IACA,IAAAG,sBAAa,EAACD,2BAA2B,CAAC;IAE1CE,cAAK,CAACC,IAAI,CAACT,iBAAiB,EAAE;MAAEU,WAAW,EAAE;IAAK,CAAC,EAAE,UAAUC,GAAG,EAAEC,OAAO,EAAE;MACzE,IAAID,GAAG,EAAE;QACLzC,OAAO,CAAC2C,IAAI,CAAC,gCAAgC,EAAEb,iBAAiB,EAAEW,GAAG,CAAC;QACtER,MAAM,CAACQ,GAAG,CAAC;QACX;MACJ;MACA,IAAI,CAACC,OAAO,EAAE;QACV1C,OAAO,CAACD,GAAG,CAAC,6CAA6C,GAAG+B,iBAAiB,CAAC;QAC9EG,MAAM,CAAC,4BAA4B,CAAC;QACpC;MACJ;MAEAjC,OAAO,CAAC4C,IAAI,CAAE,yBAAwBF,OAAO,CAACG,UAAW,WAAU,CAAC;MAEpEH,OAAO,CAACI,EAAE,CAAC,KAAK,EAAE,UAAUL,GAAG,EAAE;QAC7B,IAAIA,GAAG,EAAE;UACLzC,OAAO,CAAC2C,IAAI,CAAC,uCAAuC,EAAEb,iBAAiB,EAAEW,GAAG,CAAC;UAC7ER,MAAM,CAACQ,GAAG,CAAC;QACf;QACAT,OAAO,CAACZ,YAAY,CAAC;MACzB,CAAC,CAAC;MAEFsB,OAAO,CAACK,SAAS,CAAC,CAAC;MAEnBL,OAAO,CAACI,EAAE,CAAC,OAAO,EAAE,UAAUE,KAAK,EAAE;QACjChD,OAAO,CAAC4C,IAAI,CAAE,sBAAqBI,KAAK,CAACC,QAAS,GAAE,CAAC;QACrD,IAAI,KAAK,CAACC,IAAI,CAACF,KAAK,CAACC,QAAQ,CAAC,EAAE;UAC5B;UACA;UACA;UACAP,OAAO,CAACK,SAAS,CAAC,CAAC;QACvB,CAAC,MAAM;UACH;UACAL,OAAO,CAACS,cAAc,CAACH,KAAK,EAAE,UAAUP,GAAG,EAAEjC,UAAU,EAAE;YACrD,IAAIiC,GAAG,EAAE;cACLzC,OAAO,CAAC2C,IAAI,CACR,4CAA4C,EAC5CK,KAAK,CAACC,QAAQ,EACdR,GACJ,CAAC;cACDR,MAAM,CAACQ,GAAG,CAAC;cACX;YACJ;YACA,IAAI,CAACjC,UAAU,EAAE;cACbR,OAAO,CAACD,GAAG,CACP,8DACJ,CAAC;cACDkC,MAAM,CAAC,+BAA+B,CAAC;cACvC;YACJ;YAEA,MAAMmB,QAAQ,GAAGjD,aAAI,CAACa,IAAI,CAACoB,2BAA2B,EAAEY,KAAK,CAACC,QAAQ,CAAC;YAEvEzC,UAAU,CAACsC,EAAE,CAAC,KAAK,EAAE,YAAY;cAC7B1B,YAAY,CAACQ,IAAI,CAACwB,QAAQ,CAAC;cAC3BV,OAAO,CAACK,SAAS,CAAC,CAAC;YACvB,CAAC,CAAC;YAEFrD,cAAc,CAACc,UAAU,EAAE,IAAAW,qBAAiB,EAACiC,QAAQ,CAAC,CAAC,CAACC,KAAK,CAACC,KAAK,IAAI;cACnErB,MAAM,CAACqB,KAAK,CAAC;YACjB,CAAC,CAAC;UACN,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN,CAAC,CAAC;AACN"}
@@ -1 +1 @@
1
- {"version":3,"names":["_yauzl","_interopRequireDefault","require","_stream","_util","_s3Stream","_getFileNameWithoutExt","_prepareDataDirMap","FILE_CONTENT_TYPE","streamPipeline","promisify","pipeline","extractZipAndUploadToS3","dataZipFilePath","uniquePath","Promise","resolve","reject","filePaths","fileUploadPromises","uniqueKey","getFileNameWithoutExt","dataMap","key","assets","data","yauzl","open","lazyEntries","err","zipFile","console","warn","log","info","entryCount","on","all","then","res","forEach","r","readEntry","entry","fileName","test","openReadStream","readStream","push","newKey","prepareDataDirMap","map","filePath","streamPassThrough","streamPassThroughUploadPromise","promise","s3Stream","writeStream","catch","error"],"sources":["extractZipAndUploadToS3.ts"],"sourcesContent":["import yauzl from \"yauzl\";\nimport { pipeline } from \"stream\";\nimport { promisify } from \"util\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { getFileNameWithoutExt } from \"./getFileNameWithoutExt\";\nimport { ImportData } from \"~/types\";\nimport { prepareDataDirMap } from \"~/import/utils/prepareDataDirMap\";\n\nconst FILE_CONTENT_TYPE = \"application/octet-stream\";\nconst streamPipeline = promisify(pipeline);\n\nexport function extractZipAndUploadToS3(\n dataZipFilePath: string,\n uniquePath: string\n): Promise<ImportData> {\n return new Promise((resolve, reject) => {\n const filePaths = [];\n const fileUploadPromises: Promise<S3.ManagedUpload.SendData>[] = [];\n const uniqueKey = getFileNameWithoutExt(dataZipFilePath);\n let dataMap: ImportData = {\n key: uniqueKey,\n assets: {},\n data: \"\"\n };\n yauzl.open(dataZipFilePath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", dataZipFilePath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Probably failed to extract zip: \" + dataZipFilePath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn('ERROR: Failed on \"END\" for file: ', dataZipFilePath, err);\n reject(err);\n }\n\n Promise.all(fileUploadPromises).then(res => {\n res.forEach(r => {\n console.info(\"Done uploading... \", r);\n });\n resolve(dataMap);\n });\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed while performing [openReadStream] for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\"ERROR: Missing Read Stream while importing.\");\n reject(\"Missing Read Strea Resource.\");\n return;\n }\n readStream.on(\"end\", function () {\n filePaths.push(entry.fileName);\n zipFile.readEntry();\n });\n\n const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;\n // Modify in place\n dataMap = prepareDataDirMap({\n map: dataMap,\n filePath: entry.fileName,\n newKey\n });\n\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);\n\n streamPipeline(readStream, streamPassThrough)\n .then(() => {\n fileUploadPromises.push(promise);\n })\n .catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,MAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,KAAA,GAAAF,OAAA;AAEA,IAAAG,SAAA,GAAAH,OAAA;AACA,IAAAI,sBAAA,GAAAJ,OAAA;AAEA,IAAAK,kBAAA,GAAAL,OAAA;AAEA,MAAMM,iBAAiB,GAAG,0BAA0B;AACpD,MAAMC,cAAc,GAAG,IAAAC,eAAS,EAACC,gBAAQ,CAAC;AAEnC,SAASC,uBAAuBA,CACnCC,eAAuB,EACvBC,UAAkB,EACC;EACnB,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACpC,MAAMC,SAAS,GAAG,EAAE;IACpB,MAAMC,kBAAwD,GAAG,EAAE;IACnE,MAAMC,SAAS,GAAG,IAAAC,4CAAqB,EAACR,eAAe,CAAC;IACxD,IAAIS,OAAmB,GAAG;MACtBC,GAAG,EAAEH,SAAS;MACdI,MAAM,EAAE,CAAC,CAAC;MACVC,IAAI,EAAE;IACV,CAAC;IACDC,cAAK,CAACC,IAAI,CAACd,eAAe,EAAE;MAAEe,WAAW,EAAE;IAAK,CAAC,EAAE,UAAUC,GAAG,EAAEC,OAAO,EAAE;MACvE,IAAID,GAAG,EAAE;QACLE,OAAO,CAACC,IAAI,CAAC,gCAAgC,EAAEnB,eAAe,EAAEgB,GAAG,CAAC;QACpEZ,MAAM,CAACY,GAAG,CAAC;QACX;MACJ;MACA,IAAI,CAACC,OAAO,EAAE;QACVC,OAAO,CAACE,GAAG,CAAC,yCAAyC,GAAGpB,eAAe,CAAC;QACxEI,MAAM,CAAC,4BAA4B,CAAC;QACpC;MACJ;MACAc,OAAO,CAACG,IAAI,CAAE,yBAAwBJ,OAAO,CAACK,UAAW,WAAU,CAAC;MACpEL,OAAO,CAACM,EAAE,CAAC,KAAK,EAAE,UAAUP,GAAG,EAAE;QAC7B,IAAIA,GAAG,EAAE;UACLE,OAAO,CAACC,IAAI,CAAC,mCAAmC,EAAEnB,eAAe,EAAEgB,GAAG,CAAC;UACvEZ,MAAM,CAACY,GAAG,CAAC;QACf;QAEAd,OAAO,CAACsB,GAAG,CAAClB,kBAAkB,CAAC,CAACmB,IAAI,CAACC,GAAG,IAAI;UACxCA,GAAG,CAACC,OAAO,CAACC,CAAC,IAAI;YACbV,OAAO,CAACG,IAAI,CAAC,oBAAoB,EAAEO,CAAC,CAAC;UACzC,CAAC,CAAC;UACFzB,OAAO,CAACM,OAAO,CAAC;QACpB,CAAC,CAAC;MACN,CAAC,CAAC;MAEFQ,OAAO,CAACY,SAAS,CAAC,CAAC;MAEnBZ,OAAO,CAACM,EAAE,CAAC,OAAO,EAAE,UAAUO,KAAK,EAAE;QACjCZ,OAAO,CAACG,IAAI,CAAE,sBAAqBS,KAAK,CAACC,QAAS,GAAE,CAAC;QACrD,IAAI,KAAK,CAACC,IAAI,CAACF,KAAK,CAACC,QAAQ,CAAC,EAAE;UAC5B;UACA;UACA;UACAd,OAAO,CAACY,SAAS,CAAC,CAAC;QACvB,CAAC,MAAM;UACH;UACAZ,OAAO,CAACgB,cAAc,CAACH,KAAK,EAAE,UAAUd,GAAG,EAAEkB,UAAU,EAAE;YACrD,IAAIlB,GAAG,EAAE;cACLE,OAAO,CAACC,IAAI,CACR,4DAA4D,EAC5DW,KAAK,CAACC,QAAQ,EACdf,GACJ,CAAC;cACDZ,MAAM,CAACY,GAAG,CAAC;cACX;YACJ;YACA,IAAI,CAACkB,UAAU,EAAE;cACbhB,OAAO,CAACE,GAAG,CAAC,6CAA6C,CAAC;cAC1DhB,MAAM,CAAC,8BAA8B,CAAC;cACtC;YACJ;YACA8B,UAAU,CAACX,EAAE,CAAC,KAAK,EAAE,YAAY;cAC7BlB,SAAS,CAAC8B,IAAI,CAACL,KAAK,CAACC,QAAQ,CAAC;cAC9Bd,OAAO,CAACY,SAAS,CAAC,CAAC;YACvB,CAAC,CAAC;YAEF,MAAMO,MAAM,GAAI,GAAEnC,UAAW,IAAGM,SAAU,IAAGuB,KAAK,CAACC,QAAS,EAAC;YAC7D;YACAtB,OAAO,GAAG,IAAA4B,oCAAiB,EAAC;cACxBC,GAAG,EAAE7B,OAAO;cACZ8B,QAAQ,EAAET,KAAK,CAACC,QAAQ;cACxBK;YACJ,CAAC,CAAC;YAEF,MAAM;cAAEI,iBAAiB;cAAEC,8BAA8B,EAAEC;YAAQ,CAAC,GAChEC,kBAAQ,CAACC,WAAW,CAACR,MAAM,EAAEzC,iBAAiB,CAAC;YAEnDC,cAAc,CAACsC,UAAU,EAAEM,iBAAiB,CAAC,CACxCf,IAAI,CAAC,MAAM;cACRnB,kBAAkB,CAAC6B,IAAI,CAACO,OAAO,CAAC;YACpC,CAAC,CAAC,CACDG,KAAK,CAACC,KAAK,IAAI;cACZ1C,MAAM,CAAC0C,KAAK,CAAC;YACjB,CAAC,CAAC;UACV,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN,CAAC,CAAC;AACN"}
1
+ {"version":3,"names":["_yauzl","_interopRequireDefault","require","_stream","_util","_s3Stream","_getFileNameWithoutExt","_prepareDataDirMap","FILE_CONTENT_TYPE","streamPipeline","promisify","pipeline","extractZipAndUploadToS3","dataZipFilePath","uniquePath","Promise","resolve","reject","filePaths","fileUploadPromises","uniqueKey","getFileNameWithoutExt","dataMap","key","assets","data","yauzl","open","lazyEntries","err","zipFile","console","warn","log","info","entryCount","on","all","then","res","forEach","r","readEntry","entry","fileName","test","openReadStream","readStream","push","newKey","prepareDataDirMap","map","filePath","streamPassThrough","streamPassThroughUploadPromise","promise","s3Stream","writeStream","catch","error"],"sources":["extractZipAndUploadToS3.ts"],"sourcesContent":["import yauzl from \"yauzl\";\nimport { pipeline } from \"stream\";\nimport { promisify } from \"util\";\nimport { CompleteMultipartUploadOutput } from \"@webiny/aws-sdk/client-s3\";\nimport { s3Stream } from \"~/export/s3Stream\";\nimport { getFileNameWithoutExt } from \"./getFileNameWithoutExt\";\nimport { ImportData } from \"~/types\";\nimport { prepareDataDirMap } from \"~/import/utils/prepareDataDirMap\";\n\nconst FILE_CONTENT_TYPE = \"application/octet-stream\";\nconst streamPipeline = promisify(pipeline);\n\nexport function extractZipAndUploadToS3(\n dataZipFilePath: string,\n uniquePath: string\n): Promise<ImportData> {\n return new Promise((resolve, reject) => {\n const filePaths = [];\n const fileUploadPromises: Promise<CompleteMultipartUploadOutput>[] = [];\n const uniqueKey = getFileNameWithoutExt(dataZipFilePath);\n let dataMap: ImportData = {\n key: uniqueKey,\n assets: {},\n data: \"\"\n };\n yauzl.open(dataZipFilePath, { lazyEntries: true }, function (err, zipFile) {\n if (err) {\n console.warn(\"ERROR: Failed to extract zip: \", dataZipFilePath, err);\n reject(err);\n return;\n }\n if (!zipFile) {\n console.log(\"ERROR: Probably failed to extract zip: \" + dataZipFilePath);\n reject(\"Missing Zip File Resource.\");\n return;\n }\n console.info(`The ZIP file contains ${zipFile.entryCount} entries.`);\n zipFile.on(\"end\", function (err) {\n if (err) {\n console.warn('ERROR: Failed on \"END\" for file: ', dataZipFilePath, err);\n reject(err);\n }\n\n Promise.all(fileUploadPromises).then(res => {\n res.forEach(r => {\n console.info(\"Done uploading... \", r);\n });\n resolve(dataMap);\n });\n });\n\n zipFile.readEntry();\n\n zipFile.on(\"entry\", function (entry) {\n console.info(`Processing entry: \"${entry.fileName}\"`);\n if (/\\/$/.test(entry.fileName)) {\n // Directory file names end with '/'.\n // Note that entries for directories themselves are optional.\n // An entry's fileName implicitly requires its parent directories to exist.\n zipFile.readEntry();\n } else {\n // file entry\n zipFile.openReadStream(entry, function (err, readStream) {\n if (err) {\n console.warn(\n \"ERROR: Failed while performing [openReadStream] for file: \",\n entry.fileName,\n err\n );\n reject(err);\n return;\n }\n if (!readStream) {\n console.log(\"ERROR: Missing Read Stream while importing.\");\n reject(\"Missing Read Strea Resource.\");\n return;\n }\n readStream.on(\"end\", function () {\n filePaths.push(entry.fileName);\n zipFile.readEntry();\n });\n\n const newKey = `${uniquePath}/${uniqueKey}/${entry.fileName}`;\n // Modify in place\n dataMap = prepareDataDirMap({\n map: dataMap,\n filePath: entry.fileName,\n newKey\n });\n\n const { streamPassThrough, streamPassThroughUploadPromise: promise } =\n s3Stream.writeStream(newKey, FILE_CONTENT_TYPE);\n\n streamPipeline(readStream, streamPassThrough)\n .then(() => {\n fileUploadPromises.push(promise);\n })\n .catch(error => {\n reject(error);\n });\n });\n }\n });\n });\n });\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,MAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,KAAA,GAAAF,OAAA;AAEA,IAAAG,SAAA,GAAAH,OAAA;AACA,IAAAI,sBAAA,GAAAJ,OAAA;AAEA,IAAAK,kBAAA,GAAAL,OAAA;AAEA,MAAMM,iBAAiB,GAAG,0BAA0B;AACpD,MAAMC,cAAc,GAAG,IAAAC,eAAS,EAACC,gBAAQ,CAAC;AAEnC,SAASC,uBAAuBA,CACnCC,eAAuB,EACvBC,UAAkB,EACC;EACnB,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACpC,MAAMC,SAAS,GAAG,EAAE;IACpB,MAAMC,kBAA4D,GAAG,EAAE;IACvE,MAAMC,SAAS,GAAG,IAAAC,4CAAqB,EAACR,eAAe,CAAC;IACxD,IAAIS,OAAmB,GAAG;MACtBC,GAAG,EAAEH,SAAS;MACdI,MAAM,EAAE,CAAC,CAAC;MACVC,IAAI,EAAE;IACV,CAAC;IACDC,cAAK,CAACC,IAAI,CAACd,eAAe,EAAE;MAAEe,WAAW,EAAE;IAAK,CAAC,EAAE,UAAUC,GAAG,EAAEC,OAAO,EAAE;MACvE,IAAID,GAAG,EAAE;QACLE,OAAO,CAACC,IAAI,CAAC,gCAAgC,EAAEnB,eAAe,EAAEgB,GAAG,CAAC;QACpEZ,MAAM,CAACY,GAAG,CAAC;QACX;MACJ;MACA,IAAI,CAACC,OAAO,EAAE;QACVC,OAAO,CAACE,GAAG,CAAC,yCAAyC,GAAGpB,eAAe,CAAC;QACxEI,MAAM,CAAC,4BAA4B,CAAC;QACpC;MACJ;MACAc,OAAO,CAACG,IAAI,CAAE,yBAAwBJ,OAAO,CAACK,UAAW,WAAU,CAAC;MACpEL,OAAO,CAACM,EAAE,CAAC,KAAK,EAAE,UAAUP,GAAG,EAAE;QAC7B,IAAIA,GAAG,EAAE;UACLE,OAAO,CAACC,IAAI,CAAC,mCAAmC,EAAEnB,eAAe,EAAEgB,GAAG,CAAC;UACvEZ,MAAM,CAACY,GAAG,CAAC;QACf;QAEAd,OAAO,CAACsB,GAAG,CAAClB,kBAAkB,CAAC,CAACmB,IAAI,CAACC,GAAG,IAAI;UACxCA,GAAG,CAACC,OAAO,CAACC,CAAC,IAAI;YACbV,OAAO,CAACG,IAAI,CAAC,oBAAoB,EAAEO,CAAC,CAAC;UACzC,CAAC,CAAC;UACFzB,OAAO,CAACM,OAAO,CAAC;QACpB,CAAC,CAAC;MACN,CAAC,CAAC;MAEFQ,OAAO,CAACY,SAAS,CAAC,CAAC;MAEnBZ,OAAO,CAACM,EAAE,CAAC,OAAO,EAAE,UAAUO,KAAK,EAAE;QACjCZ,OAAO,CAACG,IAAI,CAAE,sBAAqBS,KAAK,CAACC,QAAS,GAAE,CAAC;QACrD,IAAI,KAAK,CAACC,IAAI,CAACF,KAAK,CAACC,QAAQ,CAAC,EAAE;UAC5B;UACA;UACA;UACAd,OAAO,CAACY,SAAS,CAAC,CAAC;QACvB,CAAC,MAAM;UACH;UACAZ,OAAO,CAACgB,cAAc,CAACH,KAAK,EAAE,UAAUd,GAAG,EAAEkB,UAAU,EAAE;YACrD,IAAIlB,GAAG,EAAE;cACLE,OAAO,CAACC,IAAI,CACR,4DAA4D,EAC5DW,KAAK,CAACC,QAAQ,EACdf,GACJ,CAAC;cACDZ,MAAM,CAACY,GAAG,CAAC;cACX;YACJ;YACA,IAAI,CAACkB,UAAU,EAAE;cACbhB,OAAO,CAACE,GAAG,CAAC,6CAA6C,CAAC;cAC1DhB,MAAM,CAAC,8BAA8B,CAAC;cACtC;YACJ;YACA8B,UAAU,CAACX,EAAE,CAAC,KAAK,EAAE,YAAY;cAC7BlB,SAAS,CAAC8B,IAAI,CAACL,KAAK,CAACC,QAAQ,CAAC;cAC9Bd,OAAO,CAACY,SAAS,CAAC,CAAC;YACvB,CAAC,CAAC;YAEF,MAAMO,MAAM,GAAI,GAAEnC,UAAW,IAAGM,SAAU,IAAGuB,KAAK,CAACC,QAAS,EAAC;YAC7D;YACAtB,OAAO,GAAG,IAAA4B,oCAAiB,EAAC;cACxBC,GAAG,EAAE7B,OAAO;cACZ8B,QAAQ,EAAET,KAAK,CAACC,QAAQ;cACxBK;YACJ,CAAC,CAAC;YAEF,MAAM;cAAEI,iBAAiB;cAAEC,8BAA8B,EAAEC;YAAQ,CAAC,GAChEC,kBAAQ,CAACC,WAAW,CAACR,MAAM,EAAEzC,iBAAiB,CAAC;YAEnDC,cAAc,CAACsC,UAAU,EAAEM,iBAAiB,CAAC,CACxCf,IAAI,CAAC,MAAM;cACRnB,kBAAkB,CAAC6B,IAAI,CAACO,OAAO,CAAC;YACpC,CAAC,CAAC,CACDG,KAAK,CAACC,KAAK,IAAI;cACZ1C,MAAM,CAAC0C,KAAK,CAAC;YACjB,CAAC,CAAC;UACV,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN,CAAC,CAAC;AACN"}
@@ -1,11 +1,9 @@
1
1
  "use strict";
2
2
 
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
3
  Object.defineProperty(exports, "__esModule", {
5
4
  value: true
6
5
  });
7
6
  exports.uploadAssets = void 0;
8
- var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
9
7
  var _utils = require("@webiny/utils");
10
8
  var _uploadFilesFromS = require("./uploadFilesFromS3");
11
9
  function notAPreviouslyImportedFile(importedImages) {
@@ -69,16 +67,18 @@ const uploadAssets = async params => {
69
67
  // We generate a new file id, key, and add `meta.originalKey` property to prevent duplicates on future imports.
70
68
  const id = (0, _utils.mdbid)();
71
69
  const newKey = `${id}/${toImport.key.replace(`${toImport.id}/`, "")}`;
72
- const newFile = (0, _objectSpread2.default)((0, _objectSpread2.default)({}, toImport), {}, {
70
+ const newFile = {
71
+ ...toImport,
73
72
  id,
74
73
  location: {
75
74
  folderId: "root"
76
75
  },
77
76
  key: newKey,
78
- meta: (0, _objectSpread2.default)((0, _objectSpread2.default)({}, toImport.meta), {}, {
77
+ meta: {
78
+ ...toImport.meta,
79
79
  originalKey: toImport.key
80
- })
81
- });
80
+ }
81
+ };
82
82
  createFilesInput.push(newFile);
83
83
  oldIdToNewFileMap.set(toImport.id, newFile);
84
84
  uploadFileMap.set(fileUploadsData.assets[toImport.key], newFile);
@@ -1 +1 @@
1
- {"version":3,"names":["_utils","require","_uploadFilesFromS","notAPreviouslyImportedFile","importedImages","file","some","existingImportedImage","meta","originalKey","key","notAnExistingFile","existingFiles","existingFile","uploadAssets","params","context","files","fileUploadsData","oldIdToNewFileMap","Map","process","env","NODE_ENV","fileManager","listFiles","where","originalKey_in","map","importedImage","fileBeingImported","find","set","id","id_in","newFilesToImport","filter","uploadFileMap","createFilesInput","toImport","mdbid","newKey","replace","newFile","_objectSpread2","default","location","folderId","push","assets","uploadFilesFromS3","createFilesInBatch","exports"],"sources":["uploadAssets.ts"],"sourcesContent":["import { mdbid } from \"@webiny/utils\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { File, FileInput } from \"@webiny/api-file-manager/types\";\nimport { UploadFileMap, uploadFilesFromS3 } from \"~/import/utils/uploadFilesFromS3\";\nimport { FileUploadsData } from \"~/types\";\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n files: File[];\n fileUploadsData: FileUploadsData;\n}\n\nfunction notAPreviouslyImportedFile(importedImages: File[]) {\n return (file: File) => {\n return !importedImages.some(\n existingImportedImage => existingImportedImage.meta.originalKey === file.key\n );\n };\n}\n\nfunction notAnExistingFile(existingFiles: File[]) {\n return (file: File) => {\n return !existingFiles.some(existingFile => existingFile.key === file.key);\n };\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams) => {\n const { context, files, fileUploadsData } = params;\n\n const oldIdToNewFileMap = new Map<string, FileInput>();\n\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return oldIdToNewFileMap;\n }\n\n // Check if the requested files were already imported in the past.\n const [importedImages] = await context.fileManager.listFiles({\n where: { meta: { originalKey_in: files.map(file => file.key) } }\n });\n\n // Link files that were already imported.\n for (const importedImage of importedImages) {\n const fileBeingImported = files.find(file => file.key === importedImage.meta.originalKey);\n\n if (fileBeingImported) {\n oldIdToNewFileMap.set(fileBeingImported.id, importedImage);\n }\n }\n\n // Check if files with such IDs already exist.\n const [existingFiles] = await context.fileManager.listFiles({\n where: { id_in: files.map(file => file.id) }\n });\n\n const newFilesToImport = files\n .filter(notAnExistingFile(existingFiles))\n .filter(notAPreviouslyImportedFile(importedImages));\n\n // A map of temporary file keys (created during ZIP upload) to permanent file keys.\n const uploadFileMap: UploadFileMap = new Map();\n\n // Array of file inputs, to insert into the DB.\n const createFilesInput: FileInput[] = [];\n\n for (const toImport of newFilesToImport) {\n // We generate a new file id, key, and add `meta.originalKey` property to prevent duplicates on future imports.\n const id = mdbid();\n const newKey = `${id}/${toImport.key.replace(`${toImport.id}/`, \"\")}`;\n const newFile: FileInput = {\n ...toImport,\n id,\n location: {\n folderId: \"root\"\n },\n key: newKey,\n meta: { ...toImport.meta, originalKey: toImport.key }\n };\n\n createFilesInput.push(newFile);\n oldIdToNewFileMap.set(toImport.id, newFile);\n uploadFileMap.set(fileUploadsData.assets[toImport.key], newFile);\n }\n\n await uploadFilesFromS3(uploadFileMap);\n\n await context.fileManager.createFilesInBatch(createFilesInput);\n\n return oldIdToNewFileMap;\n};\n"],"mappings":";;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAGA,IAAAC,iBAAA,GAAAD,OAAA;AASA,SAASE,0BAA0BA,CAACC,cAAsB,EAAE;EACxD,OAAQC,IAAU,IAAK;IACnB,OAAO,CAACD,cAAc,CAACE,IAAI,CACvBC,qBAAqB,IAAIA,qBAAqB,CAACC,IAAI,CAACC,WAAW,KAAKJ,IAAI,CAACK,GAC7E,CAAC;EACL,CAAC;AACL;AAEA,SAASC,iBAAiBA,CAACC,aAAqB,EAAE;EAC9C,OAAQP,IAAU,IAAK;IACnB,OAAO,CAACO,aAAa,CAACN,IAAI,CAACO,YAAY,IAAIA,YAAY,CAACH,GAAG,KAAKL,IAAI,CAACK,GAAG,CAAC;EAC7E,CAAC;AACL;AAEO,MAAMI,YAAY,GAAG,MAAOC,MAA0B,IAAK;EAC9D,MAAM;IAAEC,OAAO;IAAEC,KAAK;IAAEC;EAAgB,CAAC,GAAGH,MAAM;EAElD,MAAMI,iBAAiB,GAAG,IAAIC,GAAG,CAAoB,CAAC;;EAEtD;AACJ;AACA;AACA;AACA;EACI,IAAIC,OAAO,CAACC,GAAG,CAACC,QAAQ,KAAK,MAAM,EAAE;IACjC,OAAOJ,iBAAiB;EAC5B;;EAEA;EACA,MAAM,CAACf,cAAc,CAAC,GAAG,MAAMY,OAAO,CAACQ,WAAW,CAACC,SAAS,CAAC;IACzDC,KAAK,EAAE;MAAElB,IAAI,EAAE;QAAEmB,cAAc,EAAEV,KAAK,CAACW,GAAG,CAACvB,IAAI,IAAIA,IAAI,CAACK,GAAG;MAAE;IAAE;EACnE,CAAC,CAAC;;EAEF;EACA,KAAK,MAAMmB,aAAa,IAAIzB,cAAc,EAAE;IACxC,MAAM0B,iBAAiB,GAAGb,KAAK,CAACc,IAAI,CAAC1B,IAAI,IAAIA,IAAI,CAACK,GAAG,KAAKmB,aAAa,CAACrB,IAAI,CAACC,WAAW,CAAC;IAEzF,IAAIqB,iBAAiB,EAAE;MACnBX,iBAAiB,CAACa,GAAG,CAACF,iBAAiB,CAACG,EAAE,EAAEJ,aAAa,CAAC;IAC9D;EACJ;;EAEA;EACA,MAAM,CAACjB,aAAa,CAAC,GAAG,MAAMI,OAAO,CAACQ,WAAW,CAACC,SAAS,CAAC;IACxDC,KAAK,EAAE;MAAEQ,KAAK,EAAEjB,KAAK,CAACW,GAAG,CAACvB,IAAI,IAAIA,IAAI,CAAC4B,EAAE;IAAE;EAC/C,CAAC,CAAC;EAEF,MAAME,gBAAgB,GAAGlB,KAAK,CACzBmB,MAAM,CAACzB,iBAAiB,CAACC,aAAa,CAAC,CAAC,CACxCwB,MAAM,CAACjC,0BAA0B,CAACC,cAAc,CAAC,CAAC;;EAEvD;EACA,MAAMiC,aAA4B,GAAG,IAAIjB,GAAG,CAAC,CAAC;;EAE9C;EACA,MAAMkB,gBAA6B,GAAG,EAAE;EAExC,KAAK,MAAMC,QAAQ,IAAIJ,gBAAgB,EAAE;IACrC;IACA,MAAMF,EAAE,GAAG,IAAAO,YAAK,EAAC,CAAC;IAClB,MAAMC,MAAM,GAAI,GAAER,EAAG,IAAGM,QAAQ,CAAC7B,GAAG,CAACgC,OAAO,CAAE,GAAEH,QAAQ,CAACN,EAAG,GAAE,EAAE,EAAE,CAAE,EAAC;IACrE,MAAMU,OAAkB,OAAAC,cAAA,CAAAC,OAAA,MAAAD,cAAA,CAAAC,OAAA,MACjBN,QAAQ;MACXN,EAAE;MACFa,QAAQ,EAAE;QACNC,QAAQ,EAAE;MACd,CAAC;MACDrC,GAAG,EAAE+B,MAAM;MACXjC,IAAI,MAAAoC,cAAA,CAAAC,OAAA,MAAAD,cAAA,CAAAC,OAAA,MAAON,QAAQ,CAAC/B,IAAI;QAAEC,WAAW,EAAE8B,QAAQ,CAAC7B;MAAG;IAAE,EACxD;IAED4B,gBAAgB,CAACU,IAAI,CAACL,OAAO,CAAC;IAC9BxB,iBAAiB,CAACa,GAAG,CAACO,QAAQ,CAACN,EAAE,EAAEU,OAAO,CAAC;IAC3CN,aAAa,CAACL,GAAG,CAACd,eAAe,CAAC+B,MAAM,CAACV,QAAQ,CAAC7B,GAAG,CAAC,EAAEiC,OAAO,CAAC;EACpE;EAEA,MAAM,IAAAO,mCAAiB,EAACb,aAAa,CAAC;EAEtC,MAAMrB,OAAO,CAACQ,WAAW,CAAC2B,kBAAkB,CAACb,gBAAgB,CAAC;EAE9D,OAAOnB,iBAAiB;AAC5B,CAAC;AAACiC,OAAA,CAAAtC,YAAA,GAAAA,YAAA"}
1
+ {"version":3,"names":["_utils","require","_uploadFilesFromS","notAPreviouslyImportedFile","importedImages","file","some","existingImportedImage","meta","originalKey","key","notAnExistingFile","existingFiles","existingFile","uploadAssets","params","context","files","fileUploadsData","oldIdToNewFileMap","Map","process","env","NODE_ENV","fileManager","listFiles","where","originalKey_in","map","importedImage","fileBeingImported","find","set","id","id_in","newFilesToImport","filter","uploadFileMap","createFilesInput","toImport","mdbid","newKey","replace","newFile","location","folderId","push","assets","uploadFilesFromS3","createFilesInBatch","exports"],"sources":["uploadAssets.ts"],"sourcesContent":["import { mdbid } from \"@webiny/utils\";\nimport { PbImportExportContext } from \"~/graphql/types\";\nimport { File, FileInput } from \"@webiny/api-file-manager/types\";\nimport { UploadFileMap, uploadFilesFromS3 } from \"~/import/utils/uploadFilesFromS3\";\nimport { FileUploadsData } from \"~/types\";\n\ninterface UploadAssetsParams {\n context: PbImportExportContext;\n files: File[];\n fileUploadsData: FileUploadsData;\n}\n\nfunction notAPreviouslyImportedFile(importedImages: File[]) {\n return (file: File) => {\n return !importedImages.some(\n existingImportedImage => existingImportedImage.meta.originalKey === file.key\n );\n };\n}\n\nfunction notAnExistingFile(existingFiles: File[]) {\n return (file: File) => {\n return !existingFiles.some(existingFile => existingFile.key === file.key);\n };\n}\n\nexport const uploadAssets = async (params: UploadAssetsParams) => {\n const { context, files, fileUploadsData } = params;\n\n const oldIdToNewFileMap = new Map<string, FileInput>();\n\n /**\n * This function contains logic of file download from S3.\n * Current we're not mocking zip file download from S3 in tests at the moment.\n * So, we're manually mocking it in case of test just by returning an empty object.\n */\n if (process.env.NODE_ENV === \"test\") {\n return oldIdToNewFileMap;\n }\n\n // Check if the requested files were already imported in the past.\n const [importedImages] = await context.fileManager.listFiles({\n where: { meta: { originalKey_in: files.map(file => file.key) } }\n });\n\n // Link files that were already imported.\n for (const importedImage of importedImages) {\n const fileBeingImported = files.find(file => file.key === importedImage.meta.originalKey);\n\n if (fileBeingImported) {\n oldIdToNewFileMap.set(fileBeingImported.id, importedImage);\n }\n }\n\n // Check if files with such IDs already exist.\n const [existingFiles] = await context.fileManager.listFiles({\n where: { id_in: files.map(file => file.id) }\n });\n\n const newFilesToImport = files\n .filter(notAnExistingFile(existingFiles))\n .filter(notAPreviouslyImportedFile(importedImages));\n\n // A map of temporary file keys (created during ZIP upload) to permanent file keys.\n const uploadFileMap: UploadFileMap = new Map();\n\n // Array of file inputs, to insert into the DB.\n const createFilesInput: FileInput[] = [];\n\n for (const toImport of newFilesToImport) {\n // We generate a new file id, key, and add `meta.originalKey` property to prevent duplicates on future imports.\n const id = mdbid();\n const newKey = `${id}/${toImport.key.replace(`${toImport.id}/`, \"\")}`;\n const newFile: FileInput = {\n ...toImport,\n id,\n location: {\n folderId: \"root\"\n },\n key: newKey,\n meta: { ...toImport.meta, originalKey: toImport.key }\n };\n\n createFilesInput.push(newFile);\n oldIdToNewFileMap.set(toImport.id, newFile);\n uploadFileMap.set(fileUploadsData.assets[toImport.key], newFile);\n }\n\n await uploadFilesFromS3(uploadFileMap);\n\n await context.fileManager.createFilesInBatch(createFilesInput);\n\n return oldIdToNewFileMap;\n};\n"],"mappings":";;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAGA,IAAAC,iBAAA,GAAAD,OAAA;AASA,SAASE,0BAA0BA,CAACC,cAAsB,EAAE;EACxD,OAAQC,IAAU,IAAK;IACnB,OAAO,CAACD,cAAc,CAACE,IAAI,CACvBC,qBAAqB,IAAIA,qBAAqB,CAACC,IAAI,CAACC,WAAW,KAAKJ,IAAI,CAACK,GAC7E,CAAC;EACL,CAAC;AACL;AAEA,SAASC,iBAAiBA,CAACC,aAAqB,EAAE;EAC9C,OAAQP,IAAU,IAAK;IACnB,OAAO,CAACO,aAAa,CAACN,IAAI,CAACO,YAAY,IAAIA,YAAY,CAACH,GAAG,KAAKL,IAAI,CAACK,GAAG,CAAC;EAC7E,CAAC;AACL;AAEO,MAAMI,YAAY,GAAG,MAAOC,MAA0B,IAAK;EAC9D,MAAM;IAAEC,OAAO;IAAEC,KAAK;IAAEC;EAAgB,CAAC,GAAGH,MAAM;EAElD,MAAMI,iBAAiB,GAAG,IAAIC,GAAG,CAAoB,CAAC;;EAEtD;AACJ;AACA;AACA;AACA;EACI,IAAIC,OAAO,CAACC,GAAG,CAACC,QAAQ,KAAK,MAAM,EAAE;IACjC,OAAOJ,iBAAiB;EAC5B;;EAEA;EACA,MAAM,CAACf,cAAc,CAAC,GAAG,MAAMY,OAAO,CAACQ,WAAW,CAACC,SAAS,CAAC;IACzDC,KAAK,EAAE;MAAElB,IAAI,EAAE;QAAEmB,cAAc,EAAEV,KAAK,CAACW,GAAG,CAACvB,IAAI,IAAIA,IAAI,CAACK,GAAG;MAAE;IAAE;EACnE,CAAC,CAAC;;EAEF;EACA,KAAK,MAAMmB,aAAa,IAAIzB,cAAc,EAAE;IACxC,MAAM0B,iBAAiB,GAAGb,KAAK,CAACc,IAAI,CAAC1B,IAAI,IAAIA,IAAI,CAACK,GAAG,KAAKmB,aAAa,CAACrB,IAAI,CAACC,WAAW,CAAC;IAEzF,IAAIqB,iBAAiB,EAAE;MACnBX,iBAAiB,CAACa,GAAG,CAACF,iBAAiB,CAACG,EAAE,EAAEJ,aAAa,CAAC;IAC9D;EACJ;;EAEA;EACA,MAAM,CAACjB,aAAa,CAAC,GAAG,MAAMI,OAAO,CAACQ,WAAW,CAACC,SAAS,CAAC;IACxDC,KAAK,EAAE;MAAEQ,KAAK,EAAEjB,KAAK,CAACW,GAAG,CAACvB,IAAI,IAAIA,IAAI,CAAC4B,EAAE;IAAE;EAC/C,CAAC,CAAC;EAEF,MAAME,gBAAgB,GAAGlB,KAAK,CACzBmB,MAAM,CAACzB,iBAAiB,CAACC,aAAa,CAAC,CAAC,CACxCwB,MAAM,CAACjC,0BAA0B,CAACC,cAAc,CAAC,CAAC;;EAEvD;EACA,MAAMiC,aAA4B,GAAG,IAAIjB,GAAG,CAAC,CAAC;;EAE9C;EACA,MAAMkB,gBAA6B,GAAG,EAAE;EAExC,KAAK,MAAMC,QAAQ,IAAIJ,gBAAgB,EAAE;IACrC;IACA,MAAMF,EAAE,GAAG,IAAAO,YAAK,EAAC,CAAC;IAClB,MAAMC,MAAM,GAAI,GAAER,EAAG,IAAGM,QAAQ,CAAC7B,GAAG,CAACgC,OAAO,CAAE,GAAEH,QAAQ,CAACN,EAAG,GAAE,EAAE,EAAE,CAAE,EAAC;IACrE,MAAMU,OAAkB,GAAG;MACvB,GAAGJ,QAAQ;MACXN,EAAE;MACFW,QAAQ,EAAE;QACNC,QAAQ,EAAE;MACd,CAAC;MACDnC,GAAG,EAAE+B,MAAM;MACXjC,IAAI,EAAE;QAAE,GAAG+B,QAAQ,CAAC/B,IAAI;QAAEC,WAAW,EAAE8B,QAAQ,CAAC7B;MAAI;IACxD,CAAC;IAED4B,gBAAgB,CAACQ,IAAI,CAACH,OAAO,CAAC;IAC9BxB,iBAAiB,CAACa,GAAG,CAACO,QAAQ,CAACN,EAAE,EAAEU,OAAO,CAAC;IAC3CN,aAAa,CAACL,GAAG,CAACd,eAAe,CAAC6B,MAAM,CAACR,QAAQ,CAAC7B,GAAG,CAAC,EAAEiC,OAAO,CAAC;EACpE;EAEA,MAAM,IAAAK,mCAAiB,EAACX,aAAa,CAAC;EAEtC,MAAMrB,OAAO,CAACQ,WAAW,CAACyB,kBAAkB,CAACX,gBAAgB,CAAC;EAE9D,OAAOnB,iBAAiB;AAC5B,CAAC;AAAC+B,OAAA,CAAApC,YAAA,GAAAA,YAAA"}
@@ -1,3 +1,3 @@
1
1
  import { FileInput } from "@webiny/api-file-manager/types";
2
2
  export declare type UploadFileMap = Map<string, FileInput>;
3
- export declare function uploadFilesFromS3(fileMap: UploadFileMap): Promise<import("aws-sdk/clients/s3").ManagedUpload.SendData[]>;
3
+ export declare function uploadFilesFromS3(fileMap: UploadFileMap): Promise<(import("@aws-sdk/client-s3").AbortMultipartUploadCommandOutput | import("@aws-sdk/client-s3").CompleteMultipartUploadCommandOutput)[]>;
@@ -9,7 +9,7 @@ async function uploadFilesFromS3(fileMap) {
9
9
  const promises = [];
10
10
  for (const [source, target] of Array.from(fileMap.entries())) {
11
11
  // Read file.
12
- const readStream = _s3Stream.s3Stream.readStream(source);
12
+ const readStream = await _s3Stream.s3Stream.readStream(source);
13
13
  const ws = _s3Stream.s3Stream.writeStream(target.key, target.type);
14
14
  readStream.pipe(ws.streamPassThrough);
15
15
  promises.push(ws.streamPassThroughUploadPromise);
@@ -1 +1 @@
1
- {"version":3,"names":["_s3Stream","require","uploadFilesFromS3","fileMap","promises","source","target","Array","from","entries","readStream","s3Stream","ws","writeStream","key","type","pipe","streamPassThrough","push","streamPassThroughUploadPromise","console","log","Promise","all"],"sources":["uploadFilesFromS3.ts"],"sourcesContent":["import { s3Stream } from \"~/export/s3Stream\";\nimport { FileInput } from \"@webiny/api-file-manager/types\";\n\nexport type UploadFileMap = Map<string, FileInput>;\n\nexport async function uploadFilesFromS3(fileMap: UploadFileMap) {\n const promises = [];\n for (const [source, target] of Array.from(fileMap.entries())) {\n // Read file.\n const readStream = s3Stream.readStream(source);\n\n const ws = s3Stream.writeStream(target.key, target.type);\n readStream.pipe(ws.streamPassThrough);\n promises.push(ws.streamPassThroughUploadPromise);\n\n console.log(`Successfully queued file \"${target.key}\"`);\n }\n\n return Promise.all(promises);\n}\n"],"mappings":";;;;;;AAAA,IAAAA,SAAA,GAAAC,OAAA;AAKO,eAAeC,iBAAiBA,CAACC,OAAsB,EAAE;EAC5D,MAAMC,QAAQ,GAAG,EAAE;EACnB,KAAK,MAAM,CAACC,MAAM,EAAEC,MAAM,CAAC,IAAIC,KAAK,CAACC,IAAI,CAACL,OAAO,CAACM,OAAO,CAAC,CAAC,CAAC,EAAE;IAC1D;IACA,MAAMC,UAAU,GAAGC,kBAAQ,CAACD,UAAU,CAACL,MAAM,CAAC;IAE9C,MAAMO,EAAE,GAAGD,kBAAQ,CAACE,WAAW,CAACP,MAAM,CAACQ,GAAG,EAAER,MAAM,CAACS,IAAI,CAAC;IACxDL,UAAU,CAACM,IAAI,CAACJ,EAAE,CAACK,iBAAiB,CAAC;IACrCb,QAAQ,CAACc,IAAI,CAACN,EAAE,CAACO,8BAA8B,CAAC;IAEhDC,OAAO,CAACC,GAAG,CAAE,6BAA4Bf,MAAM,CAACQ,GAAI,GAAE,CAAC;EAC3D;EAEA,OAAOQ,OAAO,CAACC,GAAG,CAACnB,QAAQ,CAAC;AAChC"}
1
+ {"version":3,"names":["_s3Stream","require","uploadFilesFromS3","fileMap","promises","source","target","Array","from","entries","readStream","s3Stream","ws","writeStream","key","type","pipe","streamPassThrough","push","streamPassThroughUploadPromise","console","log","Promise","all"],"sources":["uploadFilesFromS3.ts"],"sourcesContent":["import { s3Stream } from \"~/export/s3Stream\";\nimport { FileInput } from \"@webiny/api-file-manager/types\";\n\nexport type UploadFileMap = Map<string, FileInput>;\n\nexport async function uploadFilesFromS3(fileMap: UploadFileMap) {\n const promises = [];\n for (const [source, target] of Array.from(fileMap.entries())) {\n // Read file.\n const readStream = await s3Stream.readStream(source);\n\n const ws = s3Stream.writeStream(target.key, target.type);\n readStream.pipe(ws.streamPassThrough);\n promises.push(ws.streamPassThroughUploadPromise);\n\n console.log(`Successfully queued file \"${target.key}\"`);\n }\n\n return Promise.all(promises);\n}\n"],"mappings":";;;;;;AAAA,IAAAA,SAAA,GAAAC,OAAA;AAKO,eAAeC,iBAAiBA,CAACC,OAAsB,EAAE;EAC5D,MAAMC,QAAQ,GAAG,EAAE;EACnB,KAAK,MAAM,CAACC,MAAM,EAAEC,MAAM,CAAC,IAAIC,KAAK,CAACC,IAAI,CAACL,OAAO,CAACM,OAAO,CAAC,CAAC,CAAC,EAAE;IAC1D;IACA,MAAMC,UAAU,GAAG,MAAMC,kBAAQ,CAACD,UAAU,CAACL,MAAM,CAAC;IAEpD,MAAMO,EAAE,GAAGD,kBAAQ,CAACE,WAAW,CAACP,MAAM,CAACQ,GAAG,EAAER,MAAM,CAACS,IAAI,CAAC;IACxDL,UAAU,CAACM,IAAI,CAACJ,EAAE,CAACK,iBAAiB,CAAC;IACrCb,QAAQ,CAACc,IAAI,CAACN,EAAE,CAACO,8BAA8B,CAAC;IAEhDC,OAAO,CAACC,GAAG,CAAE,6BAA4Bf,MAAM,CAACQ,GAAI,GAAE,CAAC;EAC3D;EAEA,OAAOQ,OAAO,CAACC,GAAG,CAACnB,QAAQ,CAAC;AAChC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@webiny/api-page-builder-import-export",
3
- "version": "5.39.0-beta.0",
3
+ "version": "5.39.0-beta.2",
4
4
  "main": "index.js",
5
5
  "keywords": [
6
6
  "pbie:base"
@@ -16,18 +16,19 @@
16
16
  "dependencies": {
17
17
  "@babel/runtime": "7.22.6",
18
18
  "@commodo/fields": "1.1.2-beta.20",
19
- "@webiny/api": "5.39.0-beta.0",
20
- "@webiny/api-file-manager": "5.39.0-beta.0",
21
- "@webiny/api-form-builder": "5.39.0-beta.0",
22
- "@webiny/api-page-builder": "5.39.0-beta.0",
23
- "@webiny/api-security": "5.39.0-beta.0",
24
- "@webiny/error": "5.39.0-beta.0",
25
- "@webiny/handler": "5.39.0-beta.0",
26
- "@webiny/handler-aws": "5.39.0-beta.0",
27
- "@webiny/handler-graphql": "5.39.0-beta.0",
28
- "@webiny/pubsub": "5.39.0-beta.0",
29
- "@webiny/utils": "5.39.0-beta.0",
30
- "@webiny/validation": "5.39.0-beta.0",
19
+ "@webiny/api": "5.39.0-beta.2",
20
+ "@webiny/api-file-manager": "5.39.0-beta.2",
21
+ "@webiny/api-form-builder": "5.39.0-beta.2",
22
+ "@webiny/api-page-builder": "5.39.0-beta.2",
23
+ "@webiny/api-security": "5.39.0-beta.2",
24
+ "@webiny/aws-sdk": "5.39.0-beta.2",
25
+ "@webiny/error": "5.39.0-beta.2",
26
+ "@webiny/handler": "5.39.0-beta.2",
27
+ "@webiny/handler-aws": "5.39.0-beta.2",
28
+ "@webiny/handler-graphql": "5.39.0-beta.2",
29
+ "@webiny/pubsub": "5.39.0-beta.2",
30
+ "@webiny/utils": "5.39.0-beta.2",
31
+ "@webiny/validation": "5.39.0-beta.2",
31
32
  "archiver": "5.3.1",
32
33
  "commodo-fields-object": "1.0.6",
33
34
  "dot-prop-immutable": "2.1.1",
@@ -48,13 +49,13 @@
48
49
  "@types/archiver": "5.3.1",
49
50
  "@types/node-fetch": "2.6.2",
50
51
  "@types/yauzl": "2.10.0",
51
- "@webiny/api-authentication": "5.39.0-beta.0",
52
- "@webiny/api-dynamodb-to-elasticsearch": "5.39.0-beta.0",
53
- "@webiny/api-tenancy": "5.39.0-beta.0",
54
- "@webiny/api-wcp": "5.39.0-beta.0",
55
- "@webiny/cli": "5.39.0-beta.0",
56
- "@webiny/db": "5.39.0-beta.0",
57
- "@webiny/project-utils": "5.39.0-beta.0",
52
+ "@webiny/api-authentication": "5.39.0-beta.2",
53
+ "@webiny/api-dynamodb-to-elasticsearch": "5.39.0-beta.2",
54
+ "@webiny/api-tenancy": "5.39.0-beta.2",
55
+ "@webiny/api-wcp": "5.39.0-beta.2",
56
+ "@webiny/cli": "5.39.0-beta.2",
57
+ "@webiny/db": "5.39.0-beta.2",
58
+ "@webiny/project-utils": "5.39.0-beta.2",
58
59
  "jest": "29.5.0",
59
60
  "jest-dynalite": "3.6.1",
60
61
  "rimraf": "3.0.2",
@@ -69,12 +70,5 @@
69
70
  "build": "yarn webiny run build",
70
71
  "watch": "yarn webiny run watch"
71
72
  },
72
- "adio": {
73
- "ignore": {
74
- "src": [
75
- "aws-sdk"
76
- ]
77
- }
78
- },
79
- "gitHead": "df94742fba6658ed3507e1e17ab53dc77bb66330"
73
+ "gitHead": "193039382160557448f23f43685f29136f58f87a"
80
74
  }