@webiny/api-page-builder-import-export 0.0.0-unstable.990c3ab1b6 → 0.0.0-unstable.d4f203fa97

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/client.js +6 -2
  2. package/client.js.map +1 -1
  3. package/exportPages/combine/index.js +19 -11
  4. package/exportPages/combine/index.js.map +1 -1
  5. package/exportPages/process/index.js +35 -19
  6. package/exportPages/process/index.js.map +1 -1
  7. package/exportPages/s3Stream.js +20 -1
  8. package/exportPages/s3Stream.js.map +1 -1
  9. package/exportPages/utils.js +31 -18
  10. package/exportPages/utils.js.map +1 -1
  11. package/exportPages/zipper.js +41 -29
  12. package/exportPages/zipper.js.map +1 -1
  13. package/graphql/crud/pageImportExportTasks.crud.js +57 -9
  14. package/graphql/crud/pageImportExportTasks.crud.js.map +1 -1
  15. package/graphql/crud/pages.crud.js +39 -21
  16. package/graphql/crud/pages.crud.js.map +1 -1
  17. package/graphql/crud.js +5 -0
  18. package/graphql/crud.js.map +1 -1
  19. package/graphql/graphql/pageImportExportTasks.gql.js +6 -1
  20. package/graphql/graphql/pageImportExportTasks.gql.js.map +1 -1
  21. package/graphql/graphql/pages.gql.js +6 -1
  22. package/graphql/graphql/pages.gql.js.map +1 -1
  23. package/graphql/graphql/utils/resolve.js +3 -0
  24. package/graphql/graphql/utils/resolve.js.map +1 -1
  25. package/graphql/graphql.js +4 -0
  26. package/graphql/graphql.js.map +1 -1
  27. package/graphql/index.js +5 -0
  28. package/graphql/index.js.map +1 -1
  29. package/importPages/create/index.js +20 -9
  30. package/importPages/create/index.js.map +1 -1
  31. package/importPages/process/index.js +29 -24
  32. package/importPages/process/index.js.map +1 -1
  33. package/importPages/utils.js +126 -44
  34. package/importPages/utils.js.map +1 -1
  35. package/mockSecurity.js +2 -0
  36. package/mockSecurity.js.map +1 -1
  37. package/package.json +23 -23
  38. package/types.js +5 -0
  39. package/types.js.map +1 -1
@@ -1,51 +1,59 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+
4
5
  Object.defineProperty(exports, "__esModule", {
5
6
  value: true
6
7
  });
7
8
  exports.EXPORT_PAGES_FOLDER_KEY = void 0;
8
9
  exports.exportPage = exportPage;
9
10
  exports.extractFilesFromPageData = extractFilesFromPageData;
11
+
10
12
  var _get = _interopRequireDefault(require("lodash/get"));
13
+
11
14
  var _s3Stream = require("./s3Stream");
15
+
12
16
  var _zipper = _interopRequireDefault(require("./zipper"));
17
+
13
18
  const EXPORT_PAGES_FOLDER_KEY = "WEBINY_PB_EXPORT_PAGES";
14
19
  exports.EXPORT_PAGES_FOLDER_KEY = EXPORT_PAGES_FOLDER_KEY;
20
+
15
21
  async function getFilteredFiles(files) {
16
22
  const uniqueFileKeys = new Map();
17
23
  const promises = files.map(file => _s3Stream.s3Stream.isFileAccessible(file.key));
18
24
  const isFileAvailableResults = await Promise.all(promises);
19
- const filesAvailableForDownload = [];
20
- // Filter files
25
+ const filesAvailableForDownload = []; // Filter files
26
+
21
27
  for (let i = 0; i < files.length; i++) {
22
- const file = files[i];
23
- // Check file accessibility
28
+ const file = files[i]; // Check file accessibility
29
+
24
30
  if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {
25
31
  filesAvailableForDownload.push(file);
26
32
  uniqueFileKeys.set(file.key, true);
27
33
  }
28
34
  }
35
+
29
36
  return filesAvailableForDownload;
30
37
  }
38
+
31
39
  async function exportPage(page, exportPagesDataKey, fileManager) {
32
40
  // Extract all files
33
- const files = extractFilesFromPageData(page.content || {});
34
- // Filter files
35
- const filesAvailableForDownload = await getFilteredFiles(files);
36
- // Extract images from page settings
41
+ const files = extractFilesFromPageData(page.content || {}); // Filter files
42
+
43
+ const filesAvailableForDownload = await getFilteredFiles(files); // Extract images from page settings
44
+
37
45
  const pageSettingsImages = [(0, _get.default)(page, "settings.general.image"), (0, _get.default)(page, "settings.social.image")].filter(image => image && image.src);
38
- const pageSettingsImagesData = [];
39
- // Get file data for all images inside "page.settings"
46
+ const pageSettingsImagesData = []; // Get file data for all images inside "page.settings"
47
+
40
48
  for (let i = 0; i < pageSettingsImages.length; i++) {
41
49
  const {
42
50
  id
43
51
  } = pageSettingsImages[i];
44
52
  const file = await fileManager.files.getFile(id);
45
53
  pageSettingsImagesData.push(file);
46
- }
54
+ } // Extract the page data in a json file and upload it to S3
55
+
47
56
 
48
- // Extract the page data in a json file and upload it to S3
49
57
  const pageData = {
50
58
  page: {
51
59
  content: page.content,
@@ -68,25 +76,29 @@ async function exportPage(page, exportPagesDataKey, fileManager) {
68
76
  });
69
77
  return zipper.process();
70
78
  }
79
+
71
80
  function extractFilesFromPageData(data, files = []) {
72
81
  // Base case: termination
73
82
  if (!data || typeof data !== "object") {
74
83
  return files;
75
- }
76
- // Recursively call function for each element
84
+ } // Recursively call function for each element
85
+
86
+
77
87
  if (Array.isArray(data)) {
78
88
  for (let i = 0; i < data.length; i++) {
79
89
  const element = data[i];
80
90
  extractFilesFromPageData(element, files);
81
91
  }
92
+
82
93
  return files;
83
- }
94
+ } // Main
95
+
84
96
 
85
- // Main
86
97
  const tuple = Object.entries(data);
98
+
87
99
  for (let i = 0; i < tuple.length; i++) {
88
- const [key, value] = tuple[i];
89
- // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.
100
+ const [key, value] = tuple[i]; // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.
101
+
90
102
  if (key === "file" && value) {
91
103
  files.push(value);
92
104
  } else if (key === "images" && Array.isArray(value)) {
@@ -96,5 +108,6 @@ function extractFilesFromPageData(data, files = []) {
96
108
  extractFilesFromPageData(value, files);
97
109
  }
98
110
  }
111
+
99
112
  return files;
100
113
  }
@@ -1 +1 @@
1
- {"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","getFilteredFiles","files","uniqueFileKeys","Map","promises","map","file","s3Stream","isFileAccessible","key","isFileAvailableResults","Promise","all","filesAvailableForDownload","i","length","has","push","set","exportPage","page","exportPagesDataKey","fileManager","extractFilesFromPageData","content","pageSettingsImages","get","filter","image","src","pageSettingsImagesData","id","getFile","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","pageTitle","archiveFileKey","process","data","Array","isArray","element","tuple","Object","entries","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, File } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport { s3Stream } from \"./s3Stream\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nasync function getFilteredFiles(files: ImageFile[]) {\n const uniqueFileKeys = new Map<string, boolean>();\n const promises = files.map(file => s3Stream.isFileAccessible(file.key));\n const isFileAvailableResults = await Promise.all(promises);\n\n const filesAvailableForDownload = [];\n // Filter files\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n // Check file accessibility\n if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {\n filesAvailableForDownload.push(file);\n uniqueFileKeys.set(file.key, true);\n }\n }\n return filesAvailableForDownload;\n}\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: ImageFile[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromPageData(page.content || {});\n // Filter files\n const filesAvailableForDownload = await getFilteredFiles(files);\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n const pageSettingsImagesData = [];\n // Get file data for all images inside \"page.settings\"\n for (let i = 0; i < pageSettingsImages.length; i++) {\n const { id } = pageSettingsImages[i];\n const file = await fileManager.files.getFile(id);\n pageSettingsImagesData.push(file);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: [...filesAvailableForDownload, ...pageSettingsImagesData]\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [...filesAvailableForDownload, ...pageSettingsImagesData],\n pageTitle: page.title,\n pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ImageFile extends Omit<File, \"src\"> {\n key: string;\n}\n\nexport function extractFilesFromPageData(\n data: Record<string, any>,\n files: any[] = []\n): ImageFile[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromPageData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromPageData(value, files);\n }\n }\n return files;\n}\n"],"mappings":";;;;;;;;;AAGA;AACA;AACA;AAEO,MAAMA,uBAAuB,GAAG,wBAAwB;AAAC;AAEhE,eAAeC,gBAAgB,CAACC,KAAkB,EAAE;EAChD,MAAMC,cAAc,GAAG,IAAIC,GAAG,EAAmB;EACjD,MAAMC,QAAQ,GAAGH,KAAK,CAACI,GAAG,CAACC,IAAI,IAAIC,kBAAQ,CAACC,gBAAgB,CAACF,IAAI,CAACG,GAAG,CAAC,CAAC;EACvE,MAAMC,sBAAsB,GAAG,MAAMC,OAAO,CAACC,GAAG,CAACR,QAAQ,CAAC;EAE1D,MAAMS,yBAAyB,GAAG,EAAE;EACpC;EACA,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGb,KAAK,CAACc,MAAM,EAAED,CAAC,EAAE,EAAE;IACnC,MAAMR,IAAI,GAAGL,KAAK,CAACa,CAAC,CAAC;IACrB;IACA,IAAIJ,sBAAsB,CAACI,CAAC,CAAC,IAAI,CAACZ,cAAc,CAACc,GAAG,CAACV,IAAI,CAACG,GAAG,CAAC,EAAE;MAC5DI,yBAAyB,CAACI,IAAI,CAACX,IAAI,CAAC;MACpCJ,cAAc,CAACgB,GAAG,CAACZ,IAAI,CAACG,GAAG,EAAE,IAAI,CAAC;IACtC;EACJ;EACA,OAAOI,yBAAyB;AACpC;AAOO,eAAeM,UAAU,CAC5BC,IAAU,EACVC,kBAA0B,EAC1BC,WAA8C,EACZ;EAClC;EACA,MAAMrB,KAAK,GAAGsB,wBAAwB,CAACH,IAAI,CAACI,OAAO,IAAI,CAAC,CAAC,CAAC;EAC1D;EACA,MAAMX,yBAAyB,GAAG,MAAMb,gBAAgB,CAACC,KAAK,CAAC;EAC/D;EACA,MAAMwB,kBAAkB,GAAG,CACvB,IAAAC,YAAG,EAACN,IAAI,EAAE,wBAAwB,CAAC,EACnC,IAAAM,YAAG,EAACN,IAAI,EAAE,uBAAuB,CAAC,CACrC,CAACO,MAAM,CAACC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAAG,CAAC;EACrC,MAAMC,sBAAsB,GAAG,EAAE;EACjC;EACA,KAAK,IAAIhB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGW,kBAAkB,CAACV,MAAM,EAAED,CAAC,EAAE,EAAE;IAChD,MAAM;MAAEiB;IAAG,CAAC,GAAGN,kBAAkB,CAACX,CAAC,CAAC;IACpC,MAAMR,IAAI,GAAG,MAAMgB,WAAW,CAACrB,KAAK,CAAC+B,OAAO,CAACD,EAAE,CAAC;IAChDD,sBAAsB,CAACb,IAAI,CAACX,IAAI,CAAC;EACrC;;EAEA;EACA,MAAM2B,QAAQ,GAAG;IACbb,IAAI,EAAE;MACFI,OAAO,EAAEJ,IAAI,CAACI,OAAO;MACrBU,KAAK,EAAEd,IAAI,CAACc,KAAK;MACjBC,IAAI,EAAEf,IAAI,CAACe,IAAI;MACfC,OAAO,EAAEhB,IAAI,CAACgB,OAAO;MACrBC,MAAM,EAAEjB,IAAI,CAACiB,MAAM;MACnBC,QAAQ,EAAElB,IAAI,CAACkB;IACnB,CAAC;IACDrC,KAAK,EAAE,CAAC,GAAGY,yBAAyB,EAAE,GAAGiB,sBAAsB;EACnE,CAAC;EACD,MAAMS,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,QAAQ,CAAC,CAAC;EAE5D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;IACtBC,UAAU,EAAE;MACR7C,KAAK,EAAE,CAAC,GAAGY,yBAAyB,EAAE,GAAGiB,sBAAsB,CAAC;MAChEiB,SAAS,EAAE3B,IAAI,CAACc,KAAK;MACrBK;IACJ,CAAC;IACDS,cAAc,EAAE3B;EACpB,CAAC,CAAC;EAEF,OAAOuB,MAAM,CAACK,OAAO,EAAE;AAC3B;AAMO,SAAS1B,wBAAwB,CACpC2B,IAAyB,EACzBjD,KAAY,GAAG,EAAE,EACN;EACX;EACA,IAAI,CAACiD,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IACnC,OAAOjD,KAAK;EAChB;EACA;EACA,IAAIkD,KAAK,CAACC,OAAO,CAACF,IAAI,CAAC,EAAE;IACrB,KAAK,IAAIpC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGoC,IAAI,CAACnC,MAAM,EAAED,CAAC,EAAE,EAAE;MAClC,MAAMuC,OAAO,GAAGH,IAAI,CAACpC,CAAC,CAAC;MACvBS,wBAAwB,CAAC8B,OAAO,EAAEpD,KAAK,CAAC;IAC5C;IACA,OAAOA,KAAK;EAChB;;EAEA;EACA,MAAMqD,KAAK,GAAGC,MAAM,CAACC,OAAO,CAACN,IAAI,CAAC;EAClC,KAAK,IAAIpC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGwC,KAAK,CAACvC,MAAM,EAAED,CAAC,EAAE,EAAE;IACnC,MAAM,CAACL,GAAG,EAAEgD,KAAK,CAAC,GAAGH,KAAK,CAACxC,CAAC,CAAC;IAC7B;IACA,IAAIL,GAAG,KAAK,MAAM,IAAIgD,KAAK,EAAE;MACzBxD,KAAK,CAACgB,IAAI,CAACwC,KAAK,CAAC;IACrB,CAAC,MAAM,IAAIhD,GAAG,KAAK,QAAQ,IAAI0C,KAAK,CAACC,OAAO,CAACK,KAAK,CAAC,EAAE;MACjD;MACAxD,KAAK,CAACgB,IAAI,CAAC,GAAGwC,KAAK,CAAC;IACxB,CAAC,MAAM;MACHlC,wBAAwB,CAACkC,KAAK,EAAExD,KAAK,CAAC;IAC1C;EACJ;EACA,OAAOA,KAAK;AAChB"}
1
+ {"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","getFilteredFiles","files","uniqueFileKeys","Map","promises","map","file","s3Stream","isFileAccessible","key","isFileAvailableResults","Promise","all","filesAvailableForDownload","i","length","has","push","set","exportPage","page","exportPagesDataKey","fileManager","extractFilesFromPageData","content","pageSettingsImages","get","filter","image","src","pageSettingsImagesData","id","getFile","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","pageTitle","archiveFileKey","process","data","Array","isArray","element","tuple","Object","entries","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, File } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport { s3Stream } from \"./s3Stream\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\n\nasync function getFilteredFiles(files: ImageFile[]) {\n const uniqueFileKeys = new Map<string, boolean>();\n const promises = files.map(file => s3Stream.isFileAccessible(file.key));\n const isFileAvailableResults = await Promise.all(promises);\n\n const filesAvailableForDownload = [];\n // Filter files\n for (let i = 0; i < files.length; i++) {\n const file = files[i];\n // Check file accessibility\n if (isFileAvailableResults[i] && !uniqueFileKeys.has(file.key)) {\n filesAvailableForDownload.push(file);\n uniqueFileKeys.set(file.key, true);\n }\n }\n return filesAvailableForDownload;\n}\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: ImageFile[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromPageData(page.content || {});\n // Filter files\n const filesAvailableForDownload = await getFilteredFiles(files);\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n const pageSettingsImagesData = [];\n // Get file data for all images inside \"page.settings\"\n for (let i = 0; i < pageSettingsImages.length; i++) {\n const { id } = pageSettingsImages[i];\n const file = await fileManager.files.getFile(id);\n pageSettingsImagesData.push(file);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: [...filesAvailableForDownload, ...pageSettingsImagesData]\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [...filesAvailableForDownload, ...pageSettingsImagesData],\n pageTitle: page.title,\n pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ImageFile extends Omit<File, \"src\"> {\n key: string;\n}\n\nexport function extractFilesFromPageData(\n data: Record<string, any>,\n files: any[] = []\n): ImageFile[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromPageData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromPageData(value, files);\n }\n }\n return files;\n}\n"],"mappings":";;;;;;;;;;;AAGA;;AACA;;AACA;;AAEO,MAAMA,uBAAuB,GAAG,wBAAhC;;;AAEP,eAAeC,gBAAf,CAAgCC,KAAhC,EAAoD;EAChD,MAAMC,cAAc,GAAG,IAAIC,GAAJ,EAAvB;EACA,MAAMC,QAAQ,GAAGH,KAAK,CAACI,GAAN,CAAUC,IAAI,IAAIC,kBAAA,CAASC,gBAAT,CAA0BF,IAAI,CAACG,GAA/B,CAAlB,CAAjB;EACA,MAAMC,sBAAsB,GAAG,MAAMC,OAAO,CAACC,GAAR,CAAYR,QAAZ,CAArC;EAEA,MAAMS,yBAAyB,GAAG,EAAlC,CALgD,CAMhD;;EACA,KAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGb,KAAK,CAACc,MAA1B,EAAkCD,CAAC,EAAnC,EAAuC;IACnC,MAAMR,IAAI,GAAGL,KAAK,CAACa,CAAD,CAAlB,CADmC,CAEnC;;IACA,IAAIJ,sBAAsB,CAACI,CAAD,CAAtB,IAA6B,CAACZ,cAAc,CAACc,GAAf,CAAmBV,IAAI,CAACG,GAAxB,CAAlC,EAAgE;MAC5DI,yBAAyB,CAACI,IAA1B,CAA+BX,IAA/B;MACAJ,cAAc,CAACgB,GAAf,CAAmBZ,IAAI,CAACG,GAAxB,EAA6B,IAA7B;IACH;EACJ;;EACD,OAAOI,yBAAP;AACH;;AAOM,eAAeM,UAAf,CACHC,IADG,EAEHC,kBAFG,EAGHC,WAHG,EAI+B;EAClC;EACA,MAAMrB,KAAK,GAAGsB,wBAAwB,CAACH,IAAI,CAACI,OAAL,IAAgB,EAAjB,CAAtC,CAFkC,CAGlC;;EACA,MAAMX,yBAAyB,GAAG,MAAMb,gBAAgB,CAACC,KAAD,CAAxD,CAJkC,CAKlC;;EACA,MAAMwB,kBAAkB,GAAG,CACvB,IAAAC,YAAA,EAAIN,IAAJ,EAAU,wBAAV,CADuB,EAEvB,IAAAM,YAAA,EAAIN,IAAJ,EAAU,uBAAV,CAFuB,EAGzBO,MAHyB,CAGlBC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAHN,CAA3B;EAIA,MAAMC,sBAAsB,GAAG,EAA/B,CAVkC,CAWlC;;EACA,KAAK,IAAIhB,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGW,kBAAkB,CAACV,MAAvC,EAA+CD,CAAC,EAAhD,EAAoD;IAChD,MAAM;MAAEiB;IAAF,IAASN,kBAAkB,CAACX,CAAD,CAAjC;IACA,MAAMR,IAAI,GAAG,MAAMgB,WAAW,CAACrB,KAAZ,CAAkB+B,OAAlB,CAA0BD,EAA1B,CAAnB;IACAD,sBAAsB,CAACb,IAAvB,CAA4BX,IAA5B;EACH,CAhBiC,CAkBlC;;;EACA,MAAM2B,QAAQ,GAAG;IACbb,IAAI,EAAE;MACFI,OAAO,EAAEJ,IAAI,CAACI,OADZ;MAEFU,KAAK,EAAEd,IAAI,CAACc,KAFV;MAGFC,IAAI,EAAEf,IAAI,CAACe,IAHT;MAIFC,OAAO,EAAEhB,IAAI,CAACgB,OAJZ;MAKFC,MAAM,EAAEjB,IAAI,CAACiB,MALX;MAMFC,QAAQ,EAAElB,IAAI,CAACkB;IANb,CADO;IASbrC,KAAK,EAAE,CAAC,GAAGY,yBAAJ,EAA+B,GAAGiB,sBAAlC;EATM,CAAjB;EAWA,MAAMS,cAAc,GAAGC,MAAM,CAACC,IAAP,CAAYC,IAAI,CAACC,SAAL,CAAeV,QAAf,CAAZ,CAAvB;EAEA,MAAMW,MAAM,GAAG,IAAIC,eAAJ,CAAW;IACtBC,UAAU,EAAE;MACR7C,KAAK,EAAE,CAAC,GAAGY,yBAAJ,EAA+B,GAAGiB,sBAAlC,CADC;MAERiB,SAAS,EAAE3B,IAAI,CAACc,KAFR;MAGRK;IAHQ,CADU;IAMtBS,cAAc,EAAE3B;EANM,CAAX,CAAf;EASA,OAAOuB,MAAM,CAACK,OAAP,EAAP;AACH;;AAMM,SAAS1B,wBAAT,CACH2B,IADG,EAEHjD,KAAY,GAAG,EAFZ,EAGQ;EACX;EACA,IAAI,CAACiD,IAAD,IAAS,OAAOA,IAAP,KAAgB,QAA7B,EAAuC;IACnC,OAAOjD,KAAP;EACH,CAJU,CAKX;;;EACA,IAAIkD,KAAK,CAACC,OAAN,CAAcF,IAAd,CAAJ,EAAyB;IACrB,KAAK,IAAIpC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGoC,IAAI,CAACnC,MAAzB,EAAiCD,CAAC,EAAlC,EAAsC;MAClC,MAAMuC,OAAO,GAAGH,IAAI,CAACpC,CAAD,CAApB;MACAS,wBAAwB,CAAC8B,OAAD,EAAUpD,KAAV,CAAxB;IACH;;IACD,OAAOA,KAAP;EACH,CAZU,CAcX;;;EACA,MAAMqD,KAAK,GAAGC,MAAM,CAACC,OAAP,CAAeN,IAAf,CAAd;;EACA,KAAK,IAAIpC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGwC,KAAK,CAACvC,MAA1B,EAAkCD,CAAC,EAAnC,EAAuC;IACnC,MAAM,CAACL,GAAD,EAAMgD,KAAN,IAAeH,KAAK,CAACxC,CAAD,CAA1B,CADmC,CAEnC;;IACA,IAAIL,GAAG,KAAK,MAAR,IAAkBgD,KAAtB,EAA6B;MACzBxD,KAAK,CAACgB,IAAN,CAAWwC,KAAX;IACH,CAFD,MAEO,IAAIhD,GAAG,KAAK,QAAR,IAAoB0C,KAAK,CAACC,OAAN,CAAcK,KAAd,CAAxB,EAA8C;MACjD;MACAxD,KAAK,CAACgB,IAAN,CAAW,GAAGwC,KAAd;IACH,CAHM,MAGA;MACHlC,wBAAwB,CAACkC,KAAD,EAAQxD,KAAR,CAAxB;IACH;EACJ;;EACD,OAAOA,KAAP;AACH"}
@@ -1,20 +1,29 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard").default;
4
+
4
5
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
6
+
5
7
  Object.defineProperty(exports, "__esModule", {
6
8
  value: true
7
9
  });
8
10
  exports.default = exports.ZipOfZip = void 0;
11
+
9
12
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
13
+
10
14
  var _archiver = _interopRequireDefault(require("archiver"));
15
+
11
16
  var _stream = require("stream");
17
+
12
18
  var path = _interopRequireWildcard(require("path"));
19
+
13
20
  var _kebabCase = _interopRequireDefault(require("lodash/kebabCase"));
21
+
14
22
  var _uniqid = _interopRequireDefault(require("uniqid"));
23
+
15
24
  var _s3Stream = require("./s3Stream");
16
- // TODO: Move "archive" in layer
17
25
 
26
+ // TODO: Move "archive" in layer
18
27
  class Zipper {
19
28
  constructor(config) {
20
29
  (0, _defineProperty2.default)(this, "archiveFormat", "zip");
@@ -24,6 +33,7 @@ class Zipper {
24
33
  this.config = config;
25
34
  this.archiveFileName = (0, _uniqid.default)(`${this.config.archiveFileKey}/`, `-${(0, _kebabCase.default)(this.config.exportInfo.pageTitle)}.zip`);
26
35
  }
36
+
27
37
  s3DownloadStreams() {
28
38
  const exportInfo = this.config.exportInfo;
29
39
  const prefix = (0, _uniqid.default)("", `-${(0, _kebabCase.default)(exportInfo.pageTitle)}`);
@@ -40,39 +50,40 @@ class Zipper {
40
50
  filename: `${prefix}\\${exportInfo.pageTitle}.json`
41
51
  }];
42
52
  }
53
+
43
54
  process() {
44
55
  const {
45
56
  streamPassThrough,
46
57
  streamPassThroughUploadPromise
47
- } = _s3Stream.s3Stream.writeStream(this.archiveFileName);
58
+ } = _s3Stream.s3Stream.writeStream(this.archiveFileName); // 1. Read all files from S3 using stream.
59
+
60
+
61
+ const s3FilesStreams = this.s3DownloadStreams(); // 2. Prepare zip from the file stream.
62
+
63
+ const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
48
64
 
49
- // 1. Read all files from S3 using stream.
50
- const s3FilesStreams = this.s3DownloadStreams();
51
65
 
52
- // 2. Prepare zip from the file stream.
53
- const archive = _archiver.default.create(this.archiveFormat);
54
- // Handle archive events.
55
66
  archive.on("error", error => {
56
67
  throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
57
- });
68
+ }); // Append all file streams to archive.
58
69
 
59
- // Append all file streams to archive.
60
70
  s3FilesStreams.forEach(streamDetails => archive.append(streamDetails.stream, {
61
71
  name: streamDetails.filename
62
- }));
72
+ })); // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
63
73
 
64
- // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
65
- archive.pipe(streamPassThrough);
66
- // Finalize the archive (ie we are done appending files but streams have to finish yet)
74
+ archive.pipe(streamPassThrough); // Finalize the archive (ie we are done appending files but streams have to finish yet)
67
75
  // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
68
- archive.finalize();
69
76
 
70
- // 3. Return upload stream promise.
77
+ archive.finalize(); // 3. Return upload stream promise.
78
+
71
79
  return streamPassThroughUploadPromise;
72
80
  }
81
+
73
82
  }
83
+
74
84
  exports.default = Zipper;
75
85
  const PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;
86
+
76
87
  class ZipOfZip {
77
88
  constructor(keys) {
78
89
  (0, _defineProperty2.default)(this, "archiveFormat", "zip");
@@ -81,6 +92,7 @@ class ZipOfZip {
81
92
  this.keys = keys;
82
93
  this.archiveFileName = (0, _uniqid.default)("", `-${PAGE_EXPORT_BASENAME}`);
83
94
  }
95
+
84
96
  getFileStreams() {
85
97
  return this.keys.map(key => {
86
98
  return {
@@ -89,35 +101,35 @@ class ZipOfZip {
89
101
  };
90
102
  });
91
103
  }
104
+
92
105
  process() {
93
106
  const {
94
107
  streamPassThrough,
95
108
  streamPassThroughUploadPromise
96
- } = _s3Stream.s3Stream.writeStream(this.archiveFileName);
109
+ } = _s3Stream.s3Stream.writeStream(this.archiveFileName); // 1. Read all files from S3 using stream.
110
+
111
+
112
+ const fileStreamDetails = this.getFileStreams(); // 2. Prepare zip from the file stream.
113
+
114
+ const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
97
115
 
98
- // 1. Read all files from S3 using stream.
99
- const fileStreamDetails = this.getFileStreams();
100
116
 
101
- // 2. Prepare zip from the file stream.
102
- const archive = _archiver.default.create(this.archiveFormat);
103
- // Handle archive events.
104
117
  archive.on("error", error => {
105
118
  throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
106
- });
119
+ }); // Append all file streams to archive.
107
120
 
108
- // Append all file streams to archive.
109
121
  fileStreamDetails.forEach(streamDetails => archive.append(streamDetails.stream, {
110
122
  name: streamDetails.filename
111
- }));
123
+ })); // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
112
124
 
113
- // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
114
- archive.pipe(streamPassThrough);
115
- // Finalize the archive (ie we are done appending files but streams have to finish yet)
125
+ archive.pipe(streamPassThrough); // Finalize the archive (ie we are done appending files but streams have to finish yet)
116
126
  // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
117
- archive.finalize();
118
127
 
119
- // 3. Return upload stream promise.
128
+ archive.finalize(); // 3. Return upload stream promise.
129
+
120
130
  return streamPassThroughUploadPromise;
121
131
  }
132
+
122
133
  }
134
+
123
135
  exports.ZipOfZip = ZipOfZip;
@@ -1 +1 @@
1
- {"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","pageTitle","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","pageDataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","name","code","message","stack","forEach","streamDetails","append","pipe","finalize","PAGE_EXPORT_BASENAME","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { ImageFile } from \"./utils\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: ImageFile[];\n pageTitle: string;\n pageDataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.pageTitle)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.pageTitle)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.pageDataBuffer),\n filename: `${prefix}\\\\${exportInfo.pageTitle}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nconst PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n\n constructor(keys: string[]) {\n this.keys = keys;\n this.archiveFileName = uniqueId(\"\", `-${PAGE_EXPORT_BASENAME}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":";;;;;;;;;AACA;AAEA;AACA;AACA;AACA;AACA;AAPA;;AA0Be,MAAMA,MAAM,CAAC;EAMxBC,WAAW,CAACC,MAAoB,EAAE;IAAA,qDALD,KAAK;IAAA,oDACN,QAAQ;IAAA;IAAA;IAKpC,IAAI,CAACA,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,eAAe,GAAG,IAAAC,eAAQ,EAC1B,GAAE,IAAI,CAACF,MAAM,CAACG,cAAe,GAAE,EAC/B,IAAG,IAAAC,kBAAS,EAAC,IAAI,CAACJ,MAAM,CAACK,UAAU,CAACC,SAAS,CAAE,MAAK,CACxD;EACL;EAEAC,iBAAiB,GAAwB;IACrC,MAAMF,UAAU,GAAG,IAAI,CAACL,MAAM,CAACK,UAAU;IACzC,MAAMG,MAAM,GAAG,IAAAN,eAAQ,EAAC,EAAE,EAAG,IAAG,IAAAE,kBAAS,EAACC,UAAU,CAACC,SAAS,CAAE,EAAC,CAAC;IAClE,MAAMG,KAAK,GAAGJ,UAAU,CAACI,KAAK,CAACC,GAAG,CAAC,CAAC;MAAEC;IAAI,CAAC,KAAK;MAC5C,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEP,MAAO,KAAI,IAAI,CAACQ,YAAa,KAAIC,IAAI,CAACC,QAAQ,CAACP,GAAG,CAAE;MACrE,CAAC;IACL,CAAC,CAAC;IAEF,OAAO,CACH,GAAGF,KAAK,EACR;MACIG,MAAM,EAAEO,gBAAQ,CAACC,IAAI,CAACf,UAAU,CAACgB,cAAc,CAAC;MAChDN,QAAQ,EAAG,GAAEP,MAAO,KAAIH,UAAU,CAACC,SAAU;IACjD,CAAC,CACJ;EACL;EAEAgB,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGX,kBAAQ,CAACY,WAAW,CAC9E,IAAI,CAACxB,eAAe,CACvB;;IAED;IACA,MAAMyB,cAAc,GAAG,IAAI,CAACnB,iBAAiB,EAAE;;IAE/C;IACA,MAAMoB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAX,cAAc,CAACY,OAAO,CAAEC,aAAgC,IACpDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAAC3B,MAAM,EAAE;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAY,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC;AAED,MAAMmB,oBAAoB,GAAI,wBAAuB;AAE9C,MAAMC,QAAQ,CAAC;EAKlB7C,WAAW,CAAC8C,IAAc,EAAE;IAAA,qDAJK,KAAK;IAAA;IAAA;IAKlC,IAAI,CAACA,IAAI,GAAGA,IAAI;IAChB,IAAI,CAAC5C,eAAe,GAAG,IAAAC,eAAQ,EAAC,EAAE,EAAG,IAAGyC,oBAAqB,EAAC,CAAC;EACnE;EAEAG,cAAc,GAAwB;IAClC,OAAO,IAAI,CAACD,IAAI,CAACnC,GAAG,CAACC,GAAG,IAAI;MACxB,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEE,IAAI,CAACC,QAAQ,CAACP,GAAG,CAAE;MACpC,CAAC;IACL,CAAC,CAAC;EACN;EAEAW,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGX,kBAAQ,CAACY,WAAW,CAC9E,IAAI,CAACxB,eAAe,CACvB;;IAED;IACA,MAAM8C,iBAAiB,GAAG,IAAI,CAACD,cAAc,EAAE;;IAE/C;IACA,MAAMnB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAU,iBAAiB,CAACT,OAAO,CAAEC,aAAgC,IACvDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAAC3B,MAAM,EAAE;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAY,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC"}
1
+ {"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","pageTitle","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","pageDataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","name","code","message","stack","forEach","streamDetails","append","pipe","finalize","PAGE_EXPORT_BASENAME","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { ImageFile } from \"./utils\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: ImageFile[];\n pageTitle: string;\n pageDataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.pageTitle)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.pageTitle)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.pageDataBuffer),\n filename: `${prefix}\\\\${exportInfo.pageTitle}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nconst PAGE_EXPORT_BASENAME = `WEBINY_PAGE_EXPORT.zip`;\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n\n constructor(keys: string[]) {\n this.keys = keys;\n this.archiveFileName = uniqueId(\"\", `-${PAGE_EXPORT_BASENAME}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":";;;;;;;;;;;;;AACA;;AAEA;;AACA;;AACA;;AACA;;AACA;;AAPA;AA0Be,MAAMA,MAAN,CAAa;EAMxBC,WAAW,CAACC,MAAD,EAAuB;IAAA,qDALD,KAKC;IAAA,oDAJF,QAIE;IAAA;IAAA;IAC9B,KAAKA,MAAL,GAAcA,MAAd;IACA,KAAKC,eAAL,GAAuB,IAAAC,eAAA,EAClB,GAAE,KAAKF,MAAL,CAAYG,cAAe,GADX,EAElB,IAAG,IAAAC,kBAAA,EAAU,KAAKJ,MAAL,CAAYK,UAAZ,CAAuBC,SAAjC,CAA4C,MAF7B,CAAvB;EAIH;;EAEDC,iBAAiB,GAAwB;IACrC,MAAMF,UAAU,GAAG,KAAKL,MAAL,CAAYK,UAA/B;IACA,MAAMG,MAAM,GAAG,IAAAN,eAAA,EAAS,EAAT,EAAc,IAAG,IAAAE,kBAAA,EAAUC,UAAU,CAACC,SAArB,CAAgC,EAAjD,CAAf;IACA,MAAMG,KAAK,GAAGJ,UAAU,CAACI,KAAX,CAAiBC,GAAjB,CAAqB,CAAC;MAAEC;IAAF,CAAD,KAAa;MAC5C,OAAO;QACHC,MAAM,EAAEC,kBAAA,CAASC,UAAT,CAAoBH,GAApB,CADL;QAEHI,QAAQ,EAAG,GAAEP,MAAO,KAAI,KAAKQ,YAAa,KAAIC,IAAI,CAACC,QAAL,CAAcP,GAAd,CAAmB;MAF9D,CAAP;IAIH,CALa,CAAd;IAOA,OAAO,CACH,GAAGF,KADA,EAEH;MACIG,MAAM,EAAEO,gBAAA,CAASC,IAAT,CAAcf,UAAU,CAACgB,cAAzB,CADZ;MAEIN,QAAQ,EAAG,GAAEP,MAAO,KAAIH,UAAU,CAACC,SAAU;IAFjD,CAFG,CAAP;EAOH;;EAEDgB,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAF;MAAqBC;IAArB,IAAwDX,kBAAA,CAASY,WAAT,CAC1D,KAAKxB,eADqD,CAA9D,CAD0C,CAK1C;;;IACA,MAAMyB,cAAc,GAAG,KAAKnB,iBAAL,EAAvB,CAN0C,CAQ1C;;IACA,MAAMoB,OAAO,GAAGC,iBAAA,CAAQC,MAAR,CAAe,KAAKC,aAApB,CAAhB,CAT0C,CAU1C;;;IACAH,OAAO,CAACI,EAAR,CAAW,OAAX,EAAqBC,KAAD,IAA0B;MAC1C,MAAM,IAAIC,KAAJ,CACD,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EADxE,CAAN;IAGH,CAJD,EAX0C,CAiB1C;;IACAX,cAAc,CAACY,OAAf,CAAwBC,aAAD,IACnBZ,OAAO,CAACa,MAAR,CAAeD,aAAa,CAAC3B,MAA7B,EAAqC;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAtB,CAArC,CADJ,EAlB0C,CAsB1C;;IACAY,OAAO,CAACc,IAAR,CAAalB,iBAAb,EAvB0C,CAwB1C;IACA;;IACAI,OAAO,CAACe,QAAR,GA1B0C,CA4B1C;;IACA,OAAOlB,8BAAP;EACH;;AA/DuB;;;AAkE5B,MAAMmB,oBAAoB,GAAI,wBAA9B;;AAEO,MAAMC,QAAN,CAAe;EAKlB7C,WAAW,CAAC8C,IAAD,EAAiB;IAAA,qDAJK,KAIL;IAAA;IAAA;IACxB,KAAKA,IAAL,GAAYA,IAAZ;IACA,KAAK5C,eAAL,GAAuB,IAAAC,eAAA,EAAS,EAAT,EAAc,IAAGyC,oBAAqB,EAAtC,CAAvB;EACH;;EAEDG,cAAc,GAAwB;IAClC,OAAO,KAAKD,IAAL,CAAUnC,GAAV,CAAcC,GAAG,IAAI;MACxB,OAAO;QACHC,MAAM,EAAEC,kBAAA,CAASC,UAAT,CAAoBH,GAApB,CADL;QAEHI,QAAQ,EAAG,GAAEE,IAAI,CAACC,QAAL,CAAcP,GAAd,CAAmB;MAF7B,CAAP;IAIH,CALM,CAAP;EAMH;;EAEDW,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAF;MAAqBC;IAArB,IAAwDX,kBAAA,CAASY,WAAT,CAC1D,KAAKxB,eADqD,CAA9D,CAD0C,CAK1C;;;IACA,MAAM8C,iBAAiB,GAAG,KAAKD,cAAL,EAA1B,CAN0C,CAQ1C;;IACA,MAAMnB,OAAO,GAAGC,iBAAA,CAAQC,MAAR,CAAe,KAAKC,aAApB,CAAhB,CAT0C,CAU1C;;;IACAH,OAAO,CAACI,EAAR,CAAW,OAAX,EAAqBC,KAAD,IAA0B;MAC1C,MAAM,IAAIC,KAAJ,CACD,GAAED,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,IAAK,IAAGH,KAAK,CAACI,OAAQ,IAAGJ,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACK,KAAM,EADxE,CAAN;IAGH,CAJD,EAX0C,CAiB1C;;IACAU,iBAAiB,CAACT,OAAlB,CAA2BC,aAAD,IACtBZ,OAAO,CAACa,MAAR,CAAeD,aAAa,CAAC3B,MAA7B,EAAqC;MAAEsB,IAAI,EAAEK,aAAa,CAACxB;IAAtB,CAArC,CADJ,EAlB0C,CAsB1C;;IACAY,OAAO,CAACc,IAAR,CAAalB,iBAAb,EAvB0C,CAwB1C;IACA;;IACAI,OAAO,CAACe,QAAR,GA1B0C,CA4B1C;;IACA,OAAOlB,8BAAP;EACH;;AAjDiB"}
@@ -1,21 +1,34 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+
4
5
  Object.defineProperty(exports, "__esModule", {
5
6
  value: true
6
7
  });
7
8
  exports.default = void 0;
9
+
8
10
  var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
11
+
9
12
  var _mdbid = _interopRequireDefault(require("mdbid"));
13
+
10
14
  var _fields = require("@commodo/fields");
15
+
11
16
  var _commodoFieldsObject = require("commodo-fields-object");
17
+
12
18
  var _validation = require("@webiny/validation");
19
+
13
20
  var _api = require("@webiny/api");
21
+
14
22
  var _checkBasePermissions = _interopRequireDefault(require("@webiny/api-page-builder/graphql/crud/utils/checkBasePermissions"));
23
+
15
24
  var _checkOwnPermissions = _interopRequireDefault(require("@webiny/api-page-builder/graphql/crud/utils/checkOwnPermissions"));
25
+
16
26
  var _handlerGraphql = require("@webiny/handler-graphql");
27
+
17
28
  var _types = require("../../types");
29
+
18
30
  var _error = _interopRequireDefault(require("@webiny/error"));
31
+
19
32
  /**
20
33
  * Package mdbid does not have types.
21
34
  */
@@ -30,7 +43,6 @@ var _error = _interopRequireDefault(require("@webiny/error"));
30
43
  * Package commodo-fields-object does not have types.
31
44
  */
32
45
  // @ts-ignore
33
-
34
46
  const validStatus = `${_types.PageImportExportTaskStatus.PENDING}:${_types.PageImportExportTaskStatus.PROCESSING}:${_types.PageImportExportTaskStatus.COMPLETED}:${_types.PageImportExportTaskStatus.FAILED}`;
35
47
  const CreateDataModel = (0, _fields.withFields)({
36
48
  status: (0, _fields.string)({
@@ -51,6 +63,7 @@ const UpdateDataModel = (0, _fields.withFields)({
51
63
  error: (0, _commodoFieldsObject.object)()
52
64
  })();
53
65
  const PERMISSION_NAME = "pb.page";
66
+
54
67
  var _default = ({
55
68
  storageOperations
56
69
  }) => new _api.ContextPlugin(async context => {
@@ -61,17 +74,21 @@ var _default = ({
61
74
  console.log("Missing pageBuilder on context. Skipping Page ImportExportTasks crud.");
62
75
  return;
63
76
  }
77
+
64
78
  const getLocale = () => {
65
79
  const locale = context.i18n.getContentLocale();
80
+
66
81
  if (!locale) {
67
82
  throw new _error.default("Missing content locale in pageImportExportTasks.crud.ts", "LOCALE_ERROR");
68
83
  }
84
+
69
85
  return locale;
70
- };
86
+ }; // Modify context
87
+
71
88
 
72
- // Modify context
73
89
  context.pageBuilder.pageImportExportTask = {
74
90
  storageOperations,
91
+
75
92
  async getTask(id) {
76
93
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
77
94
  rwd: "r"
@@ -86,8 +103,10 @@ var _default = ({
86
103
  }
87
104
  };
88
105
  let pageImportExportTask = null;
106
+
89
107
  try {
90
108
  pageImportExportTask = await storageOperations.getTask(params);
109
+
91
110
  if (!pageImportExportTask) {
92
111
  return null;
93
112
  }
@@ -96,10 +115,12 @@ var _default = ({
96
115
  params
97
116
  }));
98
117
  }
118
+
99
119
  const identity = context.security.getIdentity();
100
120
  (0, _checkOwnPermissions.default)(identity, permission, pageImportExportTask);
101
121
  return pageImportExportTask;
102
122
  },
123
+
103
124
  async listTasks(params) {
104
125
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
105
126
  rwd: "r"
@@ -117,13 +138,13 @@ var _default = ({
117
138
  },
118
139
  sort: Array.isArray(sort) && sort.length > 0 ? sort : ["createdOn_ASC"],
119
140
  limit: limit
120
- };
141
+ }; // If user can only manage own records, let's add that to the listing.
121
142
 
122
- // If user can only manage own records, let's add that to the listing.
123
143
  if (permission.own) {
124
144
  const identity = context.security.getIdentity();
125
145
  listParams.where.createdBy = identity.id;
126
146
  }
147
+
127
148
  try {
128
149
  const [items] = await storageOperations.listTasks(listParams);
129
150
  return items;
@@ -133,6 +154,7 @@ var _default = ({
133
154
  });
134
155
  }
135
156
  },
157
+
136
158
  async createTask(input) {
137
159
  await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
138
160
  rwd: "w"
@@ -153,6 +175,7 @@ var _default = ({
153
175
  displayName: identity.displayName
154
176
  }
155
177
  });
178
+
156
179
  try {
157
180
  return await storageOperations.createTask({
158
181
  input: data,
@@ -164,14 +187,17 @@ var _default = ({
164
187
  }));
165
188
  }
166
189
  },
190
+
167
191
  async updateTask(id, input) {
168
192
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
169
193
  rwd: "w"
170
194
  });
171
195
  const original = await context.pageBuilder.pageImportExportTask.getTask(id);
196
+
172
197
  if (!original) {
173
198
  throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
174
199
  }
200
+
175
201
  const identity = context.security.getIdentity();
176
202
  (0, _checkOwnPermissions.default)(identity, permission, original);
177
203
  const updateDataModel = new UpdateDataModel().populate(input);
@@ -180,6 +206,7 @@ var _default = ({
180
206
  onlyDirty: true
181
207
  });
182
208
  const pageImportExportTask = (0, _objectSpread2.default)((0, _objectSpread2.default)({}, original), data);
209
+
183
210
  try {
184
211
  return await storageOperations.updateTask({
185
212
  input: data,
@@ -193,16 +220,20 @@ var _default = ({
193
220
  }));
194
221
  }
195
222
  },
223
+
196
224
  async deleteTask(id) {
197
225
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
198
226
  rwd: "d"
199
227
  });
200
228
  const pageImportExportTask = await context.pageBuilder.pageImportExportTask.getTask(id);
229
+
201
230
  if (!pageImportExportTask) {
202
231
  throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
203
232
  }
233
+
204
234
  const identity = context.security.getIdentity();
205
235
  (0, _checkOwnPermissions.default)(identity, permission, pageImportExportTask);
236
+
206
237
  try {
207
238
  return await storageOperations.deleteTask({
208
239
  task: pageImportExportTask
@@ -213,16 +244,20 @@ var _default = ({
213
244
  }));
214
245
  }
215
246
  },
247
+
216
248
  async updateStats(id, input) {
217
249
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
218
250
  rwd: "w"
219
251
  });
220
252
  const original = await context.pageBuilder.pageImportExportTask.getTask(id);
253
+
221
254
  if (!original) {
222
255
  throw new _handlerGraphql.NotFoundError(`PageImportExportTask "${id}" not found.`);
223
256
  }
257
+
224
258
  const identity = context.security.getIdentity();
225
259
  (0, _checkOwnPermissions.default)(identity, permission, original);
260
+
226
261
  try {
227
262
  return await storageOperations.updateTaskStats({
228
263
  input,
@@ -234,6 +269,7 @@ var _default = ({
234
269
  }));
235
270
  }
236
271
  },
272
+
237
273
  async createSubTask(parent, id, input) {
238
274
  await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
239
275
  rwd: "w"
@@ -254,6 +290,7 @@ var _default = ({
254
290
  displayName: identity.displayName
255
291
  }
256
292
  });
293
+
257
294
  try {
258
295
  return await storageOperations.createSubTask({
259
296
  input: data,
@@ -265,23 +302,27 @@ var _default = ({
265
302
  }));
266
303
  }
267
304
  },
305
+
268
306
  async updateSubTask(parent, subTaskId, input) {
269
307
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
270
308
  rwd: "w"
271
309
  });
272
310
  const original = await context.pageBuilder.pageImportExportTask.getSubTask(parent, subTaskId);
311
+
273
312
  if (!original) {
274
313
  throw new _handlerGraphql.NotFoundError(`PageImportExportTask parent: "${parent}" and id: "${subTaskId}" not found.`);
275
314
  }
315
+
276
316
  const identity = context.security.getIdentity();
277
317
  (0, _checkOwnPermissions.default)(identity, permission, original);
278
318
  const updateDataModel = new UpdateDataModel().populate(input);
279
319
  await updateDataModel.validate();
280
320
  const data = await updateDataModel.toJSON({
281
321
  onlyDirty: true
282
- });
283
- // TODO: Merge recursively
322
+ }); // TODO: Merge recursively
323
+
284
324
  const pageImportExportSubTask = (0, _objectSpread2.default)((0, _objectSpread2.default)({}, original), data);
325
+
285
326
  try {
286
327
  return await storageOperations.updateSubTask({
287
328
  input: data,
@@ -295,6 +336,7 @@ var _default = ({
295
336
  }));
296
337
  }
297
338
  },
339
+
298
340
  async getSubTask(parent, subTaskId) {
299
341
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
300
342
  rwd: "r"
@@ -310,8 +352,10 @@ var _default = ({
310
352
  }
311
353
  };
312
354
  let pageImportExportSubTask = null;
355
+
313
356
  try {
314
357
  pageImportExportSubTask = await storageOperations.getSubTask(params);
358
+
315
359
  if (!pageImportExportSubTask) {
316
360
  return null;
317
361
  }
@@ -320,10 +364,12 @@ var _default = ({
320
364
  params
321
365
  }));
322
366
  }
367
+
323
368
  const identity = context.security.getIdentity();
324
369
  (0, _checkOwnPermissions.default)(identity, permission, pageImportExportSubTask);
325
370
  return pageImportExportSubTask;
326
371
  },
372
+
327
373
  async listSubTasks(parent, status, limit) {
328
374
  const permission = await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
329
375
  rwd: "r"
@@ -338,13 +384,13 @@ var _default = ({
338
384
  status
339
385
  },
340
386
  limit
341
- };
387
+ }; // If user can only manage own records, let's add that to the listing.
342
388
 
343
- // If user can only manage own records, let's add that to the listing.
344
389
  if (permission.own) {
345
390
  const identity = context.security.getIdentity();
346
391
  listParams.where.createdBy = identity.id;
347
392
  }
393
+
348
394
  try {
349
395
  const [items] = await storageOperations.listSubTasks(listParams);
350
396
  return items;
@@ -358,6 +404,8 @@ var _default = ({
358
404
  });
359
405
  }
360
406
  }
407
+
361
408
  };
362
409
  });
410
+
363
411
  exports.default = _default;