@webiny/api-page-builder-import-export 0.0.0-unstable.3386f66516 → 0.0.0-unstable.496cf268ac

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/client.js +2 -6
  2. package/client.js.map +1 -1
  3. package/export/combine/blocksHandler.js +11 -18
  4. package/export/combine/blocksHandler.js.map +1 -1
  5. package/export/combine/formsHandler.d.ts +6 -0
  6. package/export/combine/formsHandler.js +99 -0
  7. package/export/combine/formsHandler.js.map +1 -0
  8. package/export/combine/index.js +5 -8
  9. package/export/combine/index.js.map +1 -1
  10. package/export/combine/pagesHandler.js +11 -18
  11. package/export/combine/pagesHandler.js.map +1 -1
  12. package/export/combine/templatesHandler.js +11 -18
  13. package/export/combine/templatesHandler.js.map +1 -1
  14. package/export/process/blocksHandler.js +18 -32
  15. package/export/process/blocksHandler.js.map +1 -1
  16. package/export/process/formsHandler.d.ts +6 -0
  17. package/export/process/formsHandler.js +187 -0
  18. package/export/process/formsHandler.js.map +1 -0
  19. package/export/process/index.js +5 -8
  20. package/export/process/index.js.map +1 -1
  21. package/export/process/pagesHandler.js +19 -34
  22. package/export/process/pagesHandler.js.map +1 -1
  23. package/export/process/templatesHandler.js +23 -33
  24. package/export/process/templatesHandler.js.map +1 -1
  25. package/export/s3Stream.d.ts +5 -2
  26. package/export/s3Stream.js +1 -20
  27. package/export/s3Stream.js.map +1 -1
  28. package/export/utils.d.ts +8 -1
  29. package/export/utils.js +59 -44
  30. package/export/utils.js.map +1 -1
  31. package/export/zipper.js +31 -43
  32. package/export/zipper.js.map +1 -1
  33. package/graphql/crud/blocks.crud.js +20 -39
  34. package/graphql/crud/blocks.crud.js.map +1 -1
  35. package/graphql/crud/forms.crud.d.ts +4 -0
  36. package/graphql/crud/forms.crud.js +129 -0
  37. package/graphql/crud/forms.crud.js.map +1 -0
  38. package/graphql/crud/importExportTasks.crud.js +9 -57
  39. package/graphql/crud/importExportTasks.crud.js.map +1 -1
  40. package/graphql/crud/pages.crud.js +21 -39
  41. package/graphql/crud/pages.crud.js.map +1 -1
  42. package/graphql/crud/templates.crud.js +17 -33
  43. package/graphql/crud/templates.crud.js.map +1 -1
  44. package/graphql/crud.js +2 -8
  45. package/graphql/crud.js.map +1 -1
  46. package/graphql/graphql/blocks.gql.js +1 -6
  47. package/graphql/graphql/blocks.gql.js.map +1 -1
  48. package/graphql/graphql/forms.gql.d.ts +4 -0
  49. package/graphql/graphql/forms.gql.js +60 -0
  50. package/graphql/graphql/forms.gql.js.map +1 -0
  51. package/graphql/graphql/importExportTasks.gql.js +1 -6
  52. package/graphql/graphql/importExportTasks.gql.js.map +1 -1
  53. package/graphql/graphql/pages.gql.js +1 -6
  54. package/graphql/graphql/pages.gql.js.map +1 -1
  55. package/graphql/graphql/templates.gql.js +1 -6
  56. package/graphql/graphql/templates.gql.js.map +1 -1
  57. package/graphql/graphql/utils/resolve.js +0 -3
  58. package/graphql/graphql/utils/resolve.js.map +1 -1
  59. package/graphql/graphql.js +2 -7
  60. package/graphql/graphql.js.map +1 -1
  61. package/graphql/index.js +0 -5
  62. package/graphql/index.js.map +1 -1
  63. package/graphql/types.d.ts +23 -0
  64. package/graphql/types.js.map +1 -1
  65. package/import/constants.d.ts +3 -0
  66. package/import/constants.js +14 -0
  67. package/import/constants.js.map +1 -0
  68. package/import/create/blocksHandler.js +11 -20
  69. package/import/create/blocksHandler.js.map +1 -1
  70. package/import/create/formsHandler.d.ts +3 -0
  71. package/import/create/formsHandler.js +103 -0
  72. package/import/create/formsHandler.js.map +1 -0
  73. package/import/create/index.js +5 -8
  74. package/import/create/index.js.map +1 -1
  75. package/import/create/pagesHandler.js +10 -19
  76. package/import/create/pagesHandler.js.map +1 -1
  77. package/import/create/templatesHandler.js +10 -19
  78. package/import/create/templatesHandler.js.map +1 -1
  79. package/import/process/blocks/blocksHandler.d.ts +3 -0
  80. package/import/process/blocks/blocksHandler.js +169 -0
  81. package/import/process/blocks/blocksHandler.js.map +1 -0
  82. package/import/process/blocks/importBlock.d.ts +11 -0
  83. package/import/process/blocks/importBlock.js +89 -0
  84. package/import/process/blocks/importBlock.js.map +1 -0
  85. package/import/process/forms/formsHandler.d.ts +3 -0
  86. package/import/process/forms/formsHandler.js +176 -0
  87. package/import/process/forms/formsHandler.js.map +1 -0
  88. package/import/process/forms/importForm.d.ts +9 -0
  89. package/import/process/forms/importForm.js +43 -0
  90. package/import/process/forms/importForm.js.map +1 -0
  91. package/import/process/index.js +8 -11
  92. package/import/process/index.js.map +1 -1
  93. package/import/process/pages/importPage.d.ts +11 -0
  94. package/import/process/pages/importPage.js +92 -0
  95. package/import/process/pages/importPage.js.map +1 -0
  96. package/import/process/pages/pagesHandler.d.ts +3 -0
  97. package/import/process/pages/pagesHandler.js +183 -0
  98. package/import/process/pages/pagesHandler.js.map +1 -0
  99. package/import/process/pagesHandler.js +2 -3
  100. package/import/process/pagesHandler.js.map +1 -1
  101. package/import/process/templates/importTemplate.d.ts +11 -0
  102. package/import/process/templates/importTemplate.js +66 -0
  103. package/import/process/templates/importTemplate.js.map +1 -0
  104. package/import/process/{templatesHandler.d.ts → templates/templatesHandler.d.ts} +2 -2
  105. package/import/process/{templatesHandler.js → templates/templatesHandler.js} +31 -33
  106. package/import/process/templates/templatesHandler.js.map +1 -0
  107. package/import/utils/deleteS3Folder.d.ts +1 -0
  108. package/import/utils/deleteS3Folder.js +19 -0
  109. package/import/utils/deleteS3Folder.js.map +1 -0
  110. package/import/utils/extractAndUploadZipFileContents.d.ts +7 -0
  111. package/import/utils/extractAndUploadZipFileContents.js +122 -0
  112. package/import/utils/extractAndUploadZipFileContents.js.map +1 -0
  113. package/import/utils/extractZipAndUploadToS3.d.ts +2 -0
  114. package/import/utils/extractZipAndUploadToS3.js +98 -0
  115. package/import/utils/extractZipAndUploadToS3.js.map +1 -0
  116. package/import/utils/getFileNameWithoutExt.d.ts +1 -0
  117. package/import/utils/getFileNameWithoutExt.js +11 -0
  118. package/import/utils/getFileNameWithoutExt.js.map +1 -0
  119. package/import/utils/index.d.ts +9 -0
  120. package/import/utils/index.js +104 -0
  121. package/import/utils/index.js.map +1 -0
  122. package/import/utils/initialStats.d.ts +7 -0
  123. package/import/utils/initialStats.js +16 -0
  124. package/import/utils/initialStats.js.map +1 -0
  125. package/import/utils/prepareDataDirMap.d.ts +6 -0
  126. package/import/utils/prepareDataDirMap.js +29 -0
  127. package/import/utils/prepareDataDirMap.js.map +1 -0
  128. package/import/utils/updateFilesInData.d.ts +8 -0
  129. package/import/utils/updateFilesInData.js +48 -0
  130. package/import/utils/updateFilesInData.js.map +1 -0
  131. package/import/utils/uploadAssets.d.ts +10 -0
  132. package/import/utils/uploadAssets.js +51 -0
  133. package/import/utils/uploadAssets.js.map +1 -0
  134. package/import/utils/uploadFilesFromS3.d.ts +3 -0
  135. package/import/utils/uploadFilesFromS3.js +19 -0
  136. package/import/utils/uploadFilesFromS3.js.map +1 -0
  137. package/import/utils.d.ts +1 -8
  138. package/import/utils.js +0 -55
  139. package/import/utils.js.map +1 -1
  140. package/mockSecurity.js +0 -2
  141. package/mockSecurity.js.map +1 -1
  142. package/package.json +25 -24
  143. package/types.d.ts +21 -0
  144. package/types.js +0 -5
  145. package/types.js.map +1 -1
  146. package/import/process/templatesHandler.js.map +0 -1
package/export/utils.js CHANGED
@@ -1,44 +1,41 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
-
5
4
  Object.defineProperty(exports, "__esModule", {
6
5
  value: true
7
6
  });
8
- exports.EXPORT_TEMPLATES_FOLDER_KEY = exports.EXPORT_PAGES_FOLDER_KEY = exports.EXPORT_BLOCKS_FOLDER_KEY = void 0;
7
+ exports.EXPORT_TEMPLATES_FOLDER_KEY = exports.EXPORT_PAGES_FOLDER_KEY = exports.EXPORT_FORMS_FOLDER_KEY = exports.EXPORT_BLOCKS_FOLDER_KEY = void 0;
9
8
  exports.exportBlock = exportBlock;
9
+ exports.exportForm = exportForm;
10
10
  exports.exportPage = exportPage;
11
11
  exports.exportTemplate = exportTemplate;
12
12
  exports.extractFilesFromData = extractFilesFromData;
13
-
14
13
  var _get = _interopRequireDefault(require("lodash/get"));
15
-
16
14
  var _zipper = _interopRequireDefault(require("./zipper"));
17
-
18
15
  const EXPORT_PAGES_FOLDER_KEY = "WEBINY_PB_EXPORT_PAGES";
19
16
  exports.EXPORT_PAGES_FOLDER_KEY = EXPORT_PAGES_FOLDER_KEY;
20
17
  const EXPORT_BLOCKS_FOLDER_KEY = "WEBINY_PB_EXPORT_BLOCK";
21
18
  exports.EXPORT_BLOCKS_FOLDER_KEY = EXPORT_BLOCKS_FOLDER_KEY;
22
19
  const EXPORT_TEMPLATES_FOLDER_KEY = "WEBINY_PB_EXPORT_TEMPLATE";
23
20
  exports.EXPORT_TEMPLATES_FOLDER_KEY = EXPORT_TEMPLATES_FOLDER_KEY;
24
-
21
+ const EXPORT_FORMS_FOLDER_KEY = "WEBINY_FB_EXPORT_FORM";
22
+ exports.EXPORT_FORMS_FOLDER_KEY = EXPORT_FORMS_FOLDER_KEY;
25
23
  async function exportPage(page, exportPagesDataKey, fileManager) {
26
24
  // Extract all files
27
- const files = extractFilesFromData(page.content || {}); // Extract images from page settings
28
-
25
+ const files = extractFilesFromData(page.content || {});
26
+ // Extract images from page settings
29
27
  const pageSettingsImages = [(0, _get.default)(page, "settings.general.image"), (0, _get.default)(page, "settings.social.image")].filter(image => image && image.src);
30
- const fileIds = [...files, ...pageSettingsImages].map(imageFile => imageFile.id); // Get file data for all images
31
-
28
+ const fileIds = [...files, ...pageSettingsImages].map(imageFile => imageFile.id);
29
+ // Get file data for all images
32
30
  const imageFilesData = [];
33
-
34
31
  if (fileIds.length > 0) {
35
- const [filesData] = await fileManager.files.listFiles({
32
+ const [filesData] = await fileManager.listFiles({
36
33
  ids: fileIds
37
34
  });
38
35
  imageFilesData.push(...filesData);
39
- } // Extract the page data in a json file and upload it to S3
40
-
36
+ }
41
37
 
38
+ // Extract the page data in a json file and upload it to S3
42
39
  const pageData = {
43
40
  page: {
44
41
  content: page.content,
@@ -61,27 +58,24 @@ async function exportPage(page, exportPagesDataKey, fileManager) {
61
58
  });
62
59
  return zipper.process();
63
60
  }
64
-
65
61
  async function exportBlock(block, exportBlocksDataKey, fileManager) {
66
62
  // Extract all files
67
63
  const files = extractFilesFromData(block.content || {});
68
- const fileIds = files.map(imageFile => imageFile.id); // Get file data for all images
69
-
64
+ const fileIds = files.map(imageFile => imageFile.id);
65
+ // Get file data for all images
70
66
  const imageFilesData = [];
71
-
72
67
  if (fileIds.length > 0) {
73
- const [filesData] = await fileManager.files.listFiles({
68
+ const [filesData] = await fileManager.listFiles({
74
69
  ids: fileIds
75
70
  });
76
71
  imageFilesData.push(...filesData);
77
- } // Add block preview image file data
78
-
79
-
72
+ }
73
+ // Add block preview image file data
80
74
  if (block.preview.id) {
81
- imageFilesData.push(await fileManager.files.getFile(block.preview.id));
82
- } // Extract the block data in a json file and upload it to S3
83
-
75
+ imageFilesData.push(await fileManager.getFile(block.preview.id));
76
+ }
84
77
 
78
+ // Extract the block data in a json file and upload it to S3
85
79
  const blockData = {
86
80
  block: {
87
81
  name: block.name,
@@ -101,27 +95,29 @@ async function exportBlock(block, exportBlocksDataKey, fileManager) {
101
95
  });
102
96
  return zipper.process();
103
97
  }
104
-
105
98
  async function exportTemplate(template, exportTemplatesDataKey, fileManager) {
106
99
  // Extract all files
107
100
  const files = extractFilesFromData(template.content || {});
108
- const fileIds = files.map(imageFile => imageFile.id); // Get file data for all images
109
-
101
+ const fileIds = files.map(imageFile => imageFile.id);
102
+ // Get file data for all images
110
103
  const imageFilesData = [];
111
-
112
104
  if (fileIds.length > 0) {
113
- const [filesData] = await fileManager.files.listFiles({
105
+ const [filesData] = await fileManager.listFiles({
114
106
  ids: fileIds
115
107
  });
116
108
  imageFilesData.push(...filesData);
117
- } // Extract the template data in a json file and upload it to S3
118
-
109
+ }
119
110
 
111
+ // Extract the template data in a json file and upload it to S3
120
112
  const templateData = {
121
113
  template: {
122
114
  title: template.title,
115
+ slug: template.slug,
116
+ tags: template.tags,
123
117
  description: template.description,
124
- content: template.content
118
+ content: template.content,
119
+ layout: template.layout,
120
+ pageCategory: template.pageCategory
125
121
  },
126
122
  files: imageFilesData
127
123
  };
@@ -136,29 +132,25 @@ async function exportTemplate(template, exportTemplatesDataKey, fileManager) {
136
132
  });
137
133
  return zipper.process();
138
134
  }
139
-
140
135
  function extractFilesFromData(data, files = []) {
141
136
  // Base case: termination
142
137
  if (!data || typeof data !== "object") {
143
138
  return files;
144
- } // Recursively call function for each element
145
-
146
-
139
+ }
140
+ // Recursively call function for each element
147
141
  if (Array.isArray(data)) {
148
142
  for (let i = 0; i < data.length; i++) {
149
143
  const element = data[i];
150
144
  extractFilesFromData(element, files);
151
145
  }
152
-
153
146
  return files;
154
- } // Main
155
-
147
+ }
156
148
 
149
+ // Main
157
150
  const tuple = Object.entries(data);
158
-
159
151
  for (let i = 0; i < tuple.length; i++) {
160
- const [key, value] = tuple[i]; // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.
161
-
152
+ const [key, value] = tuple[i];
153
+ // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.
162
154
  if (key === "file" && value) {
163
155
  files.push(value);
164
156
  } else if (key === "images" && Array.isArray(value)) {
@@ -168,6 +160,29 @@ function extractFilesFromData(data, files = []) {
168
160
  extractFilesFromData(value, files);
169
161
  }
170
162
  }
171
-
172
163
  return files;
164
+ }
165
+ async function exportForm(form, exportFormsDataKey) {
166
+ // Extract the form data in a json file and upload it to S3
167
+ const formData = {
168
+ form: {
169
+ name: form.name,
170
+ status: form.status,
171
+ version: form.version,
172
+ fields: form.fields,
173
+ layout: form.layout,
174
+ settings: form.settings,
175
+ triggers: form.triggers
176
+ }
177
+ };
178
+ const formDataBuffer = Buffer.from(JSON.stringify(formData));
179
+ const zipper = new _zipper.default({
180
+ exportInfo: {
181
+ files: [],
182
+ name: form.name,
183
+ dataBuffer: formDataBuffer
184
+ },
185
+ archiveFileKey: exportFormsDataKey
186
+ });
187
+ return zipper.process();
173
188
  }
@@ -1 +1 @@
1
- {"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","EXPORT_BLOCKS_FOLDER_KEY","EXPORT_TEMPLATES_FOLDER_KEY","exportPage","page","exportPagesDataKey","fileManager","files","extractFilesFromData","content","pageSettingsImages","get","filter","image","src","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","ids","push","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","name","dataBuffer","archiveFileKey","process","exportBlock","block","exportBlocksDataKey","preview","getFile","blockData","blockDataBuffer","exportTemplate","template","exportTemplatesDataKey","templateData","description","templateDataBuffer","data","Array","isArray","i","element","tuple","Object","entries","key","value"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, PageBlock, PageTemplate } from \"@webiny/api-page-builder/types\";\nimport { FileManagerContext, File } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\nexport const EXPORT_BLOCKS_FOLDER_KEY = \"WEBINY_PB_EXPORT_BLOCK\";\nexport const EXPORT_TEMPLATES_FOLDER_KEY = \"WEBINY_PB_EXPORT_TEMPLATE\";\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: File[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromData(page.content || {});\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n\n const fileIds = [...files, ...pageSettingsImages].map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await fileManager.files.listFiles({ ids: fileIds });\n imageFilesData.push(...filesData);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: imageFilesData\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: page.title,\n dataBuffer: pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ExportedBlockData {\n block: Pick<PageBlock, \"name\" | \"content\" | \"preview\">;\n files: File[];\n}\n\nexport async function exportBlock(\n block: PageBlock,\n exportBlocksDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromData(block.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await fileManager.files.listFiles({ ids: fileIds });\n imageFilesData.push(...filesData);\n }\n // Add block preview image file data\n if (block.preview.id) {\n imageFilesData.push(await fileManager.files.getFile(block.preview.id));\n }\n\n // Extract the block data in a json file and upload it to S3\n const blockData = {\n block: {\n name: block.name,\n content: block.content,\n preview: block.preview\n },\n files: imageFilesData\n };\n const blockDataBuffer = Buffer.from(JSON.stringify(blockData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: block.name,\n dataBuffer: blockDataBuffer\n },\n archiveFileKey: exportBlocksDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ExportedTemplateData {\n template: Pick<PageTemplate, \"title\" | \"description\" | \"content\">;\n files: File[];\n}\n\nexport async function exportTemplate(\n template: PageTemplate,\n exportTemplatesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromData(template.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await fileManager.files.listFiles({ ids: fileIds });\n imageFilesData.push(...filesData);\n }\n\n // Extract the template data in a json file and upload it to S3\n const templateData = {\n template: {\n title: template.title,\n description: template.description,\n content: template.content\n },\n files: imageFilesData\n };\n const templateDataBuffer = Buffer.from(JSON.stringify(templateData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: template.title,\n dataBuffer: templateDataBuffer\n },\n archiveFileKey: exportTemplatesDataKey\n });\n\n return zipper.process();\n}\n\nexport function extractFilesFromData(data: Record<string, any>, files: any[] = []): File[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromData(value, files);\n }\n }\n return files;\n}\n"],"mappings":";;;;;;;;;;;;;AAGA;;AACA;;AAEO,MAAMA,uBAAuB,GAAG,wBAAhC;;AACA,MAAMC,wBAAwB,GAAG,wBAAjC;;AACA,MAAMC,2BAA2B,GAAG,2BAApC;;;AAOA,eAAeC,UAAf,CACHC,IADG,EAEHC,kBAFG,EAGHC,WAHG,EAI+B;EAClC;EACA,MAAMC,KAAK,GAAGC,oBAAoB,CAACJ,IAAI,CAACK,OAAL,IAAgB,EAAjB,CAAlC,CAFkC,CAGlC;;EACA,MAAMC,kBAAkB,GAAG,CACvB,IAAAC,YAAA,EAAIP,IAAJ,EAAU,wBAAV,CADuB,EAEvB,IAAAO,YAAA,EAAIP,IAAJ,EAAU,uBAAV,CAFuB,EAGzBQ,MAHyB,CAGlBC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAHN,CAA3B;EAKA,MAAMC,OAAO,GAAG,CAAC,GAAGR,KAAJ,EAAW,GAAGG,kBAAd,EAAkCM,GAAlC,CAAsCC,SAAS,IAAIA,SAAS,CAACC,EAA7D,CAAhB,CATkC,CAUlC;;EACA,MAAMC,cAAc,GAAG,EAAvB;;EACA,IAAIJ,OAAO,CAACK,MAAR,GAAiB,CAArB,EAAwB;IACpB,MAAM,CAACC,SAAD,IAAc,MAAMf,WAAW,CAACC,KAAZ,CAAkBe,SAAlB,CAA4B;MAAEC,GAAG,EAAER;IAAP,CAA5B,CAA1B;IACAI,cAAc,CAACK,IAAf,CAAoB,GAAGH,SAAvB;EACH,CAfiC,CAiBlC;;;EACA,MAAMI,QAAQ,GAAG;IACbrB,IAAI,EAAE;MACFK,OAAO,EAAEL,IAAI,CAACK,OADZ;MAEFiB,KAAK,EAAEtB,IAAI,CAACsB,KAFV;MAGFC,IAAI,EAAEvB,IAAI,CAACuB,IAHT;MAIFC,OAAO,EAAExB,IAAI,CAACwB,OAJZ;MAKFC,MAAM,EAAEzB,IAAI,CAACyB,MALX;MAMFC,QAAQ,EAAE1B,IAAI,CAAC0B;IANb,CADO;IASbvB,KAAK,EAAEY;EATM,CAAjB;EAWA,MAAMY,cAAc,GAAGC,MAAM,CAACC,IAAP,CAAYC,IAAI,CAACC,SAAL,CAAeV,QAAf,CAAZ,CAAvB;EAEA,MAAMW,MAAM,GAAG,IAAIC,eAAJ,CAAW;IACtBC,UAAU,EAAE;MACR/B,KAAK,EAAEY,cADC;MAERoB,IAAI,EAAEnC,IAAI,CAACsB,KAFH;MAGRc,UAAU,EAAET;IAHJ,CADU;IAMtBU,cAAc,EAAEpC;EANM,CAAX,CAAf;EASA,OAAO+B,MAAM,CAACM,OAAP,EAAP;AACH;;AAOM,eAAeC,WAAf,CACHC,KADG,EAEHC,mBAFG,EAGHvC,WAHG,EAI+B;EAClC;EACA,MAAMC,KAAK,GAAGC,oBAAoB,CAACoC,KAAK,CAACnC,OAAN,IAAiB,EAAlB,CAAlC;EACA,MAAMM,OAAO,GAAGR,KAAK,CAACS,GAAN,CAAUC,SAAS,IAAIA,SAAS,CAACC,EAAjC,CAAhB,CAHkC,CAIlC;;EACA,MAAMC,cAAc,GAAG,EAAvB;;EACA,IAAIJ,OAAO,CAACK,MAAR,GAAiB,CAArB,EAAwB;IACpB,MAAM,CAACC,SAAD,IAAc,MAAMf,WAAW,CAACC,KAAZ,CAAkBe,SAAlB,CAA4B;MAAEC,GAAG,EAAER;IAAP,CAA5B,CAA1B;IACAI,cAAc,CAACK,IAAf,CAAoB,GAAGH,SAAvB;EACH,CATiC,CAUlC;;;EACA,IAAIuB,KAAK,CAACE,OAAN,CAAc5B,EAAlB,EAAsB;IAClBC,cAAc,CAACK,IAAf,CAAoB,MAAMlB,WAAW,CAACC,KAAZ,CAAkBwC,OAAlB,CAA0BH,KAAK,CAACE,OAAN,CAAc5B,EAAxC,CAA1B;EACH,CAbiC,CAelC;;;EACA,MAAM8B,SAAS,GAAG;IACdJ,KAAK,EAAE;MACHL,IAAI,EAAEK,KAAK,CAACL,IADT;MAEH9B,OAAO,EAAEmC,KAAK,CAACnC,OAFZ;MAGHqC,OAAO,EAAEF,KAAK,CAACE;IAHZ,CADO;IAMdvC,KAAK,EAAEY;EANO,CAAlB;EAQA,MAAM8B,eAAe,GAAGjB,MAAM,CAACC,IAAP,CAAYC,IAAI,CAACC,SAAL,CAAea,SAAf,CAAZ,CAAxB;EAEA,MAAMZ,MAAM,GAAG,IAAIC,eAAJ,CAAW;IACtBC,UAAU,EAAE;MACR/B,KAAK,EAAEY,cADC;MAERoB,IAAI,EAAEK,KAAK,CAACL,IAFJ;MAGRC,UAAU,EAAES;IAHJ,CADU;IAMtBR,cAAc,EAAEI;EANM,CAAX,CAAf;EASA,OAAOT,MAAM,CAACM,OAAP,EAAP;AACH;;AAOM,eAAeQ,cAAf,CACHC,QADG,EAEHC,sBAFG,EAGH9C,WAHG,EAI+B;EAClC;EACA,MAAMC,KAAK,GAAGC,oBAAoB,CAAC2C,QAAQ,CAAC1C,OAAT,IAAoB,EAArB,CAAlC;EACA,MAAMM,OAAO,GAAGR,KAAK,CAACS,GAAN,CAAUC,SAAS,IAAIA,SAAS,CAACC,EAAjC,CAAhB,CAHkC,CAIlC;;EACA,MAAMC,cAAc,GAAG,EAAvB;;EACA,IAAIJ,OAAO,CAACK,MAAR,GAAiB,CAArB,EAAwB;IACpB,MAAM,CAACC,SAAD,IAAc,MAAMf,WAAW,CAACC,KAAZ,CAAkBe,SAAlB,CAA4B;MAAEC,GAAG,EAAER;IAAP,CAA5B,CAA1B;IACAI,cAAc,CAACK,IAAf,CAAoB,GAAGH,SAAvB;EACH,CATiC,CAWlC;;;EACA,MAAMgC,YAAY,GAAG;IACjBF,QAAQ,EAAE;MACNzB,KAAK,EAAEyB,QAAQ,CAACzB,KADV;MAEN4B,WAAW,EAAEH,QAAQ,CAACG,WAFhB;MAGN7C,OAAO,EAAE0C,QAAQ,CAAC1C;IAHZ,CADO;IAMjBF,KAAK,EAAEY;EANU,CAArB;EAQA,MAAMoC,kBAAkB,GAAGvB,MAAM,CAACC,IAAP,CAAYC,IAAI,CAACC,SAAL,CAAekB,YAAf,CAAZ,CAA3B;EAEA,MAAMjB,MAAM,GAAG,IAAIC,eAAJ,CAAW;IACtBC,UAAU,EAAE;MACR/B,KAAK,EAAEY,cADC;MAERoB,IAAI,EAAEY,QAAQ,CAACzB,KAFP;MAGRc,UAAU,EAAEe;IAHJ,CADU;IAMtBd,cAAc,EAAEW;EANM,CAAX,CAAf;EASA,OAAOhB,MAAM,CAACM,OAAP,EAAP;AACH;;AAEM,SAASlC,oBAAT,CAA8BgD,IAA9B,EAAyDjD,KAAY,GAAG,EAAxE,EAAoF;EACvF;EACA,IAAI,CAACiD,IAAD,IAAS,OAAOA,IAAP,KAAgB,QAA7B,EAAuC;IACnC,OAAOjD,KAAP;EACH,CAJsF,CAKvF;;;EACA,IAAIkD,KAAK,CAACC,OAAN,CAAcF,IAAd,CAAJ,EAAyB;IACrB,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGH,IAAI,CAACpC,MAAzB,EAAiCuC,CAAC,EAAlC,EAAsC;MAClC,MAAMC,OAAO,GAAGJ,IAAI,CAACG,CAAD,CAApB;MACAnD,oBAAoB,CAACoD,OAAD,EAAUrD,KAAV,CAApB;IACH;;IACD,OAAOA,KAAP;EACH,CAZsF,CAcvF;;;EACA,MAAMsD,KAAK,GAAGC,MAAM,CAACC,OAAP,CAAeP,IAAf,CAAd;;EACA,KAAK,IAAIG,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGE,KAAK,CAACzC,MAA1B,EAAkCuC,CAAC,EAAnC,EAAuC;IACnC,MAAM,CAACK,GAAD,EAAMC,KAAN,IAAeJ,KAAK,CAACF,CAAD,CAA1B,CADmC,CAEnC;;IACA,IAAIK,GAAG,KAAK,MAAR,IAAkBC,KAAtB,EAA6B;MACzB1D,KAAK,CAACiB,IAAN,CAAWyC,KAAX;IACH,CAFD,MAEO,IAAID,GAAG,KAAK,QAAR,IAAoBP,KAAK,CAACC,OAAN,CAAcO,KAAd,CAAxB,EAA8C;MACjD;MACA1D,KAAK,CAACiB,IAAN,CAAW,GAAGyC,KAAd;IACH,CAHM,MAGA;MACHzD,oBAAoB,CAACyD,KAAD,EAAQ1D,KAAR,CAApB;IACH;EACJ;;EACD,OAAOA,KAAP;AACH"}
1
+ {"version":3,"names":["EXPORT_PAGES_FOLDER_KEY","EXPORT_BLOCKS_FOLDER_KEY","EXPORT_TEMPLATES_FOLDER_KEY","EXPORT_FORMS_FOLDER_KEY","exportPage","page","exportPagesDataKey","fileManager","files","extractFilesFromData","content","pageSettingsImages","get","filter","image","src","fileIds","map","imageFile","id","imageFilesData","length","filesData","listFiles","ids","push","pageData","title","path","version","status","settings","pageDataBuffer","Buffer","from","JSON","stringify","zipper","Zipper","exportInfo","name","dataBuffer","archiveFileKey","process","exportBlock","block","exportBlocksDataKey","preview","getFile","blockData","blockDataBuffer","exportTemplate","template","exportTemplatesDataKey","templateData","slug","tags","description","layout","pageCategory","templateDataBuffer","data","Array","isArray","i","element","tuple","Object","entries","key","value","exportForm","form","exportFormsDataKey","formData","fields","triggers","formDataBuffer"],"sources":["utils.ts"],"sourcesContent":["import S3 from \"aws-sdk/clients/s3\";\nimport { Page, PageBlock, PageTemplate } from \"@webiny/api-page-builder/types\";\nimport { FbForm } from \"@webiny/api-form-builder/types\";\nimport { FileManagerContext, File } from \"@webiny/api-file-manager/types\";\nimport get from \"lodash/get\";\nimport Zipper from \"./zipper\";\n\nexport const EXPORT_PAGES_FOLDER_KEY = \"WEBINY_PB_EXPORT_PAGES\";\nexport const EXPORT_BLOCKS_FOLDER_KEY = \"WEBINY_PB_EXPORT_BLOCK\";\nexport const EXPORT_TEMPLATES_FOLDER_KEY = \"WEBINY_PB_EXPORT_TEMPLATE\";\nexport const EXPORT_FORMS_FOLDER_KEY = \"WEBINY_FB_EXPORT_FORM\";\n\nexport interface ExportedPageData {\n page: Pick<Page, \"content\" | \"title\" | \"version\" | \"status\" | \"settings\" | \"path\">;\n files: File[];\n}\n\nexport async function exportPage(\n page: Page,\n exportPagesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromData(page.content || {});\n // Extract images from page settings\n const pageSettingsImages = [\n get(page, \"settings.general.image\") as unknown as File,\n get(page, \"settings.social.image\") as unknown as File\n ].filter(image => image && image.src);\n\n const fileIds = [...files, ...pageSettingsImages].map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await fileManager.listFiles({ ids: fileIds });\n imageFilesData.push(...filesData);\n }\n\n // Extract the page data in a json file and upload it to S3\n const pageData = {\n page: {\n content: page.content,\n title: page.title,\n path: page.path,\n version: page.version,\n status: page.status,\n settings: page.settings\n },\n files: imageFilesData\n };\n const pageDataBuffer = Buffer.from(JSON.stringify(pageData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: page.title,\n dataBuffer: pageDataBuffer\n },\n archiveFileKey: exportPagesDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ExportedBlockData {\n block: Pick<PageBlock, \"name\" | \"content\" | \"preview\">;\n files: File[];\n}\n\nexport async function exportBlock(\n block: PageBlock,\n exportBlocksDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromData(block.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await fileManager.listFiles({ ids: fileIds });\n imageFilesData.push(...filesData);\n }\n // Add block preview image file data\n if (block.preview.id) {\n imageFilesData.push(await fileManager.getFile(block.preview.id));\n }\n\n // Extract the block data in a json file and upload it to S3\n const blockData = {\n block: {\n name: block.name,\n content: block.content,\n preview: block.preview\n },\n files: imageFilesData\n };\n const blockDataBuffer = Buffer.from(JSON.stringify(blockData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: block.name,\n dataBuffer: blockDataBuffer\n },\n archiveFileKey: exportBlocksDataKey\n });\n\n return zipper.process();\n}\n\nexport interface ExportedTemplateData {\n template: Pick<\n PageTemplate,\n \"title\" | \"slug\" | \"tags\" | \"description\" | \"content\" | \"layout\" | \"pageCategory\"\n >;\n files: File[];\n}\n\nexport async function exportTemplate(\n template: PageTemplate,\n exportTemplatesDataKey: string,\n fileManager: FileManagerContext[\"fileManager\"]\n): Promise<S3.ManagedUpload.SendData> {\n // Extract all files\n const files = extractFilesFromData(template.content || {});\n const fileIds = files.map(imageFile => imageFile.id);\n // Get file data for all images\n const imageFilesData = [];\n if (fileIds.length > 0) {\n const [filesData] = await fileManager.listFiles({ ids: fileIds });\n imageFilesData.push(...filesData);\n }\n\n // Extract the template data in a json file and upload it to S3\n const templateData = {\n template: {\n title: template.title,\n slug: template.slug,\n tags: template.tags,\n description: template.description,\n content: template.content,\n layout: template.layout,\n pageCategory: template.pageCategory\n },\n files: imageFilesData\n };\n const templateDataBuffer = Buffer.from(JSON.stringify(templateData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: imageFilesData,\n name: template.title,\n dataBuffer: templateDataBuffer\n },\n archiveFileKey: exportTemplatesDataKey\n });\n\n return zipper.process();\n}\n\nexport function extractFilesFromData(data: Record<string, any>, files: any[] = []): File[] {\n // Base case: termination\n if (!data || typeof data !== \"object\") {\n return files;\n }\n // Recursively call function for each element\n if (Array.isArray(data)) {\n for (let i = 0; i < data.length; i++) {\n const element = data[i];\n extractFilesFromData(element, files);\n }\n return files;\n }\n\n // Main\n const tuple = Object.entries(data);\n for (let i = 0; i < tuple.length; i++) {\n const [key, value] = tuple[i];\n // TODO: @ashutosh extract it to plugins, so that, we can handle cases for other components too.\n if (key === \"file\" && value) {\n files.push(value);\n } else if (key === \"images\" && Array.isArray(value)) {\n // Handle case for \"images-list\" component\n files.push(...value);\n } else {\n extractFilesFromData(value, files);\n }\n }\n return files;\n}\n\nexport interface ExportedFormData {\n form: Pick<\n FbForm,\n \"name\" | \"status\" | \"version\" | \"fields\" | \"layout\" | \"settings\" | \"triggers\"\n >;\n files: File[];\n}\n\nexport async function exportForm(\n form: FbForm,\n exportFormsDataKey: string\n): Promise<S3.ManagedUpload.SendData> {\n // Extract the form data in a json file and upload it to S3\n const formData = {\n form: {\n name: form.name,\n status: form.status,\n version: form.version,\n fields: form.fields,\n layout: form.layout,\n settings: form.settings,\n triggers: form.triggers\n }\n };\n const formDataBuffer = Buffer.from(JSON.stringify(formData));\n\n const zipper = new Zipper({\n exportInfo: {\n files: [],\n name: form.name,\n dataBuffer: formDataBuffer\n },\n archiveFileKey: exportFormsDataKey\n });\n\n return zipper.process();\n}\n"],"mappings":";;;;;;;;;;;;AAIA;AACA;AAEO,MAAMA,uBAAuB,GAAG,wBAAwB;AAAC;AACzD,MAAMC,wBAAwB,GAAG,wBAAwB;AAAC;AAC1D,MAAMC,2BAA2B,GAAG,2BAA2B;AAAC;AAChE,MAAMC,uBAAuB,GAAG,uBAAuB;AAAC;AAOxD,eAAeC,UAAU,CAC5BC,IAAU,EACVC,kBAA0B,EAC1BC,WAA8C,EACZ;EAClC;EACA,MAAMC,KAAK,GAAGC,oBAAoB,CAACJ,IAAI,CAACK,OAAO,IAAI,CAAC,CAAC,CAAC;EACtD;EACA,MAAMC,kBAAkB,GAAG,CACvB,IAAAC,YAAG,EAACP,IAAI,EAAE,wBAAwB,CAAC,EACnC,IAAAO,YAAG,EAACP,IAAI,EAAE,uBAAuB,CAAC,CACrC,CAACQ,MAAM,CAACC,KAAK,IAAIA,KAAK,IAAIA,KAAK,CAACC,GAAG,CAAC;EAErC,MAAMC,OAAO,GAAG,CAAC,GAAGR,KAAK,EAAE,GAAGG,kBAAkB,CAAC,CAACM,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;EAChF;EACA,MAAMC,cAAc,GAAG,EAAE;EACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;IACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAMf,WAAW,CAACgB,SAAS,CAAC;MAAEC,GAAG,EAAER;IAAQ,CAAC,CAAC;IACjEI,cAAc,CAACK,IAAI,CAAC,GAAGH,SAAS,CAAC;EACrC;;EAEA;EACA,MAAMI,QAAQ,GAAG;IACbrB,IAAI,EAAE;MACFK,OAAO,EAAEL,IAAI,CAACK,OAAO;MACrBiB,KAAK,EAAEtB,IAAI,CAACsB,KAAK;MACjBC,IAAI,EAAEvB,IAAI,CAACuB,IAAI;MACfC,OAAO,EAAExB,IAAI,CAACwB,OAAO;MACrBC,MAAM,EAAEzB,IAAI,CAACyB,MAAM;MACnBC,QAAQ,EAAE1B,IAAI,CAAC0B;IACnB,CAAC;IACDvB,KAAK,EAAEY;EACX,CAAC;EACD,MAAMY,cAAc,GAAGC,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACV,QAAQ,CAAC,CAAC;EAE5D,MAAMW,MAAM,GAAG,IAAIC,eAAM,CAAC;IACtBC,UAAU,EAAE;MACR/B,KAAK,EAAEY,cAAc;MACrBoB,IAAI,EAAEnC,IAAI,CAACsB,KAAK;MAChBc,UAAU,EAAET;IAChB,CAAC;IACDU,cAAc,EAAEpC;EACpB,CAAC,CAAC;EAEF,OAAO+B,MAAM,CAACM,OAAO,EAAE;AAC3B;AAOO,eAAeC,WAAW,CAC7BC,KAAgB,EAChBC,mBAA2B,EAC3BvC,WAA8C,EACZ;EAClC;EACA,MAAMC,KAAK,GAAGC,oBAAoB,CAACoC,KAAK,CAACnC,OAAO,IAAI,CAAC,CAAC,CAAC;EACvD,MAAMM,OAAO,GAAGR,KAAK,CAACS,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;EACpD;EACA,MAAMC,cAAc,GAAG,EAAE;EACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;IACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAMf,WAAW,CAACgB,SAAS,CAAC;MAAEC,GAAG,EAAER;IAAQ,CAAC,CAAC;IACjEI,cAAc,CAACK,IAAI,CAAC,GAAGH,SAAS,CAAC;EACrC;EACA;EACA,IAAIuB,KAAK,CAACE,OAAO,CAAC5B,EAAE,EAAE;IAClBC,cAAc,CAACK,IAAI,CAAC,MAAMlB,WAAW,CAACyC,OAAO,CAACH,KAAK,CAACE,OAAO,CAAC5B,EAAE,CAAC,CAAC;EACpE;;EAEA;EACA,MAAM8B,SAAS,GAAG;IACdJ,KAAK,EAAE;MACHL,IAAI,EAAEK,KAAK,CAACL,IAAI;MAChB9B,OAAO,EAAEmC,KAAK,CAACnC,OAAO;MACtBqC,OAAO,EAAEF,KAAK,CAACE;IACnB,CAAC;IACDvC,KAAK,EAAEY;EACX,CAAC;EACD,MAAM8B,eAAe,GAAGjB,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACa,SAAS,CAAC,CAAC;EAE9D,MAAMZ,MAAM,GAAG,IAAIC,eAAM,CAAC;IACtBC,UAAU,EAAE;MACR/B,KAAK,EAAEY,cAAc;MACrBoB,IAAI,EAAEK,KAAK,CAACL,IAAI;MAChBC,UAAU,EAAES;IAChB,CAAC;IACDR,cAAc,EAAEI;EACpB,CAAC,CAAC;EAEF,OAAOT,MAAM,CAACM,OAAO,EAAE;AAC3B;AAUO,eAAeQ,cAAc,CAChCC,QAAsB,EACtBC,sBAA8B,EAC9B9C,WAA8C,EACZ;EAClC;EACA,MAAMC,KAAK,GAAGC,oBAAoB,CAAC2C,QAAQ,CAAC1C,OAAO,IAAI,CAAC,CAAC,CAAC;EAC1D,MAAMM,OAAO,GAAGR,KAAK,CAACS,GAAG,CAACC,SAAS,IAAIA,SAAS,CAACC,EAAE,CAAC;EACpD;EACA,MAAMC,cAAc,GAAG,EAAE;EACzB,IAAIJ,OAAO,CAACK,MAAM,GAAG,CAAC,EAAE;IACpB,MAAM,CAACC,SAAS,CAAC,GAAG,MAAMf,WAAW,CAACgB,SAAS,CAAC;MAAEC,GAAG,EAAER;IAAQ,CAAC,CAAC;IACjEI,cAAc,CAACK,IAAI,CAAC,GAAGH,SAAS,CAAC;EACrC;;EAEA;EACA,MAAMgC,YAAY,GAAG;IACjBF,QAAQ,EAAE;MACNzB,KAAK,EAAEyB,QAAQ,CAACzB,KAAK;MACrB4B,IAAI,EAAEH,QAAQ,CAACG,IAAI;MACnBC,IAAI,EAAEJ,QAAQ,CAACI,IAAI;MACnBC,WAAW,EAAEL,QAAQ,CAACK,WAAW;MACjC/C,OAAO,EAAE0C,QAAQ,CAAC1C,OAAO;MACzBgD,MAAM,EAAEN,QAAQ,CAACM,MAAM;MACvBC,YAAY,EAAEP,QAAQ,CAACO;IAC3B,CAAC;IACDnD,KAAK,EAAEY;EACX,CAAC;EACD,MAAMwC,kBAAkB,GAAG3B,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACkB,YAAY,CAAC,CAAC;EAEpE,MAAMjB,MAAM,GAAG,IAAIC,eAAM,CAAC;IACtBC,UAAU,EAAE;MACR/B,KAAK,EAAEY,cAAc;MACrBoB,IAAI,EAAEY,QAAQ,CAACzB,KAAK;MACpBc,UAAU,EAAEmB;IAChB,CAAC;IACDlB,cAAc,EAAEW;EACpB,CAAC,CAAC;EAEF,OAAOhB,MAAM,CAACM,OAAO,EAAE;AAC3B;AAEO,SAASlC,oBAAoB,CAACoD,IAAyB,EAAErD,KAAY,GAAG,EAAE,EAAU;EACvF;EACA,IAAI,CAACqD,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IACnC,OAAOrD,KAAK;EAChB;EACA;EACA,IAAIsD,KAAK,CAACC,OAAO,CAACF,IAAI,CAAC,EAAE;IACrB,KAAK,IAAIG,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,IAAI,CAACxC,MAAM,EAAE2C,CAAC,EAAE,EAAE;MAClC,MAAMC,OAAO,GAAGJ,IAAI,CAACG,CAAC,CAAC;MACvBvD,oBAAoB,CAACwD,OAAO,EAAEzD,KAAK,CAAC;IACxC;IACA,OAAOA,KAAK;EAChB;;EAEA;EACA,MAAM0D,KAAK,GAAGC,MAAM,CAACC,OAAO,CAACP,IAAI,CAAC;EAClC,KAAK,IAAIG,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGE,KAAK,CAAC7C,MAAM,EAAE2C,CAAC,EAAE,EAAE;IACnC,MAAM,CAACK,GAAG,EAAEC,KAAK,CAAC,GAAGJ,KAAK,CAACF,CAAC,CAAC;IAC7B;IACA,IAAIK,GAAG,KAAK,MAAM,IAAIC,KAAK,EAAE;MACzB9D,KAAK,CAACiB,IAAI,CAAC6C,KAAK,CAAC;IACrB,CAAC,MAAM,IAAID,GAAG,KAAK,QAAQ,IAAIP,KAAK,CAACC,OAAO,CAACO,KAAK,CAAC,EAAE;MACjD;MACA9D,KAAK,CAACiB,IAAI,CAAC,GAAG6C,KAAK,CAAC;IACxB,CAAC,MAAM;MACH7D,oBAAoB,CAAC6D,KAAK,EAAE9D,KAAK,CAAC;IACtC;EACJ;EACA,OAAOA,KAAK;AAChB;AAUO,eAAe+D,UAAU,CAC5BC,IAAY,EACZC,kBAA0B,EACQ;EAClC;EACA,MAAMC,QAAQ,GAAG;IACbF,IAAI,EAAE;MACFhC,IAAI,EAAEgC,IAAI,CAAChC,IAAI;MACfV,MAAM,EAAE0C,IAAI,CAAC1C,MAAM;MACnBD,OAAO,EAAE2C,IAAI,CAAC3C,OAAO;MACrB8C,MAAM,EAAEH,IAAI,CAACG,MAAM;MACnBjB,MAAM,EAAEc,IAAI,CAACd,MAAM;MACnB3B,QAAQ,EAAEyC,IAAI,CAACzC,QAAQ;MACvB6C,QAAQ,EAAEJ,IAAI,CAACI;IACnB;EACJ,CAAC;EACD,MAAMC,cAAc,GAAG5C,MAAM,CAACC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACsC,QAAQ,CAAC,CAAC;EAE5D,MAAMrC,MAAM,GAAG,IAAIC,eAAM,CAAC;IACtBC,UAAU,EAAE;MACR/B,KAAK,EAAE,EAAE;MACTgC,IAAI,EAAEgC,IAAI,CAAChC,IAAI;MACfC,UAAU,EAAEoC;IAChB,CAAC;IACDnC,cAAc,EAAE+B;EACpB,CAAC,CAAC;EAEF,OAAOpC,MAAM,CAACM,OAAO,EAAE;AAC3B"}
package/export/zipper.js CHANGED
@@ -1,29 +1,20 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard").default;
4
-
5
4
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
6
-
7
5
  Object.defineProperty(exports, "__esModule", {
8
6
  value: true
9
7
  });
10
8
  exports.default = exports.ZipOfZip = void 0;
11
-
12
9
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
13
-
14
10
  var _archiver = _interopRequireDefault(require("archiver"));
15
-
16
11
  var _stream = require("stream");
17
-
18
12
  var path = _interopRequireWildcard(require("path"));
19
-
20
13
  var _kebabCase = _interopRequireDefault(require("lodash/kebabCase"));
21
-
22
14
  var _uniqid = _interopRequireDefault(require("uniqid"));
23
-
24
15
  var _s3Stream = require("./s3Stream");
25
-
26
16
  // TODO: Move "archive" in layer
17
+
27
18
  class Zipper {
28
19
  constructor(config) {
29
20
  (0, _defineProperty2.default)(this, "archiveFormat", "zip");
@@ -33,7 +24,6 @@ class Zipper {
33
24
  this.config = config;
34
25
  this.archiveFileName = (0, _uniqid.default)(`${this.config.archiveFileKey}/`, `-${(0, _kebabCase.default)(this.config.exportInfo.name)}.zip`);
35
26
  }
36
-
37
27
  s3DownloadStreams() {
38
28
  const exportInfo = this.config.exportInfo;
39
29
  const prefix = (0, _uniqid.default)("", `-${(0, _kebabCase.default)(exportInfo.name)}`);
@@ -42,7 +32,7 @@ class Zipper {
42
32
  }) => {
43
33
  return {
44
34
  stream: _s3Stream.s3Stream.readStream(key),
45
- filename: `${prefix}\\${this.filesDirName}\\${path.basename(key)}`
35
+ filename: `${prefix}\\${this.filesDirName}\\${key}`
46
36
  };
47
37
  });
48
38
  return [...files, {
@@ -50,39 +40,38 @@ class Zipper {
50
40
  filename: `${prefix}\\${exportInfo.name}.json`
51
41
  }];
52
42
  }
53
-
54
43
  process() {
55
44
  const {
56
45
  streamPassThrough,
57
46
  streamPassThroughUploadPromise
58
- } = _s3Stream.s3Stream.writeStream(this.archiveFileName); // 1. Read all files from S3 using stream.
59
-
60
-
61
- const s3FilesStreams = this.s3DownloadStreams(); // 2. Prepare zip from the file stream.
62
-
63
- const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
47
+ } = _s3Stream.s3Stream.writeStream(this.archiveFileName);
64
48
 
49
+ // 1. Read all files from S3 using stream.
50
+ const s3FilesStreams = this.s3DownloadStreams();
65
51
 
52
+ // 2. Prepare zip from the file stream.
53
+ const archive = _archiver.default.create(this.archiveFormat);
54
+ // Handle archive events.
66
55
  archive.on("error", error => {
67
56
  throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
68
- }); // Append all file streams to archive.
57
+ });
69
58
 
59
+ // Append all file streams to archive.
70
60
  s3FilesStreams.forEach(streamDetails => archive.append(streamDetails.stream, {
71
61
  name: streamDetails.filename
72
- })); // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
62
+ }));
73
63
 
74
- archive.pipe(streamPassThrough); // Finalize the archive (ie we are done appending files but streams have to finish yet)
64
+ // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
65
+ archive.pipe(streamPassThrough);
66
+ // Finalize the archive (ie we are done appending files but streams have to finish yet)
75
67
  // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
68
+ archive.finalize();
76
69
 
77
- archive.finalize(); // 3. Return upload stream promise.
78
-
70
+ // 3. Return upload stream promise.
79
71
  return streamPassThroughUploadPromise;
80
72
  }
81
-
82
73
  }
83
-
84
74
  exports.default = Zipper;
85
-
86
75
  class ZipOfZip {
87
76
  constructor(keys, filename) {
88
77
  (0, _defineProperty2.default)(this, "archiveFormat", "zip");
@@ -91,9 +80,8 @@ class ZipOfZip {
91
80
  (0, _defineProperty2.default)(this, "filename", void 0);
92
81
  this.keys = keys;
93
82
  this.filename = filename;
94
- this.archiveFileName = (0, _uniqid.default)("", `-${filename}`);
83
+ this.archiveFileName = (0, _uniqid.default)("EXPORTS/", `-${filename}`);
95
84
  }
96
-
97
85
  getFileStreams() {
98
86
  return this.keys.map(key => {
99
87
  return {
@@ -102,35 +90,35 @@ class ZipOfZip {
102
90
  };
103
91
  });
104
92
  }
105
-
106
93
  process() {
107
94
  const {
108
95
  streamPassThrough,
109
96
  streamPassThroughUploadPromise
110
- } = _s3Stream.s3Stream.writeStream(this.archiveFileName); // 1. Read all files from S3 using stream.
111
-
112
-
113
- const fileStreamDetails = this.getFileStreams(); // 2. Prepare zip from the file stream.
114
-
115
- const archive = _archiver.default.create(this.archiveFormat); // Handle archive events.
97
+ } = _s3Stream.s3Stream.writeStream(this.archiveFileName);
116
98
 
99
+ // 1. Read all files from S3 using stream.
100
+ const fileStreamDetails = this.getFileStreams();
117
101
 
102
+ // 2. Prepare zip from the file stream.
103
+ const archive = _archiver.default.create(this.archiveFormat);
104
+ // Handle archive events.
118
105
  archive.on("error", error => {
119
106
  throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`);
120
- }); // Append all file streams to archive.
107
+ });
121
108
 
109
+ // Append all file streams to archive.
122
110
  fileStreamDetails.forEach(streamDetails => archive.append(streamDetails.stream, {
123
111
  name: streamDetails.filename
124
- })); // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
112
+ }));
125
113
 
126
- archive.pipe(streamPassThrough); // Finalize the archive (ie we are done appending files but streams have to finish yet)
114
+ // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.
115
+ archive.pipe(streamPassThrough);
116
+ // Finalize the archive (ie we are done appending files but streams have to finish yet)
127
117
  // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
118
+ archive.finalize();
128
119
 
129
- archive.finalize(); // 3. Return upload stream promise.
130
-
120
+ // 3. Return upload stream promise.
131
121
  return streamPassThroughUploadPromise;
132
122
  }
133
-
134
123
  }
135
-
136
124
  exports.ZipOfZip = ZipOfZip;
@@ -1 +1 @@
1
- {"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","name","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","path","basename","Readable","from","dataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","code","message","stack","forEach","streamDetails","append","pipe","finalize","ZipOfZip","keys","getFileStreams","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { File } from \"@webiny/api-file-manager/types\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: File[];\n name: string;\n dataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.name)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.name)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${path.basename(key)}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.dataBuffer),\n filename: `${prefix}\\\\${exportInfo.name}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n filename: string;\n\n constructor(keys: string[], filename: string) {\n this.keys = keys;\n this.filename = filename;\n this.archiveFileName = uniqueId(\"\", `-${filename}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":";;;;;;;;;;;;;AACA;;AAEA;;AACA;;AACA;;AACA;;AACA;;AAPA;AA0Be,MAAMA,MAAN,CAAa;EAMxBC,WAAW,CAACC,MAAD,EAAuB;IAAA,qDALD,KAKC;IAAA,oDAJF,QAIE;IAAA;IAAA;IAC9B,KAAKA,MAAL,GAAcA,MAAd;IACA,KAAKC,eAAL,GAAuB,IAAAC,eAAA,EAClB,GAAE,KAAKF,MAAL,CAAYG,cAAe,GADX,EAElB,IAAG,IAAAC,kBAAA,EAAU,KAAKJ,MAAL,CAAYK,UAAZ,CAAuBC,IAAjC,CAAuC,MAFxB,CAAvB;EAIH;;EAEDC,iBAAiB,GAAwB;IACrC,MAAMF,UAAU,GAAG,KAAKL,MAAL,CAAYK,UAA/B;IACA,MAAMG,MAAM,GAAG,IAAAN,eAAA,EAAS,EAAT,EAAc,IAAG,IAAAE,kBAAA,EAAUC,UAAU,CAACC,IAArB,CAA2B,EAA5C,CAAf;IACA,MAAMG,KAAK,GAAGJ,UAAU,CAACI,KAAX,CAAiBC,GAAjB,CAAqB,CAAC;MAAEC;IAAF,CAAD,KAAa;MAC5C,OAAO;QACHC,MAAM,EAAEC,kBAAA,CAASC,UAAT,CAAoBH,GAApB,CADL;QAEHI,QAAQ,EAAG,GAAEP,MAAO,KAAI,KAAKQ,YAAa,KAAIC,IAAI,CAACC,QAAL,CAAcP,GAAd,CAAmB;MAF9D,CAAP;IAIH,CALa,CAAd;IAOA,OAAO,CACH,GAAGF,KADA,EAEH;MACIG,MAAM,EAAEO,gBAAA,CAASC,IAAT,CAAcf,UAAU,CAACgB,UAAzB,CADZ;MAEIN,QAAQ,EAAG,GAAEP,MAAO,KAAIH,UAAU,CAACC,IAAK;IAF5C,CAFG,CAAP;EAOH;;EAEDgB,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAF;MAAqBC;IAArB,IAAwDX,kBAAA,CAASY,WAAT,CAC1D,KAAKxB,eADqD,CAA9D,CAD0C,CAK1C;;;IACA,MAAMyB,cAAc,GAAG,KAAKnB,iBAAL,EAAvB,CAN0C,CAQ1C;;IACA,MAAMoB,OAAO,GAAGC,iBAAA,CAAQC,MAAR,CAAe,KAAKC,aAApB,CAAhB,CAT0C,CAU1C;;;IACAH,OAAO,CAACI,EAAR,CAAW,OAAX,EAAqBC,KAAD,IAA0B;MAC1C,MAAM,IAAIC,KAAJ,CACD,GAAED,KAAK,CAAC1B,IAAK,IAAG0B,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,OAAQ,IAAGH,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACI,KAAM,EADxE,CAAN;IAGH,CAJD,EAX0C,CAiB1C;;IACAV,cAAc,CAACW,OAAf,CAAwBC,aAAD,IACnBX,OAAO,CAACY,MAAR,CAAeD,aAAa,CAAC1B,MAA7B,EAAqC;MAAEN,IAAI,EAAEgC,aAAa,CAACvB;IAAtB,CAArC,CADJ,EAlB0C,CAsB1C;;IACAY,OAAO,CAACa,IAAR,CAAajB,iBAAb,EAvB0C,CAwB1C;IACA;;IACAI,OAAO,CAACc,QAAR,GA1B0C,CA4B1C;;IACA,OAAOjB,8BAAP;EACH;;AA/DuB;;;;AAkErB,MAAMkB,QAAN,CAAe;EAMlB3C,WAAW,CAAC4C,IAAD,EAAiB5B,QAAjB,EAAmC;IAAA,qDALb,KAKa;IAAA;IAAA;IAAA;IAC1C,KAAK4B,IAAL,GAAYA,IAAZ;IACA,KAAK5B,QAAL,GAAgBA,QAAhB;IACA,KAAKd,eAAL,GAAuB,IAAAC,eAAA,EAAS,EAAT,EAAc,IAAGa,QAAS,EAA1B,CAAvB;EACH;;EAED6B,cAAc,GAAwB;IAClC,OAAO,KAAKD,IAAL,CAAUjC,GAAV,CAAcC,GAAG,IAAI;MACxB,OAAO;QACHC,MAAM,EAAEC,kBAAA,CAASC,UAAT,CAAoBH,GAApB,CADL;QAEHI,QAAQ,EAAG,GAAEE,IAAI,CAACC,QAAL,CAAcP,GAAd,CAAmB;MAF7B,CAAP;IAIH,CALM,CAAP;EAMH;;EAEDW,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAF;MAAqBC;IAArB,IAAwDX,kBAAA,CAASY,WAAT,CAC1D,KAAKxB,eADqD,CAA9D,CAD0C,CAK1C;;;IACA,MAAM4C,iBAAiB,GAAG,KAAKD,cAAL,EAA1B,CAN0C,CAQ1C;;IACA,MAAMjB,OAAO,GAAGC,iBAAA,CAAQC,MAAR,CAAe,KAAKC,aAApB,CAAhB,CAT0C,CAU1C;;;IACAH,OAAO,CAACI,EAAR,CAAW,OAAX,EAAqBC,KAAD,IAA0B;MAC1C,MAAM,IAAIC,KAAJ,CACD,GAAED,KAAK,CAAC1B,IAAK,IAAG0B,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,OAAQ,IAAGH,KAAK,CAACf,IAAK,IAAGe,KAAK,CAACI,KAAM,EADxE,CAAN;IAGH,CAJD,EAX0C,CAiB1C;;IACAS,iBAAiB,CAACR,OAAlB,CAA2BC,aAAD,IACtBX,OAAO,CAACY,MAAR,CAAeD,aAAa,CAAC1B,MAA7B,EAAqC;MAAEN,IAAI,EAAEgC,aAAa,CAACvB;IAAtB,CAArC,CADJ,EAlB0C,CAsB1C;;IACAY,OAAO,CAACa,IAAR,CAAajB,iBAAb,EAvB0C,CAwB1C;IACA;;IACAI,OAAO,CAACc,QAAR,GA1B0C,CA4B1C;;IACA,OAAOjB,8BAAP;EACH;;AAnDiB"}
1
+ {"version":3,"names":["Zipper","constructor","config","archiveFileName","uniqueId","archiveFileKey","kebabCase","exportInfo","name","s3DownloadStreams","prefix","files","map","key","stream","s3Stream","readStream","filename","filesDirName","Readable","from","dataBuffer","process","streamPassThrough","streamPassThroughUploadPromise","writeStream","s3FilesStreams","archive","vending","create","archiveFormat","on","error","Error","code","message","path","stack","forEach","streamDetails","append","pipe","finalize","ZipOfZip","keys","getFileStreams","basename","fileStreamDetails"],"sources":["zipper.ts"],"sourcesContent":["// TODO: Move \"archive\" in layer\nimport vending, { ArchiverError } from \"archiver\";\nimport S3 from \"aws-sdk/clients/s3\";\nimport { Readable } from \"stream\";\nimport * as path from \"path\";\nimport kebabCase from \"lodash/kebabCase\";\nimport uniqueId from \"uniqid\";\nimport { s3Stream } from \"./s3Stream\";\nimport { File } from \"@webiny/api-file-manager/types\";\n\ninterface FileStreamDetails {\n stream: Readable;\n filename: string;\n}\n\ninterface ExportInfo {\n files: File[];\n name: string;\n dataBuffer: Buffer;\n}\n\nexport interface ZipperConfig {\n exportInfo: ExportInfo;\n archiveFileKey: string;\n}\n\nexport default class Zipper {\n private readonly archiveFormat = \"zip\";\n private readonly filesDirName = \"assets\";\n private readonly archiveFileName: string;\n config: ZipperConfig;\n\n constructor(config: ZipperConfig) {\n this.config = config;\n this.archiveFileName = uniqueId(\n `${this.config.archiveFileKey}/`,\n `-${kebabCase(this.config.exportInfo.name)}.zip`\n );\n }\n\n s3DownloadStreams(): FileStreamDetails[] {\n const exportInfo = this.config.exportInfo;\n const prefix = uniqueId(\"\", `-${kebabCase(exportInfo.name)}`);\n const files = exportInfo.files.map(({ key }) => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${prefix}\\\\${this.filesDirName}\\\\${key}`\n };\n });\n\n return [\n ...files,\n {\n stream: Readable.from(exportInfo.dataBuffer),\n filename: `${prefix}\\\\${exportInfo.name}.json`\n }\n ];\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const s3FilesStreams = this.s3DownloadStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n s3FilesStreams.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n\nexport class ZipOfZip {\n private readonly archiveFormat = \"zip\";\n private readonly archiveFileName: string;\n keys: string[];\n filename: string;\n\n constructor(keys: string[], filename: string) {\n this.keys = keys;\n this.filename = filename;\n this.archiveFileName = uniqueId(\"EXPORTS/\", `-${filename}`);\n }\n\n getFileStreams(): FileStreamDetails[] {\n return this.keys.map(key => {\n return {\n stream: s3Stream.readStream(key),\n filename: `${path.basename(key)}`\n };\n });\n }\n\n process(): Promise<S3.ManagedUpload.SendData> {\n const { streamPassThrough, streamPassThroughUploadPromise } = s3Stream.writeStream(\n this.archiveFileName\n );\n\n // 1. Read all files from S3 using stream.\n const fileStreamDetails = this.getFileStreams();\n\n // 2. Prepare zip from the file stream.\n const archive = vending.create(this.archiveFormat);\n // Handle archive events.\n archive.on(\"error\", (error: ArchiverError) => {\n throw new Error(\n `${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`\n );\n });\n\n // Append all file streams to archive.\n fileStreamDetails.forEach((streamDetails: FileStreamDetails) =>\n archive.append(streamDetails.stream, { name: streamDetails.filename })\n );\n\n // Pipe archive output to streamPassThrough (Transform Stream) which will be uploaded to S3.\n archive.pipe(streamPassThrough);\n // Finalize the archive (ie we are done appending files but streams have to finish yet)\n // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand\n archive.finalize();\n\n // 3. Return upload stream promise.\n return streamPassThroughUploadPromise;\n }\n}\n"],"mappings":";;;;;;;;;AACA;AAEA;AACA;AACA;AACA;AACA;AAPA;;AA0Be,MAAMA,MAAM,CAAC;EAMxBC,WAAW,CAACC,MAAoB,EAAE;IAAA,qDALD,KAAK;IAAA,oDACN,QAAQ;IAAA;IAAA;IAKpC,IAAI,CAACA,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,eAAe,GAAG,IAAAC,eAAQ,EAC1B,GAAE,IAAI,CAACF,MAAM,CAACG,cAAe,GAAE,EAC/B,IAAG,IAAAC,kBAAS,EAAC,IAAI,CAACJ,MAAM,CAACK,UAAU,CAACC,IAAI,CAAE,MAAK,CACnD;EACL;EAEAC,iBAAiB,GAAwB;IACrC,MAAMF,UAAU,GAAG,IAAI,CAACL,MAAM,CAACK,UAAU;IACzC,MAAMG,MAAM,GAAG,IAAAN,eAAQ,EAAC,EAAE,EAAG,IAAG,IAAAE,kBAAS,EAACC,UAAU,CAACC,IAAI,CAAE,EAAC,CAAC;IAC7D,MAAMG,KAAK,GAAGJ,UAAU,CAACI,KAAK,CAACC,GAAG,CAAC,CAAC;MAAEC;IAAI,CAAC,KAAK;MAC5C,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEP,MAAO,KAAI,IAAI,CAACQ,YAAa,KAAIL,GAAI;MACtD,CAAC;IACL,CAAC,CAAC;IAEF,OAAO,CACH,GAAGF,KAAK,EACR;MACIG,MAAM,EAAEK,gBAAQ,CAACC,IAAI,CAACb,UAAU,CAACc,UAAU,CAAC;MAC5CJ,QAAQ,EAAG,GAAEP,MAAO,KAAIH,UAAU,CAACC,IAAK;IAC5C,CAAC,CACJ;EACL;EAEAc,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGT,kBAAQ,CAACU,WAAW,CAC9E,IAAI,CAACtB,eAAe,CACvB;;IAED;IACA,MAAMuB,cAAc,GAAG,IAAI,CAACjB,iBAAiB,EAAE;;IAE/C;IACA,MAAMkB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACxB,IAAK,IAAGwB,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,OAAQ,IAAGH,KAAK,CAACI,IAAK,IAAGJ,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAX,cAAc,CAACY,OAAO,CAAEC,aAAgC,IACpDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAACzB,MAAM,EAAE;MAAEN,IAAI,EAAE+B,aAAa,CAACtB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAU,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC;AAEM,MAAMmB,QAAQ,CAAC;EAMlB1C,WAAW,CAAC2C,IAAc,EAAE3B,QAAgB,EAAE;IAAA,qDALb,KAAK;IAAA;IAAA;IAAA;IAMlC,IAAI,CAAC2B,IAAI,GAAGA,IAAI;IAChB,IAAI,CAAC3B,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACd,eAAe,GAAG,IAAAC,eAAQ,EAAC,UAAU,EAAG,IAAGa,QAAS,EAAC,CAAC;EAC/D;EAEA4B,cAAc,GAAwB;IAClC,OAAO,IAAI,CAACD,IAAI,CAAChC,GAAG,CAACC,GAAG,IAAI;MACxB,OAAO;QACHC,MAAM,EAAEC,kBAAQ,CAACC,UAAU,CAACH,GAAG,CAAC;QAChCI,QAAQ,EAAG,GAAEmB,IAAI,CAACU,QAAQ,CAACjC,GAAG,CAAE;MACpC,CAAC;IACL,CAAC,CAAC;EACN;EAEAS,OAAO,GAAuC;IAC1C,MAAM;MAAEC,iBAAiB;MAAEC;IAA+B,CAAC,GAAGT,kBAAQ,CAACU,WAAW,CAC9E,IAAI,CAACtB,eAAe,CACvB;;IAED;IACA,MAAM4C,iBAAiB,GAAG,IAAI,CAACF,cAAc,EAAE;;IAE/C;IACA,MAAMlB,OAAO,GAAGC,iBAAO,CAACC,MAAM,CAAC,IAAI,CAACC,aAAa,CAAC;IAClD;IACAH,OAAO,CAACI,EAAE,CAAC,OAAO,EAAGC,KAAoB,IAAK;MAC1C,MAAM,IAAIC,KAAK,CACV,GAAED,KAAK,CAACxB,IAAK,IAAGwB,KAAK,CAACE,IAAK,IAAGF,KAAK,CAACG,OAAQ,IAAGH,KAAK,CAACI,IAAK,IAAGJ,KAAK,CAACK,KAAM,EAAC,CAC9E;IACL,CAAC,CAAC;;IAEF;IACAU,iBAAiB,CAACT,OAAO,CAAEC,aAAgC,IACvDZ,OAAO,CAACa,MAAM,CAACD,aAAa,CAACzB,MAAM,EAAE;MAAEN,IAAI,EAAE+B,aAAa,CAACtB;IAAS,CAAC,CAAC,CACzE;;IAED;IACAU,OAAO,CAACc,IAAI,CAAClB,iBAAiB,CAAC;IAC/B;IACA;IACAI,OAAO,CAACe,QAAQ,EAAE;;IAElB;IACA,OAAOlB,8BAA8B;EACzC;AACJ;AAAC"}
@@ -1,34 +1,22 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
-
5
4
  Object.defineProperty(exports, "__esModule", {
6
5
  value: true
7
6
  });
8
7
  exports.default = void 0;
9
-
10
8
  var _error = _interopRequireDefault(require("@webiny/error"));
11
-
12
9
  var _handlerGraphql = require("@webiny/handler-graphql");
13
-
14
10
  var _api = require("@webiny/api");
15
-
16
11
  var _checkBasePermissions = _interopRequireDefault(require("@webiny/api-page-builder/graphql/crud/utils/checkBasePermissions"));
17
-
18
12
  var _types = require("../../types");
19
-
20
13
  var _client = require("../../client");
21
-
22
14
  var _utils = require("../../import/utils");
23
-
24
15
  var _utils2 = require("../../export/utils");
25
-
26
16
  var _utils3 = require("@webiny/utils");
27
-
28
17
  const PERMISSION_NAME = "pb.block";
29
18
  const EXPORT_BLOCKS_PROCESS_HANDLER = process.env.EXPORT_PROCESS_HANDLER;
30
19
  const IMPORT_BLOCKS_CREATE_HANDLER = process.env.IMPORT_CREATE_HANDLER;
31
-
32
20
  var _default = new _api.ContextPlugin(context => {
33
21
  const importExportCrud = {
34
22
  async importBlocks({
@@ -37,15 +25,15 @@ var _default = new _api.ContextPlugin(context => {
37
25
  }) {
38
26
  await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
39
27
  rwd: "w"
40
- }); // Bail out early if category not found
28
+ });
41
29
 
30
+ // Bail out early if category not found
42
31
  const category = await context.pageBuilder.getBlockCategory(categorySlug);
43
-
44
32
  if (!category) {
45
33
  throw new _handlerGraphql.NotFoundError(`Category with slug "${categorySlug}" not found.`);
46
- } // Create a task for import block
47
-
34
+ }
48
35
 
36
+ // Create a task for import block
49
37
  const task = await context.pageBuilder.importExportTask.createTask({
50
38
  status: _types.ImportExportTaskStatus.PENDING,
51
39
  input: {
@@ -58,7 +46,6 @@ var _default = new _api.ContextPlugin(context => {
58
46
  * ImportBlocks
59
47
  * importBlocks
60
48
  */
61
-
62
49
  await (0, _client.invokeHandlerClient)({
63
50
  context,
64
51
  name: IMPORT_BLOCKS_CREATE_HANDLER,
@@ -75,7 +62,6 @@ var _default = new _api.ContextPlugin(context => {
75
62
  task
76
63
  };
77
64
  },
78
-
79
65
  async exportBlocks({
80
66
  ids: initialBlockIds,
81
67
  where
@@ -83,30 +69,28 @@ var _default = new _api.ContextPlugin(context => {
83
69
  await (0, _checkBasePermissions.default)(context, PERMISSION_NAME, {
84
70
  rwd: "w"
85
71
  });
86
- let blockIds = initialBlockIds || []; // If no ids are provided then it means we want to export all blocks
87
-
72
+ let blockIds = initialBlockIds || [];
73
+ // If no ids are provided then it means we want to export all blocks
88
74
  if (!initialBlockIds || Array.isArray(initialBlockIds) && initialBlockIds.length === 0) {
89
- blockIds = [];
90
75
  const blocks = await context.pageBuilder.listPageBlocks({
91
76
  where
92
- }); // Save block ids
93
-
94
- blocks.forEach(block => blockIds.push(block.id));
77
+ });
78
+ // Save block ids
79
+ blockIds = blocks.map(block => block.id);
95
80
  }
96
-
97
81
  if (blockIds.length === 0) {
98
82
  throw new _error.default("Cannot export blocks - no blocks found for provided inputs.", "EMPTY_EXPORT_NO_BLOCKS_FOUND");
99
- } // Create the main task for blocks export.
100
-
83
+ }
101
84
 
85
+ // Create the main task for blocks export.
102
86
  const task = await context.pageBuilder.importExportTask.createTask({
103
87
  status: _types.ImportExportTaskStatus.PENDING
104
88
  });
105
- const exportBlocksDataKey = `${_utils2.EXPORT_BLOCKS_FOLDER_KEY}/${task.id}`; // For each block create a sub task and invoke the process handler.
106
-
89
+ const exportBlocksDataKey = `${_utils2.EXPORT_BLOCKS_FOLDER_KEY}/${task.id}`;
90
+ // For each block create a sub task and invoke the process handler.
107
91
  for (let i = 0; i < blockIds.length; i++) {
108
- const blockId = blockIds[i]; // Create sub task.
109
-
92
+ const blockId = blockIds[i];
93
+ // Create sub task.
110
94
  await context.pageBuilder.importExportTask.createSubTask(task.id, (0, _utils3.zeroPad)(i + 1, 5), {
111
95
  status: _types.ImportExportTaskStatus.PENDING,
112
96
  input: {
@@ -114,9 +98,8 @@ var _default = new _api.ContextPlugin(context => {
114
98
  exportBlocksDataKey
115
99
  }
116
100
  });
117
- } // Update main task status.
118
-
119
-
101
+ }
102
+ // Update main task status.
120
103
  await context.pageBuilder.importExportTask.updateTask(task.id, {
121
104
  status: _types.ImportExportTaskStatus.PROCESSING,
122
105
  stats: (0, _utils.initialStats)(blockIds.length),
@@ -124,13 +107,13 @@ var _default = new _api.ContextPlugin(context => {
124
107
  exportBlocksDataKey
125
108
  }
126
109
  });
110
+
127
111
  /**
128
112
  * Export Blocks
129
113
  * ExportBlocks
130
114
  * exportBlocks
131
115
  */
132
116
  // Invoke handler.
133
-
134
117
  await (0, _client.invokeHandlerClient)({
135
118
  context,
136
119
  name: EXPORT_BLOCKS_PROCESS_HANDLER,
@@ -146,10 +129,8 @@ var _default = new _api.ContextPlugin(context => {
146
129
  task
147
130
  };
148
131
  }
149
-
150
- }; // Modify context
151
-
132
+ };
133
+ // Modify context
152
134
  context.pageBuilder.blocks = importExportCrud;
153
135
  });
154
-
155
136
  exports.default = _default;